* config/riscv/riscv.c: Remove unnecessary includes. Reorder
[official-gcc.git] / gcc / builtins.c
blob608993afc1b6caba92a22e03f51a637c0b08e05e
1 /* Expand builtin functions.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "asan.h"
64 #include "cilk.h"
65 #include "tree-chkp.h"
66 #include "rtl-chkp.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
70 #include "intl.h"
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
84 #include "builtins.def"
87 /* Setup an array of builtin_info_type, make sure each element decl is
88 initialized to NULL_TREE. */
89 builtin_info_type builtin_info[(int)END_BUILTINS];
91 /* Non-zero if __builtin_constant_p should be folded right away. */
92 bool force_folding_builtin_constant_p;
94 static rtx c_readstr (const char *, machine_mode);
95 static int target_char_cast (tree, char *);
96 static rtx get_memory_rtx (tree, tree);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 static rtx result_vector (int, rtx);
100 static void expand_builtin_prefetch (tree);
101 static rtx expand_builtin_apply_args (void);
102 static rtx expand_builtin_apply_args_1 (void);
103 static rtx expand_builtin_apply (rtx, rtx, rtx);
104 static void expand_builtin_return (rtx);
105 static enum type_class type_to_class (tree);
106 static rtx expand_builtin_classify_type (tree);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_strcmp (tree, rtx);
119 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
121 static rtx expand_builtin_memchr (tree, rtx);
122 static rtx expand_builtin_memcpy (tree, rtx);
123 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
124 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
125 static rtx expand_builtin_memmove (tree, rtx);
126 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
127 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
128 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
129 machine_mode, int, tree);
130 static rtx expand_builtin_strcat (tree, rtx);
131 static rtx expand_builtin_strcpy (tree, rtx);
132 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
133 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
134 static rtx expand_builtin_stpncpy (tree, rtx);
135 static rtx expand_builtin_strncat (tree, rtx);
136 static rtx expand_builtin_strncpy (tree, rtx);
137 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
138 static rtx expand_builtin_memset (tree, rtx, machine_mode);
139 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
140 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
141 static rtx expand_builtin_bzero (tree);
142 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
143 static rtx expand_builtin_alloca (tree);
144 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
145 static rtx expand_builtin_frame_address (tree, tree);
146 static tree stabilize_va_list_loc (location_t, tree, int);
147 static rtx expand_builtin_expect (tree, rtx);
148 static tree fold_builtin_constant_p (tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (location_t, tree, tree);
151 static tree fold_builtin_inf (location_t, tree, int);
152 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
153 static bool validate_arg (const_tree, enum tree_code code);
154 static rtx expand_builtin_fabs (tree, rtx, rtx);
155 static rtx expand_builtin_signbit (tree, rtx);
156 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
157 static tree fold_builtin_isascii (location_t, tree);
158 static tree fold_builtin_toascii (location_t, tree);
159 static tree fold_builtin_isdigit (location_t, tree);
160 static tree fold_builtin_fabs (location_t, tree, tree);
161 static tree fold_builtin_abs (location_t, tree, tree);
162 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
163 enum tree_code);
164 static tree fold_builtin_0 (location_t, tree);
165 static tree fold_builtin_1 (location_t, tree, tree);
166 static tree fold_builtin_2 (location_t, tree, tree, tree);
167 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
168 static tree fold_builtin_varargs (location_t, tree, tree*, int);
170 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
171 static tree fold_builtin_strspn (location_t, tree, tree);
172 static tree fold_builtin_strcspn (location_t, tree, tree);
174 static rtx expand_builtin_object_size (tree);
175 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
176 enum built_in_function);
177 static void maybe_emit_chk_warning (tree, enum built_in_function);
178 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
179 static void maybe_emit_free_warning (tree);
180 static tree fold_builtin_object_size (tree, tree);
182 unsigned HOST_WIDE_INT target_newline;
183 unsigned HOST_WIDE_INT target_percent;
184 static unsigned HOST_WIDE_INT target_c;
185 static unsigned HOST_WIDE_INT target_s;
186 char target_percent_c[3];
187 char target_percent_s[3];
188 char target_percent_s_newline[4];
189 static tree do_mpfr_remquo (tree, tree, tree);
190 static tree do_mpfr_lgamma_r (tree, tree, tree);
191 static void expand_builtin_sync_synchronize (void);
193 /* Return true if NAME starts with __builtin_ or __sync_. */
195 static bool
196 is_builtin_name (const char *name)
198 if (strncmp (name, "__builtin_", 10) == 0)
199 return true;
200 if (strncmp (name, "__sync_", 7) == 0)
201 return true;
202 if (strncmp (name, "__atomic_", 9) == 0)
203 return true;
204 if (flag_cilkplus
205 && (!strcmp (name, "__cilkrts_detach")
206 || !strcmp (name, "__cilkrts_pop_frame")))
207 return true;
208 return false;
212 /* Return true if DECL is a function symbol representing a built-in. */
214 bool
215 is_builtin_fn (tree decl)
217 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
220 /* Return true if NODE should be considered for inline expansion regardless
221 of the optimization level. This means whenever a function is invoked with
222 its "internal" name, which normally contains the prefix "__builtin". */
224 bool
225 called_as_built_in (tree node)
227 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
228 we want the name used to call the function, not the name it
229 will have. */
230 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
231 return is_builtin_name (name);
234 /* Compute values M and N such that M divides (address of EXP - N) and such
235 that N < M. If these numbers can be determined, store M in alignp and N in
236 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
237 *alignp and any bit-offset to *bitposp.
239 Note that the address (and thus the alignment) computed here is based
240 on the address to which a symbol resolves, whereas DECL_ALIGN is based
241 on the address at which an object is actually located. These two
242 addresses are not always the same. For example, on ARM targets,
243 the address &foo of a Thumb function foo() has the lowest bit set,
244 whereas foo() itself starts on an even address.
246 If ADDR_P is true we are taking the address of the memory reference EXP
247 and thus cannot rely on the access taking place. */
249 static bool
250 get_object_alignment_2 (tree exp, unsigned int *alignp,
251 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
253 HOST_WIDE_INT bitsize, bitpos;
254 tree offset;
255 machine_mode mode;
256 int unsignedp, reversep, volatilep;
257 unsigned int align = BITS_PER_UNIT;
258 bool known_alignment = false;
260 /* Get the innermost object and the constant (bitpos) and possibly
261 variable (offset) offset of the access. */
262 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
263 &unsignedp, &reversep, &volatilep);
265 /* Extract alignment information from the innermost object and
266 possibly adjust bitpos and offset. */
267 if (TREE_CODE (exp) == FUNCTION_DECL)
269 /* Function addresses can encode extra information besides their
270 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
271 allows the low bit to be used as a virtual bit, we know
272 that the address itself must be at least 2-byte aligned. */
273 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
274 align = 2 * BITS_PER_UNIT;
276 else if (TREE_CODE (exp) == LABEL_DECL)
278 else if (TREE_CODE (exp) == CONST_DECL)
280 /* The alignment of a CONST_DECL is determined by its initializer. */
281 exp = DECL_INITIAL (exp);
282 align = TYPE_ALIGN (TREE_TYPE (exp));
283 if (CONSTANT_CLASS_P (exp))
284 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
286 known_alignment = true;
288 else if (DECL_P (exp))
290 align = DECL_ALIGN (exp);
291 known_alignment = true;
293 else if (TREE_CODE (exp) == INDIRECT_REF
294 || TREE_CODE (exp) == MEM_REF
295 || TREE_CODE (exp) == TARGET_MEM_REF)
297 tree addr = TREE_OPERAND (exp, 0);
298 unsigned ptr_align;
299 unsigned HOST_WIDE_INT ptr_bitpos;
300 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
302 /* If the address is explicitely aligned, handle that. */
303 if (TREE_CODE (addr) == BIT_AND_EXPR
304 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
306 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
307 ptr_bitmask *= BITS_PER_UNIT;
308 align = least_bit_hwi (ptr_bitmask);
309 addr = TREE_OPERAND (addr, 0);
312 known_alignment
313 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
314 align = MAX (ptr_align, align);
316 /* Re-apply explicit alignment to the bitpos. */
317 ptr_bitpos &= ptr_bitmask;
319 /* The alignment of the pointer operand in a TARGET_MEM_REF
320 has to take the variable offset parts into account. */
321 if (TREE_CODE (exp) == TARGET_MEM_REF)
323 if (TMR_INDEX (exp))
325 unsigned HOST_WIDE_INT step = 1;
326 if (TMR_STEP (exp))
327 step = TREE_INT_CST_LOW (TMR_STEP (exp));
328 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
330 if (TMR_INDEX2 (exp))
331 align = BITS_PER_UNIT;
332 known_alignment = false;
335 /* When EXP is an actual memory reference then we can use
336 TYPE_ALIGN of a pointer indirection to derive alignment.
337 Do so only if get_pointer_alignment_1 did not reveal absolute
338 alignment knowledge and if using that alignment would
339 improve the situation. */
340 unsigned int talign;
341 if (!addr_p && !known_alignment
342 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
343 && talign > align)
344 align = talign;
345 else
347 /* Else adjust bitpos accordingly. */
348 bitpos += ptr_bitpos;
349 if (TREE_CODE (exp) == MEM_REF
350 || TREE_CODE (exp) == TARGET_MEM_REF)
351 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
354 else if (TREE_CODE (exp) == STRING_CST)
356 /* STRING_CST are the only constant objects we allow to be not
357 wrapped inside a CONST_DECL. */
358 align = TYPE_ALIGN (TREE_TYPE (exp));
359 if (CONSTANT_CLASS_P (exp))
360 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
362 known_alignment = true;
365 /* If there is a non-constant offset part extract the maximum
366 alignment that can prevail. */
367 if (offset)
369 unsigned int trailing_zeros = tree_ctz (offset);
370 if (trailing_zeros < HOST_BITS_PER_INT)
372 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
373 if (inner)
374 align = MIN (align, inner);
378 *alignp = align;
379 *bitposp = bitpos & (*alignp - 1);
380 return known_alignment;
383 /* For a memory reference expression EXP compute values M and N such that M
384 divides (&EXP - N) and such that N < M. If these numbers can be determined,
385 store M in alignp and N in *BITPOSP and return true. Otherwise return false
386 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
388 bool
389 get_object_alignment_1 (tree exp, unsigned int *alignp,
390 unsigned HOST_WIDE_INT *bitposp)
392 return get_object_alignment_2 (exp, alignp, bitposp, false);
395 /* Return the alignment in bits of EXP, an object. */
397 unsigned int
398 get_object_alignment (tree exp)
400 unsigned HOST_WIDE_INT bitpos = 0;
401 unsigned int align;
403 get_object_alignment_1 (exp, &align, &bitpos);
405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
408 if (bitpos != 0)
409 align = least_bit_hwi (bitpos);
410 return align;
413 /* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
418 If EXP is not a pointer, false is returned too. */
420 bool
421 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
424 STRIP_NOPS (exp);
426 if (TREE_CODE (exp) == ADDR_EXPR)
427 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 alignp, bitposp, true);
429 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
431 unsigned int align;
432 unsigned HOST_WIDE_INT bitpos;
433 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 &align, &bitpos);
435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 else
439 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 if (trailing_zeros < HOST_BITS_PER_INT)
442 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 if (inner)
444 align = MIN (align, inner);
447 *alignp = align;
448 *bitposp = bitpos & (align - 1);
449 return res;
451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
454 unsigned int ptr_align, ptr_misalign;
455 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
457 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
459 *bitposp = ptr_misalign * BITS_PER_UNIT;
460 *alignp = ptr_align * BITS_PER_UNIT;
461 /* Make sure to return a sensible alignment when the multiplication
462 by BITS_PER_UNIT overflowed. */
463 if (*alignp == 0)
464 *alignp = 1u << (HOST_BITS_PER_INT - 1);
465 /* We cannot really tell whether this result is an approximation. */
466 return false;
468 else
470 *bitposp = 0;
471 *alignp = BITS_PER_UNIT;
472 return false;
475 else if (TREE_CODE (exp) == INTEGER_CST)
477 *alignp = BIGGEST_ALIGNMENT;
478 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
479 & (BIGGEST_ALIGNMENT - 1));
480 return true;
483 *bitposp = 0;
484 *alignp = BITS_PER_UNIT;
485 return false;
488 /* Return the alignment in bits of EXP, a pointer valued expression.
489 The alignment returned is, by default, the alignment of the thing that
490 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
492 Otherwise, look at the expression to see if we can do better, i.e., if the
493 expression is actually pointing at an object whose alignment is tighter. */
495 unsigned int
496 get_pointer_alignment (tree exp)
498 unsigned HOST_WIDE_INT bitpos = 0;
499 unsigned int align;
501 get_pointer_alignment_1 (exp, &align, &bitpos);
503 /* align and bitpos now specify known low bits of the pointer.
504 ptr & (align - 1) == bitpos. */
506 if (bitpos != 0)
507 align = least_bit_hwi (bitpos);
509 return align;
512 /* Return the number of non-zero elements in the sequence
513 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
516 static unsigned
517 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
519 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
521 unsigned n;
523 if (eltsize == 1)
525 /* Optimize the common case of plain char. */
526 for (n = 0; n < maxelts; n++)
528 const char *elt = (const char*) ptr + n;
529 if (!*elt)
530 break;
533 else
535 for (n = 0; n < maxelts; n++)
537 const char *elt = (const char*) ptr + n * eltsize;
538 if (!memcmp (elt, "\0\0\0\0", eltsize))
539 break;
542 return n;
545 /* Compute the length of a null-terminated character string or wide
546 character string handling character sizes of 1, 2, and 4 bytes.
547 TREE_STRING_LENGTH is not the right way because it evaluates to
548 the size of the character array in bytes (as opposed to characters)
549 and because it can contain a zero byte in the middle.
551 ONLY_VALUE should be nonzero if the result is not going to be emitted
552 into the instruction stream and zero if it is going to be expanded.
553 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
554 is returned, otherwise NULL, since
555 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
556 evaluate the side-effects.
558 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
559 accesses. Note that this implies the result is not going to be emitted
560 into the instruction stream.
562 The value returned is of type `ssizetype'.
564 Unfortunately, string_constant can't access the values of const char
565 arrays with initializers, so neither can we do so here. */
567 tree
568 c_strlen (tree src, int only_value)
570 STRIP_NOPS (src);
571 if (TREE_CODE (src) == COND_EXPR
572 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
574 tree len1, len2;
576 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
577 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
578 if (tree_int_cst_equal (len1, len2))
579 return len1;
582 if (TREE_CODE (src) == COMPOUND_EXPR
583 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
584 return c_strlen (TREE_OPERAND (src, 1), only_value);
586 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
588 /* Offset from the beginning of the string in bytes. */
589 tree byteoff;
590 src = string_constant (src, &byteoff);
591 if (src == 0)
592 return NULL_TREE;
594 /* Determine the size of the string element. */
595 unsigned eltsize
596 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
598 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
599 length of SRC. */
600 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
602 /* PTR can point to the byte representation of any string type, including
603 char* and wchar_t*. */
604 const char *ptr = TREE_STRING_POINTER (src);
606 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
608 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
609 compute the offset to the following null if we don't know where to
610 start searching for it. */
611 if (string_length (ptr, eltsize, maxelts) < maxelts)
613 /* Return when an embedded null character is found. */
614 return NULL_TREE;
617 /* We don't know the starting offset, but we do know that the string
618 has no internal zero bytes. We can assume that the offset falls
619 within the bounds of the string; otherwise, the programmer deserves
620 what he gets. Subtract the offset from the length of the string,
621 and return that. This would perhaps not be valid if we were dealing
622 with named arrays in addition to literal string constants. */
624 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
627 /* Offset from the beginning of the string in elements. */
628 HOST_WIDE_INT eltoff;
630 /* We have a known offset into the string. Start searching there for
631 a null character if we can represent it as a single HOST_WIDE_INT. */
632 if (byteoff == 0)
633 eltoff = 0;
634 else if (! tree_fits_shwi_p (byteoff))
635 eltoff = -1;
636 else
637 eltoff = tree_to_shwi (byteoff) / eltsize;
639 /* If the offset is known to be out of bounds, warn, and call strlen at
640 runtime. */
641 if (eltoff < 0 || eltoff > maxelts)
643 /* Suppress multiple warnings for propagated constant strings. */
644 if (only_value != 2
645 && !TREE_NO_WARNING (src))
647 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
648 eltoff);
649 TREE_NO_WARNING (src) = 1;
651 return NULL_TREE;
654 /* Use strlen to search for the first zero byte. Since any strings
655 constructed with build_string will have nulls appended, we win even
656 if we get handed something like (char[4])"abcd".
658 Since ELTOFF is our starting index into the string, no further
659 calculation is needed. */
660 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
661 maxelts - eltoff);
663 return ssize_int (len);
666 /* Return a constant integer corresponding to target reading
667 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
669 static rtx
670 c_readstr (const char *str, machine_mode mode)
672 HOST_WIDE_INT ch;
673 unsigned int i, j;
674 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
676 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
677 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
678 / HOST_BITS_PER_WIDE_INT;
680 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
681 for (i = 0; i < len; i++)
682 tmp[i] = 0;
684 ch = 1;
685 for (i = 0; i < GET_MODE_SIZE (mode); i++)
687 j = i;
688 if (WORDS_BIG_ENDIAN)
689 j = GET_MODE_SIZE (mode) - i - 1;
690 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
691 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
692 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
693 j *= BITS_PER_UNIT;
695 if (ch)
696 ch = (unsigned char) str[i];
697 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
700 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
701 return immed_wide_int_const (c, mode);
704 /* Cast a target constant CST to target CHAR and if that value fits into
705 host char type, return zero and put that value into variable pointed to by
706 P. */
708 static int
709 target_char_cast (tree cst, char *p)
711 unsigned HOST_WIDE_INT val, hostval;
713 if (TREE_CODE (cst) != INTEGER_CST
714 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
715 return 1;
717 /* Do not care if it fits or not right here. */
718 val = TREE_INT_CST_LOW (cst);
720 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
721 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
723 hostval = val;
724 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
725 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
727 if (val != hostval)
728 return 1;
730 *p = hostval;
731 return 0;
734 /* Similar to save_expr, but assumes that arbitrary code is not executed
735 in between the multiple evaluations. In particular, we assume that a
736 non-addressable local variable will not be modified. */
738 static tree
739 builtin_save_expr (tree exp)
741 if (TREE_CODE (exp) == SSA_NAME
742 || (TREE_ADDRESSABLE (exp) == 0
743 && (TREE_CODE (exp) == PARM_DECL
744 || (VAR_P (exp) && !TREE_STATIC (exp)))))
745 return exp;
747 return save_expr (exp);
750 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
751 times to get the address of either a higher stack frame, or a return
752 address located within it (depending on FNDECL_CODE). */
754 static rtx
755 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
757 int i;
758 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
759 if (tem == NULL_RTX)
761 /* For a zero count with __builtin_return_address, we don't care what
762 frame address we return, because target-specific definitions will
763 override us. Therefore frame pointer elimination is OK, and using
764 the soft frame pointer is OK.
766 For a nonzero count, or a zero count with __builtin_frame_address,
767 we require a stable offset from the current frame pointer to the
768 previous one, so we must use the hard frame pointer, and
769 we must disable frame pointer elimination. */
770 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
771 tem = frame_pointer_rtx;
772 else
774 tem = hard_frame_pointer_rtx;
776 /* Tell reload not to eliminate the frame pointer. */
777 crtl->accesses_prior_frames = 1;
781 if (count > 0)
782 SETUP_FRAME_ADDRESSES ();
784 /* On the SPARC, the return address is not in the frame, it is in a
785 register. There is no way to access it off of the current frame
786 pointer, but it can be accessed off the previous frame pointer by
787 reading the value from the register window save area. */
788 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
789 count--;
791 /* Scan back COUNT frames to the specified frame. */
792 for (i = 0; i < count; i++)
794 /* Assume the dynamic chain pointer is in the word that the
795 frame address points to, unless otherwise specified. */
796 tem = DYNAMIC_CHAIN_ADDRESS (tem);
797 tem = memory_address (Pmode, tem);
798 tem = gen_frame_mem (Pmode, tem);
799 tem = copy_to_reg (tem);
802 /* For __builtin_frame_address, return what we've got. But, on
803 the SPARC for example, we may have to add a bias. */
804 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
805 return FRAME_ADDR_RTX (tem);
807 /* For __builtin_return_address, get the return address from that frame. */
808 #ifdef RETURN_ADDR_RTX
809 tem = RETURN_ADDR_RTX (count, tem);
810 #else
811 tem = memory_address (Pmode,
812 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
813 tem = gen_frame_mem (Pmode, tem);
814 #endif
815 return tem;
818 /* Alias set used for setjmp buffer. */
819 static alias_set_type setjmp_alias_set = -1;
821 /* Construct the leading half of a __builtin_setjmp call. Control will
822 return to RECEIVER_LABEL. This is also called directly by the SJLJ
823 exception handling code. */
825 void
826 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
828 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
829 rtx stack_save;
830 rtx mem;
832 if (setjmp_alias_set == -1)
833 setjmp_alias_set = new_alias_set ();
835 buf_addr = convert_memory_address (Pmode, buf_addr);
837 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
839 /* We store the frame pointer and the address of receiver_label in
840 the buffer and use the rest of it for the stack save area, which
841 is machine-dependent. */
843 mem = gen_rtx_MEM (Pmode, buf_addr);
844 set_mem_alias_set (mem, setjmp_alias_set);
845 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
847 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
848 GET_MODE_SIZE (Pmode))),
849 set_mem_alias_set (mem, setjmp_alias_set);
851 emit_move_insn (validize_mem (mem),
852 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
854 stack_save = gen_rtx_MEM (sa_mode,
855 plus_constant (Pmode, buf_addr,
856 2 * GET_MODE_SIZE (Pmode)));
857 set_mem_alias_set (stack_save, setjmp_alias_set);
858 emit_stack_save (SAVE_NONLOCAL, &stack_save);
860 /* If there is further processing to do, do it. */
861 if (targetm.have_builtin_setjmp_setup ())
862 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
864 /* We have a nonlocal label. */
865 cfun->has_nonlocal_label = 1;
868 /* Construct the trailing part of a __builtin_setjmp call. This is
869 also called directly by the SJLJ exception handling code.
870 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
872 void
873 expand_builtin_setjmp_receiver (rtx receiver_label)
875 rtx chain;
877 /* Mark the FP as used when we get here, so we have to make sure it's
878 marked as used by this function. */
879 emit_use (hard_frame_pointer_rtx);
881 /* Mark the static chain as clobbered here so life information
882 doesn't get messed up for it. */
883 chain = targetm.calls.static_chain (current_function_decl, true);
884 if (chain && REG_P (chain))
885 emit_clobber (chain);
887 /* Now put in the code to restore the frame pointer, and argument
888 pointer, if needed. */
889 if (! targetm.have_nonlocal_goto ())
891 /* First adjust our frame pointer to its actual value. It was
892 previously set to the start of the virtual area corresponding to
893 the stacked variables when we branched here and now needs to be
894 adjusted to the actual hardware fp value.
896 Assignments to virtual registers are converted by
897 instantiate_virtual_regs into the corresponding assignment
898 to the underlying register (fp in this case) that makes
899 the original assignment true.
900 So the following insn will actually be decrementing fp by
901 STARTING_FRAME_OFFSET. */
902 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
904 /* Restoring the frame pointer also modifies the hard frame pointer.
905 Mark it used (so that the previous assignment remains live once
906 the frame pointer is eliminated) and clobbered (to represent the
907 implicit update from the assignment). */
908 emit_use (hard_frame_pointer_rtx);
909 emit_clobber (hard_frame_pointer_rtx);
912 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
914 /* If the argument pointer can be eliminated in favor of the
915 frame pointer, we don't need to restore it. We assume here
916 that if such an elimination is present, it can always be used.
917 This is the case on all known machines; if we don't make this
918 assumption, we do unnecessary saving on many machines. */
919 size_t i;
920 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
922 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
923 if (elim_regs[i].from == ARG_POINTER_REGNUM
924 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
925 break;
927 if (i == ARRAY_SIZE (elim_regs))
929 /* Now restore our arg pointer from the address at which it
930 was saved in our stack frame. */
931 emit_move_insn (crtl->args.internal_arg_pointer,
932 copy_to_reg (get_arg_pointer_save_area ()));
936 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
937 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
938 else if (targetm.have_nonlocal_goto_receiver ())
939 emit_insn (targetm.gen_nonlocal_goto_receiver ());
940 else
941 { /* Nothing */ }
943 /* We must not allow the code we just generated to be reordered by
944 scheduling. Specifically, the update of the frame pointer must
945 happen immediately, not later. */
946 emit_insn (gen_blockage ());
949 /* __builtin_longjmp is passed a pointer to an array of five words (not
950 all will be used on all machines). It operates similarly to the C
951 library function of the same name, but is more efficient. Much of
952 the code below is copied from the handling of non-local gotos. */
954 static void
955 expand_builtin_longjmp (rtx buf_addr, rtx value)
957 rtx fp, lab, stack;
958 rtx_insn *insn, *last;
959 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
961 /* DRAP is needed for stack realign if longjmp is expanded to current
962 function */
963 if (SUPPORTS_STACK_ALIGNMENT)
964 crtl->need_drap = true;
966 if (setjmp_alias_set == -1)
967 setjmp_alias_set = new_alias_set ();
969 buf_addr = convert_memory_address (Pmode, buf_addr);
971 buf_addr = force_reg (Pmode, buf_addr);
973 /* We require that the user must pass a second argument of 1, because
974 that is what builtin_setjmp will return. */
975 gcc_assert (value == const1_rtx);
977 last = get_last_insn ();
978 if (targetm.have_builtin_longjmp ())
979 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
980 else
982 fp = gen_rtx_MEM (Pmode, buf_addr);
983 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
984 GET_MODE_SIZE (Pmode)));
986 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
987 2 * GET_MODE_SIZE (Pmode)));
988 set_mem_alias_set (fp, setjmp_alias_set);
989 set_mem_alias_set (lab, setjmp_alias_set);
990 set_mem_alias_set (stack, setjmp_alias_set);
992 /* Pick up FP, label, and SP from the block and jump. This code is
993 from expand_goto in stmt.c; see there for detailed comments. */
994 if (targetm.have_nonlocal_goto ())
995 /* We have to pass a value to the nonlocal_goto pattern that will
996 get copied into the static_chain pointer, but it does not matter
997 what that value is, because builtin_setjmp does not use it. */
998 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
999 else
1001 lab = copy_to_reg (lab);
1003 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1004 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1006 emit_move_insn (hard_frame_pointer_rtx, fp);
1007 emit_stack_restore (SAVE_NONLOCAL, stack);
1009 emit_use (hard_frame_pointer_rtx);
1010 emit_use (stack_pointer_rtx);
1011 emit_indirect_jump (lab);
1015 /* Search backwards and mark the jump insn as a non-local goto.
1016 Note that this precludes the use of __builtin_longjmp to a
1017 __builtin_setjmp target in the same function. However, we've
1018 already cautioned the user that these functions are for
1019 internal exception handling use only. */
1020 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1022 gcc_assert (insn != last);
1024 if (JUMP_P (insn))
1026 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1027 break;
1029 else if (CALL_P (insn))
1030 break;
1034 static inline bool
1035 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1037 return (iter->i < iter->n);
1040 /* This function validates the types of a function call argument list
1041 against a specified list of tree_codes. If the last specifier is a 0,
1042 that represents an ellipsis, otherwise the last specifier must be a
1043 VOID_TYPE. */
1045 static bool
1046 validate_arglist (const_tree callexpr, ...)
1048 enum tree_code code;
1049 bool res = 0;
1050 va_list ap;
1051 const_call_expr_arg_iterator iter;
1052 const_tree arg;
1054 va_start (ap, callexpr);
1055 init_const_call_expr_arg_iterator (callexpr, &iter);
1057 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1058 tree fn = CALL_EXPR_FN (callexpr);
1059 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1061 for (unsigned argno = 1; ; ++argno)
1063 code = (enum tree_code) va_arg (ap, int);
1065 switch (code)
1067 case 0:
1068 /* This signifies an ellipses, any further arguments are all ok. */
1069 res = true;
1070 goto end;
1071 case VOID_TYPE:
1072 /* This signifies an endlink, if no arguments remain, return
1073 true, otherwise return false. */
1074 res = !more_const_call_expr_args_p (&iter);
1075 goto end;
1076 case POINTER_TYPE:
1077 /* The actual argument must be nonnull when either the whole
1078 called function has been declared nonnull, or when the formal
1079 argument corresponding to the actual argument has been. */
1080 if (argmap
1081 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1083 arg = next_const_call_expr_arg (&iter);
1084 if (!validate_arg (arg, code) || integer_zerop (arg))
1085 goto end;
1086 break;
1088 /* FALLTHRU */
1089 default:
1090 /* If no parameters remain or the parameter's code does not
1091 match the specified code, return false. Otherwise continue
1092 checking any remaining arguments. */
1093 arg = next_const_call_expr_arg (&iter);
1094 if (!validate_arg (arg, code))
1095 goto end;
1096 break;
1100 /* We need gotos here since we can only have one VA_CLOSE in a
1101 function. */
1102 end: ;
1103 va_end (ap);
1105 BITMAP_FREE (argmap);
1107 return res;
1110 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1111 and the address of the save area. */
1113 static rtx
1114 expand_builtin_nonlocal_goto (tree exp)
1116 tree t_label, t_save_area;
1117 rtx r_label, r_save_area, r_fp, r_sp;
1118 rtx_insn *insn;
1120 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1121 return NULL_RTX;
1123 t_label = CALL_EXPR_ARG (exp, 0);
1124 t_save_area = CALL_EXPR_ARG (exp, 1);
1126 r_label = expand_normal (t_label);
1127 r_label = convert_memory_address (Pmode, r_label);
1128 r_save_area = expand_normal (t_save_area);
1129 r_save_area = convert_memory_address (Pmode, r_save_area);
1130 /* Copy the address of the save location to a register just in case it was
1131 based on the frame pointer. */
1132 r_save_area = copy_to_reg (r_save_area);
1133 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1134 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1135 plus_constant (Pmode, r_save_area,
1136 GET_MODE_SIZE (Pmode)));
1138 crtl->has_nonlocal_goto = 1;
1140 /* ??? We no longer need to pass the static chain value, afaik. */
1141 if (targetm.have_nonlocal_goto ())
1142 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1143 else
1145 r_label = copy_to_reg (r_label);
1147 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1148 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1150 /* Restore frame pointer for containing function. */
1151 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1152 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1154 /* USE of hard_frame_pointer_rtx added for consistency;
1155 not clear if really needed. */
1156 emit_use (hard_frame_pointer_rtx);
1157 emit_use (stack_pointer_rtx);
1159 /* If the architecture is using a GP register, we must
1160 conservatively assume that the target function makes use of it.
1161 The prologue of functions with nonlocal gotos must therefore
1162 initialize the GP register to the appropriate value, and we
1163 must then make sure that this value is live at the point
1164 of the jump. (Note that this doesn't necessarily apply
1165 to targets with a nonlocal_goto pattern; they are free
1166 to implement it in their own way. Note also that this is
1167 a no-op if the GP register is a global invariant.) */
1168 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1169 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1170 emit_use (pic_offset_table_rtx);
1172 emit_indirect_jump (r_label);
1175 /* Search backwards to the jump insn and mark it as a
1176 non-local goto. */
1177 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1179 if (JUMP_P (insn))
1181 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1182 break;
1184 else if (CALL_P (insn))
1185 break;
1188 return const0_rtx;
1191 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1192 (not all will be used on all machines) that was passed to __builtin_setjmp.
1193 It updates the stack pointer in that block to the current value. This is
1194 also called directly by the SJLJ exception handling code. */
1196 void
1197 expand_builtin_update_setjmp_buf (rtx buf_addr)
1199 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1200 rtx stack_save
1201 = gen_rtx_MEM (sa_mode,
1202 memory_address
1203 (sa_mode,
1204 plus_constant (Pmode, buf_addr,
1205 2 * GET_MODE_SIZE (Pmode))));
1207 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1210 /* Expand a call to __builtin_prefetch. For a target that does not support
1211 data prefetch, evaluate the memory address argument in case it has side
1212 effects. */
1214 static void
1215 expand_builtin_prefetch (tree exp)
1217 tree arg0, arg1, arg2;
1218 int nargs;
1219 rtx op0, op1, op2;
1221 if (!validate_arglist (exp, POINTER_TYPE, 0))
1222 return;
1224 arg0 = CALL_EXPR_ARG (exp, 0);
1226 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1227 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1228 locality). */
1229 nargs = call_expr_nargs (exp);
1230 if (nargs > 1)
1231 arg1 = CALL_EXPR_ARG (exp, 1);
1232 else
1233 arg1 = integer_zero_node;
1234 if (nargs > 2)
1235 arg2 = CALL_EXPR_ARG (exp, 2);
1236 else
1237 arg2 = integer_three_node;
1239 /* Argument 0 is an address. */
1240 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1242 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1243 if (TREE_CODE (arg1) != INTEGER_CST)
1245 error ("second argument to %<__builtin_prefetch%> must be a constant");
1246 arg1 = integer_zero_node;
1248 op1 = expand_normal (arg1);
1249 /* Argument 1 must be either zero or one. */
1250 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1252 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1253 " using zero");
1254 op1 = const0_rtx;
1257 /* Argument 2 (locality) must be a compile-time constant int. */
1258 if (TREE_CODE (arg2) != INTEGER_CST)
1260 error ("third argument to %<__builtin_prefetch%> must be a constant");
1261 arg2 = integer_zero_node;
1263 op2 = expand_normal (arg2);
1264 /* Argument 2 must be 0, 1, 2, or 3. */
1265 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1267 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1268 op2 = const0_rtx;
1271 if (targetm.have_prefetch ())
1273 struct expand_operand ops[3];
1275 create_address_operand (&ops[0], op0);
1276 create_integer_operand (&ops[1], INTVAL (op1));
1277 create_integer_operand (&ops[2], INTVAL (op2));
1278 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1279 return;
1282 /* Don't do anything with direct references to volatile memory, but
1283 generate code to handle other side effects. */
1284 if (!MEM_P (op0) && side_effects_p (op0))
1285 emit_insn (op0);
1288 /* Get a MEM rtx for expression EXP which is the address of an operand
1289 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1290 the maximum length of the block of memory that might be accessed or
1291 NULL if unknown. */
1293 static rtx
1294 get_memory_rtx (tree exp, tree len)
1296 tree orig_exp = exp;
1297 rtx addr, mem;
1299 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1300 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1301 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1302 exp = TREE_OPERAND (exp, 0);
1304 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1305 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1307 /* Get an expression we can use to find the attributes to assign to MEM.
1308 First remove any nops. */
1309 while (CONVERT_EXPR_P (exp)
1310 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1311 exp = TREE_OPERAND (exp, 0);
1313 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1314 (as builtin stringops may alias with anything). */
1315 exp = fold_build2 (MEM_REF,
1316 build_array_type (char_type_node,
1317 build_range_type (sizetype,
1318 size_one_node, len)),
1319 exp, build_int_cst (ptr_type_node, 0));
1321 /* If the MEM_REF has no acceptable address, try to get the base object
1322 from the original address we got, and build an all-aliasing
1323 unknown-sized access to that one. */
1324 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1325 set_mem_attributes (mem, exp, 0);
1326 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1327 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1328 0))))
1330 exp = build_fold_addr_expr (exp);
1331 exp = fold_build2 (MEM_REF,
1332 build_array_type (char_type_node,
1333 build_range_type (sizetype,
1334 size_zero_node,
1335 NULL)),
1336 exp, build_int_cst (ptr_type_node, 0));
1337 set_mem_attributes (mem, exp, 0);
1339 set_mem_alias_set (mem, 0);
1340 return mem;
1343 /* Built-in functions to perform an untyped call and return. */
1345 #define apply_args_mode \
1346 (this_target_builtins->x_apply_args_mode)
1347 #define apply_result_mode \
1348 (this_target_builtins->x_apply_result_mode)
1350 /* Return the size required for the block returned by __builtin_apply_args,
1351 and initialize apply_args_mode. */
1353 static int
1354 apply_args_size (void)
1356 static int size = -1;
1357 int align;
1358 unsigned int regno;
1359 machine_mode mode;
1361 /* The values computed by this function never change. */
1362 if (size < 0)
1364 /* The first value is the incoming arg-pointer. */
1365 size = GET_MODE_SIZE (Pmode);
1367 /* The second value is the structure value address unless this is
1368 passed as an "invisible" first argument. */
1369 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1370 size += GET_MODE_SIZE (Pmode);
1372 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1373 if (FUNCTION_ARG_REGNO_P (regno))
1375 mode = targetm.calls.get_raw_arg_mode (regno);
1377 gcc_assert (mode != VOIDmode);
1379 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1380 if (size % align != 0)
1381 size = CEIL (size, align) * align;
1382 size += GET_MODE_SIZE (mode);
1383 apply_args_mode[regno] = mode;
1385 else
1387 apply_args_mode[regno] = VOIDmode;
1390 return size;
1393 /* Return the size required for the block returned by __builtin_apply,
1394 and initialize apply_result_mode. */
1396 static int
1397 apply_result_size (void)
1399 static int size = -1;
1400 int align, regno;
1401 machine_mode mode;
1403 /* The values computed by this function never change. */
1404 if (size < 0)
1406 size = 0;
1408 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1409 if (targetm.calls.function_value_regno_p (regno))
1411 mode = targetm.calls.get_raw_result_mode (regno);
1413 gcc_assert (mode != VOIDmode);
1415 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1416 if (size % align != 0)
1417 size = CEIL (size, align) * align;
1418 size += GET_MODE_SIZE (mode);
1419 apply_result_mode[regno] = mode;
1421 else
1422 apply_result_mode[regno] = VOIDmode;
1424 /* Allow targets that use untyped_call and untyped_return to override
1425 the size so that machine-specific information can be stored here. */
1426 #ifdef APPLY_RESULT_SIZE
1427 size = APPLY_RESULT_SIZE;
1428 #endif
1430 return size;
1433 /* Create a vector describing the result block RESULT. If SAVEP is true,
1434 the result block is used to save the values; otherwise it is used to
1435 restore the values. */
1437 static rtx
1438 result_vector (int savep, rtx result)
1440 int regno, size, align, nelts;
1441 machine_mode mode;
1442 rtx reg, mem;
1443 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1445 size = nelts = 0;
1446 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1447 if ((mode = apply_result_mode[regno]) != VOIDmode)
1449 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1450 if (size % align != 0)
1451 size = CEIL (size, align) * align;
1452 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1453 mem = adjust_address (result, mode, size);
1454 savevec[nelts++] = (savep
1455 ? gen_rtx_SET (mem, reg)
1456 : gen_rtx_SET (reg, mem));
1457 size += GET_MODE_SIZE (mode);
1459 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1462 /* Save the state required to perform an untyped call with the same
1463 arguments as were passed to the current function. */
1465 static rtx
1466 expand_builtin_apply_args_1 (void)
1468 rtx registers, tem;
1469 int size, align, regno;
1470 machine_mode mode;
1471 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1473 /* Create a block where the arg-pointer, structure value address,
1474 and argument registers can be saved. */
1475 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1477 /* Walk past the arg-pointer and structure value address. */
1478 size = GET_MODE_SIZE (Pmode);
1479 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1480 size += GET_MODE_SIZE (Pmode);
1482 /* Save each register used in calling a function to the block. */
1483 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1484 if ((mode = apply_args_mode[regno]) != VOIDmode)
1486 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1487 if (size % align != 0)
1488 size = CEIL (size, align) * align;
1490 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1492 emit_move_insn (adjust_address (registers, mode, size), tem);
1493 size += GET_MODE_SIZE (mode);
1496 /* Save the arg pointer to the block. */
1497 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1498 /* We need the pointer as the caller actually passed them to us, not
1499 as we might have pretended they were passed. Make sure it's a valid
1500 operand, as emit_move_insn isn't expected to handle a PLUS. */
1501 if (STACK_GROWS_DOWNWARD)
1503 = force_operand (plus_constant (Pmode, tem,
1504 crtl->args.pretend_args_size),
1505 NULL_RTX);
1506 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1508 size = GET_MODE_SIZE (Pmode);
1510 /* Save the structure value address unless this is passed as an
1511 "invisible" first argument. */
1512 if (struct_incoming_value)
1514 emit_move_insn (adjust_address (registers, Pmode, size),
1515 copy_to_reg (struct_incoming_value));
1516 size += GET_MODE_SIZE (Pmode);
1519 /* Return the address of the block. */
1520 return copy_addr_to_reg (XEXP (registers, 0));
1523 /* __builtin_apply_args returns block of memory allocated on
1524 the stack into which is stored the arg pointer, structure
1525 value address, static chain, and all the registers that might
1526 possibly be used in performing a function call. The code is
1527 moved to the start of the function so the incoming values are
1528 saved. */
1530 static rtx
1531 expand_builtin_apply_args (void)
1533 /* Don't do __builtin_apply_args more than once in a function.
1534 Save the result of the first call and reuse it. */
1535 if (apply_args_value != 0)
1536 return apply_args_value;
1538 /* When this function is called, it means that registers must be
1539 saved on entry to this function. So we migrate the
1540 call to the first insn of this function. */
1541 rtx temp;
1543 start_sequence ();
1544 temp = expand_builtin_apply_args_1 ();
1545 rtx_insn *seq = get_insns ();
1546 end_sequence ();
1548 apply_args_value = temp;
1550 /* Put the insns after the NOTE that starts the function.
1551 If this is inside a start_sequence, make the outer-level insn
1552 chain current, so the code is placed at the start of the
1553 function. If internal_arg_pointer is a non-virtual pseudo,
1554 it needs to be placed after the function that initializes
1555 that pseudo. */
1556 push_topmost_sequence ();
1557 if (REG_P (crtl->args.internal_arg_pointer)
1558 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1559 emit_insn_before (seq, parm_birth_insn);
1560 else
1561 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1562 pop_topmost_sequence ();
1563 return temp;
1567 /* Perform an untyped call and save the state required to perform an
1568 untyped return of whatever value was returned by the given function. */
1570 static rtx
1571 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1573 int size, align, regno;
1574 machine_mode mode;
1575 rtx incoming_args, result, reg, dest, src;
1576 rtx_call_insn *call_insn;
1577 rtx old_stack_level = 0;
1578 rtx call_fusage = 0;
1579 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1581 arguments = convert_memory_address (Pmode, arguments);
1583 /* Create a block where the return registers can be saved. */
1584 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1586 /* Fetch the arg pointer from the ARGUMENTS block. */
1587 incoming_args = gen_reg_rtx (Pmode);
1588 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1589 if (!STACK_GROWS_DOWNWARD)
1590 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1591 incoming_args, 0, OPTAB_LIB_WIDEN);
1593 /* Push a new argument block and copy the arguments. Do not allow
1594 the (potential) memcpy call below to interfere with our stack
1595 manipulations. */
1596 do_pending_stack_adjust ();
1597 NO_DEFER_POP;
1599 /* Save the stack with nonlocal if available. */
1600 if (targetm.have_save_stack_nonlocal ())
1601 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1602 else
1603 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1605 /* Allocate a block of memory onto the stack and copy the memory
1606 arguments to the outgoing arguments address. We can pass TRUE
1607 as the 4th argument because we just saved the stack pointer
1608 and will restore it right after the call. */
1609 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1611 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1612 may have already set current_function_calls_alloca to true.
1613 current_function_calls_alloca won't be set if argsize is zero,
1614 so we have to guarantee need_drap is true here. */
1615 if (SUPPORTS_STACK_ALIGNMENT)
1616 crtl->need_drap = true;
1618 dest = virtual_outgoing_args_rtx;
1619 if (!STACK_GROWS_DOWNWARD)
1621 if (CONST_INT_P (argsize))
1622 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1623 else
1624 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1626 dest = gen_rtx_MEM (BLKmode, dest);
1627 set_mem_align (dest, PARM_BOUNDARY);
1628 src = gen_rtx_MEM (BLKmode, incoming_args);
1629 set_mem_align (src, PARM_BOUNDARY);
1630 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1632 /* Refer to the argument block. */
1633 apply_args_size ();
1634 arguments = gen_rtx_MEM (BLKmode, arguments);
1635 set_mem_align (arguments, PARM_BOUNDARY);
1637 /* Walk past the arg-pointer and structure value address. */
1638 size = GET_MODE_SIZE (Pmode);
1639 if (struct_value)
1640 size += GET_MODE_SIZE (Pmode);
1642 /* Restore each of the registers previously saved. Make USE insns
1643 for each of these registers for use in making the call. */
1644 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1645 if ((mode = apply_args_mode[regno]) != VOIDmode)
1647 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1648 if (size % align != 0)
1649 size = CEIL (size, align) * align;
1650 reg = gen_rtx_REG (mode, regno);
1651 emit_move_insn (reg, adjust_address (arguments, mode, size));
1652 use_reg (&call_fusage, reg);
1653 size += GET_MODE_SIZE (mode);
1656 /* Restore the structure value address unless this is passed as an
1657 "invisible" first argument. */
1658 size = GET_MODE_SIZE (Pmode);
1659 if (struct_value)
1661 rtx value = gen_reg_rtx (Pmode);
1662 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1663 emit_move_insn (struct_value, value);
1664 if (REG_P (struct_value))
1665 use_reg (&call_fusage, struct_value);
1666 size += GET_MODE_SIZE (Pmode);
1669 /* All arguments and registers used for the call are set up by now! */
1670 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1672 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1673 and we don't want to load it into a register as an optimization,
1674 because prepare_call_address already did it if it should be done. */
1675 if (GET_CODE (function) != SYMBOL_REF)
1676 function = memory_address (FUNCTION_MODE, function);
1678 /* Generate the actual call instruction and save the return value. */
1679 if (targetm.have_untyped_call ())
1681 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1682 emit_call_insn (targetm.gen_untyped_call (mem, result,
1683 result_vector (1, result)));
1685 else if (targetm.have_call_value ())
1687 rtx valreg = 0;
1689 /* Locate the unique return register. It is not possible to
1690 express a call that sets more than one return register using
1691 call_value; use untyped_call for that. In fact, untyped_call
1692 only needs to save the return registers in the given block. */
1693 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1694 if ((mode = apply_result_mode[regno]) != VOIDmode)
1696 gcc_assert (!valreg); /* have_untyped_call required. */
1698 valreg = gen_rtx_REG (mode, regno);
1701 emit_insn (targetm.gen_call_value (valreg,
1702 gen_rtx_MEM (FUNCTION_MODE, function),
1703 const0_rtx, NULL_RTX, const0_rtx));
1705 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1707 else
1708 gcc_unreachable ();
1710 /* Find the CALL insn we just emitted, and attach the register usage
1711 information. */
1712 call_insn = last_call_insn ();
1713 add_function_usage_to (call_insn, call_fusage);
1715 /* Restore the stack. */
1716 if (targetm.have_save_stack_nonlocal ())
1717 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1718 else
1719 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1720 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1722 OK_DEFER_POP;
1724 /* Return the address of the result block. */
1725 result = copy_addr_to_reg (XEXP (result, 0));
1726 return convert_memory_address (ptr_mode, result);
1729 /* Perform an untyped return. */
1731 static void
1732 expand_builtin_return (rtx result)
1734 int size, align, regno;
1735 machine_mode mode;
1736 rtx reg;
1737 rtx_insn *call_fusage = 0;
1739 result = convert_memory_address (Pmode, result);
1741 apply_result_size ();
1742 result = gen_rtx_MEM (BLKmode, result);
1744 if (targetm.have_untyped_return ())
1746 rtx vector = result_vector (0, result);
1747 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1748 emit_barrier ();
1749 return;
1752 /* Restore the return value and note that each value is used. */
1753 size = 0;
1754 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1755 if ((mode = apply_result_mode[regno]) != VOIDmode)
1757 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1758 if (size % align != 0)
1759 size = CEIL (size, align) * align;
1760 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1761 emit_move_insn (reg, adjust_address (result, mode, size));
1763 push_to_sequence (call_fusage);
1764 emit_use (reg);
1765 call_fusage = get_insns ();
1766 end_sequence ();
1767 size += GET_MODE_SIZE (mode);
1770 /* Put the USE insns before the return. */
1771 emit_insn (call_fusage);
1773 /* Return whatever values was restored by jumping directly to the end
1774 of the function. */
1775 expand_naked_return ();
1778 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1780 static enum type_class
1781 type_to_class (tree type)
1783 switch (TREE_CODE (type))
1785 case VOID_TYPE: return void_type_class;
1786 case INTEGER_TYPE: return integer_type_class;
1787 case ENUMERAL_TYPE: return enumeral_type_class;
1788 case BOOLEAN_TYPE: return boolean_type_class;
1789 case POINTER_TYPE: return pointer_type_class;
1790 case REFERENCE_TYPE: return reference_type_class;
1791 case OFFSET_TYPE: return offset_type_class;
1792 case REAL_TYPE: return real_type_class;
1793 case COMPLEX_TYPE: return complex_type_class;
1794 case FUNCTION_TYPE: return function_type_class;
1795 case METHOD_TYPE: return method_type_class;
1796 case RECORD_TYPE: return record_type_class;
1797 case UNION_TYPE:
1798 case QUAL_UNION_TYPE: return union_type_class;
1799 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1800 ? string_type_class : array_type_class);
1801 case LANG_TYPE: return lang_type_class;
1802 default: return no_type_class;
1806 /* Expand a call EXP to __builtin_classify_type. */
1808 static rtx
1809 expand_builtin_classify_type (tree exp)
1811 if (call_expr_nargs (exp))
1812 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1813 return GEN_INT (no_type_class);
1816 /* This helper macro, meant to be used in mathfn_built_in below,
1817 determines which among a set of three builtin math functions is
1818 appropriate for a given type mode. The `F' and `L' cases are
1819 automatically generated from the `double' case. */
1820 #define CASE_MATHFN(MATHFN) \
1821 CASE_CFN_##MATHFN: \
1822 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1823 fcodel = BUILT_IN_##MATHFN##L ; break;
1824 /* Similar to above, but appends _R after any F/L suffix. */
1825 #define CASE_MATHFN_REENT(MATHFN) \
1826 case CFN_BUILT_IN_##MATHFN##_R: \
1827 case CFN_BUILT_IN_##MATHFN##F_R: \
1828 case CFN_BUILT_IN_##MATHFN##L_R: \
1829 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1830 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1832 /* Return a function equivalent to FN but operating on floating-point
1833 values of type TYPE, or END_BUILTINS if no such function exists.
1834 This is purely an operation on function codes; it does not guarantee
1835 that the target actually has an implementation of the function. */
1837 static built_in_function
1838 mathfn_built_in_2 (tree type, combined_fn fn)
1840 built_in_function fcode, fcodef, fcodel;
1842 switch (fn)
1844 CASE_MATHFN (ACOS)
1845 CASE_MATHFN (ACOSH)
1846 CASE_MATHFN (ASIN)
1847 CASE_MATHFN (ASINH)
1848 CASE_MATHFN (ATAN)
1849 CASE_MATHFN (ATAN2)
1850 CASE_MATHFN (ATANH)
1851 CASE_MATHFN (CBRT)
1852 CASE_MATHFN (CEIL)
1853 CASE_MATHFN (CEXPI)
1854 CASE_MATHFN (COPYSIGN)
1855 CASE_MATHFN (COS)
1856 CASE_MATHFN (COSH)
1857 CASE_MATHFN (DREM)
1858 CASE_MATHFN (ERF)
1859 CASE_MATHFN (ERFC)
1860 CASE_MATHFN (EXP)
1861 CASE_MATHFN (EXP10)
1862 CASE_MATHFN (EXP2)
1863 CASE_MATHFN (EXPM1)
1864 CASE_MATHFN (FABS)
1865 CASE_MATHFN (FDIM)
1866 CASE_MATHFN (FLOOR)
1867 CASE_MATHFN (FMA)
1868 CASE_MATHFN (FMAX)
1869 CASE_MATHFN (FMIN)
1870 CASE_MATHFN (FMOD)
1871 CASE_MATHFN (FREXP)
1872 CASE_MATHFN (GAMMA)
1873 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1874 CASE_MATHFN (HUGE_VAL)
1875 CASE_MATHFN (HYPOT)
1876 CASE_MATHFN (ILOGB)
1877 CASE_MATHFN (ICEIL)
1878 CASE_MATHFN (IFLOOR)
1879 CASE_MATHFN (INF)
1880 CASE_MATHFN (IRINT)
1881 CASE_MATHFN (IROUND)
1882 CASE_MATHFN (ISINF)
1883 CASE_MATHFN (J0)
1884 CASE_MATHFN (J1)
1885 CASE_MATHFN (JN)
1886 CASE_MATHFN (LCEIL)
1887 CASE_MATHFN (LDEXP)
1888 CASE_MATHFN (LFLOOR)
1889 CASE_MATHFN (LGAMMA)
1890 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1891 CASE_MATHFN (LLCEIL)
1892 CASE_MATHFN (LLFLOOR)
1893 CASE_MATHFN (LLRINT)
1894 CASE_MATHFN (LLROUND)
1895 CASE_MATHFN (LOG)
1896 CASE_MATHFN (LOG10)
1897 CASE_MATHFN (LOG1P)
1898 CASE_MATHFN (LOG2)
1899 CASE_MATHFN (LOGB)
1900 CASE_MATHFN (LRINT)
1901 CASE_MATHFN (LROUND)
1902 CASE_MATHFN (MODF)
1903 CASE_MATHFN (NAN)
1904 CASE_MATHFN (NANS)
1905 CASE_MATHFN (NEARBYINT)
1906 CASE_MATHFN (NEXTAFTER)
1907 CASE_MATHFN (NEXTTOWARD)
1908 CASE_MATHFN (POW)
1909 CASE_MATHFN (POWI)
1910 CASE_MATHFN (POW10)
1911 CASE_MATHFN (REMAINDER)
1912 CASE_MATHFN (REMQUO)
1913 CASE_MATHFN (RINT)
1914 CASE_MATHFN (ROUND)
1915 CASE_MATHFN (SCALB)
1916 CASE_MATHFN (SCALBLN)
1917 CASE_MATHFN (SCALBN)
1918 CASE_MATHFN (SIGNBIT)
1919 CASE_MATHFN (SIGNIFICAND)
1920 CASE_MATHFN (SIN)
1921 CASE_MATHFN (SINCOS)
1922 CASE_MATHFN (SINH)
1923 CASE_MATHFN (SQRT)
1924 CASE_MATHFN (TAN)
1925 CASE_MATHFN (TANH)
1926 CASE_MATHFN (TGAMMA)
1927 CASE_MATHFN (TRUNC)
1928 CASE_MATHFN (Y0)
1929 CASE_MATHFN (Y1)
1930 CASE_MATHFN (YN)
1932 default:
1933 return END_BUILTINS;
1936 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1937 return fcode;
1938 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1939 return fcodef;
1940 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1941 return fcodel;
1942 else
1943 return END_BUILTINS;
1946 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1947 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1948 otherwise use the explicit declaration. If we can't do the conversion,
1949 return null. */
1951 static tree
1952 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1954 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1955 if (fcode2 == END_BUILTINS)
1956 return NULL_TREE;
1958 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1959 return NULL_TREE;
1961 return builtin_decl_explicit (fcode2);
1964 /* Like mathfn_built_in_1, but always use the implicit array. */
1966 tree
1967 mathfn_built_in (tree type, combined_fn fn)
1969 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1972 /* Like mathfn_built_in_1, but take a built_in_function and
1973 always use the implicit array. */
1975 tree
1976 mathfn_built_in (tree type, enum built_in_function fn)
1978 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1981 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1982 return its code, otherwise return IFN_LAST. Note that this function
1983 only tests whether the function is defined in internals.def, not whether
1984 it is actually available on the target. */
1986 internal_fn
1987 associated_internal_fn (tree fndecl)
1989 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1990 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1991 switch (DECL_FUNCTION_CODE (fndecl))
1993 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1994 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1995 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1996 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1997 #include "internal-fn.def"
1999 CASE_FLT_FN (BUILT_IN_POW10):
2000 return IFN_EXP10;
2002 CASE_FLT_FN (BUILT_IN_DREM):
2003 return IFN_REMAINDER;
2005 CASE_FLT_FN (BUILT_IN_SCALBN):
2006 CASE_FLT_FN (BUILT_IN_SCALBLN):
2007 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2008 return IFN_LDEXP;
2009 return IFN_LAST;
2011 default:
2012 return IFN_LAST;
2016 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2017 on the current target by a call to an internal function, return the
2018 code of that internal function, otherwise return IFN_LAST. The caller
2019 is responsible for ensuring that any side-effects of the built-in
2020 call are dealt with correctly. E.g. if CALL sets errno, the caller
2021 must decide that the errno result isn't needed or make it available
2022 in some other way. */
2024 internal_fn
2025 replacement_internal_fn (gcall *call)
2027 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2029 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2030 if (ifn != IFN_LAST)
2032 tree_pair types = direct_internal_fn_types (ifn, call);
2033 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2034 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2035 return ifn;
2038 return IFN_LAST;
2041 /* Expand a call to the builtin trinary math functions (fma).
2042 Return NULL_RTX if a normal call should be emitted rather than expanding the
2043 function in-line. EXP is the expression that is a call to the builtin
2044 function; if convenient, the result should be placed in TARGET.
2045 SUBTARGET may be used as the target for computing one of EXP's
2046 operands. */
2048 static rtx
2049 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2051 optab builtin_optab;
2052 rtx op0, op1, op2, result;
2053 rtx_insn *insns;
2054 tree fndecl = get_callee_fndecl (exp);
2055 tree arg0, arg1, arg2;
2056 machine_mode mode;
2058 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2059 return NULL_RTX;
2061 arg0 = CALL_EXPR_ARG (exp, 0);
2062 arg1 = CALL_EXPR_ARG (exp, 1);
2063 arg2 = CALL_EXPR_ARG (exp, 2);
2065 switch (DECL_FUNCTION_CODE (fndecl))
2067 CASE_FLT_FN (BUILT_IN_FMA):
2068 builtin_optab = fma_optab; break;
2069 default:
2070 gcc_unreachable ();
2073 /* Make a suitable register to place result in. */
2074 mode = TYPE_MODE (TREE_TYPE (exp));
2076 /* Before working hard, check whether the instruction is available. */
2077 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2078 return NULL_RTX;
2080 result = gen_reg_rtx (mode);
2082 /* Always stabilize the argument list. */
2083 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2084 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2085 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2087 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2088 op1 = expand_normal (arg1);
2089 op2 = expand_normal (arg2);
2091 start_sequence ();
2093 /* Compute into RESULT.
2094 Set RESULT to wherever the result comes back. */
2095 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2096 result, 0);
2098 /* If we were unable to expand via the builtin, stop the sequence
2099 (without outputting the insns) and call to the library function
2100 with the stabilized argument list. */
2101 if (result == 0)
2103 end_sequence ();
2104 return expand_call (exp, target, target == const0_rtx);
2107 /* Output the entire sequence. */
2108 insns = get_insns ();
2109 end_sequence ();
2110 emit_insn (insns);
2112 return result;
2115 /* Expand a call to the builtin sin and cos math functions.
2116 Return NULL_RTX if a normal call should be emitted rather than expanding the
2117 function in-line. EXP is the expression that is a call to the builtin
2118 function; if convenient, the result should be placed in TARGET.
2119 SUBTARGET may be used as the target for computing one of EXP's
2120 operands. */
2122 static rtx
2123 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2125 optab builtin_optab;
2126 rtx op0;
2127 rtx_insn *insns;
2128 tree fndecl = get_callee_fndecl (exp);
2129 machine_mode mode;
2130 tree arg;
2132 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2133 return NULL_RTX;
2135 arg = CALL_EXPR_ARG (exp, 0);
2137 switch (DECL_FUNCTION_CODE (fndecl))
2139 CASE_FLT_FN (BUILT_IN_SIN):
2140 CASE_FLT_FN (BUILT_IN_COS):
2141 builtin_optab = sincos_optab; break;
2142 default:
2143 gcc_unreachable ();
2146 /* Make a suitable register to place result in. */
2147 mode = TYPE_MODE (TREE_TYPE (exp));
2149 /* Check if sincos insn is available, otherwise fallback
2150 to sin or cos insn. */
2151 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2152 switch (DECL_FUNCTION_CODE (fndecl))
2154 CASE_FLT_FN (BUILT_IN_SIN):
2155 builtin_optab = sin_optab; break;
2156 CASE_FLT_FN (BUILT_IN_COS):
2157 builtin_optab = cos_optab; break;
2158 default:
2159 gcc_unreachable ();
2162 /* Before working hard, check whether the instruction is available. */
2163 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2165 rtx result = gen_reg_rtx (mode);
2167 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2168 need to expand the argument again. This way, we will not perform
2169 side-effects more the once. */
2170 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2172 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2174 start_sequence ();
2176 /* Compute into RESULT.
2177 Set RESULT to wherever the result comes back. */
2178 if (builtin_optab == sincos_optab)
2180 int ok;
2182 switch (DECL_FUNCTION_CODE (fndecl))
2184 CASE_FLT_FN (BUILT_IN_SIN):
2185 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2186 break;
2187 CASE_FLT_FN (BUILT_IN_COS):
2188 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2189 break;
2190 default:
2191 gcc_unreachable ();
2193 gcc_assert (ok);
2195 else
2196 result = expand_unop (mode, builtin_optab, op0, result, 0);
2198 if (result != 0)
2200 /* Output the entire sequence. */
2201 insns = get_insns ();
2202 end_sequence ();
2203 emit_insn (insns);
2204 return result;
2207 /* If we were unable to expand via the builtin, stop the sequence
2208 (without outputting the insns) and call to the library function
2209 with the stabilized argument list. */
2210 end_sequence ();
2213 return expand_call (exp, target, target == const0_rtx);
2216 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2217 return an RTL instruction code that implements the functionality.
2218 If that isn't possible or available return CODE_FOR_nothing. */
2220 static enum insn_code
2221 interclass_mathfn_icode (tree arg, tree fndecl)
2223 bool errno_set = false;
2224 optab builtin_optab = unknown_optab;
2225 machine_mode mode;
2227 switch (DECL_FUNCTION_CODE (fndecl))
2229 CASE_FLT_FN (BUILT_IN_ILOGB):
2230 errno_set = true; builtin_optab = ilogb_optab; break;
2231 CASE_FLT_FN (BUILT_IN_ISINF):
2232 builtin_optab = isinf_optab; break;
2233 case BUILT_IN_ISNORMAL:
2234 case BUILT_IN_ISFINITE:
2235 CASE_FLT_FN (BUILT_IN_FINITE):
2236 case BUILT_IN_FINITED32:
2237 case BUILT_IN_FINITED64:
2238 case BUILT_IN_FINITED128:
2239 case BUILT_IN_ISINFD32:
2240 case BUILT_IN_ISINFD64:
2241 case BUILT_IN_ISINFD128:
2242 /* These builtins have no optabs (yet). */
2243 break;
2244 default:
2245 gcc_unreachable ();
2248 /* There's no easy way to detect the case we need to set EDOM. */
2249 if (flag_errno_math && errno_set)
2250 return CODE_FOR_nothing;
2252 /* Optab mode depends on the mode of the input argument. */
2253 mode = TYPE_MODE (TREE_TYPE (arg));
2255 if (builtin_optab)
2256 return optab_handler (builtin_optab, mode);
2257 return CODE_FOR_nothing;
2260 /* Expand a call to one of the builtin math functions that operate on
2261 floating point argument and output an integer result (ilogb, isinf,
2262 isnan, etc).
2263 Return 0 if a normal call should be emitted rather than expanding the
2264 function in-line. EXP is the expression that is a call to the builtin
2265 function; if convenient, the result should be placed in TARGET. */
2267 static rtx
2268 expand_builtin_interclass_mathfn (tree exp, rtx target)
2270 enum insn_code icode = CODE_FOR_nothing;
2271 rtx op0;
2272 tree fndecl = get_callee_fndecl (exp);
2273 machine_mode mode;
2274 tree arg;
2276 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2277 return NULL_RTX;
2279 arg = CALL_EXPR_ARG (exp, 0);
2280 icode = interclass_mathfn_icode (arg, fndecl);
2281 mode = TYPE_MODE (TREE_TYPE (arg));
2283 if (icode != CODE_FOR_nothing)
2285 struct expand_operand ops[1];
2286 rtx_insn *last = get_last_insn ();
2287 tree orig_arg = arg;
2289 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2290 need to expand the argument again. This way, we will not perform
2291 side-effects more the once. */
2292 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2294 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2296 if (mode != GET_MODE (op0))
2297 op0 = convert_to_mode (mode, op0, 0);
2299 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2300 if (maybe_legitimize_operands (icode, 0, 1, ops)
2301 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2302 return ops[0].value;
2304 delete_insns_since (last);
2305 CALL_EXPR_ARG (exp, 0) = orig_arg;
2308 return NULL_RTX;
2311 /* Expand a call to the builtin sincos math function.
2312 Return NULL_RTX if a normal call should be emitted rather than expanding the
2313 function in-line. EXP is the expression that is a call to the builtin
2314 function. */
2316 static rtx
2317 expand_builtin_sincos (tree exp)
2319 rtx op0, op1, op2, target1, target2;
2320 machine_mode mode;
2321 tree arg, sinp, cosp;
2322 int result;
2323 location_t loc = EXPR_LOCATION (exp);
2324 tree alias_type, alias_off;
2326 if (!validate_arglist (exp, REAL_TYPE,
2327 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2328 return NULL_RTX;
2330 arg = CALL_EXPR_ARG (exp, 0);
2331 sinp = CALL_EXPR_ARG (exp, 1);
2332 cosp = CALL_EXPR_ARG (exp, 2);
2334 /* Make a suitable register to place result in. */
2335 mode = TYPE_MODE (TREE_TYPE (arg));
2337 /* Check if sincos insn is available, otherwise emit the call. */
2338 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2339 return NULL_RTX;
2341 target1 = gen_reg_rtx (mode);
2342 target2 = gen_reg_rtx (mode);
2344 op0 = expand_normal (arg);
2345 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2346 alias_off = build_int_cst (alias_type, 0);
2347 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2348 sinp, alias_off));
2349 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2350 cosp, alias_off));
2352 /* Compute into target1 and target2.
2353 Set TARGET to wherever the result comes back. */
2354 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2355 gcc_assert (result);
2357 /* Move target1 and target2 to the memory locations indicated
2358 by op1 and op2. */
2359 emit_move_insn (op1, target1);
2360 emit_move_insn (op2, target2);
2362 return const0_rtx;
2365 /* Expand a call to the internal cexpi builtin to the sincos math function.
2366 EXP is the expression that is a call to the builtin function; if convenient,
2367 the result should be placed in TARGET. */
2369 static rtx
2370 expand_builtin_cexpi (tree exp, rtx target)
2372 tree fndecl = get_callee_fndecl (exp);
2373 tree arg, type;
2374 machine_mode mode;
2375 rtx op0, op1, op2;
2376 location_t loc = EXPR_LOCATION (exp);
2378 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2379 return NULL_RTX;
2381 arg = CALL_EXPR_ARG (exp, 0);
2382 type = TREE_TYPE (arg);
2383 mode = TYPE_MODE (TREE_TYPE (arg));
2385 /* Try expanding via a sincos optab, fall back to emitting a libcall
2386 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2387 is only generated from sincos, cexp or if we have either of them. */
2388 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2390 op1 = gen_reg_rtx (mode);
2391 op2 = gen_reg_rtx (mode);
2393 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2395 /* Compute into op1 and op2. */
2396 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2398 else if (targetm.libc_has_function (function_sincos))
2400 tree call, fn = NULL_TREE;
2401 tree top1, top2;
2402 rtx op1a, op2a;
2404 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2405 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2406 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2407 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2408 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2409 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2410 else
2411 gcc_unreachable ();
2413 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2414 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2415 op1a = copy_addr_to_reg (XEXP (op1, 0));
2416 op2a = copy_addr_to_reg (XEXP (op2, 0));
2417 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2418 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2420 /* Make sure not to fold the sincos call again. */
2421 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2422 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2423 call, 3, arg, top1, top2));
2425 else
2427 tree call, fn = NULL_TREE, narg;
2428 tree ctype = build_complex_type (type);
2430 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2431 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2432 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2433 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2434 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2435 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2436 else
2437 gcc_unreachable ();
2439 /* If we don't have a decl for cexp create one. This is the
2440 friendliest fallback if the user calls __builtin_cexpi
2441 without full target C99 function support. */
2442 if (fn == NULL_TREE)
2444 tree fntype;
2445 const char *name = NULL;
2447 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2448 name = "cexpf";
2449 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2450 name = "cexp";
2451 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2452 name = "cexpl";
2454 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2455 fn = build_fn_decl (name, fntype);
2458 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2459 build_real (type, dconst0), arg);
2461 /* Make sure not to fold the cexp call again. */
2462 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2463 return expand_expr (build_call_nary (ctype, call, 1, narg),
2464 target, VOIDmode, EXPAND_NORMAL);
2467 /* Now build the proper return type. */
2468 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2469 make_tree (TREE_TYPE (arg), op2),
2470 make_tree (TREE_TYPE (arg), op1)),
2471 target, VOIDmode, EXPAND_NORMAL);
2474 /* Conveniently construct a function call expression. FNDECL names the
2475 function to be called, N is the number of arguments, and the "..."
2476 parameters are the argument expressions. Unlike build_call_exr
2477 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2479 static tree
2480 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2482 va_list ap;
2483 tree fntype = TREE_TYPE (fndecl);
2484 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2486 va_start (ap, n);
2487 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2488 va_end (ap);
2489 SET_EXPR_LOCATION (fn, loc);
2490 return fn;
2493 /* Expand a call to one of the builtin rounding functions gcc defines
2494 as an extension (lfloor and lceil). As these are gcc extensions we
2495 do not need to worry about setting errno to EDOM.
2496 If expanding via optab fails, lower expression to (int)(floor(x)).
2497 EXP is the expression that is a call to the builtin function;
2498 if convenient, the result should be placed in TARGET. */
2500 static rtx
2501 expand_builtin_int_roundingfn (tree exp, rtx target)
2503 convert_optab builtin_optab;
2504 rtx op0, tmp;
2505 rtx_insn *insns;
2506 tree fndecl = get_callee_fndecl (exp);
2507 enum built_in_function fallback_fn;
2508 tree fallback_fndecl;
2509 machine_mode mode;
2510 tree arg;
2512 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2513 gcc_unreachable ();
2515 arg = CALL_EXPR_ARG (exp, 0);
2517 switch (DECL_FUNCTION_CODE (fndecl))
2519 CASE_FLT_FN (BUILT_IN_ICEIL):
2520 CASE_FLT_FN (BUILT_IN_LCEIL):
2521 CASE_FLT_FN (BUILT_IN_LLCEIL):
2522 builtin_optab = lceil_optab;
2523 fallback_fn = BUILT_IN_CEIL;
2524 break;
2526 CASE_FLT_FN (BUILT_IN_IFLOOR):
2527 CASE_FLT_FN (BUILT_IN_LFLOOR):
2528 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2529 builtin_optab = lfloor_optab;
2530 fallback_fn = BUILT_IN_FLOOR;
2531 break;
2533 default:
2534 gcc_unreachable ();
2537 /* Make a suitable register to place result in. */
2538 mode = TYPE_MODE (TREE_TYPE (exp));
2540 target = gen_reg_rtx (mode);
2542 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2543 need to expand the argument again. This way, we will not perform
2544 side-effects more the once. */
2545 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2547 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2549 start_sequence ();
2551 /* Compute into TARGET. */
2552 if (expand_sfix_optab (target, op0, builtin_optab))
2554 /* Output the entire sequence. */
2555 insns = get_insns ();
2556 end_sequence ();
2557 emit_insn (insns);
2558 return target;
2561 /* If we were unable to expand via the builtin, stop the sequence
2562 (without outputting the insns). */
2563 end_sequence ();
2565 /* Fall back to floating point rounding optab. */
2566 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2568 /* For non-C99 targets we may end up without a fallback fndecl here
2569 if the user called __builtin_lfloor directly. In this case emit
2570 a call to the floor/ceil variants nevertheless. This should result
2571 in the best user experience for not full C99 targets. */
2572 if (fallback_fndecl == NULL_TREE)
2574 tree fntype;
2575 const char *name = NULL;
2577 switch (DECL_FUNCTION_CODE (fndecl))
2579 case BUILT_IN_ICEIL:
2580 case BUILT_IN_LCEIL:
2581 case BUILT_IN_LLCEIL:
2582 name = "ceil";
2583 break;
2584 case BUILT_IN_ICEILF:
2585 case BUILT_IN_LCEILF:
2586 case BUILT_IN_LLCEILF:
2587 name = "ceilf";
2588 break;
2589 case BUILT_IN_ICEILL:
2590 case BUILT_IN_LCEILL:
2591 case BUILT_IN_LLCEILL:
2592 name = "ceill";
2593 break;
2594 case BUILT_IN_IFLOOR:
2595 case BUILT_IN_LFLOOR:
2596 case BUILT_IN_LLFLOOR:
2597 name = "floor";
2598 break;
2599 case BUILT_IN_IFLOORF:
2600 case BUILT_IN_LFLOORF:
2601 case BUILT_IN_LLFLOORF:
2602 name = "floorf";
2603 break;
2604 case BUILT_IN_IFLOORL:
2605 case BUILT_IN_LFLOORL:
2606 case BUILT_IN_LLFLOORL:
2607 name = "floorl";
2608 break;
2609 default:
2610 gcc_unreachable ();
2613 fntype = build_function_type_list (TREE_TYPE (arg),
2614 TREE_TYPE (arg), NULL_TREE);
2615 fallback_fndecl = build_fn_decl (name, fntype);
2618 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2620 tmp = expand_normal (exp);
2621 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2623 /* Truncate the result of floating point optab to integer
2624 via expand_fix (). */
2625 target = gen_reg_rtx (mode);
2626 expand_fix (target, tmp, 0);
2628 return target;
2631 /* Expand a call to one of the builtin math functions doing integer
2632 conversion (lrint).
2633 Return 0 if a normal call should be emitted rather than expanding the
2634 function in-line. EXP is the expression that is a call to the builtin
2635 function; if convenient, the result should be placed in TARGET. */
2637 static rtx
2638 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2640 convert_optab builtin_optab;
2641 rtx op0;
2642 rtx_insn *insns;
2643 tree fndecl = get_callee_fndecl (exp);
2644 tree arg;
2645 machine_mode mode;
2646 enum built_in_function fallback_fn = BUILT_IN_NONE;
2648 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2649 gcc_unreachable ();
2651 arg = CALL_EXPR_ARG (exp, 0);
2653 switch (DECL_FUNCTION_CODE (fndecl))
2655 CASE_FLT_FN (BUILT_IN_IRINT):
2656 fallback_fn = BUILT_IN_LRINT;
2657 gcc_fallthrough ();
2658 CASE_FLT_FN (BUILT_IN_LRINT):
2659 CASE_FLT_FN (BUILT_IN_LLRINT):
2660 builtin_optab = lrint_optab;
2661 break;
2663 CASE_FLT_FN (BUILT_IN_IROUND):
2664 fallback_fn = BUILT_IN_LROUND;
2665 gcc_fallthrough ();
2666 CASE_FLT_FN (BUILT_IN_LROUND):
2667 CASE_FLT_FN (BUILT_IN_LLROUND):
2668 builtin_optab = lround_optab;
2669 break;
2671 default:
2672 gcc_unreachable ();
2675 /* There's no easy way to detect the case we need to set EDOM. */
2676 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2677 return NULL_RTX;
2679 /* Make a suitable register to place result in. */
2680 mode = TYPE_MODE (TREE_TYPE (exp));
2682 /* There's no easy way to detect the case we need to set EDOM. */
2683 if (!flag_errno_math)
2685 rtx result = gen_reg_rtx (mode);
2687 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2688 need to expand the argument again. This way, we will not perform
2689 side-effects more the once. */
2690 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2692 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2694 start_sequence ();
2696 if (expand_sfix_optab (result, op0, builtin_optab))
2698 /* Output the entire sequence. */
2699 insns = get_insns ();
2700 end_sequence ();
2701 emit_insn (insns);
2702 return result;
2705 /* If we were unable to expand via the builtin, stop the sequence
2706 (without outputting the insns) and call to the library function
2707 with the stabilized argument list. */
2708 end_sequence ();
2711 if (fallback_fn != BUILT_IN_NONE)
2713 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2714 targets, (int) round (x) should never be transformed into
2715 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2716 a call to lround in the hope that the target provides at least some
2717 C99 functions. This should result in the best user experience for
2718 not full C99 targets. */
2719 tree fallback_fndecl = mathfn_built_in_1
2720 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2722 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2723 fallback_fndecl, 1, arg);
2725 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2726 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2727 return convert_to_mode (mode, target, 0);
2730 return expand_call (exp, target, target == const0_rtx);
2733 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2734 a normal call should be emitted rather than expanding the function
2735 in-line. EXP is the expression that is a call to the builtin
2736 function; if convenient, the result should be placed in TARGET. */
2738 static rtx
2739 expand_builtin_powi (tree exp, rtx target)
2741 tree arg0, arg1;
2742 rtx op0, op1;
2743 machine_mode mode;
2744 machine_mode mode2;
2746 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2747 return NULL_RTX;
2749 arg0 = CALL_EXPR_ARG (exp, 0);
2750 arg1 = CALL_EXPR_ARG (exp, 1);
2751 mode = TYPE_MODE (TREE_TYPE (exp));
2753 /* Emit a libcall to libgcc. */
2755 /* Mode of the 2nd argument must match that of an int. */
2756 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2758 if (target == NULL_RTX)
2759 target = gen_reg_rtx (mode);
2761 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2762 if (GET_MODE (op0) != mode)
2763 op0 = convert_to_mode (mode, op0, 0);
2764 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2765 if (GET_MODE (op1) != mode2)
2766 op1 = convert_to_mode (mode2, op1, 0);
2768 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2769 target, LCT_CONST, mode, 2,
2770 op0, mode, op1, mode2);
2772 return target;
2775 /* Expand expression EXP which is a call to the strlen builtin. Return
2776 NULL_RTX if we failed the caller should emit a normal call, otherwise
2777 try to get the result in TARGET, if convenient. */
2779 static rtx
2780 expand_builtin_strlen (tree exp, rtx target,
2781 machine_mode target_mode)
2783 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2784 return NULL_RTX;
2785 else
2787 struct expand_operand ops[4];
2788 rtx pat;
2789 tree len;
2790 tree src = CALL_EXPR_ARG (exp, 0);
2791 rtx src_reg;
2792 rtx_insn *before_strlen;
2793 machine_mode insn_mode = target_mode;
2794 enum insn_code icode = CODE_FOR_nothing;
2795 unsigned int align;
2797 /* If the length can be computed at compile-time, return it. */
2798 len = c_strlen (src, 0);
2799 if (len)
2800 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2802 /* If the length can be computed at compile-time and is constant
2803 integer, but there are side-effects in src, evaluate
2804 src for side-effects, then return len.
2805 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2806 can be optimized into: i++; x = 3; */
2807 len = c_strlen (src, 1);
2808 if (len && TREE_CODE (len) == INTEGER_CST)
2810 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2811 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2814 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2816 /* If SRC is not a pointer type, don't do this operation inline. */
2817 if (align == 0)
2818 return NULL_RTX;
2820 /* Bail out if we can't compute strlen in the right mode. */
2821 while (insn_mode != VOIDmode)
2823 icode = optab_handler (strlen_optab, insn_mode);
2824 if (icode != CODE_FOR_nothing)
2825 break;
2827 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2829 if (insn_mode == VOIDmode)
2830 return NULL_RTX;
2832 /* Make a place to hold the source address. We will not expand
2833 the actual source until we are sure that the expansion will
2834 not fail -- there are trees that cannot be expanded twice. */
2835 src_reg = gen_reg_rtx (Pmode);
2837 /* Mark the beginning of the strlen sequence so we can emit the
2838 source operand later. */
2839 before_strlen = get_last_insn ();
2841 create_output_operand (&ops[0], target, insn_mode);
2842 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2843 create_integer_operand (&ops[2], 0);
2844 create_integer_operand (&ops[3], align);
2845 if (!maybe_expand_insn (icode, 4, ops))
2846 return NULL_RTX;
2848 /* Now that we are assured of success, expand the source. */
2849 start_sequence ();
2850 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2851 if (pat != src_reg)
2853 #ifdef POINTERS_EXTEND_UNSIGNED
2854 if (GET_MODE (pat) != Pmode)
2855 pat = convert_to_mode (Pmode, pat,
2856 POINTERS_EXTEND_UNSIGNED);
2857 #endif
2858 emit_move_insn (src_reg, pat);
2860 pat = get_insns ();
2861 end_sequence ();
2863 if (before_strlen)
2864 emit_insn_after (pat, before_strlen);
2865 else
2866 emit_insn_before (pat, get_insns ());
2868 /* Return the value in the proper mode for this function. */
2869 if (GET_MODE (ops[0].value) == target_mode)
2870 target = ops[0].value;
2871 else if (target != 0)
2872 convert_move (target, ops[0].value, 0);
2873 else
2874 target = convert_to_mode (target_mode, ops[0].value, 0);
2876 return target;
2880 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2881 bytes from constant string DATA + OFFSET and return it as target
2882 constant. */
2884 static rtx
2885 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2886 machine_mode mode)
2888 const char *str = (const char *) data;
2890 gcc_assert (offset >= 0
2891 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2892 <= strlen (str) + 1));
2894 return c_readstr (str + offset, mode);
2897 /* LEN specify length of the block of memcpy/memset operation.
2898 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2899 In some cases we can make very likely guess on max size, then we
2900 set it into PROBABLE_MAX_SIZE. */
2902 static void
2903 determine_block_size (tree len, rtx len_rtx,
2904 unsigned HOST_WIDE_INT *min_size,
2905 unsigned HOST_WIDE_INT *max_size,
2906 unsigned HOST_WIDE_INT *probable_max_size)
2908 if (CONST_INT_P (len_rtx))
2910 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2911 return;
2913 else
2915 wide_int min, max;
2916 enum value_range_type range_type = VR_UNDEFINED;
2918 /* Determine bounds from the type. */
2919 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2920 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2921 else
2922 *min_size = 0;
2923 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2924 *probable_max_size = *max_size
2925 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2926 else
2927 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2929 if (TREE_CODE (len) == SSA_NAME)
2930 range_type = get_range_info (len, &min, &max);
2931 if (range_type == VR_RANGE)
2933 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2934 *min_size = min.to_uhwi ();
2935 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2936 *probable_max_size = *max_size = max.to_uhwi ();
2938 else if (range_type == VR_ANTI_RANGE)
2940 /* Anti range 0...N lets us to determine minimal size to N+1. */
2941 if (min == 0)
2943 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2944 *min_size = max.to_uhwi () + 1;
2946 /* Code like
2948 int n;
2949 if (n < 100)
2950 memcpy (a, b, n)
2952 Produce anti range allowing negative values of N. We still
2953 can use the information and make a guess that N is not negative.
2955 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2956 *probable_max_size = min.to_uhwi () - 1;
2959 gcc_checking_assert (*max_size <=
2960 (unsigned HOST_WIDE_INT)
2961 GET_MODE_MASK (GET_MODE (len_rtx)));
2964 /* Helper function to do the actual work for expand_builtin_memcpy. */
2966 static rtx
2967 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2969 const char *src_str;
2970 unsigned int src_align = get_pointer_alignment (src);
2971 unsigned int dest_align = get_pointer_alignment (dest);
2972 rtx dest_mem, src_mem, dest_addr, len_rtx;
2973 HOST_WIDE_INT expected_size = -1;
2974 unsigned int expected_align = 0;
2975 unsigned HOST_WIDE_INT min_size;
2976 unsigned HOST_WIDE_INT max_size;
2977 unsigned HOST_WIDE_INT probable_max_size;
2979 /* If DEST is not a pointer type, call the normal function. */
2980 if (dest_align == 0)
2981 return NULL_RTX;
2983 /* If either SRC is not a pointer type, don't do this
2984 operation in-line. */
2985 if (src_align == 0)
2986 return NULL_RTX;
2988 if (currently_expanding_gimple_stmt)
2989 stringop_block_profile (currently_expanding_gimple_stmt,
2990 &expected_align, &expected_size);
2992 if (expected_align < dest_align)
2993 expected_align = dest_align;
2994 dest_mem = get_memory_rtx (dest, len);
2995 set_mem_align (dest_mem, dest_align);
2996 len_rtx = expand_normal (len);
2997 determine_block_size (len, len_rtx, &min_size, &max_size,
2998 &probable_max_size);
2999 src_str = c_getstr (src);
3001 /* If SRC is a string constant and block move would be done
3002 by pieces, we can avoid loading the string from memory
3003 and only stored the computed constants. */
3004 if (src_str
3005 && CONST_INT_P (len_rtx)
3006 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3007 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3008 CONST_CAST (char *, src_str),
3009 dest_align, false))
3011 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3012 builtin_memcpy_read_str,
3013 CONST_CAST (char *, src_str),
3014 dest_align, false, 0);
3015 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3016 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3017 return dest_mem;
3020 src_mem = get_memory_rtx (src, len);
3021 set_mem_align (src_mem, src_align);
3023 /* Copy word part most expediently. */
3024 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3025 CALL_EXPR_TAILCALL (exp)
3026 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3027 expected_align, expected_size,
3028 min_size, max_size, probable_max_size);
3030 if (dest_addr == 0)
3032 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3033 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3036 return dest_addr;
3039 /* Try to verify that the sizes and lengths of the arguments to a string
3040 manipulation function given by EXP are within valid bounds and that
3041 the operation does not lead to buffer overflow. Arguments other than
3042 EXP may be null. When non-null, the arguments have the following
3043 meaning:
3044 SIZE is the user-supplied size argument to the function (such as in
3045 memcpy(d, s, SIZE) or strncpy(d, s, SIZE). It specifies the exact
3046 number of bytes to write.
3047 MAXLEN is the user-supplied bound on the length of the source sequence
3048 (such as in strncat(d, s, N). It specifies the upper limit on the number
3049 of bytes to write.
3050 SRC is the source string (such as in strcpy(d, s)) when the expression
3051 EXP is a string function call (as opposed to a memory call like memcpy).
3052 As an exception, SRC can also be an integer denoting the precomputed
3053 size of the source string or object (for functions like memcpy).
3054 OBJSIZE is the size of the destination object specified by the last
3055 argument to the _chk builtins, typically resulting from the expansion
3056 of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
3057 OBJSIZE).
3059 When SIZE is null LEN is checked to verify that it doesn't exceed
3060 SIZE_MAX.
3062 If the call is successfully verified as safe from buffer overflow
3063 the function returns true, otherwise false.. */
3065 static bool
3066 check_sizes (int opt, tree exp, tree size, tree maxlen, tree src, tree objsize)
3068 /* The size of the largest object is half the address space, or
3069 SSIZE_MAX. (This is way too permissive.) */
3070 tree maxobjsize = TYPE_MAX_VALUE (ssizetype);
3072 tree slen = NULL_TREE;
3074 tree range[2] = { NULL_TREE, NULL_TREE };
3076 /* Set to true when the exact number of bytes written by a string
3077 function like strcpy is not known and the only thing that is
3078 known is that it must be at least one (for the terminating nul). */
3079 bool at_least_one = false;
3080 if (src)
3082 /* SRC is normally a pointer to string but as a special case
3083 it can be an integer denoting the length of a string. */
3084 if (POINTER_TYPE_P (TREE_TYPE (src)))
3086 /* Try to determine the range of lengths the source string
3087 refers to. If it can be determined and is less than
3088 the upper bound given by MAXLEN add one to it for
3089 the terminating nul. Otherwise, set it to one for
3090 the same reason, or to MAXLEN as appropriate. */
3091 get_range_strlen (src, range);
3092 if (range[0] && (!maxlen || TREE_CODE (maxlen) == INTEGER_CST))
3094 if (maxlen && tree_int_cst_le (maxlen, range[0]))
3095 range[0] = range[1] = maxlen;
3096 else
3097 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3098 range[0], size_one_node);
3100 if (maxlen && tree_int_cst_le (maxlen, range[1]))
3101 range[1] = maxlen;
3102 else if (!integer_all_onesp (range[1]))
3103 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3104 range[1], size_one_node);
3106 slen = range[0];
3108 else
3110 at_least_one = true;
3111 slen = size_one_node;
3114 else
3115 slen = src;
3118 if (!size && !maxlen)
3120 /* When the only available piece of data is the object size
3121 there is nothing to do. */
3122 if (!slen)
3123 return true;
3125 /* Otherwise, when the length of the source sequence is known
3126 (as with with strlen), set SIZE to it. */
3127 if (!range[0])
3128 size = slen;
3131 if (!objsize)
3132 objsize = maxobjsize;
3134 /* The SIZE is exact if it's non-null, constant, and in range of
3135 unsigned HOST_WIDE_INT. */
3136 bool exactsize = size && tree_fits_uhwi_p (size);
3138 if (size)
3139 get_size_range (size, range);
3141 /* First check the number of bytes to be written against the maximum
3142 object size. */
3143 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3145 location_t loc = tree_nonartificial_location (exp);
3146 loc = expansion_point_location_if_in_system_header (loc);
3148 if (range[0] == range[1])
3149 warning_at (loc, opt,
3150 "%K%qD specified size %E "
3151 "exceeds maximum object size %E",
3152 exp, get_callee_fndecl (exp), range[0], maxobjsize);
3153 else
3154 warning_at (loc, opt,
3155 "%K%qD specified size between %E and %E "
3156 "exceeds maximum object size %E",
3157 exp, get_callee_fndecl (exp),
3158 range[0], range[1], maxobjsize);
3159 return false;
3162 /* Next check the number of bytes to be written against the destination
3163 object size. */
3164 if (range[0] || !exactsize || integer_all_onesp (size))
3166 if (range[0]
3167 && ((tree_fits_uhwi_p (objsize)
3168 && tree_int_cst_lt (objsize, range[0]))
3169 || (tree_fits_uhwi_p (size)
3170 && tree_int_cst_lt (size, range[0]))))
3172 location_t loc = tree_nonartificial_location (exp);
3173 loc = expansion_point_location_if_in_system_header (loc);
3175 if (size == slen && at_least_one)
3177 /* This is a call to strcpy with a destination of 0 size
3178 and a source of unknown length. The call will write
3179 at least one byte past the end of the destination. */
3180 warning_at (loc, opt,
3181 "%K%qD writing %E or more bytes into a region "
3182 "of size %E overflows the destination",
3183 exp, get_callee_fndecl (exp), range[0], objsize);
3185 else if (tree_int_cst_equal (range[0], range[1]))
3186 warning_at (loc, opt,
3187 (integer_onep (range[0])
3188 ? G_("%K%qD writing %E byte into a region "
3189 "of size %E overflows the destination")
3190 : G_("%K%qD writing %E bytes into a region "
3191 "of size %E overflows the destination")),
3192 exp, get_callee_fndecl (exp), range[0], objsize);
3193 else if (tree_int_cst_sign_bit (range[1]))
3195 /* Avoid printing the upper bound if it's invalid. */
3196 warning_at (loc, opt,
3197 "%K%qD writing %E or more bytes into a region "
3198 "of size %E overflows the destination",
3199 exp, get_callee_fndecl (exp), range[0], objsize);
3201 else
3202 warning_at (loc, opt,
3203 "%K%qD writing between %E and %E bytes into "
3204 "a region of size %E overflows the destination",
3205 exp, get_callee_fndecl (exp), range[0], range[1],
3206 objsize);
3208 /* Return error when an overflow has been detected. */
3209 return false;
3213 /* Check the maximum length of the source sequence against the size
3214 of the destination object if known, or against the maximum size
3215 of an object. */
3216 if (maxlen)
3218 get_size_range (maxlen, range);
3220 if (range[0] && objsize && tree_fits_uhwi_p (objsize))
3222 location_t loc = tree_nonartificial_location (exp);
3223 loc = expansion_point_location_if_in_system_header (loc);
3225 if (tree_int_cst_lt (maxobjsize, range[0]))
3227 /* Warn about crazy big sizes first since that's more
3228 likely to be meaningful than saying that the bound
3229 is greater than the object size if both are big. */
3230 if (range[0] == range[1])
3231 warning_at (loc, opt,
3232 "%K%qD specified bound %E "
3233 "exceeds maximum object size %E",
3234 exp, get_callee_fndecl (exp),
3235 range[0], maxobjsize);
3236 else
3237 warning_at (loc, opt,
3238 "%K%qD specified bound between %E and %E "
3239 "exceeds maximum object size %E",
3240 exp, get_callee_fndecl (exp),
3241 range[0], range[1], maxobjsize);
3243 return false;
3246 if (objsize != maxobjsize && tree_int_cst_lt (objsize, range[0]))
3248 if (tree_int_cst_equal (range[0], range[1]))
3249 warning_at (loc, opt,
3250 "%K%qD specified bound %E "
3251 "exceeds destination size %E",
3252 exp, get_callee_fndecl (exp),
3253 range[0], objsize);
3254 else
3255 warning_at (loc, opt,
3256 "%K%qD specified bound between %E and %E "
3257 "exceeds destination size %E",
3258 exp, get_callee_fndecl (exp),
3259 range[0], range[1], objsize);
3260 return false;
3265 if (slen
3266 && slen == src
3267 && size && range[0]
3268 && tree_int_cst_lt (slen, range[0]))
3270 location_t loc = tree_nonartificial_location (exp);
3272 if (tree_int_cst_equal (range[0], range[1]))
3273 warning_at (loc, opt,
3274 (tree_int_cst_equal (range[0], integer_one_node)
3275 ? G_("%K%qD reading %E byte from a region of size %E")
3276 : G_("%K%qD reading %E bytes from a region of size %E")),
3277 exp, get_callee_fndecl (exp), range[0], slen);
3278 else if (tree_int_cst_sign_bit (range[1]))
3280 /* Avoid printing the upper bound if it's invalid. */
3281 warning_at (loc, opt,
3282 "%K%qD reading %E or more bytes from a region "
3283 "of size %E",
3284 exp, get_callee_fndecl (exp), range[0], slen);
3286 else
3287 warning_at (loc, opt,
3288 "%K%qD reading between %E and %E bytes from a region "
3289 "of size %E",
3290 exp, get_callee_fndecl (exp), range[0], range[1], slen);
3291 return false;
3294 return true;
3297 /* Helper to compute the size of the object referenced by the DEST
3298 expression which must of of pointer type, using Object Size type
3299 OSTYPE (only the least significant 2 bits are used). Return
3300 the size of the object if successful or NULL when the size cannot
3301 be determined. */
3303 static inline tree
3304 compute_objsize (tree dest, int ostype)
3306 unsigned HOST_WIDE_INT size;
3307 if (compute_builtin_object_size (dest, ostype & 3, &size))
3308 return build_int_cst (sizetype, size);
3310 return NULL_TREE;
3313 /* Helper to determine and check the sizes of the source and the destination
3314 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3315 call expression, DEST is the destination argument, SRC is the source
3316 argument or null, and LEN is the number of bytes. Use Object Size type-0
3317 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3318 (no overflow or invalid sizes), false otherwise. */
3320 static bool
3321 check_memop_sizes (tree exp, tree dest, tree src, tree size)
3323 if (!warn_stringop_overflow)
3324 return true;
3326 /* For functions like memset and memcpy that operate on raw memory
3327 try to determine the size of the largest source and destination
3328 object using type-0 Object Size regardless of the object size
3329 type specified by the option. */
3330 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3331 tree dstsize = compute_objsize (dest, 0);
3333 return check_sizes (OPT_Wstringop_overflow_, exp,
3334 size, /*maxlen=*/NULL_TREE, srcsize, dstsize);
3337 /* Validate memchr arguments without performing any expansion.
3338 Return NULL_RTX. */
3340 static rtx
3341 expand_builtin_memchr (tree exp, rtx)
3343 if (!validate_arglist (exp,
3344 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3345 return NULL_RTX;
3347 tree arg1 = CALL_EXPR_ARG (exp, 0);
3348 tree len = CALL_EXPR_ARG (exp, 2);
3350 /* Diagnose calls where the specified length exceeds the size
3351 of the object. */
3352 if (warn_stringop_overflow)
3354 tree size = compute_objsize (arg1, 0);
3355 check_sizes (OPT_Wstringop_overflow_,
3356 exp, len, /*maxlen=*/NULL_TREE,
3357 size, /*objsize=*/NULL_TREE);
3360 return NULL_RTX;
3363 /* Expand a call EXP to the memcpy builtin.
3364 Return NULL_RTX if we failed, the caller should emit a normal call,
3365 otherwise try to get the result in TARGET, if convenient (and in
3366 mode MODE if that's convenient). */
3368 static rtx
3369 expand_builtin_memcpy (tree exp, rtx target)
3371 if (!validate_arglist (exp,
3372 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3373 return NULL_RTX;
3375 tree dest = CALL_EXPR_ARG (exp, 0);
3376 tree src = CALL_EXPR_ARG (exp, 1);
3377 tree len = CALL_EXPR_ARG (exp, 2);
3379 check_memop_sizes (exp, dest, src, len);
3381 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3384 /* Check a call EXP to the memmove built-in for validity.
3385 Return NULL_RTX on both success and failure. */
3387 static rtx
3388 expand_builtin_memmove (tree exp, rtx)
3390 if (!validate_arglist (exp,
3391 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3392 return NULL_RTX;
3394 tree dest = CALL_EXPR_ARG (exp, 0);
3395 tree src = CALL_EXPR_ARG (exp, 1);
3396 tree len = CALL_EXPR_ARG (exp, 2);
3398 check_memop_sizes (exp, dest, src, len);
3400 return NULL_RTX;
3403 /* Expand an instrumented call EXP to the memcpy builtin.
3404 Return NULL_RTX if we failed, the caller should emit a normal call,
3405 otherwise try to get the result in TARGET, if convenient (and in
3406 mode MODE if that's convenient). */
3408 static rtx
3409 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3411 if (!validate_arglist (exp,
3412 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3413 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3414 INTEGER_TYPE, VOID_TYPE))
3415 return NULL_RTX;
3416 else
3418 tree dest = CALL_EXPR_ARG (exp, 0);
3419 tree src = CALL_EXPR_ARG (exp, 2);
3420 tree len = CALL_EXPR_ARG (exp, 4);
3421 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3423 /* Return src bounds with the result. */
3424 if (res)
3426 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3427 expand_normal (CALL_EXPR_ARG (exp, 1)));
3428 res = chkp_join_splitted_slot (res, bnd);
3430 return res;
3434 /* Expand a call EXP to the mempcpy builtin.
3435 Return NULL_RTX if we failed; the caller should emit a normal call,
3436 otherwise try to get the result in TARGET, if convenient (and in
3437 mode MODE if that's convenient). If ENDP is 0 return the
3438 destination pointer, if ENDP is 1 return the end pointer ala
3439 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3440 stpcpy. */
3442 static rtx
3443 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3445 if (!validate_arglist (exp,
3446 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3447 return NULL_RTX;
3449 tree dest = CALL_EXPR_ARG (exp, 0);
3450 tree src = CALL_EXPR_ARG (exp, 1);
3451 tree len = CALL_EXPR_ARG (exp, 2);
3453 /* Avoid expanding mempcpy into memcpy when the call is determined
3454 to overflow the buffer. This also prevents the same overflow
3455 from being diagnosed again when expanding memcpy. */
3456 if (!check_memop_sizes (exp, dest, src, len))
3457 return NULL_RTX;
3459 return expand_builtin_mempcpy_args (dest, src, len,
3460 target, mode, /*endp=*/ 1,
3461 exp);
3464 /* Expand an instrumented call EXP to the mempcpy builtin.
3465 Return NULL_RTX if we failed, the caller should emit a normal call,
3466 otherwise try to get the result in TARGET, if convenient (and in
3467 mode MODE if that's convenient). */
3469 static rtx
3470 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3472 if (!validate_arglist (exp,
3473 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3474 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3475 INTEGER_TYPE, VOID_TYPE))
3476 return NULL_RTX;
3477 else
3479 tree dest = CALL_EXPR_ARG (exp, 0);
3480 tree src = CALL_EXPR_ARG (exp, 2);
3481 tree len = CALL_EXPR_ARG (exp, 4);
3482 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3483 mode, 1, exp);
3485 /* Return src bounds with the result. */
3486 if (res)
3488 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3489 expand_normal (CALL_EXPR_ARG (exp, 1)));
3490 res = chkp_join_splitted_slot (res, bnd);
3492 return res;
3496 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3497 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3498 so that this can also be called without constructing an actual CALL_EXPR.
3499 The other arguments and return value are the same as for
3500 expand_builtin_mempcpy. */
3502 static rtx
3503 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3504 rtx target, machine_mode mode, int endp,
3505 tree orig_exp)
3507 tree fndecl = get_callee_fndecl (orig_exp);
3509 /* If return value is ignored, transform mempcpy into memcpy. */
3510 if (target == const0_rtx
3511 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3512 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3514 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3515 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3516 dest, src, len);
3517 return expand_expr (result, target, mode, EXPAND_NORMAL);
3519 else if (target == const0_rtx
3520 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3522 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3523 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3524 dest, src, len);
3525 return expand_expr (result, target, mode, EXPAND_NORMAL);
3527 else
3529 const char *src_str;
3530 unsigned int src_align = get_pointer_alignment (src);
3531 unsigned int dest_align = get_pointer_alignment (dest);
3532 rtx dest_mem, src_mem, len_rtx;
3534 /* If either SRC or DEST is not a pointer type, don't do this
3535 operation in-line. */
3536 if (dest_align == 0 || src_align == 0)
3537 return NULL_RTX;
3539 /* If LEN is not constant, call the normal function. */
3540 if (! tree_fits_uhwi_p (len))
3541 return NULL_RTX;
3543 len_rtx = expand_normal (len);
3544 src_str = c_getstr (src);
3546 /* If SRC is a string constant and block move would be done
3547 by pieces, we can avoid loading the string from memory
3548 and only stored the computed constants. */
3549 if (src_str
3550 && CONST_INT_P (len_rtx)
3551 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3552 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3553 CONST_CAST (char *, src_str),
3554 dest_align, false))
3556 dest_mem = get_memory_rtx (dest, len);
3557 set_mem_align (dest_mem, dest_align);
3558 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3559 builtin_memcpy_read_str,
3560 CONST_CAST (char *, src_str),
3561 dest_align, false, endp);
3562 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3563 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3564 return dest_mem;
3567 if (CONST_INT_P (len_rtx)
3568 && can_move_by_pieces (INTVAL (len_rtx),
3569 MIN (dest_align, src_align)))
3571 dest_mem = get_memory_rtx (dest, len);
3572 set_mem_align (dest_mem, dest_align);
3573 src_mem = get_memory_rtx (src, len);
3574 set_mem_align (src_mem, src_align);
3575 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3576 MIN (dest_align, src_align), endp);
3577 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3578 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3579 return dest_mem;
3582 return NULL_RTX;
3586 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3587 we failed, the caller should emit a normal call, otherwise try to
3588 get the result in TARGET, if convenient. If ENDP is 0 return the
3589 destination pointer, if ENDP is 1 return the end pointer ala
3590 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3591 stpcpy. */
3593 static rtx
3594 expand_movstr (tree dest, tree src, rtx target, int endp)
3596 struct expand_operand ops[3];
3597 rtx dest_mem;
3598 rtx src_mem;
3600 if (!targetm.have_movstr ())
3601 return NULL_RTX;
3603 dest_mem = get_memory_rtx (dest, NULL);
3604 src_mem = get_memory_rtx (src, NULL);
3605 if (!endp)
3607 target = force_reg (Pmode, XEXP (dest_mem, 0));
3608 dest_mem = replace_equiv_address (dest_mem, target);
3611 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3612 create_fixed_operand (&ops[1], dest_mem);
3613 create_fixed_operand (&ops[2], src_mem);
3614 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3615 return NULL_RTX;
3617 if (endp && target != const0_rtx)
3619 target = ops[0].value;
3620 /* movstr is supposed to set end to the address of the NUL
3621 terminator. If the caller requested a mempcpy-like return value,
3622 adjust it. */
3623 if (endp == 1)
3625 rtx tem = plus_constant (GET_MODE (target),
3626 gen_lowpart (GET_MODE (target), target), 1);
3627 emit_move_insn (target, force_operand (tem, NULL_RTX));
3630 return target;
3633 /* Do some very basic size validation of a call to the strcpy builtin
3634 given by EXP. Return NULL_RTX to have the built-in expand to a call
3635 to the library function. */
3637 static rtx
3638 expand_builtin_strcat (tree exp, rtx)
3640 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3641 || !warn_stringop_overflow)
3642 return NULL_RTX;
3644 tree dest = CALL_EXPR_ARG (exp, 0);
3645 tree src = CALL_EXPR_ARG (exp, 1);
3647 /* There is no way here to determine the length of the string in
3648 the destination to which the SRC string is being appended so
3649 just diagnose cases when the souce string is longer than
3650 the destination object. */
3652 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3654 check_sizes (OPT_Wstringop_overflow_,
3655 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3657 return NULL_RTX;
3660 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3661 NULL_RTX if we failed the caller should emit a normal call, otherwise
3662 try to get the result in TARGET, if convenient (and in mode MODE if that's
3663 convenient). */
3665 static rtx
3666 expand_builtin_strcpy (tree exp, rtx target)
3668 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3669 return NULL_RTX;
3671 tree dest = CALL_EXPR_ARG (exp, 0);
3672 tree src = CALL_EXPR_ARG (exp, 1);
3674 if (warn_stringop_overflow)
3676 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3677 check_sizes (OPT_Wstringop_overflow_,
3678 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3681 return expand_builtin_strcpy_args (dest, src, target);
3684 /* Helper function to do the actual work for expand_builtin_strcpy. The
3685 arguments to the builtin_strcpy call DEST and SRC are broken out
3686 so that this can also be called without constructing an actual CALL_EXPR.
3687 The other arguments and return value are the same as for
3688 expand_builtin_strcpy. */
3690 static rtx
3691 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3693 return expand_movstr (dest, src, target, /*endp=*/0);
3696 /* Expand a call EXP to the stpcpy builtin.
3697 Return NULL_RTX if we failed the caller should emit a normal call,
3698 otherwise try to get the result in TARGET, if convenient (and in
3699 mode MODE if that's convenient). */
3701 static rtx
3702 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3704 tree dst, src;
3705 location_t loc = EXPR_LOCATION (exp);
3707 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3708 return NULL_RTX;
3710 dst = CALL_EXPR_ARG (exp, 0);
3711 src = CALL_EXPR_ARG (exp, 1);
3713 if (warn_stringop_overflow)
3715 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3716 check_sizes (OPT_Wstringop_overflow_,
3717 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3720 /* If return value is ignored, transform stpcpy into strcpy. */
3721 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3723 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3724 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3725 return expand_expr (result, target, mode, EXPAND_NORMAL);
3727 else
3729 tree len, lenp1;
3730 rtx ret;
3732 /* Ensure we get an actual string whose length can be evaluated at
3733 compile-time, not an expression containing a string. This is
3734 because the latter will potentially produce pessimized code
3735 when used to produce the return value. */
3736 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3737 return expand_movstr (dst, src, target, /*endp=*/2);
3739 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3740 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3741 target, mode, /*endp=*/2,
3742 exp);
3744 if (ret)
3745 return ret;
3747 if (TREE_CODE (len) == INTEGER_CST)
3749 rtx len_rtx = expand_normal (len);
3751 if (CONST_INT_P (len_rtx))
3753 ret = expand_builtin_strcpy_args (dst, src, target);
3755 if (ret)
3757 if (! target)
3759 if (mode != VOIDmode)
3760 target = gen_reg_rtx (mode);
3761 else
3762 target = gen_reg_rtx (GET_MODE (ret));
3764 if (GET_MODE (target) != GET_MODE (ret))
3765 ret = gen_lowpart (GET_MODE (target), ret);
3767 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3768 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3769 gcc_assert (ret);
3771 return target;
3776 return expand_movstr (dst, src, target, /*endp=*/2);
3780 /* Check a call EXP to the stpncpy built-in for validity.
3781 Return NULL_RTX on both success and failure. */
3783 static rtx
3784 expand_builtin_stpncpy (tree exp, rtx)
3786 if (!validate_arglist (exp,
3787 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3788 || !warn_stringop_overflow)
3789 return NULL_RTX;
3791 /* The source and destination of the call. */
3792 tree dest = CALL_EXPR_ARG (exp, 0);
3793 tree src = CALL_EXPR_ARG (exp, 1);
3795 /* The exact number of bytes to write (not the maximum). */
3796 tree len = CALL_EXPR_ARG (exp, 2);
3798 /* The size of the destination object. */
3799 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3801 check_sizes (OPT_Wstringop_overflow_,
3802 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
3804 return NULL_RTX;
3807 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3808 bytes from constant string DATA + OFFSET and return it as target
3809 constant. */
3812 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3813 machine_mode mode)
3815 const char *str = (const char *) data;
3817 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3818 return const0_rtx;
3820 return c_readstr (str + offset, mode);
3823 /* Helper to check the sizes of sequences and the destination of calls
3824 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3825 success (no overflow or invalid sizes), false otherwise. */
3827 static bool
3828 check_strncat_sizes (tree exp, tree objsize)
3830 tree dest = CALL_EXPR_ARG (exp, 0);
3831 tree src = CALL_EXPR_ARG (exp, 1);
3832 tree maxlen = CALL_EXPR_ARG (exp, 2);
3834 /* Try to determine the range of lengths that the source expression
3835 refers to. */
3836 tree lenrange[2];
3837 get_range_strlen (src, lenrange);
3839 /* Try to verify that the destination is big enough for the shortest
3840 string. */
3842 if (!objsize && warn_stringop_overflow)
3844 /* If it hasn't been provided by __strncat_chk, try to determine
3845 the size of the destination object into which the source is
3846 being copied. */
3847 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3850 /* Add one for the terminating nul. */
3851 tree srclen = (lenrange[0]
3852 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3853 size_one_node)
3854 : NULL_TREE);
3856 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3857 nul so the specified upper bound should never be equal to (or greater
3858 than) the size of the destination. */
3859 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (objsize)
3860 && tree_int_cst_equal (objsize, maxlen))
3862 location_t loc = tree_nonartificial_location (exp);
3863 loc = expansion_point_location_if_in_system_header (loc);
3865 warning_at (loc, OPT_Wstringop_overflow_,
3866 "%K%qD specified bound %E equals destination size",
3867 exp, get_callee_fndecl (exp), maxlen);
3869 return false;
3872 if (!srclen
3873 || (maxlen && tree_fits_uhwi_p (maxlen)
3874 && tree_fits_uhwi_p (srclen)
3875 && tree_int_cst_lt (maxlen, srclen)))
3876 srclen = maxlen;
3878 /* The number of bytes to write is LEN but check_sizes will also
3879 check SRCLEN if LEN's value isn't known. */
3880 return check_sizes (OPT_Wstringop_overflow_,
3881 exp, /*size=*/NULL_TREE, maxlen, srclen, objsize);
3884 /* Similar to expand_builtin_strcat, do some very basic size validation
3885 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3886 the built-in expand to a call to the library function. */
3888 static rtx
3889 expand_builtin_strncat (tree exp, rtx)
3891 if (!validate_arglist (exp,
3892 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3893 || !warn_stringop_overflow)
3894 return NULL_RTX;
3896 tree dest = CALL_EXPR_ARG (exp, 0);
3897 tree src = CALL_EXPR_ARG (exp, 1);
3898 /* The upper bound on the number of bytes to write. */
3899 tree maxlen = CALL_EXPR_ARG (exp, 2);
3900 /* The length of the source sequence. */
3901 tree slen = c_strlen (src, 1);
3903 /* Try to determine the range of lengths that the source expression
3904 refers to. */
3905 tree lenrange[2];
3906 if (slen)
3907 lenrange[0] = lenrange[1] = slen;
3908 else
3909 get_range_strlen (src, lenrange);
3911 /* Try to verify that the destination is big enough for the shortest
3912 string. First try to determine the size of the destination object
3913 into which the source is being copied. */
3914 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3916 /* Add one for the terminating nul. */
3917 tree srclen = (lenrange[0]
3918 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3919 size_one_node)
3920 : NULL_TREE);
3922 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3923 nul so the specified upper bound should never be equal to (or greater
3924 than) the size of the destination. */
3925 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (destsize)
3926 && tree_int_cst_equal (destsize, maxlen))
3928 location_t loc = tree_nonartificial_location (exp);
3929 loc = expansion_point_location_if_in_system_header (loc);
3931 warning_at (loc, OPT_Wstringop_overflow_,
3932 "%K%qD specified bound %E equals destination size",
3933 exp, get_callee_fndecl (exp), maxlen);
3935 return NULL_RTX;
3938 if (!srclen
3939 || (maxlen && tree_fits_uhwi_p (maxlen)
3940 && tree_fits_uhwi_p (srclen)
3941 && tree_int_cst_lt (maxlen, srclen)))
3942 srclen = maxlen;
3944 /* The number of bytes to write is LEN but check_sizes will also
3945 check SRCLEN if LEN's value isn't known. */
3946 check_sizes (OPT_Wstringop_overflow_,
3947 exp, /*size=*/NULL_TREE, maxlen, srclen, destsize);
3949 return NULL_RTX;
3952 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3953 NULL_RTX if we failed the caller should emit a normal call. */
3955 static rtx
3956 expand_builtin_strncpy (tree exp, rtx target)
3958 location_t loc = EXPR_LOCATION (exp);
3960 if (validate_arglist (exp,
3961 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3963 tree dest = CALL_EXPR_ARG (exp, 0);
3964 tree src = CALL_EXPR_ARG (exp, 1);
3965 /* The number of bytes to write (not the maximum). */
3966 tree len = CALL_EXPR_ARG (exp, 2);
3967 /* The length of the source sequence. */
3968 tree slen = c_strlen (src, 1);
3970 if (warn_stringop_overflow)
3972 tree destsize = compute_objsize (dest,
3973 warn_stringop_overflow - 1);
3975 /* The number of bytes to write is LEN but check_sizes will also
3976 check SLEN if LEN's value isn't known. */
3977 check_sizes (OPT_Wstringop_overflow_,
3978 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
3981 /* We must be passed a constant len and src parameter. */
3982 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3983 return NULL_RTX;
3985 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3987 /* We're required to pad with trailing zeros if the requested
3988 len is greater than strlen(s2)+1. In that case try to
3989 use store_by_pieces, if it fails, punt. */
3990 if (tree_int_cst_lt (slen, len))
3992 unsigned int dest_align = get_pointer_alignment (dest);
3993 const char *p = c_getstr (src);
3994 rtx dest_mem;
3996 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3997 || !can_store_by_pieces (tree_to_uhwi (len),
3998 builtin_strncpy_read_str,
3999 CONST_CAST (char *, p),
4000 dest_align, false))
4001 return NULL_RTX;
4003 dest_mem = get_memory_rtx (dest, len);
4004 store_by_pieces (dest_mem, tree_to_uhwi (len),
4005 builtin_strncpy_read_str,
4006 CONST_CAST (char *, p), dest_align, false, 0);
4007 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4008 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4009 return dest_mem;
4012 return NULL_RTX;
4015 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4016 bytes from constant string DATA + OFFSET and return it as target
4017 constant. */
4020 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4021 machine_mode mode)
4023 const char *c = (const char *) data;
4024 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4026 memset (p, *c, GET_MODE_SIZE (mode));
4028 return c_readstr (p, mode);
4031 /* Callback routine for store_by_pieces. Return the RTL of a register
4032 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4033 char value given in the RTL register data. For example, if mode is
4034 4 bytes wide, return the RTL for 0x01010101*data. */
4036 static rtx
4037 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4038 machine_mode mode)
4040 rtx target, coeff;
4041 size_t size;
4042 char *p;
4044 size = GET_MODE_SIZE (mode);
4045 if (size == 1)
4046 return (rtx) data;
4048 p = XALLOCAVEC (char, size);
4049 memset (p, 1, size);
4050 coeff = c_readstr (p, mode);
4052 target = convert_to_mode (mode, (rtx) data, 1);
4053 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4054 return force_reg (mode, target);
4057 /* Expand expression EXP, which is a call to the memset builtin. Return
4058 NULL_RTX if we failed the caller should emit a normal call, otherwise
4059 try to get the result in TARGET, if convenient (and in mode MODE if that's
4060 convenient). */
4062 static rtx
4063 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4065 if (!validate_arglist (exp,
4066 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4067 return NULL_RTX;
4069 tree dest = CALL_EXPR_ARG (exp, 0);
4070 tree val = CALL_EXPR_ARG (exp, 1);
4071 tree len = CALL_EXPR_ARG (exp, 2);
4073 check_memop_sizes (exp, dest, NULL_TREE, len);
4075 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4078 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4079 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4080 try to get the result in TARGET, if convenient (and in mode MODE if that's
4081 convenient). */
4083 static rtx
4084 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4086 if (!validate_arglist (exp,
4087 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4088 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4089 return NULL_RTX;
4090 else
4092 tree dest = CALL_EXPR_ARG (exp, 0);
4093 tree val = CALL_EXPR_ARG (exp, 2);
4094 tree len = CALL_EXPR_ARG (exp, 3);
4095 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4097 /* Return src bounds with the result. */
4098 if (res)
4100 rtx bnd = force_reg (targetm.chkp_bound_mode (),
4101 expand_normal (CALL_EXPR_ARG (exp, 1)));
4102 res = chkp_join_splitted_slot (res, bnd);
4104 return res;
4108 /* Helper function to do the actual work for expand_builtin_memset. The
4109 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4110 so that this can also be called without constructing an actual CALL_EXPR.
4111 The other arguments and return value are the same as for
4112 expand_builtin_memset. */
4114 static rtx
4115 expand_builtin_memset_args (tree dest, tree val, tree len,
4116 rtx target, machine_mode mode, tree orig_exp)
4118 tree fndecl, fn;
4119 enum built_in_function fcode;
4120 machine_mode val_mode;
4121 char c;
4122 unsigned int dest_align;
4123 rtx dest_mem, dest_addr, len_rtx;
4124 HOST_WIDE_INT expected_size = -1;
4125 unsigned int expected_align = 0;
4126 unsigned HOST_WIDE_INT min_size;
4127 unsigned HOST_WIDE_INT max_size;
4128 unsigned HOST_WIDE_INT probable_max_size;
4130 dest_align = get_pointer_alignment (dest);
4132 /* If DEST is not a pointer type, don't do this operation in-line. */
4133 if (dest_align == 0)
4134 return NULL_RTX;
4136 if (currently_expanding_gimple_stmt)
4137 stringop_block_profile (currently_expanding_gimple_stmt,
4138 &expected_align, &expected_size);
4140 if (expected_align < dest_align)
4141 expected_align = dest_align;
4143 /* If the LEN parameter is zero, return DEST. */
4144 if (integer_zerop (len))
4146 /* Evaluate and ignore VAL in case it has side-effects. */
4147 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4148 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4151 /* Stabilize the arguments in case we fail. */
4152 dest = builtin_save_expr (dest);
4153 val = builtin_save_expr (val);
4154 len = builtin_save_expr (len);
4156 len_rtx = expand_normal (len);
4157 determine_block_size (len, len_rtx, &min_size, &max_size,
4158 &probable_max_size);
4159 dest_mem = get_memory_rtx (dest, len);
4160 val_mode = TYPE_MODE (unsigned_char_type_node);
4162 if (TREE_CODE (val) != INTEGER_CST)
4164 rtx val_rtx;
4166 val_rtx = expand_normal (val);
4167 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4169 /* Assume that we can memset by pieces if we can store
4170 * the coefficients by pieces (in the required modes).
4171 * We can't pass builtin_memset_gen_str as that emits RTL. */
4172 c = 1;
4173 if (tree_fits_uhwi_p (len)
4174 && can_store_by_pieces (tree_to_uhwi (len),
4175 builtin_memset_read_str, &c, dest_align,
4176 true))
4178 val_rtx = force_reg (val_mode, val_rtx);
4179 store_by_pieces (dest_mem, tree_to_uhwi (len),
4180 builtin_memset_gen_str, val_rtx, dest_align,
4181 true, 0);
4183 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4184 dest_align, expected_align,
4185 expected_size, min_size, max_size,
4186 probable_max_size))
4187 goto do_libcall;
4189 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4190 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4191 return dest_mem;
4194 if (target_char_cast (val, &c))
4195 goto do_libcall;
4197 if (c)
4199 if (tree_fits_uhwi_p (len)
4200 && can_store_by_pieces (tree_to_uhwi (len),
4201 builtin_memset_read_str, &c, dest_align,
4202 true))
4203 store_by_pieces (dest_mem, tree_to_uhwi (len),
4204 builtin_memset_read_str, &c, dest_align, true, 0);
4205 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4206 gen_int_mode (c, val_mode),
4207 dest_align, expected_align,
4208 expected_size, min_size, max_size,
4209 probable_max_size))
4210 goto do_libcall;
4212 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4213 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4214 return dest_mem;
4217 set_mem_align (dest_mem, dest_align);
4218 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4219 CALL_EXPR_TAILCALL (orig_exp)
4220 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4221 expected_align, expected_size,
4222 min_size, max_size,
4223 probable_max_size);
4225 if (dest_addr == 0)
4227 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4228 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4231 return dest_addr;
4233 do_libcall:
4234 fndecl = get_callee_fndecl (orig_exp);
4235 fcode = DECL_FUNCTION_CODE (fndecl);
4236 if (fcode == BUILT_IN_MEMSET
4237 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4238 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4239 dest, val, len);
4240 else if (fcode == BUILT_IN_BZERO)
4241 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4242 dest, len);
4243 else
4244 gcc_unreachable ();
4245 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4246 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4247 return expand_call (fn, target, target == const0_rtx);
4250 /* Expand expression EXP, which is a call to the bzero builtin. Return
4251 NULL_RTX if we failed the caller should emit a normal call. */
4253 static rtx
4254 expand_builtin_bzero (tree exp)
4256 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4257 return NULL_RTX;
4259 tree dest = CALL_EXPR_ARG (exp, 0);
4260 tree size = CALL_EXPR_ARG (exp, 1);
4262 check_memop_sizes (exp, dest, NULL_TREE, size);
4264 /* New argument list transforming bzero(ptr x, int y) to
4265 memset(ptr x, int 0, size_t y). This is done this way
4266 so that if it isn't expanded inline, we fallback to
4267 calling bzero instead of memset. */
4269 location_t loc = EXPR_LOCATION (exp);
4271 return expand_builtin_memset_args (dest, integer_zero_node,
4272 fold_convert_loc (loc,
4273 size_type_node, size),
4274 const0_rtx, VOIDmode, exp);
4277 /* Try to expand cmpstr operation ICODE with the given operands.
4278 Return the result rtx on success, otherwise return null. */
4280 static rtx
4281 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4282 HOST_WIDE_INT align)
4284 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4286 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4287 target = NULL_RTX;
4289 struct expand_operand ops[4];
4290 create_output_operand (&ops[0], target, insn_mode);
4291 create_fixed_operand (&ops[1], arg1_rtx);
4292 create_fixed_operand (&ops[2], arg2_rtx);
4293 create_integer_operand (&ops[3], align);
4294 if (maybe_expand_insn (icode, 4, ops))
4295 return ops[0].value;
4296 return NULL_RTX;
4299 /* Expand expression EXP, which is a call to the memcmp built-in function.
4300 Return NULL_RTX if we failed and the caller should emit a normal call,
4301 otherwise try to get the result in TARGET, if convenient.
4302 RESULT_EQ is true if we can relax the returned value to be either zero
4303 or nonzero, without caring about the sign. */
4305 static rtx
4306 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4308 if (!validate_arglist (exp,
4309 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4310 return NULL_RTX;
4312 tree arg1 = CALL_EXPR_ARG (exp, 0);
4313 tree arg2 = CALL_EXPR_ARG (exp, 1);
4314 tree len = CALL_EXPR_ARG (exp, 2);
4316 /* Diagnose calls where the specified length exceeds the size of either
4317 object. */
4318 if (warn_stringop_overflow)
4320 tree size = compute_objsize (arg1, 0);
4321 if (check_sizes (OPT_Wstringop_overflow_,
4322 exp, len, /*maxlen=*/NULL_TREE,
4323 size, /*objsize=*/NULL_TREE))
4325 size = compute_objsize (arg2, 0);
4326 check_sizes (OPT_Wstringop_overflow_,
4327 exp, len, /*maxlen=*/NULL_TREE,
4328 size, /*objsize=*/NULL_TREE);
4332 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4333 location_t loc = EXPR_LOCATION (exp);
4335 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4336 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4338 /* If we don't have POINTER_TYPE, call the function. */
4339 if (arg1_align == 0 || arg2_align == 0)
4340 return NULL_RTX;
4342 rtx arg1_rtx = get_memory_rtx (arg1, len);
4343 rtx arg2_rtx = get_memory_rtx (arg2, len);
4344 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4346 /* Set MEM_SIZE as appropriate. */
4347 if (CONST_INT_P (len_rtx))
4349 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4350 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4353 by_pieces_constfn constfn = NULL;
4355 const char *src_str = c_getstr (arg2);
4356 if (result_eq && src_str == NULL)
4358 src_str = c_getstr (arg1);
4359 if (src_str != NULL)
4360 std::swap (arg1_rtx, arg2_rtx);
4363 /* If SRC is a string constant and block move would be done
4364 by pieces, we can avoid loading the string from memory
4365 and only stored the computed constants. */
4366 if (src_str
4367 && CONST_INT_P (len_rtx)
4368 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4369 constfn = builtin_memcpy_read_str;
4371 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4372 TREE_TYPE (len), target,
4373 result_eq, constfn,
4374 CONST_CAST (char *, src_str));
4376 if (result)
4378 /* Return the value in the proper mode for this function. */
4379 if (GET_MODE (result) == mode)
4380 return result;
4382 if (target != 0)
4384 convert_move (target, result, 0);
4385 return target;
4388 return convert_to_mode (mode, result, 0);
4391 return NULL_RTX;
4394 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4395 if we failed the caller should emit a normal call, otherwise try to get
4396 the result in TARGET, if convenient. */
4398 static rtx
4399 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4401 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4402 return NULL_RTX;
4404 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4405 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4406 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4408 rtx arg1_rtx, arg2_rtx;
4409 tree fndecl, fn;
4410 tree arg1 = CALL_EXPR_ARG (exp, 0);
4411 tree arg2 = CALL_EXPR_ARG (exp, 1);
4412 rtx result = NULL_RTX;
4414 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4415 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4417 /* If we don't have POINTER_TYPE, call the function. */
4418 if (arg1_align == 0 || arg2_align == 0)
4419 return NULL_RTX;
4421 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4422 arg1 = builtin_save_expr (arg1);
4423 arg2 = builtin_save_expr (arg2);
4425 arg1_rtx = get_memory_rtx (arg1, NULL);
4426 arg2_rtx = get_memory_rtx (arg2, NULL);
4428 /* Try to call cmpstrsi. */
4429 if (cmpstr_icode != CODE_FOR_nothing)
4430 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4431 MIN (arg1_align, arg2_align));
4433 /* Try to determine at least one length and call cmpstrnsi. */
4434 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4436 tree len;
4437 rtx arg3_rtx;
4439 tree len1 = c_strlen (arg1, 1);
4440 tree len2 = c_strlen (arg2, 1);
4442 if (len1)
4443 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4444 if (len2)
4445 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4447 /* If we don't have a constant length for the first, use the length
4448 of the second, if we know it. We don't require a constant for
4449 this case; some cost analysis could be done if both are available
4450 but neither is constant. For now, assume they're equally cheap,
4451 unless one has side effects. If both strings have constant lengths,
4452 use the smaller. */
4454 if (!len1)
4455 len = len2;
4456 else if (!len2)
4457 len = len1;
4458 else if (TREE_SIDE_EFFECTS (len1))
4459 len = len2;
4460 else if (TREE_SIDE_EFFECTS (len2))
4461 len = len1;
4462 else if (TREE_CODE (len1) != INTEGER_CST)
4463 len = len2;
4464 else if (TREE_CODE (len2) != INTEGER_CST)
4465 len = len1;
4466 else if (tree_int_cst_lt (len1, len2))
4467 len = len1;
4468 else
4469 len = len2;
4471 /* If both arguments have side effects, we cannot optimize. */
4472 if (len && !TREE_SIDE_EFFECTS (len))
4474 arg3_rtx = expand_normal (len);
4475 result = expand_cmpstrn_or_cmpmem
4476 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4477 arg3_rtx, MIN (arg1_align, arg2_align));
4481 if (result)
4483 /* Return the value in the proper mode for this function. */
4484 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4485 if (GET_MODE (result) == mode)
4486 return result;
4487 if (target == 0)
4488 return convert_to_mode (mode, result, 0);
4489 convert_move (target, result, 0);
4490 return target;
4493 /* Expand the library call ourselves using a stabilized argument
4494 list to avoid re-evaluating the function's arguments twice. */
4495 fndecl = get_callee_fndecl (exp);
4496 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4497 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4498 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4499 return expand_call (fn, target, target == const0_rtx);
4501 return NULL_RTX;
4504 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4505 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4506 the result in TARGET, if convenient. */
4508 static rtx
4509 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4510 ATTRIBUTE_UNUSED machine_mode mode)
4512 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4514 if (!validate_arglist (exp,
4515 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4516 return NULL_RTX;
4518 /* If c_strlen can determine an expression for one of the string
4519 lengths, and it doesn't have side effects, then emit cmpstrnsi
4520 using length MIN(strlen(string)+1, arg3). */
4521 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4522 if (cmpstrn_icode != CODE_FOR_nothing)
4524 tree len, len1, len2, len3;
4525 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4526 rtx result;
4527 tree fndecl, fn;
4528 tree arg1 = CALL_EXPR_ARG (exp, 0);
4529 tree arg2 = CALL_EXPR_ARG (exp, 1);
4530 tree arg3 = CALL_EXPR_ARG (exp, 2);
4532 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4533 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4535 len1 = c_strlen (arg1, 1);
4536 len2 = c_strlen (arg2, 1);
4538 if (len1)
4539 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4540 if (len2)
4541 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4543 len3 = fold_convert_loc (loc, sizetype, arg3);
4545 /* If we don't have a constant length for the first, use the length
4546 of the second, if we know it. If neither string is constant length,
4547 use the given length argument. We don't require a constant for
4548 this case; some cost analysis could be done if both are available
4549 but neither is constant. For now, assume they're equally cheap,
4550 unless one has side effects. If both strings have constant lengths,
4551 use the smaller. */
4553 if (!len1 && !len2)
4554 len = len3;
4555 else if (!len1)
4556 len = len2;
4557 else if (!len2)
4558 len = len1;
4559 else if (TREE_SIDE_EFFECTS (len1))
4560 len = len2;
4561 else if (TREE_SIDE_EFFECTS (len2))
4562 len = len1;
4563 else if (TREE_CODE (len1) != INTEGER_CST)
4564 len = len2;
4565 else if (TREE_CODE (len2) != INTEGER_CST)
4566 len = len1;
4567 else if (tree_int_cst_lt (len1, len2))
4568 len = len1;
4569 else
4570 len = len2;
4572 /* If we are not using the given length, we must incorporate it here.
4573 The actual new length parameter will be MIN(len,arg3) in this case. */
4574 if (len != len3)
4575 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4576 arg1_rtx = get_memory_rtx (arg1, len);
4577 arg2_rtx = get_memory_rtx (arg2, len);
4578 arg3_rtx = expand_normal (len);
4579 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4580 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4581 MIN (arg1_align, arg2_align));
4582 if (result)
4584 /* Return the value in the proper mode for this function. */
4585 mode = TYPE_MODE (TREE_TYPE (exp));
4586 if (GET_MODE (result) == mode)
4587 return result;
4588 if (target == 0)
4589 return convert_to_mode (mode, result, 0);
4590 convert_move (target, result, 0);
4591 return target;
4594 /* Expand the library call ourselves using a stabilized argument
4595 list to avoid re-evaluating the function's arguments twice. */
4596 fndecl = get_callee_fndecl (exp);
4597 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4598 arg1, arg2, len);
4599 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4600 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4601 return expand_call (fn, target, target == const0_rtx);
4603 return NULL_RTX;
4606 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4607 if that's convenient. */
4610 expand_builtin_saveregs (void)
4612 rtx val;
4613 rtx_insn *seq;
4615 /* Don't do __builtin_saveregs more than once in a function.
4616 Save the result of the first call and reuse it. */
4617 if (saveregs_value != 0)
4618 return saveregs_value;
4620 /* When this function is called, it means that registers must be
4621 saved on entry to this function. So we migrate the call to the
4622 first insn of this function. */
4624 start_sequence ();
4626 /* Do whatever the machine needs done in this case. */
4627 val = targetm.calls.expand_builtin_saveregs ();
4629 seq = get_insns ();
4630 end_sequence ();
4632 saveregs_value = val;
4634 /* Put the insns after the NOTE that starts the function. If this
4635 is inside a start_sequence, make the outer-level insn chain current, so
4636 the code is placed at the start of the function. */
4637 push_topmost_sequence ();
4638 emit_insn_after (seq, entry_of_function ());
4639 pop_topmost_sequence ();
4641 return val;
4644 /* Expand a call to __builtin_next_arg. */
4646 static rtx
4647 expand_builtin_next_arg (void)
4649 /* Checking arguments is already done in fold_builtin_next_arg
4650 that must be called before this function. */
4651 return expand_binop (ptr_mode, add_optab,
4652 crtl->args.internal_arg_pointer,
4653 crtl->args.arg_offset_rtx,
4654 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4657 /* Make it easier for the backends by protecting the valist argument
4658 from multiple evaluations. */
4660 static tree
4661 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4663 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4665 /* The current way of determining the type of valist is completely
4666 bogus. We should have the information on the va builtin instead. */
4667 if (!vatype)
4668 vatype = targetm.fn_abi_va_list (cfun->decl);
4670 if (TREE_CODE (vatype) == ARRAY_TYPE)
4672 if (TREE_SIDE_EFFECTS (valist))
4673 valist = save_expr (valist);
4675 /* For this case, the backends will be expecting a pointer to
4676 vatype, but it's possible we've actually been given an array
4677 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4678 So fix it. */
4679 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4681 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4682 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4685 else
4687 tree pt = build_pointer_type (vatype);
4689 if (! needs_lvalue)
4691 if (! TREE_SIDE_EFFECTS (valist))
4692 return valist;
4694 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4695 TREE_SIDE_EFFECTS (valist) = 1;
4698 if (TREE_SIDE_EFFECTS (valist))
4699 valist = save_expr (valist);
4700 valist = fold_build2_loc (loc, MEM_REF,
4701 vatype, valist, build_int_cst (pt, 0));
4704 return valist;
4707 /* The "standard" definition of va_list is void*. */
4709 tree
4710 std_build_builtin_va_list (void)
4712 return ptr_type_node;
4715 /* The "standard" abi va_list is va_list_type_node. */
4717 tree
4718 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4720 return va_list_type_node;
4723 /* The "standard" type of va_list is va_list_type_node. */
4725 tree
4726 std_canonical_va_list_type (tree type)
4728 tree wtype, htype;
4730 wtype = va_list_type_node;
4731 htype = type;
4733 if (TREE_CODE (wtype) == ARRAY_TYPE)
4735 /* If va_list is an array type, the argument may have decayed
4736 to a pointer type, e.g. by being passed to another function.
4737 In that case, unwrap both types so that we can compare the
4738 underlying records. */
4739 if (TREE_CODE (htype) == ARRAY_TYPE
4740 || POINTER_TYPE_P (htype))
4742 wtype = TREE_TYPE (wtype);
4743 htype = TREE_TYPE (htype);
4746 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4747 return va_list_type_node;
4749 return NULL_TREE;
4752 /* The "standard" implementation of va_start: just assign `nextarg' to
4753 the variable. */
4755 void
4756 std_expand_builtin_va_start (tree valist, rtx nextarg)
4758 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4759 convert_move (va_r, nextarg, 0);
4761 /* We do not have any valid bounds for the pointer, so
4762 just store zero bounds for it. */
4763 if (chkp_function_instrumented_p (current_function_decl))
4764 chkp_expand_bounds_reset_for_mem (valist,
4765 make_tree (TREE_TYPE (valist),
4766 nextarg));
4769 /* Expand EXP, a call to __builtin_va_start. */
4771 static rtx
4772 expand_builtin_va_start (tree exp)
4774 rtx nextarg;
4775 tree valist;
4776 location_t loc = EXPR_LOCATION (exp);
4778 if (call_expr_nargs (exp) < 2)
4780 error_at (loc, "too few arguments to function %<va_start%>");
4781 return const0_rtx;
4784 if (fold_builtin_next_arg (exp, true))
4785 return const0_rtx;
4787 nextarg = expand_builtin_next_arg ();
4788 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4790 if (targetm.expand_builtin_va_start)
4791 targetm.expand_builtin_va_start (valist, nextarg);
4792 else
4793 std_expand_builtin_va_start (valist, nextarg);
4795 return const0_rtx;
4798 /* Expand EXP, a call to __builtin_va_end. */
4800 static rtx
4801 expand_builtin_va_end (tree exp)
4803 tree valist = CALL_EXPR_ARG (exp, 0);
4805 /* Evaluate for side effects, if needed. I hate macros that don't
4806 do that. */
4807 if (TREE_SIDE_EFFECTS (valist))
4808 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4810 return const0_rtx;
4813 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4814 builtin rather than just as an assignment in stdarg.h because of the
4815 nastiness of array-type va_list types. */
4817 static rtx
4818 expand_builtin_va_copy (tree exp)
4820 tree dst, src, t;
4821 location_t loc = EXPR_LOCATION (exp);
4823 dst = CALL_EXPR_ARG (exp, 0);
4824 src = CALL_EXPR_ARG (exp, 1);
4826 dst = stabilize_va_list_loc (loc, dst, 1);
4827 src = stabilize_va_list_loc (loc, src, 0);
4829 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4831 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4833 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4834 TREE_SIDE_EFFECTS (t) = 1;
4835 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4837 else
4839 rtx dstb, srcb, size;
4841 /* Evaluate to pointers. */
4842 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4843 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4844 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4845 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4847 dstb = convert_memory_address (Pmode, dstb);
4848 srcb = convert_memory_address (Pmode, srcb);
4850 /* "Dereference" to BLKmode memories. */
4851 dstb = gen_rtx_MEM (BLKmode, dstb);
4852 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4853 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4854 srcb = gen_rtx_MEM (BLKmode, srcb);
4855 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4856 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4858 /* Copy. */
4859 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4862 return const0_rtx;
4865 /* Expand a call to one of the builtin functions __builtin_frame_address or
4866 __builtin_return_address. */
4868 static rtx
4869 expand_builtin_frame_address (tree fndecl, tree exp)
4871 /* The argument must be a nonnegative integer constant.
4872 It counts the number of frames to scan up the stack.
4873 The value is either the frame pointer value or the return
4874 address saved in that frame. */
4875 if (call_expr_nargs (exp) == 0)
4876 /* Warning about missing arg was already issued. */
4877 return const0_rtx;
4878 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4880 error ("invalid argument to %qD", fndecl);
4881 return const0_rtx;
4883 else
4885 /* Number of frames to scan up the stack. */
4886 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4888 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4890 /* Some ports cannot access arbitrary stack frames. */
4891 if (tem == NULL)
4893 warning (0, "unsupported argument to %qD", fndecl);
4894 return const0_rtx;
4897 if (count)
4899 /* Warn since no effort is made to ensure that any frame
4900 beyond the current one exists or can be safely reached. */
4901 warning (OPT_Wframe_address, "calling %qD with "
4902 "a nonzero argument is unsafe", fndecl);
4905 /* For __builtin_frame_address, return what we've got. */
4906 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4907 return tem;
4909 if (!REG_P (tem)
4910 && ! CONSTANT_P (tem))
4911 tem = copy_addr_to_reg (tem);
4912 return tem;
4916 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4917 failed and the caller should emit a normal call. */
4919 static rtx
4920 expand_builtin_alloca (tree exp)
4922 rtx op0;
4923 rtx result;
4924 unsigned int align;
4925 tree fndecl = get_callee_fndecl (exp);
4926 bool alloca_with_align = (DECL_FUNCTION_CODE (fndecl)
4927 == BUILT_IN_ALLOCA_WITH_ALIGN);
4928 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4929 bool valid_arglist
4930 = (alloca_with_align
4931 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4932 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4934 if (!valid_arglist)
4935 return NULL_RTX;
4937 if ((alloca_with_align && !warn_vla_limit)
4938 || (!alloca_with_align && !warn_alloca_limit))
4940 /* -Walloca-larger-than and -Wvla-larger-than settings override
4941 the more general -Walloc-size-larger-than so unless either of
4942 the former options is specified check the alloca arguments for
4943 overflow. */
4944 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4945 int idx[] = { 0, -1 };
4946 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4949 /* Compute the argument. */
4950 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4952 /* Compute the alignment. */
4953 align = (alloca_with_align
4954 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4955 : BIGGEST_ALIGNMENT);
4957 /* Allocate the desired space. If the allocation stems from the declaration
4958 of a variable-sized object, it cannot accumulate. */
4959 result = allocate_dynamic_stack_space (op0, 0, align, alloca_for_var);
4960 result = convert_memory_address (ptr_mode, result);
4962 return result;
4965 /* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
4966 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
4967 dummy value into second parameter relying on this function to perform the
4968 change. See motivation for this in comment to handle_builtin_stack_restore
4969 function. */
4971 static rtx
4972 expand_asan_emit_allocas_unpoison (tree exp)
4974 tree arg0 = CALL_EXPR_ARG (exp, 0);
4975 rtx top = expand_expr (arg0, NULL_RTX, GET_MODE (virtual_stack_dynamic_rtx),
4976 EXPAND_NORMAL);
4977 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
4978 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2, top,
4979 TYPE_MODE (pointer_sized_int_node),
4980 virtual_stack_dynamic_rtx,
4981 TYPE_MODE (pointer_sized_int_node));
4982 return ret;
4985 /* Expand a call to bswap builtin in EXP.
4986 Return NULL_RTX if a normal call should be emitted rather than expanding the
4987 function in-line. If convenient, the result should be placed in TARGET.
4988 SUBTARGET may be used as the target for computing one of EXP's operands. */
4990 static rtx
4991 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4992 rtx subtarget)
4994 tree arg;
4995 rtx op0;
4997 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4998 return NULL_RTX;
5000 arg = CALL_EXPR_ARG (exp, 0);
5001 op0 = expand_expr (arg,
5002 subtarget && GET_MODE (subtarget) == target_mode
5003 ? subtarget : NULL_RTX,
5004 target_mode, EXPAND_NORMAL);
5005 if (GET_MODE (op0) != target_mode)
5006 op0 = convert_to_mode (target_mode, op0, 1);
5008 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5010 gcc_assert (target);
5012 return convert_to_mode (target_mode, target, 1);
5015 /* Expand a call to a unary builtin in EXP.
5016 Return NULL_RTX if a normal call should be emitted rather than expanding the
5017 function in-line. If convenient, the result should be placed in TARGET.
5018 SUBTARGET may be used as the target for computing one of EXP's operands. */
5020 static rtx
5021 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5022 rtx subtarget, optab op_optab)
5024 rtx op0;
5026 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5027 return NULL_RTX;
5029 /* Compute the argument. */
5030 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5031 (subtarget
5032 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5033 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5034 VOIDmode, EXPAND_NORMAL);
5035 /* Compute op, into TARGET if possible.
5036 Set TARGET to wherever the result comes back. */
5037 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5038 op_optab, op0, target, op_optab != clrsb_optab);
5039 gcc_assert (target);
5041 return convert_to_mode (target_mode, target, 0);
5044 /* Expand a call to __builtin_expect. We just return our argument
5045 as the builtin_expect semantic should've been already executed by
5046 tree branch prediction pass. */
5048 static rtx
5049 expand_builtin_expect (tree exp, rtx target)
5051 tree arg;
5053 if (call_expr_nargs (exp) < 2)
5054 return const0_rtx;
5055 arg = CALL_EXPR_ARG (exp, 0);
5057 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5058 /* When guessing was done, the hints should be already stripped away. */
5059 gcc_assert (!flag_guess_branch_prob
5060 || optimize == 0 || seen_error ());
5061 return target;
5064 /* Expand a call to __builtin_assume_aligned. We just return our first
5065 argument as the builtin_assume_aligned semantic should've been already
5066 executed by CCP. */
5068 static rtx
5069 expand_builtin_assume_aligned (tree exp, rtx target)
5071 if (call_expr_nargs (exp) < 2)
5072 return const0_rtx;
5073 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5074 EXPAND_NORMAL);
5075 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5076 && (call_expr_nargs (exp) < 3
5077 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5078 return target;
5081 void
5082 expand_builtin_trap (void)
5084 if (targetm.have_trap ())
5086 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5087 /* For trap insns when not accumulating outgoing args force
5088 REG_ARGS_SIZE note to prevent crossjumping of calls with
5089 different args sizes. */
5090 if (!ACCUMULATE_OUTGOING_ARGS)
5091 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
5093 else
5095 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5096 tree call_expr = build_call_expr (fn, 0);
5097 expand_call (call_expr, NULL_RTX, false);
5100 emit_barrier ();
5103 /* Expand a call to __builtin_unreachable. We do nothing except emit
5104 a barrier saying that control flow will not pass here.
5106 It is the responsibility of the program being compiled to ensure
5107 that control flow does never reach __builtin_unreachable. */
5108 static void
5109 expand_builtin_unreachable (void)
5111 emit_barrier ();
5114 /* Expand EXP, a call to fabs, fabsf or fabsl.
5115 Return NULL_RTX if a normal call should be emitted rather than expanding
5116 the function inline. If convenient, the result should be placed
5117 in TARGET. SUBTARGET may be used as the target for computing
5118 the operand. */
5120 static rtx
5121 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5123 machine_mode mode;
5124 tree arg;
5125 rtx op0;
5127 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5128 return NULL_RTX;
5130 arg = CALL_EXPR_ARG (exp, 0);
5131 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5132 mode = TYPE_MODE (TREE_TYPE (arg));
5133 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5134 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5137 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5138 Return NULL is a normal call should be emitted rather than expanding the
5139 function inline. If convenient, the result should be placed in TARGET.
5140 SUBTARGET may be used as the target for computing the operand. */
5142 static rtx
5143 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5145 rtx op0, op1;
5146 tree arg;
5148 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5149 return NULL_RTX;
5151 arg = CALL_EXPR_ARG (exp, 0);
5152 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5154 arg = CALL_EXPR_ARG (exp, 1);
5155 op1 = expand_normal (arg);
5157 return expand_copysign (op0, op1, target);
5160 /* Expand a call to __builtin___clear_cache. */
5162 static rtx
5163 expand_builtin___clear_cache (tree exp)
5165 if (!targetm.code_for_clear_cache)
5167 #ifdef CLEAR_INSN_CACHE
5168 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5169 does something. Just do the default expansion to a call to
5170 __clear_cache(). */
5171 return NULL_RTX;
5172 #else
5173 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5174 does nothing. There is no need to call it. Do nothing. */
5175 return const0_rtx;
5176 #endif /* CLEAR_INSN_CACHE */
5179 /* We have a "clear_cache" insn, and it will handle everything. */
5180 tree begin, end;
5181 rtx begin_rtx, end_rtx;
5183 /* We must not expand to a library call. If we did, any
5184 fallback library function in libgcc that might contain a call to
5185 __builtin___clear_cache() would recurse infinitely. */
5186 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5188 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5189 return const0_rtx;
5192 if (targetm.have_clear_cache ())
5194 struct expand_operand ops[2];
5196 begin = CALL_EXPR_ARG (exp, 0);
5197 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5199 end = CALL_EXPR_ARG (exp, 1);
5200 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5202 create_address_operand (&ops[0], begin_rtx);
5203 create_address_operand (&ops[1], end_rtx);
5204 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5205 return const0_rtx;
5207 return const0_rtx;
5210 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5212 static rtx
5213 round_trampoline_addr (rtx tramp)
5215 rtx temp, addend, mask;
5217 /* If we don't need too much alignment, we'll have been guaranteed
5218 proper alignment by get_trampoline_type. */
5219 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5220 return tramp;
5222 /* Round address up to desired boundary. */
5223 temp = gen_reg_rtx (Pmode);
5224 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5225 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5227 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5228 temp, 0, OPTAB_LIB_WIDEN);
5229 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5230 temp, 0, OPTAB_LIB_WIDEN);
5232 return tramp;
5235 static rtx
5236 expand_builtin_init_trampoline (tree exp, bool onstack)
5238 tree t_tramp, t_func, t_chain;
5239 rtx m_tramp, r_tramp, r_chain, tmp;
5241 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5242 POINTER_TYPE, VOID_TYPE))
5243 return NULL_RTX;
5245 t_tramp = CALL_EXPR_ARG (exp, 0);
5246 t_func = CALL_EXPR_ARG (exp, 1);
5247 t_chain = CALL_EXPR_ARG (exp, 2);
5249 r_tramp = expand_normal (t_tramp);
5250 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5251 MEM_NOTRAP_P (m_tramp) = 1;
5253 /* If ONSTACK, the TRAMP argument should be the address of a field
5254 within the local function's FRAME decl. Either way, let's see if
5255 we can fill in the MEM_ATTRs for this memory. */
5256 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5257 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5259 /* Creator of a heap trampoline is responsible for making sure the
5260 address is aligned to at least STACK_BOUNDARY. Normally malloc
5261 will ensure this anyhow. */
5262 tmp = round_trampoline_addr (r_tramp);
5263 if (tmp != r_tramp)
5265 m_tramp = change_address (m_tramp, BLKmode, tmp);
5266 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5267 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5270 /* The FUNC argument should be the address of the nested function.
5271 Extract the actual function decl to pass to the hook. */
5272 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5273 t_func = TREE_OPERAND (t_func, 0);
5274 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5276 r_chain = expand_normal (t_chain);
5278 /* Generate insns to initialize the trampoline. */
5279 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5281 if (onstack)
5283 trampolines_created = 1;
5285 if (targetm.calls.custom_function_descriptors != 0)
5286 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5287 "trampoline generated for nested function %qD", t_func);
5290 return const0_rtx;
5293 static rtx
5294 expand_builtin_adjust_trampoline (tree exp)
5296 rtx tramp;
5298 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5299 return NULL_RTX;
5301 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5302 tramp = round_trampoline_addr (tramp);
5303 if (targetm.calls.trampoline_adjust_address)
5304 tramp = targetm.calls.trampoline_adjust_address (tramp);
5306 return tramp;
5309 /* Expand a call to the builtin descriptor initialization routine.
5310 A descriptor is made up of a couple of pointers to the static
5311 chain and the code entry in this order. */
5313 static rtx
5314 expand_builtin_init_descriptor (tree exp)
5316 tree t_descr, t_func, t_chain;
5317 rtx m_descr, r_descr, r_func, r_chain;
5319 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5320 VOID_TYPE))
5321 return NULL_RTX;
5323 t_descr = CALL_EXPR_ARG (exp, 0);
5324 t_func = CALL_EXPR_ARG (exp, 1);
5325 t_chain = CALL_EXPR_ARG (exp, 2);
5327 r_descr = expand_normal (t_descr);
5328 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5329 MEM_NOTRAP_P (m_descr) = 1;
5331 r_func = expand_normal (t_func);
5332 r_chain = expand_normal (t_chain);
5334 /* Generate insns to initialize the descriptor. */
5335 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5336 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5337 POINTER_SIZE / BITS_PER_UNIT), r_func);
5339 return const0_rtx;
5342 /* Expand a call to the builtin descriptor adjustment routine. */
5344 static rtx
5345 expand_builtin_adjust_descriptor (tree exp)
5347 rtx tramp;
5349 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5350 return NULL_RTX;
5352 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5354 /* Unalign the descriptor to allow runtime identification. */
5355 tramp = plus_constant (ptr_mode, tramp,
5356 targetm.calls.custom_function_descriptors);
5358 return force_operand (tramp, NULL_RTX);
5361 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5362 function. The function first checks whether the back end provides
5363 an insn to implement signbit for the respective mode. If not, it
5364 checks whether the floating point format of the value is such that
5365 the sign bit can be extracted. If that is not the case, error out.
5366 EXP is the expression that is a call to the builtin function; if
5367 convenient, the result should be placed in TARGET. */
5368 static rtx
5369 expand_builtin_signbit (tree exp, rtx target)
5371 const struct real_format *fmt;
5372 machine_mode fmode, imode, rmode;
5373 tree arg;
5374 int word, bitpos;
5375 enum insn_code icode;
5376 rtx temp;
5377 location_t loc = EXPR_LOCATION (exp);
5379 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5380 return NULL_RTX;
5382 arg = CALL_EXPR_ARG (exp, 0);
5383 fmode = TYPE_MODE (TREE_TYPE (arg));
5384 rmode = TYPE_MODE (TREE_TYPE (exp));
5385 fmt = REAL_MODE_FORMAT (fmode);
5387 arg = builtin_save_expr (arg);
5389 /* Expand the argument yielding a RTX expression. */
5390 temp = expand_normal (arg);
5392 /* Check if the back end provides an insn that handles signbit for the
5393 argument's mode. */
5394 icode = optab_handler (signbit_optab, fmode);
5395 if (icode != CODE_FOR_nothing)
5397 rtx_insn *last = get_last_insn ();
5398 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5399 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5400 return target;
5401 delete_insns_since (last);
5404 /* For floating point formats without a sign bit, implement signbit
5405 as "ARG < 0.0". */
5406 bitpos = fmt->signbit_ro;
5407 if (bitpos < 0)
5409 /* But we can't do this if the format supports signed zero. */
5410 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5412 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5413 build_real (TREE_TYPE (arg), dconst0));
5414 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5417 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5419 imode = int_mode_for_mode (fmode);
5420 gcc_assert (imode != BLKmode);
5421 temp = gen_lowpart (imode, temp);
5423 else
5425 imode = word_mode;
5426 /* Handle targets with different FP word orders. */
5427 if (FLOAT_WORDS_BIG_ENDIAN)
5428 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5429 else
5430 word = bitpos / BITS_PER_WORD;
5431 temp = operand_subword_force (temp, word, fmode);
5432 bitpos = bitpos % BITS_PER_WORD;
5435 /* Force the intermediate word_mode (or narrower) result into a
5436 register. This avoids attempting to create paradoxical SUBREGs
5437 of floating point modes below. */
5438 temp = force_reg (imode, temp);
5440 /* If the bitpos is within the "result mode" lowpart, the operation
5441 can be implement with a single bitwise AND. Otherwise, we need
5442 a right shift and an AND. */
5444 if (bitpos < GET_MODE_BITSIZE (rmode))
5446 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5448 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5449 temp = gen_lowpart (rmode, temp);
5450 temp = expand_binop (rmode, and_optab, temp,
5451 immed_wide_int_const (mask, rmode),
5452 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5454 else
5456 /* Perform a logical right shift to place the signbit in the least
5457 significant bit, then truncate the result to the desired mode
5458 and mask just this bit. */
5459 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5460 temp = gen_lowpart (rmode, temp);
5461 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5462 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5465 return temp;
5468 /* Expand fork or exec calls. TARGET is the desired target of the
5469 call. EXP is the call. FN is the
5470 identificator of the actual function. IGNORE is nonzero if the
5471 value is to be ignored. */
5473 static rtx
5474 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5476 tree id, decl;
5477 tree call;
5479 /* If we are not profiling, just call the function. */
5480 if (!profile_arc_flag)
5481 return NULL_RTX;
5483 /* Otherwise call the wrapper. This should be equivalent for the rest of
5484 compiler, so the code does not diverge, and the wrapper may run the
5485 code necessary for keeping the profiling sane. */
5487 switch (DECL_FUNCTION_CODE (fn))
5489 case BUILT_IN_FORK:
5490 id = get_identifier ("__gcov_fork");
5491 break;
5493 case BUILT_IN_EXECL:
5494 id = get_identifier ("__gcov_execl");
5495 break;
5497 case BUILT_IN_EXECV:
5498 id = get_identifier ("__gcov_execv");
5499 break;
5501 case BUILT_IN_EXECLP:
5502 id = get_identifier ("__gcov_execlp");
5503 break;
5505 case BUILT_IN_EXECLE:
5506 id = get_identifier ("__gcov_execle");
5507 break;
5509 case BUILT_IN_EXECVP:
5510 id = get_identifier ("__gcov_execvp");
5511 break;
5513 case BUILT_IN_EXECVE:
5514 id = get_identifier ("__gcov_execve");
5515 break;
5517 default:
5518 gcc_unreachable ();
5521 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5522 FUNCTION_DECL, id, TREE_TYPE (fn));
5523 DECL_EXTERNAL (decl) = 1;
5524 TREE_PUBLIC (decl) = 1;
5525 DECL_ARTIFICIAL (decl) = 1;
5526 TREE_NOTHROW (decl) = 1;
5527 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5528 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5529 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5530 return expand_call (call, target, ignore);
5535 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5536 the pointer in these functions is void*, the tree optimizers may remove
5537 casts. The mode computed in expand_builtin isn't reliable either, due
5538 to __sync_bool_compare_and_swap.
5540 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5541 group of builtins. This gives us log2 of the mode size. */
5543 static inline machine_mode
5544 get_builtin_sync_mode (int fcode_diff)
5546 /* The size is not negotiable, so ask not to get BLKmode in return
5547 if the target indicates that a smaller size would be better. */
5548 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5551 /* Expand the memory expression LOC and return the appropriate memory operand
5552 for the builtin_sync operations. */
5554 static rtx
5555 get_builtin_sync_mem (tree loc, machine_mode mode)
5557 rtx addr, mem;
5559 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5560 addr = convert_memory_address (Pmode, addr);
5562 /* Note that we explicitly do not want any alias information for this
5563 memory, so that we kill all other live memories. Otherwise we don't
5564 satisfy the full barrier semantics of the intrinsic. */
5565 mem = validize_mem (gen_rtx_MEM (mode, addr));
5567 /* The alignment needs to be at least according to that of the mode. */
5568 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5569 get_pointer_alignment (loc)));
5570 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5571 MEM_VOLATILE_P (mem) = 1;
5573 return mem;
5576 /* Make sure an argument is in the right mode.
5577 EXP is the tree argument.
5578 MODE is the mode it should be in. */
5580 static rtx
5581 expand_expr_force_mode (tree exp, machine_mode mode)
5583 rtx val;
5584 machine_mode old_mode;
5586 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5587 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5588 of CONST_INTs, where we know the old_mode only from the call argument. */
5590 old_mode = GET_MODE (val);
5591 if (old_mode == VOIDmode)
5592 old_mode = TYPE_MODE (TREE_TYPE (exp));
5593 val = convert_modes (mode, old_mode, val, 1);
5594 return val;
5598 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5599 EXP is the CALL_EXPR. CODE is the rtx code
5600 that corresponds to the arithmetic or logical operation from the name;
5601 an exception here is that NOT actually means NAND. TARGET is an optional
5602 place for us to store the results; AFTER is true if this is the
5603 fetch_and_xxx form. */
5605 static rtx
5606 expand_builtin_sync_operation (machine_mode mode, tree exp,
5607 enum rtx_code code, bool after,
5608 rtx target)
5610 rtx val, mem;
5611 location_t loc = EXPR_LOCATION (exp);
5613 if (code == NOT && warn_sync_nand)
5615 tree fndecl = get_callee_fndecl (exp);
5616 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5618 static bool warned_f_a_n, warned_n_a_f;
5620 switch (fcode)
5622 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5623 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5624 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5625 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5626 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5627 if (warned_f_a_n)
5628 break;
5630 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5631 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5632 warned_f_a_n = true;
5633 break;
5635 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5636 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5637 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5638 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5639 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5640 if (warned_n_a_f)
5641 break;
5643 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5644 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5645 warned_n_a_f = true;
5646 break;
5648 default:
5649 gcc_unreachable ();
5653 /* Expand the operands. */
5654 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5655 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5657 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5658 after);
5661 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5662 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5663 true if this is the boolean form. TARGET is a place for us to store the
5664 results; this is NOT optional if IS_BOOL is true. */
5666 static rtx
5667 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5668 bool is_bool, rtx target)
5670 rtx old_val, new_val, mem;
5671 rtx *pbool, *poval;
5673 /* Expand the operands. */
5674 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5675 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5676 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5678 pbool = poval = NULL;
5679 if (target != const0_rtx)
5681 if (is_bool)
5682 pbool = &target;
5683 else
5684 poval = &target;
5686 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5687 false, MEMMODEL_SYNC_SEQ_CST,
5688 MEMMODEL_SYNC_SEQ_CST))
5689 return NULL_RTX;
5691 return target;
5694 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5695 general form is actually an atomic exchange, and some targets only
5696 support a reduced form with the second argument being a constant 1.
5697 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5698 the results. */
5700 static rtx
5701 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5702 rtx target)
5704 rtx val, mem;
5706 /* Expand the operands. */
5707 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5708 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5710 return expand_sync_lock_test_and_set (target, mem, val);
5713 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5715 static void
5716 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5718 rtx mem;
5720 /* Expand the operands. */
5721 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5723 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5726 /* Given an integer representing an ``enum memmodel'', verify its
5727 correctness and return the memory model enum. */
5729 static enum memmodel
5730 get_memmodel (tree exp)
5732 rtx op;
5733 unsigned HOST_WIDE_INT val;
5734 source_location loc
5735 = expansion_point_location_if_in_system_header (input_location);
5737 /* If the parameter is not a constant, it's a run time value so we'll just
5738 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5739 if (TREE_CODE (exp) != INTEGER_CST)
5740 return MEMMODEL_SEQ_CST;
5742 op = expand_normal (exp);
5744 val = INTVAL (op);
5745 if (targetm.memmodel_check)
5746 val = targetm.memmodel_check (val);
5747 else if (val & ~MEMMODEL_MASK)
5749 warning_at (loc, OPT_Winvalid_memory_model,
5750 "unknown architecture specifier in memory model to builtin");
5751 return MEMMODEL_SEQ_CST;
5754 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5755 if (memmodel_base (val) >= MEMMODEL_LAST)
5757 warning_at (loc, OPT_Winvalid_memory_model,
5758 "invalid memory model argument to builtin");
5759 return MEMMODEL_SEQ_CST;
5762 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5763 be conservative and promote consume to acquire. */
5764 if (val == MEMMODEL_CONSUME)
5765 val = MEMMODEL_ACQUIRE;
5767 return (enum memmodel) val;
5770 /* Expand the __atomic_exchange intrinsic:
5771 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5772 EXP is the CALL_EXPR.
5773 TARGET is an optional place for us to store the results. */
5775 static rtx
5776 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5778 rtx val, mem;
5779 enum memmodel model;
5781 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5783 if (!flag_inline_atomics)
5784 return NULL_RTX;
5786 /* Expand the operands. */
5787 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5788 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5790 return expand_atomic_exchange (target, mem, val, model);
5793 /* Expand the __atomic_compare_exchange intrinsic:
5794 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5795 TYPE desired, BOOL weak,
5796 enum memmodel success,
5797 enum memmodel failure)
5798 EXP is the CALL_EXPR.
5799 TARGET is an optional place for us to store the results. */
5801 static rtx
5802 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5803 rtx target)
5805 rtx expect, desired, mem, oldval;
5806 rtx_code_label *label;
5807 enum memmodel success, failure;
5808 tree weak;
5809 bool is_weak;
5810 source_location loc
5811 = expansion_point_location_if_in_system_header (input_location);
5813 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5814 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5816 if (failure > success)
5818 warning_at (loc, OPT_Winvalid_memory_model,
5819 "failure memory model cannot be stronger than success "
5820 "memory model for %<__atomic_compare_exchange%>");
5821 success = MEMMODEL_SEQ_CST;
5824 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5826 warning_at (loc, OPT_Winvalid_memory_model,
5827 "invalid failure memory model for "
5828 "%<__atomic_compare_exchange%>");
5829 failure = MEMMODEL_SEQ_CST;
5830 success = MEMMODEL_SEQ_CST;
5834 if (!flag_inline_atomics)
5835 return NULL_RTX;
5837 /* Expand the operands. */
5838 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5840 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5841 expect = convert_memory_address (Pmode, expect);
5842 expect = gen_rtx_MEM (mode, expect);
5843 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5845 weak = CALL_EXPR_ARG (exp, 3);
5846 is_weak = false;
5847 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5848 is_weak = true;
5850 if (target == const0_rtx)
5851 target = NULL;
5853 /* Lest the rtl backend create a race condition with an imporoper store
5854 to memory, always create a new pseudo for OLDVAL. */
5855 oldval = NULL;
5857 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5858 is_weak, success, failure))
5859 return NULL_RTX;
5861 /* Conditionally store back to EXPECT, lest we create a race condition
5862 with an improper store to memory. */
5863 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5864 the normal case where EXPECT is totally private, i.e. a register. At
5865 which point the store can be unconditional. */
5866 label = gen_label_rtx ();
5867 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5868 GET_MODE (target), 1, label);
5869 emit_move_insn (expect, oldval);
5870 emit_label (label);
5872 return target;
5875 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5876 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5877 call. The weak parameter must be dropped to match the expected parameter
5878 list and the expected argument changed from value to pointer to memory
5879 slot. */
5881 static void
5882 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5884 unsigned int z;
5885 vec<tree, va_gc> *vec;
5887 vec_alloc (vec, 5);
5888 vec->quick_push (gimple_call_arg (call, 0));
5889 tree expected = gimple_call_arg (call, 1);
5890 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5891 TREE_TYPE (expected));
5892 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5893 if (expd != x)
5894 emit_move_insn (x, expd);
5895 tree v = make_tree (TREE_TYPE (expected), x);
5896 vec->quick_push (build1 (ADDR_EXPR,
5897 build_pointer_type (TREE_TYPE (expected)), v));
5898 vec->quick_push (gimple_call_arg (call, 2));
5899 /* Skip the boolean weak parameter. */
5900 for (z = 4; z < 6; z++)
5901 vec->quick_push (gimple_call_arg (call, z));
5902 built_in_function fncode
5903 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5904 + exact_log2 (GET_MODE_SIZE (mode)));
5905 tree fndecl = builtin_decl_explicit (fncode);
5906 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5907 fndecl);
5908 tree exp = build_call_vec (boolean_type_node, fn, vec);
5909 tree lhs = gimple_call_lhs (call);
5910 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5911 if (lhs)
5913 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5914 if (GET_MODE (boolret) != mode)
5915 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5916 x = force_reg (mode, x);
5917 write_complex_part (target, boolret, true);
5918 write_complex_part (target, x, false);
5922 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5924 void
5925 expand_ifn_atomic_compare_exchange (gcall *call)
5927 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5928 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5929 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5930 rtx expect, desired, mem, oldval, boolret;
5931 enum memmodel success, failure;
5932 tree lhs;
5933 bool is_weak;
5934 source_location loc
5935 = expansion_point_location_if_in_system_header (gimple_location (call));
5937 success = get_memmodel (gimple_call_arg (call, 4));
5938 failure = get_memmodel (gimple_call_arg (call, 5));
5940 if (failure > success)
5942 warning_at (loc, OPT_Winvalid_memory_model,
5943 "failure memory model cannot be stronger than success "
5944 "memory model for %<__atomic_compare_exchange%>");
5945 success = MEMMODEL_SEQ_CST;
5948 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5950 warning_at (loc, OPT_Winvalid_memory_model,
5951 "invalid failure memory model for "
5952 "%<__atomic_compare_exchange%>");
5953 failure = MEMMODEL_SEQ_CST;
5954 success = MEMMODEL_SEQ_CST;
5957 if (!flag_inline_atomics)
5959 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5960 return;
5963 /* Expand the operands. */
5964 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5966 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5967 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5969 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5971 boolret = NULL;
5972 oldval = NULL;
5974 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5975 is_weak, success, failure))
5977 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5978 return;
5981 lhs = gimple_call_lhs (call);
5982 if (lhs)
5984 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5985 if (GET_MODE (boolret) != mode)
5986 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5987 write_complex_part (target, boolret, true);
5988 write_complex_part (target, oldval, false);
5992 /* Expand the __atomic_load intrinsic:
5993 TYPE __atomic_load (TYPE *object, enum memmodel)
5994 EXP is the CALL_EXPR.
5995 TARGET is an optional place for us to store the results. */
5997 static rtx
5998 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6000 rtx mem;
6001 enum memmodel model;
6003 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6004 if (is_mm_release (model) || is_mm_acq_rel (model))
6006 source_location loc
6007 = expansion_point_location_if_in_system_header (input_location);
6008 warning_at (loc, OPT_Winvalid_memory_model,
6009 "invalid memory model for %<__atomic_load%>");
6010 model = MEMMODEL_SEQ_CST;
6013 if (!flag_inline_atomics)
6014 return NULL_RTX;
6016 /* Expand the operand. */
6017 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6019 return expand_atomic_load (target, mem, model);
6023 /* Expand the __atomic_store intrinsic:
6024 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6025 EXP is the CALL_EXPR.
6026 TARGET is an optional place for us to store the results. */
6028 static rtx
6029 expand_builtin_atomic_store (machine_mode mode, tree exp)
6031 rtx mem, val;
6032 enum memmodel model;
6034 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6035 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6036 || is_mm_release (model)))
6038 source_location loc
6039 = expansion_point_location_if_in_system_header (input_location);
6040 warning_at (loc, OPT_Winvalid_memory_model,
6041 "invalid memory model for %<__atomic_store%>");
6042 model = MEMMODEL_SEQ_CST;
6045 if (!flag_inline_atomics)
6046 return NULL_RTX;
6048 /* Expand the operands. */
6049 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6050 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6052 return expand_atomic_store (mem, val, model, false);
6055 /* Expand the __atomic_fetch_XXX intrinsic:
6056 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6057 EXP is the CALL_EXPR.
6058 TARGET is an optional place for us to store the results.
6059 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6060 FETCH_AFTER is true if returning the result of the operation.
6061 FETCH_AFTER is false if returning the value before the operation.
6062 IGNORE is true if the result is not used.
6063 EXT_CALL is the correct builtin for an external call if this cannot be
6064 resolved to an instruction sequence. */
6066 static rtx
6067 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6068 enum rtx_code code, bool fetch_after,
6069 bool ignore, enum built_in_function ext_call)
6071 rtx val, mem, ret;
6072 enum memmodel model;
6073 tree fndecl;
6074 tree addr;
6076 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6078 /* Expand the operands. */
6079 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6080 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6082 /* Only try generating instructions if inlining is turned on. */
6083 if (flag_inline_atomics)
6085 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6086 if (ret)
6087 return ret;
6090 /* Return if a different routine isn't needed for the library call. */
6091 if (ext_call == BUILT_IN_NONE)
6092 return NULL_RTX;
6094 /* Change the call to the specified function. */
6095 fndecl = get_callee_fndecl (exp);
6096 addr = CALL_EXPR_FN (exp);
6097 STRIP_NOPS (addr);
6099 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6100 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6102 /* If we will emit code after the call, the call can not be a tail call.
6103 If it is emitted as a tail call, a barrier is emitted after it, and
6104 then all trailing code is removed. */
6105 if (!ignore)
6106 CALL_EXPR_TAILCALL (exp) = 0;
6108 /* Expand the call here so we can emit trailing code. */
6109 ret = expand_call (exp, target, ignore);
6111 /* Replace the original function just in case it matters. */
6112 TREE_OPERAND (addr, 0) = fndecl;
6114 /* Then issue the arithmetic correction to return the right result. */
6115 if (!ignore)
6117 if (code == NOT)
6119 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6120 OPTAB_LIB_WIDEN);
6121 ret = expand_simple_unop (mode, NOT, ret, target, true);
6123 else
6124 ret = expand_simple_binop (mode, code, ret, val, target, true,
6125 OPTAB_LIB_WIDEN);
6127 return ret;
6130 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6132 void
6133 expand_ifn_atomic_bit_test_and (gcall *call)
6135 tree ptr = gimple_call_arg (call, 0);
6136 tree bit = gimple_call_arg (call, 1);
6137 tree flag = gimple_call_arg (call, 2);
6138 tree lhs = gimple_call_lhs (call);
6139 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6140 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6141 enum rtx_code code;
6142 optab optab;
6143 struct expand_operand ops[5];
6145 gcc_assert (flag_inline_atomics);
6147 if (gimple_call_num_args (call) == 4)
6148 model = get_memmodel (gimple_call_arg (call, 3));
6150 rtx mem = get_builtin_sync_mem (ptr, mode);
6151 rtx val = expand_expr_force_mode (bit, mode);
6153 switch (gimple_call_internal_fn (call))
6155 case IFN_ATOMIC_BIT_TEST_AND_SET:
6156 code = IOR;
6157 optab = atomic_bit_test_and_set_optab;
6158 break;
6159 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6160 code = XOR;
6161 optab = atomic_bit_test_and_complement_optab;
6162 break;
6163 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6164 code = AND;
6165 optab = atomic_bit_test_and_reset_optab;
6166 break;
6167 default:
6168 gcc_unreachable ();
6171 if (lhs == NULL_TREE)
6173 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6174 val, NULL_RTX, true, OPTAB_DIRECT);
6175 if (code == AND)
6176 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6177 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6178 return;
6181 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6182 enum insn_code icode = direct_optab_handler (optab, mode);
6183 gcc_assert (icode != CODE_FOR_nothing);
6184 create_output_operand (&ops[0], target, mode);
6185 create_fixed_operand (&ops[1], mem);
6186 create_convert_operand_to (&ops[2], val, mode, true);
6187 create_integer_operand (&ops[3], model);
6188 create_integer_operand (&ops[4], integer_onep (flag));
6189 if (maybe_expand_insn (icode, 5, ops))
6190 return;
6192 rtx bitval = val;
6193 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6194 val, NULL_RTX, true, OPTAB_DIRECT);
6195 rtx maskval = val;
6196 if (code == AND)
6197 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6198 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6199 code, model, false);
6200 if (integer_onep (flag))
6202 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6203 NULL_RTX, true, OPTAB_DIRECT);
6204 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6205 true, OPTAB_DIRECT);
6207 else
6208 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6209 OPTAB_DIRECT);
6210 if (result != target)
6211 emit_move_insn (target, result);
6214 /* Expand an atomic clear operation.
6215 void _atomic_clear (BOOL *obj, enum memmodel)
6216 EXP is the call expression. */
6218 static rtx
6219 expand_builtin_atomic_clear (tree exp)
6221 machine_mode mode;
6222 rtx mem, ret;
6223 enum memmodel model;
6225 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6226 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6227 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6229 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6231 source_location loc
6232 = expansion_point_location_if_in_system_header (input_location);
6233 warning_at (loc, OPT_Winvalid_memory_model,
6234 "invalid memory model for %<__atomic_store%>");
6235 model = MEMMODEL_SEQ_CST;
6238 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6239 Failing that, a store is issued by __atomic_store. The only way this can
6240 fail is if the bool type is larger than a word size. Unlikely, but
6241 handle it anyway for completeness. Assume a single threaded model since
6242 there is no atomic support in this case, and no barriers are required. */
6243 ret = expand_atomic_store (mem, const0_rtx, model, true);
6244 if (!ret)
6245 emit_move_insn (mem, const0_rtx);
6246 return const0_rtx;
6249 /* Expand an atomic test_and_set operation.
6250 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6251 EXP is the call expression. */
6253 static rtx
6254 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6256 rtx mem;
6257 enum memmodel model;
6258 machine_mode mode;
6260 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6261 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6262 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6264 return expand_atomic_test_and_set (target, mem, model);
6268 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6269 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6271 static tree
6272 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6274 int size;
6275 machine_mode mode;
6276 unsigned int mode_align, type_align;
6278 if (TREE_CODE (arg0) != INTEGER_CST)
6279 return NULL_TREE;
6281 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6282 mode = mode_for_size (size, MODE_INT, 0);
6283 mode_align = GET_MODE_ALIGNMENT (mode);
6285 if (TREE_CODE (arg1) == INTEGER_CST)
6287 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6289 /* Either this argument is null, or it's a fake pointer encoding
6290 the alignment of the object. */
6291 val = least_bit_hwi (val);
6292 val *= BITS_PER_UNIT;
6294 if (val == 0 || mode_align < val)
6295 type_align = mode_align;
6296 else
6297 type_align = val;
6299 else
6301 tree ttype = TREE_TYPE (arg1);
6303 /* This function is usually invoked and folded immediately by the front
6304 end before anything else has a chance to look at it. The pointer
6305 parameter at this point is usually cast to a void *, so check for that
6306 and look past the cast. */
6307 if (CONVERT_EXPR_P (arg1)
6308 && POINTER_TYPE_P (ttype)
6309 && VOID_TYPE_P (TREE_TYPE (ttype))
6310 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6311 arg1 = TREE_OPERAND (arg1, 0);
6313 ttype = TREE_TYPE (arg1);
6314 gcc_assert (POINTER_TYPE_P (ttype));
6316 /* Get the underlying type of the object. */
6317 ttype = TREE_TYPE (ttype);
6318 type_align = TYPE_ALIGN (ttype);
6321 /* If the object has smaller alignment, the lock free routines cannot
6322 be used. */
6323 if (type_align < mode_align)
6324 return boolean_false_node;
6326 /* Check if a compare_and_swap pattern exists for the mode which represents
6327 the required size. The pattern is not allowed to fail, so the existence
6328 of the pattern indicates support is present. Also require that an
6329 atomic load exists for the required size. */
6330 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6331 return boolean_true_node;
6332 else
6333 return boolean_false_node;
6336 /* Return true if the parameters to call EXP represent an object which will
6337 always generate lock free instructions. The first argument represents the
6338 size of the object, and the second parameter is a pointer to the object
6339 itself. If NULL is passed for the object, then the result is based on
6340 typical alignment for an object of the specified size. Otherwise return
6341 false. */
6343 static rtx
6344 expand_builtin_atomic_always_lock_free (tree exp)
6346 tree size;
6347 tree arg0 = CALL_EXPR_ARG (exp, 0);
6348 tree arg1 = CALL_EXPR_ARG (exp, 1);
6350 if (TREE_CODE (arg0) != INTEGER_CST)
6352 error ("non-constant argument 1 to __atomic_always_lock_free");
6353 return const0_rtx;
6356 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6357 if (size == boolean_true_node)
6358 return const1_rtx;
6359 return const0_rtx;
6362 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6363 is lock free on this architecture. */
6365 static tree
6366 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6368 if (!flag_inline_atomics)
6369 return NULL_TREE;
6371 /* If it isn't always lock free, don't generate a result. */
6372 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6373 return boolean_true_node;
6375 return NULL_TREE;
6378 /* Return true if the parameters to call EXP represent an object which will
6379 always generate lock free instructions. The first argument represents the
6380 size of the object, and the second parameter is a pointer to the object
6381 itself. If NULL is passed for the object, then the result is based on
6382 typical alignment for an object of the specified size. Otherwise return
6383 NULL*/
6385 static rtx
6386 expand_builtin_atomic_is_lock_free (tree exp)
6388 tree size;
6389 tree arg0 = CALL_EXPR_ARG (exp, 0);
6390 tree arg1 = CALL_EXPR_ARG (exp, 1);
6392 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6394 error ("non-integer argument 1 to __atomic_is_lock_free");
6395 return NULL_RTX;
6398 if (!flag_inline_atomics)
6399 return NULL_RTX;
6401 /* If the value is known at compile time, return the RTX for it. */
6402 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6403 if (size == boolean_true_node)
6404 return const1_rtx;
6406 return NULL_RTX;
6409 /* Expand the __atomic_thread_fence intrinsic:
6410 void __atomic_thread_fence (enum memmodel)
6411 EXP is the CALL_EXPR. */
6413 static void
6414 expand_builtin_atomic_thread_fence (tree exp)
6416 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6417 expand_mem_thread_fence (model);
6420 /* Expand the __atomic_signal_fence intrinsic:
6421 void __atomic_signal_fence (enum memmodel)
6422 EXP is the CALL_EXPR. */
6424 static void
6425 expand_builtin_atomic_signal_fence (tree exp)
6427 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6428 expand_mem_signal_fence (model);
6431 /* Expand the __sync_synchronize intrinsic. */
6433 static void
6434 expand_builtin_sync_synchronize (void)
6436 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6439 static rtx
6440 expand_builtin_thread_pointer (tree exp, rtx target)
6442 enum insn_code icode;
6443 if (!validate_arglist (exp, VOID_TYPE))
6444 return const0_rtx;
6445 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6446 if (icode != CODE_FOR_nothing)
6448 struct expand_operand op;
6449 /* If the target is not sutitable then create a new target. */
6450 if (target == NULL_RTX
6451 || !REG_P (target)
6452 || GET_MODE (target) != Pmode)
6453 target = gen_reg_rtx (Pmode);
6454 create_output_operand (&op, target, Pmode);
6455 expand_insn (icode, 1, &op);
6456 return target;
6458 error ("__builtin_thread_pointer is not supported on this target");
6459 return const0_rtx;
6462 static void
6463 expand_builtin_set_thread_pointer (tree exp)
6465 enum insn_code icode;
6466 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6467 return;
6468 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6469 if (icode != CODE_FOR_nothing)
6471 struct expand_operand op;
6472 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6473 Pmode, EXPAND_NORMAL);
6474 create_input_operand (&op, val, Pmode);
6475 expand_insn (icode, 1, &op);
6476 return;
6478 error ("__builtin_set_thread_pointer is not supported on this target");
6482 /* Emit code to restore the current value of stack. */
6484 static void
6485 expand_stack_restore (tree var)
6487 rtx_insn *prev;
6488 rtx sa = expand_normal (var);
6490 sa = convert_memory_address (Pmode, sa);
6492 prev = get_last_insn ();
6493 emit_stack_restore (SAVE_BLOCK, sa);
6495 record_new_stack_level ();
6497 fixup_args_size_notes (prev, get_last_insn (), 0);
6500 /* Emit code to save the current value of stack. */
6502 static rtx
6503 expand_stack_save (void)
6505 rtx ret = NULL_RTX;
6507 emit_stack_save (SAVE_BLOCK, &ret);
6508 return ret;
6512 /* Expand an expression EXP that calls a built-in function,
6513 with result going to TARGET if that's convenient
6514 (and in mode MODE if that's convenient).
6515 SUBTARGET may be used as the target for computing one of EXP's operands.
6516 IGNORE is nonzero if the value is to be ignored. */
6519 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6520 int ignore)
6522 tree fndecl = get_callee_fndecl (exp);
6523 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6524 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6525 int flags;
6527 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6528 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6530 /* When ASan is enabled, we don't want to expand some memory/string
6531 builtins and rely on libsanitizer's hooks. This allows us to avoid
6532 redundant checks and be sure, that possible overflow will be detected
6533 by ASan. */
6535 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6536 return expand_call (exp, target, ignore);
6538 /* When not optimizing, generate calls to library functions for a certain
6539 set of builtins. */
6540 if (!optimize
6541 && !called_as_built_in (fndecl)
6542 && fcode != BUILT_IN_FORK
6543 && fcode != BUILT_IN_EXECL
6544 && fcode != BUILT_IN_EXECV
6545 && fcode != BUILT_IN_EXECLP
6546 && fcode != BUILT_IN_EXECLE
6547 && fcode != BUILT_IN_EXECVP
6548 && fcode != BUILT_IN_EXECVE
6549 && fcode != BUILT_IN_ALLOCA
6550 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
6551 && fcode != BUILT_IN_FREE
6552 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6553 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6554 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6555 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6556 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6557 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6558 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6559 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6560 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6561 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6562 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6563 && fcode != BUILT_IN_CHKP_BNDRET)
6564 return expand_call (exp, target, ignore);
6566 /* The built-in function expanders test for target == const0_rtx
6567 to determine whether the function's result will be ignored. */
6568 if (ignore)
6569 target = const0_rtx;
6571 /* If the result of a pure or const built-in function is ignored, and
6572 none of its arguments are volatile, we can avoid expanding the
6573 built-in call and just evaluate the arguments for side-effects. */
6574 if (target == const0_rtx
6575 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6576 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6578 bool volatilep = false;
6579 tree arg;
6580 call_expr_arg_iterator iter;
6582 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6583 if (TREE_THIS_VOLATILE (arg))
6585 volatilep = true;
6586 break;
6589 if (! volatilep)
6591 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6592 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6593 return const0_rtx;
6597 /* expand_builtin_with_bounds is supposed to be used for
6598 instrumented builtin calls. */
6599 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6601 switch (fcode)
6603 CASE_FLT_FN (BUILT_IN_FABS):
6604 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6605 case BUILT_IN_FABSD32:
6606 case BUILT_IN_FABSD64:
6607 case BUILT_IN_FABSD128:
6608 target = expand_builtin_fabs (exp, target, subtarget);
6609 if (target)
6610 return target;
6611 break;
6613 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6614 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6615 target = expand_builtin_copysign (exp, target, subtarget);
6616 if (target)
6617 return target;
6618 break;
6620 /* Just do a normal library call if we were unable to fold
6621 the values. */
6622 CASE_FLT_FN (BUILT_IN_CABS):
6623 break;
6625 CASE_FLT_FN (BUILT_IN_FMA):
6626 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6627 if (target)
6628 return target;
6629 break;
6631 CASE_FLT_FN (BUILT_IN_ILOGB):
6632 if (! flag_unsafe_math_optimizations)
6633 break;
6634 gcc_fallthrough ();
6635 CASE_FLT_FN (BUILT_IN_ISINF):
6636 CASE_FLT_FN (BUILT_IN_FINITE):
6637 case BUILT_IN_ISFINITE:
6638 case BUILT_IN_ISNORMAL:
6639 target = expand_builtin_interclass_mathfn (exp, target);
6640 if (target)
6641 return target;
6642 break;
6644 CASE_FLT_FN (BUILT_IN_ICEIL):
6645 CASE_FLT_FN (BUILT_IN_LCEIL):
6646 CASE_FLT_FN (BUILT_IN_LLCEIL):
6647 CASE_FLT_FN (BUILT_IN_LFLOOR):
6648 CASE_FLT_FN (BUILT_IN_IFLOOR):
6649 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6650 target = expand_builtin_int_roundingfn (exp, target);
6651 if (target)
6652 return target;
6653 break;
6655 CASE_FLT_FN (BUILT_IN_IRINT):
6656 CASE_FLT_FN (BUILT_IN_LRINT):
6657 CASE_FLT_FN (BUILT_IN_LLRINT):
6658 CASE_FLT_FN (BUILT_IN_IROUND):
6659 CASE_FLT_FN (BUILT_IN_LROUND):
6660 CASE_FLT_FN (BUILT_IN_LLROUND):
6661 target = expand_builtin_int_roundingfn_2 (exp, target);
6662 if (target)
6663 return target;
6664 break;
6666 CASE_FLT_FN (BUILT_IN_POWI):
6667 target = expand_builtin_powi (exp, target);
6668 if (target)
6669 return target;
6670 break;
6672 CASE_FLT_FN (BUILT_IN_CEXPI):
6673 target = expand_builtin_cexpi (exp, target);
6674 gcc_assert (target);
6675 return target;
6677 CASE_FLT_FN (BUILT_IN_SIN):
6678 CASE_FLT_FN (BUILT_IN_COS):
6679 if (! flag_unsafe_math_optimizations)
6680 break;
6681 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6682 if (target)
6683 return target;
6684 break;
6686 CASE_FLT_FN (BUILT_IN_SINCOS):
6687 if (! flag_unsafe_math_optimizations)
6688 break;
6689 target = expand_builtin_sincos (exp);
6690 if (target)
6691 return target;
6692 break;
6694 case BUILT_IN_APPLY_ARGS:
6695 return expand_builtin_apply_args ();
6697 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6698 FUNCTION with a copy of the parameters described by
6699 ARGUMENTS, and ARGSIZE. It returns a block of memory
6700 allocated on the stack into which is stored all the registers
6701 that might possibly be used for returning the result of a
6702 function. ARGUMENTS is the value returned by
6703 __builtin_apply_args. ARGSIZE is the number of bytes of
6704 arguments that must be copied. ??? How should this value be
6705 computed? We'll also need a safe worst case value for varargs
6706 functions. */
6707 case BUILT_IN_APPLY:
6708 if (!validate_arglist (exp, POINTER_TYPE,
6709 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6710 && !validate_arglist (exp, REFERENCE_TYPE,
6711 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6712 return const0_rtx;
6713 else
6715 rtx ops[3];
6717 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6718 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6719 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6721 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6724 /* __builtin_return (RESULT) causes the function to return the
6725 value described by RESULT. RESULT is address of the block of
6726 memory returned by __builtin_apply. */
6727 case BUILT_IN_RETURN:
6728 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6729 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6730 return const0_rtx;
6732 case BUILT_IN_SAVEREGS:
6733 return expand_builtin_saveregs ();
6735 case BUILT_IN_VA_ARG_PACK:
6736 /* All valid uses of __builtin_va_arg_pack () are removed during
6737 inlining. */
6738 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6739 return const0_rtx;
6741 case BUILT_IN_VA_ARG_PACK_LEN:
6742 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6743 inlining. */
6744 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6745 return const0_rtx;
6747 /* Return the address of the first anonymous stack arg. */
6748 case BUILT_IN_NEXT_ARG:
6749 if (fold_builtin_next_arg (exp, false))
6750 return const0_rtx;
6751 return expand_builtin_next_arg ();
6753 case BUILT_IN_CLEAR_CACHE:
6754 target = expand_builtin___clear_cache (exp);
6755 if (target)
6756 return target;
6757 break;
6759 case BUILT_IN_CLASSIFY_TYPE:
6760 return expand_builtin_classify_type (exp);
6762 case BUILT_IN_CONSTANT_P:
6763 return const0_rtx;
6765 case BUILT_IN_FRAME_ADDRESS:
6766 case BUILT_IN_RETURN_ADDRESS:
6767 return expand_builtin_frame_address (fndecl, exp);
6769 /* Returns the address of the area where the structure is returned.
6770 0 otherwise. */
6771 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6772 if (call_expr_nargs (exp) != 0
6773 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6774 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6775 return const0_rtx;
6776 else
6777 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6779 case BUILT_IN_ALLOCA:
6780 case BUILT_IN_ALLOCA_WITH_ALIGN:
6781 target = expand_builtin_alloca (exp);
6782 if (target)
6783 return target;
6784 break;
6786 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6787 return expand_asan_emit_allocas_unpoison (exp);
6789 case BUILT_IN_STACK_SAVE:
6790 return expand_stack_save ();
6792 case BUILT_IN_STACK_RESTORE:
6793 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6794 return const0_rtx;
6796 case BUILT_IN_BSWAP16:
6797 case BUILT_IN_BSWAP32:
6798 case BUILT_IN_BSWAP64:
6799 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6800 if (target)
6801 return target;
6802 break;
6804 CASE_INT_FN (BUILT_IN_FFS):
6805 target = expand_builtin_unop (target_mode, exp, target,
6806 subtarget, ffs_optab);
6807 if (target)
6808 return target;
6809 break;
6811 CASE_INT_FN (BUILT_IN_CLZ):
6812 target = expand_builtin_unop (target_mode, exp, target,
6813 subtarget, clz_optab);
6814 if (target)
6815 return target;
6816 break;
6818 CASE_INT_FN (BUILT_IN_CTZ):
6819 target = expand_builtin_unop (target_mode, exp, target,
6820 subtarget, ctz_optab);
6821 if (target)
6822 return target;
6823 break;
6825 CASE_INT_FN (BUILT_IN_CLRSB):
6826 target = expand_builtin_unop (target_mode, exp, target,
6827 subtarget, clrsb_optab);
6828 if (target)
6829 return target;
6830 break;
6832 CASE_INT_FN (BUILT_IN_POPCOUNT):
6833 target = expand_builtin_unop (target_mode, exp, target,
6834 subtarget, popcount_optab);
6835 if (target)
6836 return target;
6837 break;
6839 CASE_INT_FN (BUILT_IN_PARITY):
6840 target = expand_builtin_unop (target_mode, exp, target,
6841 subtarget, parity_optab);
6842 if (target)
6843 return target;
6844 break;
6846 case BUILT_IN_STRLEN:
6847 target = expand_builtin_strlen (exp, target, target_mode);
6848 if (target)
6849 return target;
6850 break;
6852 case BUILT_IN_STRCAT:
6853 target = expand_builtin_strcat (exp, target);
6854 if (target)
6855 return target;
6856 break;
6858 case BUILT_IN_STRCPY:
6859 target = expand_builtin_strcpy (exp, target);
6860 if (target)
6861 return target;
6862 break;
6864 case BUILT_IN_STRNCAT:
6865 target = expand_builtin_strncat (exp, target);
6866 if (target)
6867 return target;
6868 break;
6870 case BUILT_IN_STRNCPY:
6871 target = expand_builtin_strncpy (exp, target);
6872 if (target)
6873 return target;
6874 break;
6876 case BUILT_IN_STPCPY:
6877 target = expand_builtin_stpcpy (exp, target, mode);
6878 if (target)
6879 return target;
6880 break;
6882 case BUILT_IN_STPNCPY:
6883 target = expand_builtin_stpncpy (exp, target);
6884 if (target)
6885 return target;
6886 break;
6888 case BUILT_IN_MEMCHR:
6889 target = expand_builtin_memchr (exp, target);
6890 if (target)
6891 return target;
6892 break;
6894 case BUILT_IN_MEMCPY:
6895 target = expand_builtin_memcpy (exp, target);
6896 if (target)
6897 return target;
6898 break;
6900 case BUILT_IN_MEMMOVE:
6901 target = expand_builtin_memmove (exp, target);
6902 if (target)
6903 return target;
6904 break;
6906 case BUILT_IN_MEMPCPY:
6907 target = expand_builtin_mempcpy (exp, target, mode);
6908 if (target)
6909 return target;
6910 break;
6912 case BUILT_IN_MEMSET:
6913 target = expand_builtin_memset (exp, target, mode);
6914 if (target)
6915 return target;
6916 break;
6918 case BUILT_IN_BZERO:
6919 target = expand_builtin_bzero (exp);
6920 if (target)
6921 return target;
6922 break;
6924 case BUILT_IN_STRCMP:
6925 target = expand_builtin_strcmp (exp, target);
6926 if (target)
6927 return target;
6928 break;
6930 case BUILT_IN_STRNCMP:
6931 target = expand_builtin_strncmp (exp, target, mode);
6932 if (target)
6933 return target;
6934 break;
6936 case BUILT_IN_BCMP:
6937 case BUILT_IN_MEMCMP:
6938 case BUILT_IN_MEMCMP_EQ:
6939 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6940 if (target)
6941 return target;
6942 if (fcode == BUILT_IN_MEMCMP_EQ)
6944 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6945 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6947 break;
6949 case BUILT_IN_SETJMP:
6950 /* This should have been lowered to the builtins below. */
6951 gcc_unreachable ();
6953 case BUILT_IN_SETJMP_SETUP:
6954 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6955 and the receiver label. */
6956 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6958 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6959 VOIDmode, EXPAND_NORMAL);
6960 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6961 rtx_insn *label_r = label_rtx (label);
6963 /* This is copied from the handling of non-local gotos. */
6964 expand_builtin_setjmp_setup (buf_addr, label_r);
6965 nonlocal_goto_handler_labels
6966 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6967 nonlocal_goto_handler_labels);
6968 /* ??? Do not let expand_label treat us as such since we would
6969 not want to be both on the list of non-local labels and on
6970 the list of forced labels. */
6971 FORCED_LABEL (label) = 0;
6972 return const0_rtx;
6974 break;
6976 case BUILT_IN_SETJMP_RECEIVER:
6977 /* __builtin_setjmp_receiver is passed the receiver label. */
6978 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6980 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6981 rtx_insn *label_r = label_rtx (label);
6983 expand_builtin_setjmp_receiver (label_r);
6984 return const0_rtx;
6986 break;
6988 /* __builtin_longjmp is passed a pointer to an array of five words.
6989 It's similar to the C library longjmp function but works with
6990 __builtin_setjmp above. */
6991 case BUILT_IN_LONGJMP:
6992 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6994 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6995 VOIDmode, EXPAND_NORMAL);
6996 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6998 if (value != const1_rtx)
7000 error ("%<__builtin_longjmp%> second argument must be 1");
7001 return const0_rtx;
7004 expand_builtin_longjmp (buf_addr, value);
7005 return const0_rtx;
7007 break;
7009 case BUILT_IN_NONLOCAL_GOTO:
7010 target = expand_builtin_nonlocal_goto (exp);
7011 if (target)
7012 return target;
7013 break;
7015 /* This updates the setjmp buffer that is its argument with the value
7016 of the current stack pointer. */
7017 case BUILT_IN_UPDATE_SETJMP_BUF:
7018 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7020 rtx buf_addr
7021 = expand_normal (CALL_EXPR_ARG (exp, 0));
7023 expand_builtin_update_setjmp_buf (buf_addr);
7024 return const0_rtx;
7026 break;
7028 case BUILT_IN_TRAP:
7029 expand_builtin_trap ();
7030 return const0_rtx;
7032 case BUILT_IN_UNREACHABLE:
7033 expand_builtin_unreachable ();
7034 return const0_rtx;
7036 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7037 case BUILT_IN_SIGNBITD32:
7038 case BUILT_IN_SIGNBITD64:
7039 case BUILT_IN_SIGNBITD128:
7040 target = expand_builtin_signbit (exp, target);
7041 if (target)
7042 return target;
7043 break;
7045 /* Various hooks for the DWARF 2 __throw routine. */
7046 case BUILT_IN_UNWIND_INIT:
7047 expand_builtin_unwind_init ();
7048 return const0_rtx;
7049 case BUILT_IN_DWARF_CFA:
7050 return virtual_cfa_rtx;
7051 #ifdef DWARF2_UNWIND_INFO
7052 case BUILT_IN_DWARF_SP_COLUMN:
7053 return expand_builtin_dwarf_sp_column ();
7054 case BUILT_IN_INIT_DWARF_REG_SIZES:
7055 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7056 return const0_rtx;
7057 #endif
7058 case BUILT_IN_FROB_RETURN_ADDR:
7059 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7060 case BUILT_IN_EXTRACT_RETURN_ADDR:
7061 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7062 case BUILT_IN_EH_RETURN:
7063 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7064 CALL_EXPR_ARG (exp, 1));
7065 return const0_rtx;
7066 case BUILT_IN_EH_RETURN_DATA_REGNO:
7067 return expand_builtin_eh_return_data_regno (exp);
7068 case BUILT_IN_EXTEND_POINTER:
7069 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7070 case BUILT_IN_EH_POINTER:
7071 return expand_builtin_eh_pointer (exp);
7072 case BUILT_IN_EH_FILTER:
7073 return expand_builtin_eh_filter (exp);
7074 case BUILT_IN_EH_COPY_VALUES:
7075 return expand_builtin_eh_copy_values (exp);
7077 case BUILT_IN_VA_START:
7078 return expand_builtin_va_start (exp);
7079 case BUILT_IN_VA_END:
7080 return expand_builtin_va_end (exp);
7081 case BUILT_IN_VA_COPY:
7082 return expand_builtin_va_copy (exp);
7083 case BUILT_IN_EXPECT:
7084 return expand_builtin_expect (exp, target);
7085 case BUILT_IN_ASSUME_ALIGNED:
7086 return expand_builtin_assume_aligned (exp, target);
7087 case BUILT_IN_PREFETCH:
7088 expand_builtin_prefetch (exp);
7089 return const0_rtx;
7091 case BUILT_IN_INIT_TRAMPOLINE:
7092 return expand_builtin_init_trampoline (exp, true);
7093 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7094 return expand_builtin_init_trampoline (exp, false);
7095 case BUILT_IN_ADJUST_TRAMPOLINE:
7096 return expand_builtin_adjust_trampoline (exp);
7098 case BUILT_IN_INIT_DESCRIPTOR:
7099 return expand_builtin_init_descriptor (exp);
7100 case BUILT_IN_ADJUST_DESCRIPTOR:
7101 return expand_builtin_adjust_descriptor (exp);
7103 case BUILT_IN_FORK:
7104 case BUILT_IN_EXECL:
7105 case BUILT_IN_EXECV:
7106 case BUILT_IN_EXECLP:
7107 case BUILT_IN_EXECLE:
7108 case BUILT_IN_EXECVP:
7109 case BUILT_IN_EXECVE:
7110 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7111 if (target)
7112 return target;
7113 break;
7115 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7116 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7117 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7118 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7119 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7120 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7121 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7122 if (target)
7123 return target;
7124 break;
7126 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7127 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7128 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7129 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7130 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7131 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7132 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7133 if (target)
7134 return target;
7135 break;
7137 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7138 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7139 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7140 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7141 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7142 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7143 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7144 if (target)
7145 return target;
7146 break;
7148 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7149 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7150 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7151 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7152 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7153 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7154 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7155 if (target)
7156 return target;
7157 break;
7159 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7160 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7161 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7162 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7163 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7164 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7165 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7166 if (target)
7167 return target;
7168 break;
7170 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7171 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7172 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7173 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7174 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7175 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7176 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7177 if (target)
7178 return target;
7179 break;
7181 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7182 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7183 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7184 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7185 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7186 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7187 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7188 if (target)
7189 return target;
7190 break;
7192 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7193 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7194 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7195 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7196 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7197 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7198 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7199 if (target)
7200 return target;
7201 break;
7203 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7204 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7205 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7206 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7207 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7208 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7209 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7210 if (target)
7211 return target;
7212 break;
7214 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7215 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7216 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7217 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7218 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7219 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7220 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7221 if (target)
7222 return target;
7223 break;
7225 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7226 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7227 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7228 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7229 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7230 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7231 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7232 if (target)
7233 return target;
7234 break;
7236 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7237 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7238 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7239 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7240 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7241 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7242 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7243 if (target)
7244 return target;
7245 break;
7247 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7248 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7249 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7250 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7251 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7252 if (mode == VOIDmode)
7253 mode = TYPE_MODE (boolean_type_node);
7254 if (!target || !register_operand (target, mode))
7255 target = gen_reg_rtx (mode);
7257 mode = get_builtin_sync_mode
7258 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7259 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7260 if (target)
7261 return target;
7262 break;
7264 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7265 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7266 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7267 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7268 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7269 mode = get_builtin_sync_mode
7270 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7271 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7272 if (target)
7273 return target;
7274 break;
7276 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7277 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7278 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7279 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7280 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7281 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7282 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7283 if (target)
7284 return target;
7285 break;
7287 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7288 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7289 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7290 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7291 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7292 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7293 expand_builtin_sync_lock_release (mode, exp);
7294 return const0_rtx;
7296 case BUILT_IN_SYNC_SYNCHRONIZE:
7297 expand_builtin_sync_synchronize ();
7298 return const0_rtx;
7300 case BUILT_IN_ATOMIC_EXCHANGE_1:
7301 case BUILT_IN_ATOMIC_EXCHANGE_2:
7302 case BUILT_IN_ATOMIC_EXCHANGE_4:
7303 case BUILT_IN_ATOMIC_EXCHANGE_8:
7304 case BUILT_IN_ATOMIC_EXCHANGE_16:
7305 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7306 target = expand_builtin_atomic_exchange (mode, exp, target);
7307 if (target)
7308 return target;
7309 break;
7311 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7312 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7313 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7314 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7315 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7317 unsigned int nargs, z;
7318 vec<tree, va_gc> *vec;
7320 mode =
7321 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7322 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7323 if (target)
7324 return target;
7326 /* If this is turned into an external library call, the weak parameter
7327 must be dropped to match the expected parameter list. */
7328 nargs = call_expr_nargs (exp);
7329 vec_alloc (vec, nargs - 1);
7330 for (z = 0; z < 3; z++)
7331 vec->quick_push (CALL_EXPR_ARG (exp, z));
7332 /* Skip the boolean weak parameter. */
7333 for (z = 4; z < 6; z++)
7334 vec->quick_push (CALL_EXPR_ARG (exp, z));
7335 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7336 break;
7339 case BUILT_IN_ATOMIC_LOAD_1:
7340 case BUILT_IN_ATOMIC_LOAD_2:
7341 case BUILT_IN_ATOMIC_LOAD_4:
7342 case BUILT_IN_ATOMIC_LOAD_8:
7343 case BUILT_IN_ATOMIC_LOAD_16:
7344 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7345 target = expand_builtin_atomic_load (mode, exp, target);
7346 if (target)
7347 return target;
7348 break;
7350 case BUILT_IN_ATOMIC_STORE_1:
7351 case BUILT_IN_ATOMIC_STORE_2:
7352 case BUILT_IN_ATOMIC_STORE_4:
7353 case BUILT_IN_ATOMIC_STORE_8:
7354 case BUILT_IN_ATOMIC_STORE_16:
7355 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7356 target = expand_builtin_atomic_store (mode, exp);
7357 if (target)
7358 return const0_rtx;
7359 break;
7361 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7362 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7363 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7364 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7365 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7367 enum built_in_function lib;
7368 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7369 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7370 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7371 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7372 ignore, lib);
7373 if (target)
7374 return target;
7375 break;
7377 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7378 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7379 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7380 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7381 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7383 enum built_in_function lib;
7384 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7385 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7386 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7387 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7388 ignore, lib);
7389 if (target)
7390 return target;
7391 break;
7393 case BUILT_IN_ATOMIC_AND_FETCH_1:
7394 case BUILT_IN_ATOMIC_AND_FETCH_2:
7395 case BUILT_IN_ATOMIC_AND_FETCH_4:
7396 case BUILT_IN_ATOMIC_AND_FETCH_8:
7397 case BUILT_IN_ATOMIC_AND_FETCH_16:
7399 enum built_in_function lib;
7400 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7401 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7402 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7403 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7404 ignore, lib);
7405 if (target)
7406 return target;
7407 break;
7409 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7410 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7411 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7412 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7413 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7415 enum built_in_function lib;
7416 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7417 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7418 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7419 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7420 ignore, lib);
7421 if (target)
7422 return target;
7423 break;
7425 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7426 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7427 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7428 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7429 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7431 enum built_in_function lib;
7432 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7433 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7434 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7435 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7436 ignore, lib);
7437 if (target)
7438 return target;
7439 break;
7441 case BUILT_IN_ATOMIC_OR_FETCH_1:
7442 case BUILT_IN_ATOMIC_OR_FETCH_2:
7443 case BUILT_IN_ATOMIC_OR_FETCH_4:
7444 case BUILT_IN_ATOMIC_OR_FETCH_8:
7445 case BUILT_IN_ATOMIC_OR_FETCH_16:
7447 enum built_in_function lib;
7448 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7449 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7450 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7451 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7452 ignore, lib);
7453 if (target)
7454 return target;
7455 break;
7457 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7458 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7459 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7460 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7461 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7462 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7463 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7464 ignore, BUILT_IN_NONE);
7465 if (target)
7466 return target;
7467 break;
7469 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7470 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7471 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7472 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7473 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7474 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7475 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7476 ignore, BUILT_IN_NONE);
7477 if (target)
7478 return target;
7479 break;
7481 case BUILT_IN_ATOMIC_FETCH_AND_1:
7482 case BUILT_IN_ATOMIC_FETCH_AND_2:
7483 case BUILT_IN_ATOMIC_FETCH_AND_4:
7484 case BUILT_IN_ATOMIC_FETCH_AND_8:
7485 case BUILT_IN_ATOMIC_FETCH_AND_16:
7486 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7487 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7488 ignore, BUILT_IN_NONE);
7489 if (target)
7490 return target;
7491 break;
7493 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7494 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7495 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7496 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7497 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7498 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7499 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7500 ignore, BUILT_IN_NONE);
7501 if (target)
7502 return target;
7503 break;
7505 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7506 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7507 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7508 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7509 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7510 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7511 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7512 ignore, BUILT_IN_NONE);
7513 if (target)
7514 return target;
7515 break;
7517 case BUILT_IN_ATOMIC_FETCH_OR_1:
7518 case BUILT_IN_ATOMIC_FETCH_OR_2:
7519 case BUILT_IN_ATOMIC_FETCH_OR_4:
7520 case BUILT_IN_ATOMIC_FETCH_OR_8:
7521 case BUILT_IN_ATOMIC_FETCH_OR_16:
7522 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7523 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7524 ignore, BUILT_IN_NONE);
7525 if (target)
7526 return target;
7527 break;
7529 case BUILT_IN_ATOMIC_TEST_AND_SET:
7530 return expand_builtin_atomic_test_and_set (exp, target);
7532 case BUILT_IN_ATOMIC_CLEAR:
7533 return expand_builtin_atomic_clear (exp);
7535 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7536 return expand_builtin_atomic_always_lock_free (exp);
7538 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7539 target = expand_builtin_atomic_is_lock_free (exp);
7540 if (target)
7541 return target;
7542 break;
7544 case BUILT_IN_ATOMIC_THREAD_FENCE:
7545 expand_builtin_atomic_thread_fence (exp);
7546 return const0_rtx;
7548 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7549 expand_builtin_atomic_signal_fence (exp);
7550 return const0_rtx;
7552 case BUILT_IN_OBJECT_SIZE:
7553 return expand_builtin_object_size (exp);
7555 case BUILT_IN_MEMCPY_CHK:
7556 case BUILT_IN_MEMPCPY_CHK:
7557 case BUILT_IN_MEMMOVE_CHK:
7558 case BUILT_IN_MEMSET_CHK:
7559 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7560 if (target)
7561 return target;
7562 break;
7564 case BUILT_IN_STRCPY_CHK:
7565 case BUILT_IN_STPCPY_CHK:
7566 case BUILT_IN_STRNCPY_CHK:
7567 case BUILT_IN_STPNCPY_CHK:
7568 case BUILT_IN_STRCAT_CHK:
7569 case BUILT_IN_STRNCAT_CHK:
7570 case BUILT_IN_SNPRINTF_CHK:
7571 case BUILT_IN_VSNPRINTF_CHK:
7572 maybe_emit_chk_warning (exp, fcode);
7573 break;
7575 case BUILT_IN_SPRINTF_CHK:
7576 case BUILT_IN_VSPRINTF_CHK:
7577 maybe_emit_sprintf_chk_warning (exp, fcode);
7578 break;
7580 case BUILT_IN_FREE:
7581 if (warn_free_nonheap_object)
7582 maybe_emit_free_warning (exp);
7583 break;
7585 case BUILT_IN_THREAD_POINTER:
7586 return expand_builtin_thread_pointer (exp, target);
7588 case BUILT_IN_SET_THREAD_POINTER:
7589 expand_builtin_set_thread_pointer (exp);
7590 return const0_rtx;
7592 case BUILT_IN_CILK_DETACH:
7593 expand_builtin_cilk_detach (exp);
7594 return const0_rtx;
7596 case BUILT_IN_CILK_POP_FRAME:
7597 expand_builtin_cilk_pop_frame (exp);
7598 return const0_rtx;
7600 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7601 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7602 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7603 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7604 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7605 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7606 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7607 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7608 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7609 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7610 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7611 /* We allow user CHKP builtins if Pointer Bounds
7612 Checker is off. */
7613 if (!chkp_function_instrumented_p (current_function_decl))
7615 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7616 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7617 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7618 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7619 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7620 return expand_normal (CALL_EXPR_ARG (exp, 0));
7621 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7622 return expand_normal (size_zero_node);
7623 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7624 return expand_normal (size_int (-1));
7625 else
7626 return const0_rtx;
7628 /* FALLTHROUGH */
7630 case BUILT_IN_CHKP_BNDMK:
7631 case BUILT_IN_CHKP_BNDSTX:
7632 case BUILT_IN_CHKP_BNDCL:
7633 case BUILT_IN_CHKP_BNDCU:
7634 case BUILT_IN_CHKP_BNDLDX:
7635 case BUILT_IN_CHKP_BNDRET:
7636 case BUILT_IN_CHKP_INTERSECT:
7637 case BUILT_IN_CHKP_NARROW:
7638 case BUILT_IN_CHKP_EXTRACT_LOWER:
7639 case BUILT_IN_CHKP_EXTRACT_UPPER:
7640 /* Software implementation of Pointer Bounds Checker is NYI.
7641 Target support is required. */
7642 error ("Your target platform does not support -fcheck-pointer-bounds");
7643 break;
7645 case BUILT_IN_ACC_ON_DEVICE:
7646 /* Do library call, if we failed to expand the builtin when
7647 folding. */
7648 break;
7650 default: /* just do library call, if unknown builtin */
7651 break;
7654 /* The switch statement above can drop through to cause the function
7655 to be called normally. */
7656 return expand_call (exp, target, ignore);
7659 /* Similar to expand_builtin but is used for instrumented calls. */
7662 expand_builtin_with_bounds (tree exp, rtx target,
7663 rtx subtarget ATTRIBUTE_UNUSED,
7664 machine_mode mode, int ignore)
7666 tree fndecl = get_callee_fndecl (exp);
7667 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7669 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7671 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7672 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7674 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7675 && fcode < END_CHKP_BUILTINS);
7677 switch (fcode)
7679 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7680 target = expand_builtin_memcpy_with_bounds (exp, target);
7681 if (target)
7682 return target;
7683 break;
7685 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7686 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7687 if (target)
7688 return target;
7689 break;
7691 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7692 target = expand_builtin_memset_with_bounds (exp, target, mode);
7693 if (target)
7694 return target;
7695 break;
7697 default:
7698 break;
7701 /* The switch statement above can drop through to cause the function
7702 to be called normally. */
7703 return expand_call (exp, target, ignore);
7706 /* Determine whether a tree node represents a call to a built-in
7707 function. If the tree T is a call to a built-in function with
7708 the right number of arguments of the appropriate types, return
7709 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7710 Otherwise the return value is END_BUILTINS. */
7712 enum built_in_function
7713 builtin_mathfn_code (const_tree t)
7715 const_tree fndecl, arg, parmlist;
7716 const_tree argtype, parmtype;
7717 const_call_expr_arg_iterator iter;
7719 if (TREE_CODE (t) != CALL_EXPR
7720 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7721 return END_BUILTINS;
7723 fndecl = get_callee_fndecl (t);
7724 if (fndecl == NULL_TREE
7725 || TREE_CODE (fndecl) != FUNCTION_DECL
7726 || ! DECL_BUILT_IN (fndecl)
7727 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7728 return END_BUILTINS;
7730 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7731 init_const_call_expr_arg_iterator (t, &iter);
7732 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7734 /* If a function doesn't take a variable number of arguments,
7735 the last element in the list will have type `void'. */
7736 parmtype = TREE_VALUE (parmlist);
7737 if (VOID_TYPE_P (parmtype))
7739 if (more_const_call_expr_args_p (&iter))
7740 return END_BUILTINS;
7741 return DECL_FUNCTION_CODE (fndecl);
7744 if (! more_const_call_expr_args_p (&iter))
7745 return END_BUILTINS;
7747 arg = next_const_call_expr_arg (&iter);
7748 argtype = TREE_TYPE (arg);
7750 if (SCALAR_FLOAT_TYPE_P (parmtype))
7752 if (! SCALAR_FLOAT_TYPE_P (argtype))
7753 return END_BUILTINS;
7755 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7757 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7758 return END_BUILTINS;
7760 else if (POINTER_TYPE_P (parmtype))
7762 if (! POINTER_TYPE_P (argtype))
7763 return END_BUILTINS;
7765 else if (INTEGRAL_TYPE_P (parmtype))
7767 if (! INTEGRAL_TYPE_P (argtype))
7768 return END_BUILTINS;
7770 else
7771 return END_BUILTINS;
7774 /* Variable-length argument list. */
7775 return DECL_FUNCTION_CODE (fndecl);
7778 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7779 evaluate to a constant. */
7781 static tree
7782 fold_builtin_constant_p (tree arg)
7784 /* We return 1 for a numeric type that's known to be a constant
7785 value at compile-time or for an aggregate type that's a
7786 literal constant. */
7787 STRIP_NOPS (arg);
7789 /* If we know this is a constant, emit the constant of one. */
7790 if (CONSTANT_CLASS_P (arg)
7791 || (TREE_CODE (arg) == CONSTRUCTOR
7792 && TREE_CONSTANT (arg)))
7793 return integer_one_node;
7794 if (TREE_CODE (arg) == ADDR_EXPR)
7796 tree op = TREE_OPERAND (arg, 0);
7797 if (TREE_CODE (op) == STRING_CST
7798 || (TREE_CODE (op) == ARRAY_REF
7799 && integer_zerop (TREE_OPERAND (op, 1))
7800 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7801 return integer_one_node;
7804 /* If this expression has side effects, show we don't know it to be a
7805 constant. Likewise if it's a pointer or aggregate type since in
7806 those case we only want literals, since those are only optimized
7807 when generating RTL, not later.
7808 And finally, if we are compiling an initializer, not code, we
7809 need to return a definite result now; there's not going to be any
7810 more optimization done. */
7811 if (TREE_SIDE_EFFECTS (arg)
7812 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7813 || POINTER_TYPE_P (TREE_TYPE (arg))
7814 || cfun == 0
7815 || folding_initializer
7816 || force_folding_builtin_constant_p)
7817 return integer_zero_node;
7819 return NULL_TREE;
7822 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7823 return it as a truthvalue. */
7825 static tree
7826 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7827 tree predictor)
7829 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7831 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7832 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7833 ret_type = TREE_TYPE (TREE_TYPE (fn));
7834 pred_type = TREE_VALUE (arg_types);
7835 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7837 pred = fold_convert_loc (loc, pred_type, pred);
7838 expected = fold_convert_loc (loc, expected_type, expected);
7839 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7840 predictor);
7842 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7843 build_int_cst (ret_type, 0));
7846 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7847 NULL_TREE if no simplification is possible. */
7849 tree
7850 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7852 tree inner, fndecl, inner_arg0;
7853 enum tree_code code;
7855 /* Distribute the expected value over short-circuiting operators.
7856 See through the cast from truthvalue_type_node to long. */
7857 inner_arg0 = arg0;
7858 while (CONVERT_EXPR_P (inner_arg0)
7859 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7860 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7861 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7863 /* If this is a builtin_expect within a builtin_expect keep the
7864 inner one. See through a comparison against a constant. It
7865 might have been added to create a thruthvalue. */
7866 inner = inner_arg0;
7868 if (COMPARISON_CLASS_P (inner)
7869 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7870 inner = TREE_OPERAND (inner, 0);
7872 if (TREE_CODE (inner) == CALL_EXPR
7873 && (fndecl = get_callee_fndecl (inner))
7874 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7875 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7876 return arg0;
7878 inner = inner_arg0;
7879 code = TREE_CODE (inner);
7880 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7882 tree op0 = TREE_OPERAND (inner, 0);
7883 tree op1 = TREE_OPERAND (inner, 1);
7885 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7886 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7887 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7889 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7892 /* If the argument isn't invariant then there's nothing else we can do. */
7893 if (!TREE_CONSTANT (inner_arg0))
7894 return NULL_TREE;
7896 /* If we expect that a comparison against the argument will fold to
7897 a constant return the constant. In practice, this means a true
7898 constant or the address of a non-weak symbol. */
7899 inner = inner_arg0;
7900 STRIP_NOPS (inner);
7901 if (TREE_CODE (inner) == ADDR_EXPR)
7905 inner = TREE_OPERAND (inner, 0);
7907 while (TREE_CODE (inner) == COMPONENT_REF
7908 || TREE_CODE (inner) == ARRAY_REF);
7909 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7910 return NULL_TREE;
7913 /* Otherwise, ARG0 already has the proper type for the return value. */
7914 return arg0;
7917 /* Fold a call to __builtin_classify_type with argument ARG. */
7919 static tree
7920 fold_builtin_classify_type (tree arg)
7922 if (arg == 0)
7923 return build_int_cst (integer_type_node, no_type_class);
7925 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7928 /* Fold a call to __builtin_strlen with argument ARG. */
7930 static tree
7931 fold_builtin_strlen (location_t loc, tree type, tree arg)
7933 if (!validate_arg (arg, POINTER_TYPE))
7934 return NULL_TREE;
7935 else
7937 tree len = c_strlen (arg, 0);
7939 if (len)
7940 return fold_convert_loc (loc, type, len);
7942 return NULL_TREE;
7946 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7948 static tree
7949 fold_builtin_inf (location_t loc, tree type, int warn)
7951 REAL_VALUE_TYPE real;
7953 /* __builtin_inff is intended to be usable to define INFINITY on all
7954 targets. If an infinity is not available, INFINITY expands "to a
7955 positive constant of type float that overflows at translation
7956 time", footnote "In this case, using INFINITY will violate the
7957 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7958 Thus we pedwarn to ensure this constraint violation is
7959 diagnosed. */
7960 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7961 pedwarn (loc, 0, "target format does not support infinity");
7963 real_inf (&real);
7964 return build_real (type, real);
7967 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7968 NULL_TREE if no simplification can be made. */
7970 static tree
7971 fold_builtin_sincos (location_t loc,
7972 tree arg0, tree arg1, tree arg2)
7974 tree type;
7975 tree fndecl, call = NULL_TREE;
7977 if (!validate_arg (arg0, REAL_TYPE)
7978 || !validate_arg (arg1, POINTER_TYPE)
7979 || !validate_arg (arg2, POINTER_TYPE))
7980 return NULL_TREE;
7982 type = TREE_TYPE (arg0);
7984 /* Calculate the result when the argument is a constant. */
7985 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7986 if (fn == END_BUILTINS)
7987 return NULL_TREE;
7989 /* Canonicalize sincos to cexpi. */
7990 if (TREE_CODE (arg0) == REAL_CST)
7992 tree complex_type = build_complex_type (type);
7993 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7995 if (!call)
7997 if (!targetm.libc_has_function (function_c99_math_complex)
7998 || !builtin_decl_implicit_p (fn))
7999 return NULL_TREE;
8000 fndecl = builtin_decl_explicit (fn);
8001 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8002 call = builtin_save_expr (call);
8005 return build2 (COMPOUND_EXPR, void_type_node,
8006 build2 (MODIFY_EXPR, void_type_node,
8007 build_fold_indirect_ref_loc (loc, arg1),
8008 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8009 build2 (MODIFY_EXPR, void_type_node,
8010 build_fold_indirect_ref_loc (loc, arg2),
8011 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8014 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8015 Return NULL_TREE if no simplification can be made. */
8017 static tree
8018 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8020 if (!validate_arg (arg1, POINTER_TYPE)
8021 || !validate_arg (arg2, POINTER_TYPE)
8022 || !validate_arg (len, INTEGER_TYPE))
8023 return NULL_TREE;
8025 /* If the LEN parameter is zero, return zero. */
8026 if (integer_zerop (len))
8027 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8028 arg1, arg2);
8030 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8031 if (operand_equal_p (arg1, arg2, 0))
8032 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8034 /* If len parameter is one, return an expression corresponding to
8035 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8036 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8038 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8039 tree cst_uchar_ptr_node
8040 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8042 tree ind1
8043 = fold_convert_loc (loc, integer_type_node,
8044 build1 (INDIRECT_REF, cst_uchar_node,
8045 fold_convert_loc (loc,
8046 cst_uchar_ptr_node,
8047 arg1)));
8048 tree ind2
8049 = fold_convert_loc (loc, integer_type_node,
8050 build1 (INDIRECT_REF, cst_uchar_node,
8051 fold_convert_loc (loc,
8052 cst_uchar_ptr_node,
8053 arg2)));
8054 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8057 return NULL_TREE;
8060 /* Fold a call to builtin isascii with argument ARG. */
8062 static tree
8063 fold_builtin_isascii (location_t loc, tree arg)
8065 if (!validate_arg (arg, INTEGER_TYPE))
8066 return NULL_TREE;
8067 else
8069 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8070 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8071 build_int_cst (integer_type_node,
8072 ~ (unsigned HOST_WIDE_INT) 0x7f));
8073 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8074 arg, integer_zero_node);
8078 /* Fold a call to builtin toascii with argument ARG. */
8080 static tree
8081 fold_builtin_toascii (location_t loc, tree arg)
8083 if (!validate_arg (arg, INTEGER_TYPE))
8084 return NULL_TREE;
8086 /* Transform toascii(c) -> (c & 0x7f). */
8087 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8088 build_int_cst (integer_type_node, 0x7f));
8091 /* Fold a call to builtin isdigit with argument ARG. */
8093 static tree
8094 fold_builtin_isdigit (location_t loc, tree arg)
8096 if (!validate_arg (arg, INTEGER_TYPE))
8097 return NULL_TREE;
8098 else
8100 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8101 /* According to the C standard, isdigit is unaffected by locale.
8102 However, it definitely is affected by the target character set. */
8103 unsigned HOST_WIDE_INT target_digit0
8104 = lang_hooks.to_target_charset ('0');
8106 if (target_digit0 == 0)
8107 return NULL_TREE;
8109 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8110 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8111 build_int_cst (unsigned_type_node, target_digit0));
8112 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8113 build_int_cst (unsigned_type_node, 9));
8117 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8119 static tree
8120 fold_builtin_fabs (location_t loc, tree arg, tree type)
8122 if (!validate_arg (arg, REAL_TYPE))
8123 return NULL_TREE;
8125 arg = fold_convert_loc (loc, type, arg);
8126 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8129 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8131 static tree
8132 fold_builtin_abs (location_t loc, tree arg, tree type)
8134 if (!validate_arg (arg, INTEGER_TYPE))
8135 return NULL_TREE;
8137 arg = fold_convert_loc (loc, type, arg);
8138 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8141 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8143 static tree
8144 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8146 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8147 if (validate_arg (arg0, REAL_TYPE)
8148 && validate_arg (arg1, REAL_TYPE)
8149 && validate_arg (arg2, REAL_TYPE)
8150 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8151 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8153 return NULL_TREE;
8156 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8158 static tree
8159 fold_builtin_carg (location_t loc, tree arg, tree type)
8161 if (validate_arg (arg, COMPLEX_TYPE)
8162 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8164 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8166 if (atan2_fn)
8168 tree new_arg = builtin_save_expr (arg);
8169 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8170 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8171 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8175 return NULL_TREE;
8178 /* Fold a call to builtin frexp, we can assume the base is 2. */
8180 static tree
8181 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8183 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8184 return NULL_TREE;
8186 STRIP_NOPS (arg0);
8188 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8189 return NULL_TREE;
8191 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8193 /* Proceed if a valid pointer type was passed in. */
8194 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8196 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8197 tree frac, exp;
8199 switch (value->cl)
8201 case rvc_zero:
8202 /* For +-0, return (*exp = 0, +-0). */
8203 exp = integer_zero_node;
8204 frac = arg0;
8205 break;
8206 case rvc_nan:
8207 case rvc_inf:
8208 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8209 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8210 case rvc_normal:
8212 /* Since the frexp function always expects base 2, and in
8213 GCC normalized significands are already in the range
8214 [0.5, 1.0), we have exactly what frexp wants. */
8215 REAL_VALUE_TYPE frac_rvt = *value;
8216 SET_REAL_EXP (&frac_rvt, 0);
8217 frac = build_real (rettype, frac_rvt);
8218 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8220 break;
8221 default:
8222 gcc_unreachable ();
8225 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8226 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8227 TREE_SIDE_EFFECTS (arg1) = 1;
8228 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8231 return NULL_TREE;
8234 /* Fold a call to builtin modf. */
8236 static tree
8237 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8239 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8240 return NULL_TREE;
8242 STRIP_NOPS (arg0);
8244 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8245 return NULL_TREE;
8247 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8249 /* Proceed if a valid pointer type was passed in. */
8250 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8252 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8253 REAL_VALUE_TYPE trunc, frac;
8255 switch (value->cl)
8257 case rvc_nan:
8258 case rvc_zero:
8259 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8260 trunc = frac = *value;
8261 break;
8262 case rvc_inf:
8263 /* For +-Inf, return (*arg1 = arg0, +-0). */
8264 frac = dconst0;
8265 frac.sign = value->sign;
8266 trunc = *value;
8267 break;
8268 case rvc_normal:
8269 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8270 real_trunc (&trunc, VOIDmode, value);
8271 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8272 /* If the original number was negative and already
8273 integral, then the fractional part is -0.0. */
8274 if (value->sign && frac.cl == rvc_zero)
8275 frac.sign = value->sign;
8276 break;
8279 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8280 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8281 build_real (rettype, trunc));
8282 TREE_SIDE_EFFECTS (arg1) = 1;
8283 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8284 build_real (rettype, frac));
8287 return NULL_TREE;
8290 /* Given a location LOC, an interclass builtin function decl FNDECL
8291 and its single argument ARG, return an folded expression computing
8292 the same, or NULL_TREE if we either couldn't or didn't want to fold
8293 (the latter happen if there's an RTL instruction available). */
8295 static tree
8296 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8298 machine_mode mode;
8300 if (!validate_arg (arg, REAL_TYPE))
8301 return NULL_TREE;
8303 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8304 return NULL_TREE;
8306 mode = TYPE_MODE (TREE_TYPE (arg));
8308 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8310 /* If there is no optab, try generic code. */
8311 switch (DECL_FUNCTION_CODE (fndecl))
8313 tree result;
8315 CASE_FLT_FN (BUILT_IN_ISINF):
8317 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8318 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8319 tree type = TREE_TYPE (arg);
8320 REAL_VALUE_TYPE r;
8321 char buf[128];
8323 if (is_ibm_extended)
8325 /* NaN and Inf are encoded in the high-order double value
8326 only. The low-order value is not significant. */
8327 type = double_type_node;
8328 mode = DFmode;
8329 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8331 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8332 real_from_string (&r, buf);
8333 result = build_call_expr (isgr_fn, 2,
8334 fold_build1_loc (loc, ABS_EXPR, type, arg),
8335 build_real (type, r));
8336 return result;
8338 CASE_FLT_FN (BUILT_IN_FINITE):
8339 case BUILT_IN_ISFINITE:
8341 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8342 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8343 tree type = TREE_TYPE (arg);
8344 REAL_VALUE_TYPE r;
8345 char buf[128];
8347 if (is_ibm_extended)
8349 /* NaN and Inf are encoded in the high-order double value
8350 only. The low-order value is not significant. */
8351 type = double_type_node;
8352 mode = DFmode;
8353 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8355 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8356 real_from_string (&r, buf);
8357 result = build_call_expr (isle_fn, 2,
8358 fold_build1_loc (loc, ABS_EXPR, type, arg),
8359 build_real (type, r));
8360 /*result = fold_build2_loc (loc, UNGT_EXPR,
8361 TREE_TYPE (TREE_TYPE (fndecl)),
8362 fold_build1_loc (loc, ABS_EXPR, type, arg),
8363 build_real (type, r));
8364 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8365 TREE_TYPE (TREE_TYPE (fndecl)),
8366 result);*/
8367 return result;
8369 case BUILT_IN_ISNORMAL:
8371 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8372 islessequal(fabs(x),DBL_MAX). */
8373 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8374 tree type = TREE_TYPE (arg);
8375 tree orig_arg, max_exp, min_exp;
8376 machine_mode orig_mode = mode;
8377 REAL_VALUE_TYPE rmax, rmin;
8378 char buf[128];
8380 orig_arg = arg = builtin_save_expr (arg);
8381 if (is_ibm_extended)
8383 /* Use double to test the normal range of IBM extended
8384 precision. Emin for IBM extended precision is
8385 different to emin for IEEE double, being 53 higher
8386 since the low double exponent is at least 53 lower
8387 than the high double exponent. */
8388 type = double_type_node;
8389 mode = DFmode;
8390 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8392 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8394 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8395 real_from_string (&rmax, buf);
8396 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8397 real_from_string (&rmin, buf);
8398 max_exp = build_real (type, rmax);
8399 min_exp = build_real (type, rmin);
8401 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8402 if (is_ibm_extended)
8404 /* Testing the high end of the range is done just using
8405 the high double, using the same test as isfinite().
8406 For the subnormal end of the range we first test the
8407 high double, then if its magnitude is equal to the
8408 limit of 0x1p-969, we test whether the low double is
8409 non-zero and opposite sign to the high double. */
8410 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8411 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8412 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8413 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8414 arg, min_exp);
8415 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8416 complex_double_type_node, orig_arg);
8417 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8418 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8419 tree zero = build_real (type, dconst0);
8420 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8421 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8422 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8423 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8424 fold_build3 (COND_EXPR,
8425 integer_type_node,
8426 hilt, logt, lolt));
8427 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8428 eq_min, ok_lo);
8429 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8430 gt_min, eq_min);
8432 else
8434 tree const isge_fn
8435 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8436 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8438 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8439 max_exp, min_exp);
8440 return result;
8442 default:
8443 break;
8446 return NULL_TREE;
8449 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8450 ARG is the argument for the call. */
8452 static tree
8453 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8455 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8457 if (!validate_arg (arg, REAL_TYPE))
8458 return NULL_TREE;
8460 switch (builtin_index)
8462 case BUILT_IN_ISINF:
8463 if (!HONOR_INFINITIES (arg))
8464 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8466 return NULL_TREE;
8468 case BUILT_IN_ISINF_SIGN:
8470 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8471 /* In a boolean context, GCC will fold the inner COND_EXPR to
8472 1. So e.g. "if (isinf_sign(x))" would be folded to just
8473 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8474 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8475 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8476 tree tmp = NULL_TREE;
8478 arg = builtin_save_expr (arg);
8480 if (signbit_fn && isinf_fn)
8482 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8483 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8485 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8486 signbit_call, integer_zero_node);
8487 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8488 isinf_call, integer_zero_node);
8490 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8491 integer_minus_one_node, integer_one_node);
8492 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8493 isinf_call, tmp,
8494 integer_zero_node);
8497 return tmp;
8500 case BUILT_IN_ISFINITE:
8501 if (!HONOR_NANS (arg)
8502 && !HONOR_INFINITIES (arg))
8503 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8505 return NULL_TREE;
8507 case BUILT_IN_ISNAN:
8508 if (!HONOR_NANS (arg))
8509 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8512 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8513 if (is_ibm_extended)
8515 /* NaN and Inf are encoded in the high-order double value
8516 only. The low-order value is not significant. */
8517 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8520 arg = builtin_save_expr (arg);
8521 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8523 default:
8524 gcc_unreachable ();
8528 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8529 This builtin will generate code to return the appropriate floating
8530 point classification depending on the value of the floating point
8531 number passed in. The possible return values must be supplied as
8532 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8533 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8534 one floating point argument which is "type generic". */
8536 static tree
8537 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8539 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8540 arg, type, res, tmp;
8541 machine_mode mode;
8542 REAL_VALUE_TYPE r;
8543 char buf[128];
8545 /* Verify the required arguments in the original call. */
8546 if (nargs != 6
8547 || !validate_arg (args[0], INTEGER_TYPE)
8548 || !validate_arg (args[1], INTEGER_TYPE)
8549 || !validate_arg (args[2], INTEGER_TYPE)
8550 || !validate_arg (args[3], INTEGER_TYPE)
8551 || !validate_arg (args[4], INTEGER_TYPE)
8552 || !validate_arg (args[5], REAL_TYPE))
8553 return NULL_TREE;
8555 fp_nan = args[0];
8556 fp_infinite = args[1];
8557 fp_normal = args[2];
8558 fp_subnormal = args[3];
8559 fp_zero = args[4];
8560 arg = args[5];
8561 type = TREE_TYPE (arg);
8562 mode = TYPE_MODE (type);
8563 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8565 /* fpclassify(x) ->
8566 isnan(x) ? FP_NAN :
8567 (fabs(x) == Inf ? FP_INFINITE :
8568 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8569 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8571 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8572 build_real (type, dconst0));
8573 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8574 tmp, fp_zero, fp_subnormal);
8576 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8577 real_from_string (&r, buf);
8578 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8579 arg, build_real (type, r));
8580 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8582 if (HONOR_INFINITIES (mode))
8584 real_inf (&r);
8585 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8586 build_real (type, r));
8587 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8588 fp_infinite, res);
8591 if (HONOR_NANS (mode))
8593 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8594 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8597 return res;
8600 /* Fold a call to an unordered comparison function such as
8601 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8602 being called and ARG0 and ARG1 are the arguments for the call.
8603 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8604 the opposite of the desired result. UNORDERED_CODE is used
8605 for modes that can hold NaNs and ORDERED_CODE is used for
8606 the rest. */
8608 static tree
8609 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8610 enum tree_code unordered_code,
8611 enum tree_code ordered_code)
8613 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8614 enum tree_code code;
8615 tree type0, type1;
8616 enum tree_code code0, code1;
8617 tree cmp_type = NULL_TREE;
8619 type0 = TREE_TYPE (arg0);
8620 type1 = TREE_TYPE (arg1);
8622 code0 = TREE_CODE (type0);
8623 code1 = TREE_CODE (type1);
8625 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8626 /* Choose the wider of two real types. */
8627 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8628 ? type0 : type1;
8629 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8630 cmp_type = type0;
8631 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8632 cmp_type = type1;
8634 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8635 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8637 if (unordered_code == UNORDERED_EXPR)
8639 if (!HONOR_NANS (arg0))
8640 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8641 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8644 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8645 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8646 fold_build2_loc (loc, code, type, arg0, arg1));
8649 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8650 arithmetics if it can never overflow, or into internal functions that
8651 return both result of arithmetics and overflowed boolean flag in
8652 a complex integer result, or some other check for overflow.
8653 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8654 checking part of that. */
8656 static tree
8657 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8658 tree arg0, tree arg1, tree arg2)
8660 enum internal_fn ifn = IFN_LAST;
8661 /* The code of the expression corresponding to the type-generic
8662 built-in, or ERROR_MARK for the type-specific ones. */
8663 enum tree_code opcode = ERROR_MARK;
8664 bool ovf_only = false;
8666 switch (fcode)
8668 case BUILT_IN_ADD_OVERFLOW_P:
8669 ovf_only = true;
8670 /* FALLTHRU */
8671 case BUILT_IN_ADD_OVERFLOW:
8672 opcode = PLUS_EXPR;
8673 /* FALLTHRU */
8674 case BUILT_IN_SADD_OVERFLOW:
8675 case BUILT_IN_SADDL_OVERFLOW:
8676 case BUILT_IN_SADDLL_OVERFLOW:
8677 case BUILT_IN_UADD_OVERFLOW:
8678 case BUILT_IN_UADDL_OVERFLOW:
8679 case BUILT_IN_UADDLL_OVERFLOW:
8680 ifn = IFN_ADD_OVERFLOW;
8681 break;
8682 case BUILT_IN_SUB_OVERFLOW_P:
8683 ovf_only = true;
8684 /* FALLTHRU */
8685 case BUILT_IN_SUB_OVERFLOW:
8686 opcode = MINUS_EXPR;
8687 /* FALLTHRU */
8688 case BUILT_IN_SSUB_OVERFLOW:
8689 case BUILT_IN_SSUBL_OVERFLOW:
8690 case BUILT_IN_SSUBLL_OVERFLOW:
8691 case BUILT_IN_USUB_OVERFLOW:
8692 case BUILT_IN_USUBL_OVERFLOW:
8693 case BUILT_IN_USUBLL_OVERFLOW:
8694 ifn = IFN_SUB_OVERFLOW;
8695 break;
8696 case BUILT_IN_MUL_OVERFLOW_P:
8697 ovf_only = true;
8698 /* FALLTHRU */
8699 case BUILT_IN_MUL_OVERFLOW:
8700 opcode = MULT_EXPR;
8701 /* FALLTHRU */
8702 case BUILT_IN_SMUL_OVERFLOW:
8703 case BUILT_IN_SMULL_OVERFLOW:
8704 case BUILT_IN_SMULLL_OVERFLOW:
8705 case BUILT_IN_UMUL_OVERFLOW:
8706 case BUILT_IN_UMULL_OVERFLOW:
8707 case BUILT_IN_UMULLL_OVERFLOW:
8708 ifn = IFN_MUL_OVERFLOW;
8709 break;
8710 default:
8711 gcc_unreachable ();
8714 /* For the "generic" overloads, the first two arguments can have different
8715 types and the last argument determines the target type to use to check
8716 for overflow. The arguments of the other overloads all have the same
8717 type. */
8718 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8720 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8721 arguments are constant, attempt to fold the built-in call into a constant
8722 expression indicating whether or not it detected an overflow. */
8723 if (ovf_only
8724 && TREE_CODE (arg0) == INTEGER_CST
8725 && TREE_CODE (arg1) == INTEGER_CST)
8726 /* Perform the computation in the target type and check for overflow. */
8727 return omit_one_operand_loc (loc, boolean_type_node,
8728 arith_overflowed_p (opcode, type, arg0, arg1)
8729 ? boolean_true_node : boolean_false_node,
8730 arg2);
8732 tree ctype = build_complex_type (type);
8733 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8734 2, arg0, arg1);
8735 tree tgt = save_expr (call);
8736 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8737 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8738 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8740 if (ovf_only)
8741 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8743 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8744 tree store
8745 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8746 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8749 /* Fold a call to __builtin_FILE to a constant string. */
8751 static inline tree
8752 fold_builtin_FILE (location_t loc)
8754 if (const char *fname = LOCATION_FILE (loc))
8755 return build_string_literal (strlen (fname) + 1, fname);
8757 return build_string_literal (1, "");
8760 /* Fold a call to __builtin_FUNCTION to a constant string. */
8762 static inline tree
8763 fold_builtin_FUNCTION ()
8765 const char *name = "";
8767 if (current_function_decl)
8768 name = lang_hooks.decl_printable_name (current_function_decl, 0);
8770 return build_string_literal (strlen (name) + 1, name);
8773 /* Fold a call to __builtin_LINE to an integer constant. */
8775 static inline tree
8776 fold_builtin_LINE (location_t loc, tree type)
8778 return build_int_cst (type, LOCATION_LINE (loc));
8781 /* Fold a call to built-in function FNDECL with 0 arguments.
8782 This function returns NULL_TREE if no simplification was possible. */
8784 static tree
8785 fold_builtin_0 (location_t loc, tree fndecl)
8787 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8788 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8789 switch (fcode)
8791 case BUILT_IN_FILE:
8792 return fold_builtin_FILE (loc);
8794 case BUILT_IN_FUNCTION:
8795 return fold_builtin_FUNCTION ();
8797 case BUILT_IN_LINE:
8798 return fold_builtin_LINE (loc, type);
8800 CASE_FLT_FN (BUILT_IN_INF):
8801 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8802 case BUILT_IN_INFD32:
8803 case BUILT_IN_INFD64:
8804 case BUILT_IN_INFD128:
8805 return fold_builtin_inf (loc, type, true);
8807 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8808 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8809 return fold_builtin_inf (loc, type, false);
8811 case BUILT_IN_CLASSIFY_TYPE:
8812 return fold_builtin_classify_type (NULL_TREE);
8814 default:
8815 break;
8817 return NULL_TREE;
8820 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8821 This function returns NULL_TREE if no simplification was possible. */
8823 static tree
8824 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8826 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8827 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8829 if (TREE_CODE (arg0) == ERROR_MARK)
8830 return NULL_TREE;
8832 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8833 return ret;
8835 switch (fcode)
8837 case BUILT_IN_CONSTANT_P:
8839 tree val = fold_builtin_constant_p (arg0);
8841 /* Gimplification will pull the CALL_EXPR for the builtin out of
8842 an if condition. When not optimizing, we'll not CSE it back.
8843 To avoid link error types of regressions, return false now. */
8844 if (!val && !optimize)
8845 val = integer_zero_node;
8847 return val;
8850 case BUILT_IN_CLASSIFY_TYPE:
8851 return fold_builtin_classify_type (arg0);
8853 case BUILT_IN_STRLEN:
8854 return fold_builtin_strlen (loc, type, arg0);
8856 CASE_FLT_FN (BUILT_IN_FABS):
8857 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8858 case BUILT_IN_FABSD32:
8859 case BUILT_IN_FABSD64:
8860 case BUILT_IN_FABSD128:
8861 return fold_builtin_fabs (loc, arg0, type);
8863 case BUILT_IN_ABS:
8864 case BUILT_IN_LABS:
8865 case BUILT_IN_LLABS:
8866 case BUILT_IN_IMAXABS:
8867 return fold_builtin_abs (loc, arg0, type);
8869 CASE_FLT_FN (BUILT_IN_CONJ):
8870 if (validate_arg (arg0, COMPLEX_TYPE)
8871 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8872 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8873 break;
8875 CASE_FLT_FN (BUILT_IN_CREAL):
8876 if (validate_arg (arg0, COMPLEX_TYPE)
8877 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8878 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8879 break;
8881 CASE_FLT_FN (BUILT_IN_CIMAG):
8882 if (validate_arg (arg0, COMPLEX_TYPE)
8883 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8884 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8885 break;
8887 CASE_FLT_FN (BUILT_IN_CARG):
8888 return fold_builtin_carg (loc, arg0, type);
8890 case BUILT_IN_ISASCII:
8891 return fold_builtin_isascii (loc, arg0);
8893 case BUILT_IN_TOASCII:
8894 return fold_builtin_toascii (loc, arg0);
8896 case BUILT_IN_ISDIGIT:
8897 return fold_builtin_isdigit (loc, arg0);
8899 CASE_FLT_FN (BUILT_IN_FINITE):
8900 case BUILT_IN_FINITED32:
8901 case BUILT_IN_FINITED64:
8902 case BUILT_IN_FINITED128:
8903 case BUILT_IN_ISFINITE:
8905 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8906 if (ret)
8907 return ret;
8908 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8911 CASE_FLT_FN (BUILT_IN_ISINF):
8912 case BUILT_IN_ISINFD32:
8913 case BUILT_IN_ISINFD64:
8914 case BUILT_IN_ISINFD128:
8916 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8917 if (ret)
8918 return ret;
8919 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8922 case BUILT_IN_ISNORMAL:
8923 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8925 case BUILT_IN_ISINF_SIGN:
8926 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8928 CASE_FLT_FN (BUILT_IN_ISNAN):
8929 case BUILT_IN_ISNAND32:
8930 case BUILT_IN_ISNAND64:
8931 case BUILT_IN_ISNAND128:
8932 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8934 case BUILT_IN_FREE:
8935 if (integer_zerop (arg0))
8936 return build_empty_stmt (loc);
8937 break;
8939 default:
8940 break;
8943 return NULL_TREE;
8947 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8948 This function returns NULL_TREE if no simplification was possible. */
8950 static tree
8951 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8953 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8954 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8956 if (TREE_CODE (arg0) == ERROR_MARK
8957 || TREE_CODE (arg1) == ERROR_MARK)
8958 return NULL_TREE;
8960 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8961 return ret;
8963 switch (fcode)
8965 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8966 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8967 if (validate_arg (arg0, REAL_TYPE)
8968 && validate_arg (arg1, POINTER_TYPE))
8969 return do_mpfr_lgamma_r (arg0, arg1, type);
8970 break;
8972 CASE_FLT_FN (BUILT_IN_FREXP):
8973 return fold_builtin_frexp (loc, arg0, arg1, type);
8975 CASE_FLT_FN (BUILT_IN_MODF):
8976 return fold_builtin_modf (loc, arg0, arg1, type);
8978 case BUILT_IN_STRSPN:
8979 return fold_builtin_strspn (loc, arg0, arg1);
8981 case BUILT_IN_STRCSPN:
8982 return fold_builtin_strcspn (loc, arg0, arg1);
8984 case BUILT_IN_STRPBRK:
8985 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8987 case BUILT_IN_EXPECT:
8988 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8990 case BUILT_IN_ISGREATER:
8991 return fold_builtin_unordered_cmp (loc, fndecl,
8992 arg0, arg1, UNLE_EXPR, LE_EXPR);
8993 case BUILT_IN_ISGREATEREQUAL:
8994 return fold_builtin_unordered_cmp (loc, fndecl,
8995 arg0, arg1, UNLT_EXPR, LT_EXPR);
8996 case BUILT_IN_ISLESS:
8997 return fold_builtin_unordered_cmp (loc, fndecl,
8998 arg0, arg1, UNGE_EXPR, GE_EXPR);
8999 case BUILT_IN_ISLESSEQUAL:
9000 return fold_builtin_unordered_cmp (loc, fndecl,
9001 arg0, arg1, UNGT_EXPR, GT_EXPR);
9002 case BUILT_IN_ISLESSGREATER:
9003 return fold_builtin_unordered_cmp (loc, fndecl,
9004 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9005 case BUILT_IN_ISUNORDERED:
9006 return fold_builtin_unordered_cmp (loc, fndecl,
9007 arg0, arg1, UNORDERED_EXPR,
9008 NOP_EXPR);
9010 /* We do the folding for va_start in the expander. */
9011 case BUILT_IN_VA_START:
9012 break;
9014 case BUILT_IN_OBJECT_SIZE:
9015 return fold_builtin_object_size (arg0, arg1);
9017 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9018 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9020 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9021 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9023 default:
9024 break;
9026 return NULL_TREE;
9029 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9030 and ARG2.
9031 This function returns NULL_TREE if no simplification was possible. */
9033 static tree
9034 fold_builtin_3 (location_t loc, tree fndecl,
9035 tree arg0, tree arg1, tree arg2)
9037 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9038 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9040 if (TREE_CODE (arg0) == ERROR_MARK
9041 || TREE_CODE (arg1) == ERROR_MARK
9042 || TREE_CODE (arg2) == ERROR_MARK)
9043 return NULL_TREE;
9045 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9046 arg0, arg1, arg2))
9047 return ret;
9049 switch (fcode)
9052 CASE_FLT_FN (BUILT_IN_SINCOS):
9053 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9055 CASE_FLT_FN (BUILT_IN_FMA):
9056 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
9058 CASE_FLT_FN (BUILT_IN_REMQUO):
9059 if (validate_arg (arg0, REAL_TYPE)
9060 && validate_arg (arg1, REAL_TYPE)
9061 && validate_arg (arg2, POINTER_TYPE))
9062 return do_mpfr_remquo (arg0, arg1, arg2);
9063 break;
9065 case BUILT_IN_MEMCMP:
9066 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
9068 case BUILT_IN_EXPECT:
9069 return fold_builtin_expect (loc, arg0, arg1, arg2);
9071 case BUILT_IN_ADD_OVERFLOW:
9072 case BUILT_IN_SUB_OVERFLOW:
9073 case BUILT_IN_MUL_OVERFLOW:
9074 case BUILT_IN_ADD_OVERFLOW_P:
9075 case BUILT_IN_SUB_OVERFLOW_P:
9076 case BUILT_IN_MUL_OVERFLOW_P:
9077 case BUILT_IN_SADD_OVERFLOW:
9078 case BUILT_IN_SADDL_OVERFLOW:
9079 case BUILT_IN_SADDLL_OVERFLOW:
9080 case BUILT_IN_SSUB_OVERFLOW:
9081 case BUILT_IN_SSUBL_OVERFLOW:
9082 case BUILT_IN_SSUBLL_OVERFLOW:
9083 case BUILT_IN_SMUL_OVERFLOW:
9084 case BUILT_IN_SMULL_OVERFLOW:
9085 case BUILT_IN_SMULLL_OVERFLOW:
9086 case BUILT_IN_UADD_OVERFLOW:
9087 case BUILT_IN_UADDL_OVERFLOW:
9088 case BUILT_IN_UADDLL_OVERFLOW:
9089 case BUILT_IN_USUB_OVERFLOW:
9090 case BUILT_IN_USUBL_OVERFLOW:
9091 case BUILT_IN_USUBLL_OVERFLOW:
9092 case BUILT_IN_UMUL_OVERFLOW:
9093 case BUILT_IN_UMULL_OVERFLOW:
9094 case BUILT_IN_UMULLL_OVERFLOW:
9095 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9097 default:
9098 break;
9100 return NULL_TREE;
9103 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9104 arguments. IGNORE is true if the result of the
9105 function call is ignored. This function returns NULL_TREE if no
9106 simplification was possible. */
9108 tree
9109 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9111 tree ret = NULL_TREE;
9113 switch (nargs)
9115 case 0:
9116 ret = fold_builtin_0 (loc, fndecl);
9117 break;
9118 case 1:
9119 ret = fold_builtin_1 (loc, fndecl, args[0]);
9120 break;
9121 case 2:
9122 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9123 break;
9124 case 3:
9125 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9126 break;
9127 default:
9128 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9129 break;
9131 if (ret)
9133 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9134 SET_EXPR_LOCATION (ret, loc);
9135 TREE_NO_WARNING (ret) = 1;
9136 return ret;
9138 return NULL_TREE;
9141 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9142 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9143 of arguments in ARGS to be omitted. OLDNARGS is the number of
9144 elements in ARGS. */
9146 static tree
9147 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9148 int skip, tree fndecl, int n, va_list newargs)
9150 int nargs = oldnargs - skip + n;
9151 tree *buffer;
9153 if (n > 0)
9155 int i, j;
9157 buffer = XALLOCAVEC (tree, nargs);
9158 for (i = 0; i < n; i++)
9159 buffer[i] = va_arg (newargs, tree);
9160 for (j = skip; j < oldnargs; j++, i++)
9161 buffer[i] = args[j];
9163 else
9164 buffer = args + skip;
9166 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9169 /* Return true if FNDECL shouldn't be folded right now.
9170 If a built-in function has an inline attribute always_inline
9171 wrapper, defer folding it after always_inline functions have
9172 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9173 might not be performed. */
9175 bool
9176 avoid_folding_inline_builtin (tree fndecl)
9178 return (DECL_DECLARED_INLINE_P (fndecl)
9179 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9180 && cfun
9181 && !cfun->always_inline_functions_inlined
9182 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9185 /* A wrapper function for builtin folding that prevents warnings for
9186 "statement without effect" and the like, caused by removing the
9187 call node earlier than the warning is generated. */
9189 tree
9190 fold_call_expr (location_t loc, tree exp, bool ignore)
9192 tree ret = NULL_TREE;
9193 tree fndecl = get_callee_fndecl (exp);
9194 if (fndecl
9195 && TREE_CODE (fndecl) == FUNCTION_DECL
9196 && DECL_BUILT_IN (fndecl)
9197 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9198 yet. Defer folding until we see all the arguments
9199 (after inlining). */
9200 && !CALL_EXPR_VA_ARG_PACK (exp))
9202 int nargs = call_expr_nargs (exp);
9204 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9205 instead last argument is __builtin_va_arg_pack (). Defer folding
9206 even in that case, until arguments are finalized. */
9207 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9209 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9210 if (fndecl2
9211 && TREE_CODE (fndecl2) == FUNCTION_DECL
9212 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9213 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9214 return NULL_TREE;
9217 if (avoid_folding_inline_builtin (fndecl))
9218 return NULL_TREE;
9220 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9221 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9222 CALL_EXPR_ARGP (exp), ignore);
9223 else
9225 tree *args = CALL_EXPR_ARGP (exp);
9226 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9227 if (ret)
9228 return ret;
9231 return NULL_TREE;
9234 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9235 N arguments are passed in the array ARGARRAY. Return a folded
9236 expression or NULL_TREE if no simplification was possible. */
9238 tree
9239 fold_builtin_call_array (location_t loc, tree,
9240 tree fn,
9241 int n,
9242 tree *argarray)
9244 if (TREE_CODE (fn) != ADDR_EXPR)
9245 return NULL_TREE;
9247 tree fndecl = TREE_OPERAND (fn, 0);
9248 if (TREE_CODE (fndecl) == FUNCTION_DECL
9249 && DECL_BUILT_IN (fndecl))
9251 /* If last argument is __builtin_va_arg_pack (), arguments to this
9252 function are not finalized yet. Defer folding until they are. */
9253 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9255 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9256 if (fndecl2
9257 && TREE_CODE (fndecl2) == FUNCTION_DECL
9258 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9259 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9260 return NULL_TREE;
9262 if (avoid_folding_inline_builtin (fndecl))
9263 return NULL_TREE;
9264 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9265 return targetm.fold_builtin (fndecl, n, argarray, false);
9266 else
9267 return fold_builtin_n (loc, fndecl, argarray, n, false);
9270 return NULL_TREE;
9273 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9274 along with N new arguments specified as the "..." parameters. SKIP
9275 is the number of arguments in EXP to be omitted. This function is used
9276 to do varargs-to-varargs transformations. */
9278 static tree
9279 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9281 va_list ap;
9282 tree t;
9284 va_start (ap, n);
9285 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9286 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9287 va_end (ap);
9289 return t;
9292 /* Validate a single argument ARG against a tree code CODE representing
9293 a type. Return true when argument is valid. */
9295 static bool
9296 validate_arg (const_tree arg, enum tree_code code)
9298 if (!arg)
9299 return false;
9300 else if (code == POINTER_TYPE)
9301 return POINTER_TYPE_P (TREE_TYPE (arg));
9302 else if (code == INTEGER_TYPE)
9303 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9304 return code == TREE_CODE (TREE_TYPE (arg));
9307 /* This function validates the types of a function call argument list
9308 against a specified list of tree_codes. If the last specifier is a 0,
9309 that represents an ellipses, otherwise the last specifier must be a
9310 VOID_TYPE.
9312 This is the GIMPLE version of validate_arglist. Eventually we want to
9313 completely convert builtins.c to work from GIMPLEs and the tree based
9314 validate_arglist will then be removed. */
9316 bool
9317 validate_gimple_arglist (const gcall *call, ...)
9319 enum tree_code code;
9320 bool res = 0;
9321 va_list ap;
9322 const_tree arg;
9323 size_t i;
9325 va_start (ap, call);
9326 i = 0;
9330 code = (enum tree_code) va_arg (ap, int);
9331 switch (code)
9333 case 0:
9334 /* This signifies an ellipses, any further arguments are all ok. */
9335 res = true;
9336 goto end;
9337 case VOID_TYPE:
9338 /* This signifies an endlink, if no arguments remain, return
9339 true, otherwise return false. */
9340 res = (i == gimple_call_num_args (call));
9341 goto end;
9342 default:
9343 /* If no parameters remain or the parameter's code does not
9344 match the specified code, return false. Otherwise continue
9345 checking any remaining arguments. */
9346 arg = gimple_call_arg (call, i++);
9347 if (!validate_arg (arg, code))
9348 goto end;
9349 break;
9352 while (1);
9354 /* We need gotos here since we can only have one VA_CLOSE in a
9355 function. */
9356 end: ;
9357 va_end (ap);
9359 return res;
9362 /* Default target-specific builtin expander that does nothing. */
9365 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9366 rtx target ATTRIBUTE_UNUSED,
9367 rtx subtarget ATTRIBUTE_UNUSED,
9368 machine_mode mode ATTRIBUTE_UNUSED,
9369 int ignore ATTRIBUTE_UNUSED)
9371 return NULL_RTX;
9374 /* Returns true is EXP represents data that would potentially reside
9375 in a readonly section. */
9377 bool
9378 readonly_data_expr (tree exp)
9380 STRIP_NOPS (exp);
9382 if (TREE_CODE (exp) != ADDR_EXPR)
9383 return false;
9385 exp = get_base_address (TREE_OPERAND (exp, 0));
9386 if (!exp)
9387 return false;
9389 /* Make sure we call decl_readonly_section only for trees it
9390 can handle (since it returns true for everything it doesn't
9391 understand). */
9392 if (TREE_CODE (exp) == STRING_CST
9393 || TREE_CODE (exp) == CONSTRUCTOR
9394 || (VAR_P (exp) && TREE_STATIC (exp)))
9395 return decl_readonly_section (exp, 0);
9396 else
9397 return false;
9400 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9401 to the call, and TYPE is its return type.
9403 Return NULL_TREE if no simplification was possible, otherwise return the
9404 simplified form of the call as a tree.
9406 The simplified form may be a constant or other expression which
9407 computes the same value, but in a more efficient manner (including
9408 calls to other builtin functions).
9410 The call may contain arguments which need to be evaluated, but
9411 which are not useful to determine the result of the call. In
9412 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9413 COMPOUND_EXPR will be an argument which must be evaluated.
9414 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9415 COMPOUND_EXPR in the chain will contain the tree for the simplified
9416 form of the builtin function call. */
9418 static tree
9419 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9421 if (!validate_arg (s1, POINTER_TYPE)
9422 || !validate_arg (s2, POINTER_TYPE))
9423 return NULL_TREE;
9424 else
9426 tree fn;
9427 const char *p1, *p2;
9429 p2 = c_getstr (s2);
9430 if (p2 == NULL)
9431 return NULL_TREE;
9433 p1 = c_getstr (s1);
9434 if (p1 != NULL)
9436 const char *r = strpbrk (p1, p2);
9437 tree tem;
9439 if (r == NULL)
9440 return build_int_cst (TREE_TYPE (s1), 0);
9442 /* Return an offset into the constant string argument. */
9443 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9444 return fold_convert_loc (loc, type, tem);
9447 if (p2[0] == '\0')
9448 /* strpbrk(x, "") == NULL.
9449 Evaluate and ignore s1 in case it had side-effects. */
9450 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9452 if (p2[1] != '\0')
9453 return NULL_TREE; /* Really call strpbrk. */
9455 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9456 if (!fn)
9457 return NULL_TREE;
9459 /* New argument list transforming strpbrk(s1, s2) to
9460 strchr(s1, s2[0]). */
9461 return build_call_expr_loc (loc, fn, 2, s1,
9462 build_int_cst (integer_type_node, p2[0]));
9466 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9467 to the call.
9469 Return NULL_TREE if no simplification was possible, otherwise return the
9470 simplified form of the call as a tree.
9472 The simplified form may be a constant or other expression which
9473 computes the same value, but in a more efficient manner (including
9474 calls to other builtin functions).
9476 The call may contain arguments which need to be evaluated, but
9477 which are not useful to determine the result of the call. In
9478 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9479 COMPOUND_EXPR will be an argument which must be evaluated.
9480 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9481 COMPOUND_EXPR in the chain will contain the tree for the simplified
9482 form of the builtin function call. */
9484 static tree
9485 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9487 if (!validate_arg (s1, POINTER_TYPE)
9488 || !validate_arg (s2, POINTER_TYPE))
9489 return NULL_TREE;
9490 else
9492 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9494 /* If either argument is "", return NULL_TREE. */
9495 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9496 /* Evaluate and ignore both arguments in case either one has
9497 side-effects. */
9498 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9499 s1, s2);
9500 return NULL_TREE;
9504 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9505 to the call.
9507 Return NULL_TREE if no simplification was possible, otherwise return the
9508 simplified form of the call as a tree.
9510 The simplified form may be a constant or other expression which
9511 computes the same value, but in a more efficient manner (including
9512 calls to other builtin functions).
9514 The call may contain arguments which need to be evaluated, but
9515 which are not useful to determine the result of the call. In
9516 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9517 COMPOUND_EXPR will be an argument which must be evaluated.
9518 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9519 COMPOUND_EXPR in the chain will contain the tree for the simplified
9520 form of the builtin function call. */
9522 static tree
9523 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9525 if (!validate_arg (s1, POINTER_TYPE)
9526 || !validate_arg (s2, POINTER_TYPE))
9527 return NULL_TREE;
9528 else
9530 /* If the first argument is "", return NULL_TREE. */
9531 const char *p1 = c_getstr (s1);
9532 if (p1 && *p1 == '\0')
9534 /* Evaluate and ignore argument s2 in case it has
9535 side-effects. */
9536 return omit_one_operand_loc (loc, size_type_node,
9537 size_zero_node, s2);
9540 /* If the second argument is "", return __builtin_strlen(s1). */
9541 const char *p2 = c_getstr (s2);
9542 if (p2 && *p2 == '\0')
9544 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9546 /* If the replacement _DECL isn't initialized, don't do the
9547 transformation. */
9548 if (!fn)
9549 return NULL_TREE;
9551 return build_call_expr_loc (loc, fn, 1, s1);
9553 return NULL_TREE;
9557 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9558 produced. False otherwise. This is done so that we don't output the error
9559 or warning twice or three times. */
9561 bool
9562 fold_builtin_next_arg (tree exp, bool va_start_p)
9564 tree fntype = TREE_TYPE (current_function_decl);
9565 int nargs = call_expr_nargs (exp);
9566 tree arg;
9567 /* There is good chance the current input_location points inside the
9568 definition of the va_start macro (perhaps on the token for
9569 builtin) in a system header, so warnings will not be emitted.
9570 Use the location in real source code. */
9571 source_location current_location =
9572 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9573 NULL);
9575 if (!stdarg_p (fntype))
9577 error ("%<va_start%> used in function with fixed args");
9578 return true;
9581 if (va_start_p)
9583 if (va_start_p && (nargs != 2))
9585 error ("wrong number of arguments to function %<va_start%>");
9586 return true;
9588 arg = CALL_EXPR_ARG (exp, 1);
9590 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9591 when we checked the arguments and if needed issued a warning. */
9592 else
9594 if (nargs == 0)
9596 /* Evidently an out of date version of <stdarg.h>; can't validate
9597 va_start's second argument, but can still work as intended. */
9598 warning_at (current_location,
9599 OPT_Wvarargs,
9600 "%<__builtin_next_arg%> called without an argument");
9601 return true;
9603 else if (nargs > 1)
9605 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9606 return true;
9608 arg = CALL_EXPR_ARG (exp, 0);
9611 if (TREE_CODE (arg) == SSA_NAME)
9612 arg = SSA_NAME_VAR (arg);
9614 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9615 or __builtin_next_arg (0) the first time we see it, after checking
9616 the arguments and if needed issuing a warning. */
9617 if (!integer_zerop (arg))
9619 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9621 /* Strip off all nops for the sake of the comparison. This
9622 is not quite the same as STRIP_NOPS. It does more.
9623 We must also strip off INDIRECT_EXPR for C++ reference
9624 parameters. */
9625 while (CONVERT_EXPR_P (arg)
9626 || TREE_CODE (arg) == INDIRECT_REF)
9627 arg = TREE_OPERAND (arg, 0);
9628 if (arg != last_parm)
9630 /* FIXME: Sometimes with the tree optimizers we can get the
9631 not the last argument even though the user used the last
9632 argument. We just warn and set the arg to be the last
9633 argument so that we will get wrong-code because of
9634 it. */
9635 warning_at (current_location,
9636 OPT_Wvarargs,
9637 "second parameter of %<va_start%> not last named argument");
9640 /* Undefined by C99 7.15.1.4p4 (va_start):
9641 "If the parameter parmN is declared with the register storage
9642 class, with a function or array type, or with a type that is
9643 not compatible with the type that results after application of
9644 the default argument promotions, the behavior is undefined."
9646 else if (DECL_REGISTER (arg))
9648 warning_at (current_location,
9649 OPT_Wvarargs,
9650 "undefined behavior when second parameter of "
9651 "%<va_start%> is declared with %<register%> storage");
9654 /* We want to verify the second parameter just once before the tree
9655 optimizers are run and then avoid keeping it in the tree,
9656 as otherwise we could warn even for correct code like:
9657 void foo (int i, ...)
9658 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9659 if (va_start_p)
9660 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9661 else
9662 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9664 return false;
9668 /* Expand a call EXP to __builtin_object_size. */
9670 static rtx
9671 expand_builtin_object_size (tree exp)
9673 tree ost;
9674 int object_size_type;
9675 tree fndecl = get_callee_fndecl (exp);
9677 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9679 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9680 exp, fndecl);
9681 expand_builtin_trap ();
9682 return const0_rtx;
9685 ost = CALL_EXPR_ARG (exp, 1);
9686 STRIP_NOPS (ost);
9688 if (TREE_CODE (ost) != INTEGER_CST
9689 || tree_int_cst_sgn (ost) < 0
9690 || compare_tree_int (ost, 3) > 0)
9692 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9693 exp, fndecl);
9694 expand_builtin_trap ();
9695 return const0_rtx;
9698 object_size_type = tree_to_shwi (ost);
9700 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9703 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9704 FCODE is the BUILT_IN_* to use.
9705 Return NULL_RTX if we failed; the caller should emit a normal call,
9706 otherwise try to get the result in TARGET, if convenient (and in
9707 mode MODE if that's convenient). */
9709 static rtx
9710 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9711 enum built_in_function fcode)
9713 tree dest, src, len, size;
9715 if (!validate_arglist (exp,
9716 POINTER_TYPE,
9717 fcode == BUILT_IN_MEMSET_CHK
9718 ? INTEGER_TYPE : POINTER_TYPE,
9719 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9720 return NULL_RTX;
9722 dest = CALL_EXPR_ARG (exp, 0);
9723 src = CALL_EXPR_ARG (exp, 1);
9724 len = CALL_EXPR_ARG (exp, 2);
9725 size = CALL_EXPR_ARG (exp, 3);
9727 bool sizes_ok = check_sizes (OPT_Wstringop_overflow_,
9728 exp, len, /*maxlen=*/NULL_TREE,
9729 /*str=*/NULL_TREE, size);
9731 if (!tree_fits_uhwi_p (size))
9732 return NULL_RTX;
9734 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9736 /* Avoid transforming the checking call to an ordinary one when
9737 an overflow has been detected or when the call couldn't be
9738 validated because the size is not constant. */
9739 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9740 return NULL_RTX;
9742 tree fn = NULL_TREE;
9743 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9744 mem{cpy,pcpy,move,set} is available. */
9745 switch (fcode)
9747 case BUILT_IN_MEMCPY_CHK:
9748 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9749 break;
9750 case BUILT_IN_MEMPCPY_CHK:
9751 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9752 break;
9753 case BUILT_IN_MEMMOVE_CHK:
9754 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9755 break;
9756 case BUILT_IN_MEMSET_CHK:
9757 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9758 break;
9759 default:
9760 break;
9763 if (! fn)
9764 return NULL_RTX;
9766 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9767 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9768 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9769 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9771 else if (fcode == BUILT_IN_MEMSET_CHK)
9772 return NULL_RTX;
9773 else
9775 unsigned int dest_align = get_pointer_alignment (dest);
9777 /* If DEST is not a pointer type, call the normal function. */
9778 if (dest_align == 0)
9779 return NULL_RTX;
9781 /* If SRC and DEST are the same (and not volatile), do nothing. */
9782 if (operand_equal_p (src, dest, 0))
9784 tree expr;
9786 if (fcode != BUILT_IN_MEMPCPY_CHK)
9788 /* Evaluate and ignore LEN in case it has side-effects. */
9789 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9790 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9793 expr = fold_build_pointer_plus (dest, len);
9794 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9797 /* __memmove_chk special case. */
9798 if (fcode == BUILT_IN_MEMMOVE_CHK)
9800 unsigned int src_align = get_pointer_alignment (src);
9802 if (src_align == 0)
9803 return NULL_RTX;
9805 /* If src is categorized for a readonly section we can use
9806 normal __memcpy_chk. */
9807 if (readonly_data_expr (src))
9809 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9810 if (!fn)
9811 return NULL_RTX;
9812 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9813 dest, src, len, size);
9814 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9815 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9816 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9819 return NULL_RTX;
9823 /* Emit warning if a buffer overflow is detected at compile time. */
9825 static void
9826 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9828 /* The source string. */
9829 tree srcstr = NULL_TREE;
9830 /* The size of the destination object. */
9831 tree objsize = NULL_TREE;
9832 /* The string that is being concatenated with (as in __strcat_chk)
9833 or null if it isn't. */
9834 tree catstr = NULL_TREE;
9835 /* The maximum length of the source sequence in a bounded operation
9836 (such as __strncat_chk) or null if the operation isn't bounded
9837 (such as __strcat_chk). */
9838 tree maxlen = NULL_TREE;
9840 switch (fcode)
9842 case BUILT_IN_STRCPY_CHK:
9843 case BUILT_IN_STPCPY_CHK:
9844 srcstr = CALL_EXPR_ARG (exp, 1);
9845 objsize = CALL_EXPR_ARG (exp, 2);
9846 break;
9848 case BUILT_IN_STRCAT_CHK:
9849 /* For __strcat_chk the warning will be emitted only if overflowing
9850 by at least strlen (dest) + 1 bytes. */
9851 catstr = CALL_EXPR_ARG (exp, 0);
9852 srcstr = CALL_EXPR_ARG (exp, 1);
9853 objsize = CALL_EXPR_ARG (exp, 2);
9854 break;
9856 case BUILT_IN_STRNCAT_CHK:
9857 catstr = CALL_EXPR_ARG (exp, 0);
9858 srcstr = CALL_EXPR_ARG (exp, 1);
9859 maxlen = CALL_EXPR_ARG (exp, 2);
9860 objsize = CALL_EXPR_ARG (exp, 3);
9861 break;
9863 case BUILT_IN_STRNCPY_CHK:
9864 case BUILT_IN_STPNCPY_CHK:
9865 srcstr = CALL_EXPR_ARG (exp, 1);
9866 maxlen = CALL_EXPR_ARG (exp, 2);
9867 objsize = CALL_EXPR_ARG (exp, 3);
9868 break;
9870 case BUILT_IN_SNPRINTF_CHK:
9871 case BUILT_IN_VSNPRINTF_CHK:
9872 maxlen = CALL_EXPR_ARG (exp, 1);
9873 objsize = CALL_EXPR_ARG (exp, 3);
9874 break;
9875 default:
9876 gcc_unreachable ();
9879 if (catstr && maxlen)
9881 /* Check __strncat_chk. There is no way to determine the length
9882 of the string to which the source string is being appended so
9883 just warn when the length of the source string is not known. */
9884 check_strncat_sizes (exp, objsize);
9885 return;
9888 check_sizes (OPT_Wstringop_overflow_, exp,
9889 /*size=*/NULL_TREE, maxlen, srcstr, objsize);
9892 /* Emit warning if a buffer overflow is detected at compile time
9893 in __sprintf_chk/__vsprintf_chk calls. */
9895 static void
9896 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9898 tree size, len, fmt;
9899 const char *fmt_str;
9900 int nargs = call_expr_nargs (exp);
9902 /* Verify the required arguments in the original call. */
9904 if (nargs < 4)
9905 return;
9906 size = CALL_EXPR_ARG (exp, 2);
9907 fmt = CALL_EXPR_ARG (exp, 3);
9909 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9910 return;
9912 /* Check whether the format is a literal string constant. */
9913 fmt_str = c_getstr (fmt);
9914 if (fmt_str == NULL)
9915 return;
9917 if (!init_target_chars ())
9918 return;
9920 /* If the format doesn't contain % args or %%, we know its size. */
9921 if (strchr (fmt_str, target_percent) == 0)
9922 len = build_int_cstu (size_type_node, strlen (fmt_str));
9923 /* If the format is "%s" and first ... argument is a string literal,
9924 we know it too. */
9925 else if (fcode == BUILT_IN_SPRINTF_CHK
9926 && strcmp (fmt_str, target_percent_s) == 0)
9928 tree arg;
9930 if (nargs < 5)
9931 return;
9932 arg = CALL_EXPR_ARG (exp, 4);
9933 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9934 return;
9936 len = c_strlen (arg, 1);
9937 if (!len || ! tree_fits_uhwi_p (len))
9938 return;
9940 else
9941 return;
9943 /* Add one for the terminating nul. */
9944 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9945 check_sizes (OPT_Wstringop_overflow_,
9946 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, len, size);
9949 /* Emit warning if a free is called with address of a variable. */
9951 static void
9952 maybe_emit_free_warning (tree exp)
9954 tree arg = CALL_EXPR_ARG (exp, 0);
9956 STRIP_NOPS (arg);
9957 if (TREE_CODE (arg) != ADDR_EXPR)
9958 return;
9960 arg = get_base_address (TREE_OPERAND (arg, 0));
9961 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9962 return;
9964 if (SSA_VAR_P (arg))
9965 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9966 "%Kattempt to free a non-heap object %qD", exp, arg);
9967 else
9968 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9969 "%Kattempt to free a non-heap object", exp);
9972 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9973 if possible. */
9975 static tree
9976 fold_builtin_object_size (tree ptr, tree ost)
9978 unsigned HOST_WIDE_INT bytes;
9979 int object_size_type;
9981 if (!validate_arg (ptr, POINTER_TYPE)
9982 || !validate_arg (ost, INTEGER_TYPE))
9983 return NULL_TREE;
9985 STRIP_NOPS (ost);
9987 if (TREE_CODE (ost) != INTEGER_CST
9988 || tree_int_cst_sgn (ost) < 0
9989 || compare_tree_int (ost, 3) > 0)
9990 return NULL_TREE;
9992 object_size_type = tree_to_shwi (ost);
9994 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9995 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9996 and (size_t) 0 for types 2 and 3. */
9997 if (TREE_SIDE_EFFECTS (ptr))
9998 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10000 if (TREE_CODE (ptr) == ADDR_EXPR)
10002 compute_builtin_object_size (ptr, object_size_type, &bytes);
10003 if (wi::fits_to_tree_p (bytes, size_type_node))
10004 return build_int_cstu (size_type_node, bytes);
10006 else if (TREE_CODE (ptr) == SSA_NAME)
10008 /* If object size is not known yet, delay folding until
10009 later. Maybe subsequent passes will help determining
10010 it. */
10011 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10012 && wi::fits_to_tree_p (bytes, size_type_node))
10013 return build_int_cstu (size_type_node, bytes);
10016 return NULL_TREE;
10019 /* Builtins with folding operations that operate on "..." arguments
10020 need special handling; we need to store the arguments in a convenient
10021 data structure before attempting any folding. Fortunately there are
10022 only a few builtins that fall into this category. FNDECL is the
10023 function, EXP is the CALL_EXPR for the call. */
10025 static tree
10026 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10028 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10029 tree ret = NULL_TREE;
10031 switch (fcode)
10033 case BUILT_IN_FPCLASSIFY:
10034 ret = fold_builtin_fpclassify (loc, args, nargs);
10035 break;
10037 default:
10038 break;
10040 if (ret)
10042 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10043 SET_EXPR_LOCATION (ret, loc);
10044 TREE_NO_WARNING (ret) = 1;
10045 return ret;
10047 return NULL_TREE;
10050 /* Initialize format string characters in the target charset. */
10052 bool
10053 init_target_chars (void)
10055 static bool init;
10056 if (!init)
10058 target_newline = lang_hooks.to_target_charset ('\n');
10059 target_percent = lang_hooks.to_target_charset ('%');
10060 target_c = lang_hooks.to_target_charset ('c');
10061 target_s = lang_hooks.to_target_charset ('s');
10062 if (target_newline == 0 || target_percent == 0 || target_c == 0
10063 || target_s == 0)
10064 return false;
10066 target_percent_c[0] = target_percent;
10067 target_percent_c[1] = target_c;
10068 target_percent_c[2] = '\0';
10070 target_percent_s[0] = target_percent;
10071 target_percent_s[1] = target_s;
10072 target_percent_s[2] = '\0';
10074 target_percent_s_newline[0] = target_percent;
10075 target_percent_s_newline[1] = target_s;
10076 target_percent_s_newline[2] = target_newline;
10077 target_percent_s_newline[3] = '\0';
10079 init = true;
10081 return true;
10084 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10085 and no overflow/underflow occurred. INEXACT is true if M was not
10086 exactly calculated. TYPE is the tree type for the result. This
10087 function assumes that you cleared the MPFR flags and then
10088 calculated M to see if anything subsequently set a flag prior to
10089 entering this function. Return NULL_TREE if any checks fail. */
10091 static tree
10092 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10094 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10095 overflow/underflow occurred. If -frounding-math, proceed iff the
10096 result of calling FUNC was exact. */
10097 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10098 && (!flag_rounding_math || !inexact))
10100 REAL_VALUE_TYPE rr;
10102 real_from_mpfr (&rr, m, type, GMP_RNDN);
10103 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10104 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10105 but the mpft_t is not, then we underflowed in the
10106 conversion. */
10107 if (real_isfinite (&rr)
10108 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10110 REAL_VALUE_TYPE rmode;
10112 real_convert (&rmode, TYPE_MODE (type), &rr);
10113 /* Proceed iff the specified mode can hold the value. */
10114 if (real_identical (&rmode, &rr))
10115 return build_real (type, rmode);
10118 return NULL_TREE;
10121 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10122 number and no overflow/underflow occurred. INEXACT is true if M
10123 was not exactly calculated. TYPE is the tree type for the result.
10124 This function assumes that you cleared the MPFR flags and then
10125 calculated M to see if anything subsequently set a flag prior to
10126 entering this function. Return NULL_TREE if any checks fail, if
10127 FORCE_CONVERT is true, then bypass the checks. */
10129 static tree
10130 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10132 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10133 overflow/underflow occurred. If -frounding-math, proceed iff the
10134 result of calling FUNC was exact. */
10135 if (force_convert
10136 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10137 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10138 && (!flag_rounding_math || !inexact)))
10140 REAL_VALUE_TYPE re, im;
10142 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10143 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10144 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10145 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10146 but the mpft_t is not, then we underflowed in the
10147 conversion. */
10148 if (force_convert
10149 || (real_isfinite (&re) && real_isfinite (&im)
10150 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10151 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10153 REAL_VALUE_TYPE re_mode, im_mode;
10155 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10156 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10157 /* Proceed iff the specified mode can hold the value. */
10158 if (force_convert
10159 || (real_identical (&re_mode, &re)
10160 && real_identical (&im_mode, &im)))
10161 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10162 build_real (TREE_TYPE (type), im_mode));
10165 return NULL_TREE;
10168 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10169 the pointer *(ARG_QUO) and return the result. The type is taken
10170 from the type of ARG0 and is used for setting the precision of the
10171 calculation and results. */
10173 static tree
10174 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10176 tree const type = TREE_TYPE (arg0);
10177 tree result = NULL_TREE;
10179 STRIP_NOPS (arg0);
10180 STRIP_NOPS (arg1);
10182 /* To proceed, MPFR must exactly represent the target floating point
10183 format, which only happens when the target base equals two. */
10184 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10185 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10186 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10188 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10189 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10191 if (real_isfinite (ra0) && real_isfinite (ra1))
10193 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10194 const int prec = fmt->p;
10195 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10196 tree result_rem;
10197 long integer_quo;
10198 mpfr_t m0, m1;
10200 mpfr_inits2 (prec, m0, m1, NULL);
10201 mpfr_from_real (m0, ra0, GMP_RNDN);
10202 mpfr_from_real (m1, ra1, GMP_RNDN);
10203 mpfr_clear_flags ();
10204 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10205 /* Remquo is independent of the rounding mode, so pass
10206 inexact=0 to do_mpfr_ckconv(). */
10207 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10208 mpfr_clears (m0, m1, NULL);
10209 if (result_rem)
10211 /* MPFR calculates quo in the host's long so it may
10212 return more bits in quo than the target int can hold
10213 if sizeof(host long) > sizeof(target int). This can
10214 happen even for native compilers in LP64 mode. In
10215 these cases, modulo the quo value with the largest
10216 number that the target int can hold while leaving one
10217 bit for the sign. */
10218 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10219 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10221 /* Dereference the quo pointer argument. */
10222 arg_quo = build_fold_indirect_ref (arg_quo);
10223 /* Proceed iff a valid pointer type was passed in. */
10224 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10226 /* Set the value. */
10227 tree result_quo
10228 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10229 build_int_cst (TREE_TYPE (arg_quo),
10230 integer_quo));
10231 TREE_SIDE_EFFECTS (result_quo) = 1;
10232 /* Combine the quo assignment with the rem. */
10233 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10234 result_quo, result_rem));
10239 return result;
10242 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10243 resulting value as a tree with type TYPE. The mpfr precision is
10244 set to the precision of TYPE. We assume that this mpfr function
10245 returns zero if the result could be calculated exactly within the
10246 requested precision. In addition, the integer pointer represented
10247 by ARG_SG will be dereferenced and set to the appropriate signgam
10248 (-1,1) value. */
10250 static tree
10251 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10253 tree result = NULL_TREE;
10255 STRIP_NOPS (arg);
10257 /* To proceed, MPFR must exactly represent the target floating point
10258 format, which only happens when the target base equals two. Also
10259 verify ARG is a constant and that ARG_SG is an int pointer. */
10260 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10261 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10262 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10263 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10265 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10267 /* In addition to NaN and Inf, the argument cannot be zero or a
10268 negative integer. */
10269 if (real_isfinite (ra)
10270 && ra->cl != rvc_zero
10271 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10273 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10274 const int prec = fmt->p;
10275 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10276 int inexact, sg;
10277 mpfr_t m;
10278 tree result_lg;
10280 mpfr_init2 (m, prec);
10281 mpfr_from_real (m, ra, GMP_RNDN);
10282 mpfr_clear_flags ();
10283 inexact = mpfr_lgamma (m, &sg, m, rnd);
10284 result_lg = do_mpfr_ckconv (m, type, inexact);
10285 mpfr_clear (m);
10286 if (result_lg)
10288 tree result_sg;
10290 /* Dereference the arg_sg pointer argument. */
10291 arg_sg = build_fold_indirect_ref (arg_sg);
10292 /* Assign the signgam value into *arg_sg. */
10293 result_sg = fold_build2 (MODIFY_EXPR,
10294 TREE_TYPE (arg_sg), arg_sg,
10295 build_int_cst (TREE_TYPE (arg_sg), sg));
10296 TREE_SIDE_EFFECTS (result_sg) = 1;
10297 /* Combine the signgam assignment with the lgamma result. */
10298 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10299 result_sg, result_lg));
10304 return result;
10307 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10308 mpc function FUNC on it and return the resulting value as a tree
10309 with type TYPE. The mpfr precision is set to the precision of
10310 TYPE. We assume that function FUNC returns zero if the result
10311 could be calculated exactly within the requested precision. If
10312 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10313 in the arguments and/or results. */
10315 tree
10316 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10317 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10319 tree result = NULL_TREE;
10321 STRIP_NOPS (arg0);
10322 STRIP_NOPS (arg1);
10324 /* To proceed, MPFR must exactly represent the target floating point
10325 format, which only happens when the target base equals two. */
10326 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10327 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10328 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10329 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10330 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10332 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10333 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10334 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10335 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10337 if (do_nonfinite
10338 || (real_isfinite (re0) && real_isfinite (im0)
10339 && real_isfinite (re1) && real_isfinite (im1)))
10341 const struct real_format *const fmt =
10342 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10343 const int prec = fmt->p;
10344 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10345 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10346 int inexact;
10347 mpc_t m0, m1;
10349 mpc_init2 (m0, prec);
10350 mpc_init2 (m1, prec);
10351 mpfr_from_real (mpc_realref (m0), re0, rnd);
10352 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10353 mpfr_from_real (mpc_realref (m1), re1, rnd);
10354 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10355 mpfr_clear_flags ();
10356 inexact = func (m0, m0, m1, crnd);
10357 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10358 mpc_clear (m0);
10359 mpc_clear (m1);
10363 return result;
10366 /* A wrapper function for builtin folding that prevents warnings for
10367 "statement without effect" and the like, caused by removing the
10368 call node earlier than the warning is generated. */
10370 tree
10371 fold_call_stmt (gcall *stmt, bool ignore)
10373 tree ret = NULL_TREE;
10374 tree fndecl = gimple_call_fndecl (stmt);
10375 location_t loc = gimple_location (stmt);
10376 if (fndecl
10377 && TREE_CODE (fndecl) == FUNCTION_DECL
10378 && DECL_BUILT_IN (fndecl)
10379 && !gimple_call_va_arg_pack_p (stmt))
10381 int nargs = gimple_call_num_args (stmt);
10382 tree *args = (nargs > 0
10383 ? gimple_call_arg_ptr (stmt, 0)
10384 : &error_mark_node);
10386 if (avoid_folding_inline_builtin (fndecl))
10387 return NULL_TREE;
10388 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10390 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10392 else
10394 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10395 if (ret)
10397 /* Propagate location information from original call to
10398 expansion of builtin. Otherwise things like
10399 maybe_emit_chk_warning, that operate on the expansion
10400 of a builtin, will use the wrong location information. */
10401 if (gimple_has_location (stmt))
10403 tree realret = ret;
10404 if (TREE_CODE (ret) == NOP_EXPR)
10405 realret = TREE_OPERAND (ret, 0);
10406 if (CAN_HAVE_LOCATION_P (realret)
10407 && !EXPR_HAS_LOCATION (realret))
10408 SET_EXPR_LOCATION (realret, loc);
10409 return realret;
10411 return ret;
10415 return NULL_TREE;
10418 /* Look up the function in builtin_decl that corresponds to DECL
10419 and set ASMSPEC as its user assembler name. DECL must be a
10420 function decl that declares a builtin. */
10422 void
10423 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10425 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10426 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10427 && asmspec != 0);
10429 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10430 set_user_assembler_name (builtin, asmspec);
10432 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10433 && INT_TYPE_SIZE < BITS_PER_WORD)
10435 set_user_assembler_libfunc ("ffs", asmspec);
10436 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
10437 "ffs");
10441 /* Return true if DECL is a builtin that expands to a constant or similarly
10442 simple code. */
10443 bool
10444 is_simple_builtin (tree decl)
10446 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10447 switch (DECL_FUNCTION_CODE (decl))
10449 /* Builtins that expand to constants. */
10450 case BUILT_IN_CONSTANT_P:
10451 case BUILT_IN_EXPECT:
10452 case BUILT_IN_OBJECT_SIZE:
10453 case BUILT_IN_UNREACHABLE:
10454 /* Simple register moves or loads from stack. */
10455 case BUILT_IN_ASSUME_ALIGNED:
10456 case BUILT_IN_RETURN_ADDRESS:
10457 case BUILT_IN_EXTRACT_RETURN_ADDR:
10458 case BUILT_IN_FROB_RETURN_ADDR:
10459 case BUILT_IN_RETURN:
10460 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10461 case BUILT_IN_FRAME_ADDRESS:
10462 case BUILT_IN_VA_END:
10463 case BUILT_IN_STACK_SAVE:
10464 case BUILT_IN_STACK_RESTORE:
10465 /* Exception state returns or moves registers around. */
10466 case BUILT_IN_EH_FILTER:
10467 case BUILT_IN_EH_POINTER:
10468 case BUILT_IN_EH_COPY_VALUES:
10469 return true;
10471 default:
10472 return false;
10475 return false;
10478 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10479 most probably expanded inline into reasonably simple code. This is a
10480 superset of is_simple_builtin. */
10481 bool
10482 is_inexpensive_builtin (tree decl)
10484 if (!decl)
10485 return false;
10486 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10487 return true;
10488 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10489 switch (DECL_FUNCTION_CODE (decl))
10491 case BUILT_IN_ABS:
10492 case BUILT_IN_ALLOCA:
10493 case BUILT_IN_ALLOCA_WITH_ALIGN:
10494 case BUILT_IN_BSWAP16:
10495 case BUILT_IN_BSWAP32:
10496 case BUILT_IN_BSWAP64:
10497 case BUILT_IN_CLZ:
10498 case BUILT_IN_CLZIMAX:
10499 case BUILT_IN_CLZL:
10500 case BUILT_IN_CLZLL:
10501 case BUILT_IN_CTZ:
10502 case BUILT_IN_CTZIMAX:
10503 case BUILT_IN_CTZL:
10504 case BUILT_IN_CTZLL:
10505 case BUILT_IN_FFS:
10506 case BUILT_IN_FFSIMAX:
10507 case BUILT_IN_FFSL:
10508 case BUILT_IN_FFSLL:
10509 case BUILT_IN_IMAXABS:
10510 case BUILT_IN_FINITE:
10511 case BUILT_IN_FINITEF:
10512 case BUILT_IN_FINITEL:
10513 case BUILT_IN_FINITED32:
10514 case BUILT_IN_FINITED64:
10515 case BUILT_IN_FINITED128:
10516 case BUILT_IN_FPCLASSIFY:
10517 case BUILT_IN_ISFINITE:
10518 case BUILT_IN_ISINF_SIGN:
10519 case BUILT_IN_ISINF:
10520 case BUILT_IN_ISINFF:
10521 case BUILT_IN_ISINFL:
10522 case BUILT_IN_ISINFD32:
10523 case BUILT_IN_ISINFD64:
10524 case BUILT_IN_ISINFD128:
10525 case BUILT_IN_ISNAN:
10526 case BUILT_IN_ISNANF:
10527 case BUILT_IN_ISNANL:
10528 case BUILT_IN_ISNAND32:
10529 case BUILT_IN_ISNAND64:
10530 case BUILT_IN_ISNAND128:
10531 case BUILT_IN_ISNORMAL:
10532 case BUILT_IN_ISGREATER:
10533 case BUILT_IN_ISGREATEREQUAL:
10534 case BUILT_IN_ISLESS:
10535 case BUILT_IN_ISLESSEQUAL:
10536 case BUILT_IN_ISLESSGREATER:
10537 case BUILT_IN_ISUNORDERED:
10538 case BUILT_IN_VA_ARG_PACK:
10539 case BUILT_IN_VA_ARG_PACK_LEN:
10540 case BUILT_IN_VA_COPY:
10541 case BUILT_IN_TRAP:
10542 case BUILT_IN_SAVEREGS:
10543 case BUILT_IN_POPCOUNTL:
10544 case BUILT_IN_POPCOUNTLL:
10545 case BUILT_IN_POPCOUNTIMAX:
10546 case BUILT_IN_POPCOUNT:
10547 case BUILT_IN_PARITYL:
10548 case BUILT_IN_PARITYLL:
10549 case BUILT_IN_PARITYIMAX:
10550 case BUILT_IN_PARITY:
10551 case BUILT_IN_LABS:
10552 case BUILT_IN_LLABS:
10553 case BUILT_IN_PREFETCH:
10554 case BUILT_IN_ACC_ON_DEVICE:
10555 return true;
10557 default:
10558 return is_simple_builtin (decl);
10561 return false;
10564 /* Return true if T is a constant and the value cast to a target char
10565 can be represented by a host char.
10566 Store the casted char constant in *P if so. */
10568 bool
10569 target_char_cst_p (tree t, char *p)
10571 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10572 return false;
10574 *p = (char)tree_to_uhwi (t);
10575 return true;