PR 44292 Enable large record lengths in OPEN and INQUIRE statements
[official-gcc.git] / gcc / builtins.c
blob650de0d9aca71388b7a5cb5cc54bfda341956c21
1 /* Expand builtin functions.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "stringpool.h"
64 #include "attribs.h"
65 #include "asan.h"
66 #include "cilk.h"
67 #include "tree-chkp.h"
68 #include "rtl-chkp.h"
69 #include "internal-fn.h"
70 #include "case-cfn-macros.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
74 struct target_builtins default_target_builtins;
75 #if SWITCHABLE_TARGET
76 struct target_builtins *this_target_builtins = &default_target_builtins;
77 #endif
79 /* Define the names of the builtin function types and codes. */
80 const char *const built_in_class_names[BUILT_IN_LAST]
81 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
84 const char * built_in_names[(int) END_BUILTINS] =
86 #include "builtins.def"
89 /* Setup an array of builtin_info_type, make sure each element decl is
90 initialized to NULL_TREE. */
91 builtin_info_type builtin_info[(int)END_BUILTINS];
93 /* Non-zero if __builtin_constant_p should be folded right away. */
94 bool force_folding_builtin_constant_p;
96 static rtx c_readstr (const char *, scalar_int_mode);
97 static int target_char_cast (tree, char *);
98 static rtx get_memory_rtx (tree, tree);
99 static int apply_args_size (void);
100 static int apply_result_size (void);
101 static rtx result_vector (int, rtx);
102 static void expand_builtin_prefetch (tree);
103 static rtx expand_builtin_apply_args (void);
104 static rtx expand_builtin_apply_args_1 (void);
105 static rtx expand_builtin_apply (rtx, rtx, rtx);
106 static void expand_builtin_return (rtx);
107 static enum type_class type_to_class (tree);
108 static rtx expand_builtin_classify_type (tree);
109 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
111 static rtx expand_builtin_interclass_mathfn (tree, rtx);
112 static rtx expand_builtin_sincos (tree);
113 static rtx expand_builtin_cexpi (tree, rtx);
114 static rtx expand_builtin_int_roundingfn (tree, rtx);
115 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
116 static rtx expand_builtin_next_arg (void);
117 static rtx expand_builtin_va_start (tree);
118 static rtx expand_builtin_va_end (tree);
119 static rtx expand_builtin_va_copy (tree);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
123 static rtx expand_builtin_memchr (tree, rtx);
124 static rtx expand_builtin_memcpy (tree, rtx);
125 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
126 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
127 rtx target, tree exp, int endp);
128 static rtx expand_builtin_memmove (tree, rtx);
129 static rtx expand_builtin_mempcpy (tree, rtx);
130 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx);
131 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
132 static rtx expand_builtin_strcat (tree, rtx);
133 static rtx expand_builtin_strcpy (tree, rtx);
134 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
135 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
136 static rtx expand_builtin_stpncpy (tree, rtx);
137 static rtx expand_builtin_strncat (tree, rtx);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
155 static bool validate_arg (const_tree, enum tree_code code);
156 static rtx expand_builtin_fabs (tree, rtx, rtx);
157 static rtx expand_builtin_signbit (tree, rtx);
158 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
159 static tree fold_builtin_isascii (location_t, tree);
160 static tree fold_builtin_toascii (location_t, tree);
161 static tree fold_builtin_isdigit (location_t, tree);
162 static tree fold_builtin_fabs (location_t, tree, tree);
163 static tree fold_builtin_abs (location_t, tree, tree);
164 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
165 enum tree_code);
166 static tree fold_builtin_0 (location_t, tree);
167 static tree fold_builtin_1 (location_t, tree, tree);
168 static tree fold_builtin_2 (location_t, tree, tree, tree);
169 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree);
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_free_warning (tree);
182 static tree fold_builtin_object_size (tree, tree);
184 unsigned HOST_WIDE_INT target_newline;
185 unsigned HOST_WIDE_INT target_percent;
186 static unsigned HOST_WIDE_INT target_c;
187 static unsigned HOST_WIDE_INT target_s;
188 char target_percent_c[3];
189 char target_percent_s[3];
190 char target_percent_s_newline[4];
191 static tree do_mpfr_remquo (tree, tree, tree);
192 static tree do_mpfr_lgamma_r (tree, tree, tree);
193 static void expand_builtin_sync_synchronize (void);
195 /* Return true if NAME starts with __builtin_ or __sync_. */
197 static bool
198 is_builtin_name (const char *name)
200 if (strncmp (name, "__builtin_", 10) == 0)
201 return true;
202 if (strncmp (name, "__sync_", 7) == 0)
203 return true;
204 if (strncmp (name, "__atomic_", 9) == 0)
205 return true;
206 if (flag_cilkplus
207 && (!strcmp (name, "__cilkrts_detach")
208 || !strcmp (name, "__cilkrts_pop_frame")))
209 return true;
210 return false;
214 /* Return true if DECL is a function symbol representing a built-in. */
216 bool
217 is_builtin_fn (tree decl)
219 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
222 /* Return true if NODE should be considered for inline expansion regardless
223 of the optimization level. This means whenever a function is invoked with
224 its "internal" name, which normally contains the prefix "__builtin". */
226 bool
227 called_as_built_in (tree node)
229 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
230 we want the name used to call the function, not the name it
231 will have. */
232 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
233 return is_builtin_name (name);
236 /* Compute values M and N such that M divides (address of EXP - N) and such
237 that N < M. If these numbers can be determined, store M in alignp and N in
238 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
239 *alignp and any bit-offset to *bitposp.
241 Note that the address (and thus the alignment) computed here is based
242 on the address to which a symbol resolves, whereas DECL_ALIGN is based
243 on the address at which an object is actually located. These two
244 addresses are not always the same. For example, on ARM targets,
245 the address &foo of a Thumb function foo() has the lowest bit set,
246 whereas foo() itself starts on an even address.
248 If ADDR_P is true we are taking the address of the memory reference EXP
249 and thus cannot rely on the access taking place. */
251 static bool
252 get_object_alignment_2 (tree exp, unsigned int *alignp,
253 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
255 HOST_WIDE_INT bitsize, bitpos;
256 tree offset;
257 machine_mode mode;
258 int unsignedp, reversep, volatilep;
259 unsigned int align = BITS_PER_UNIT;
260 bool known_alignment = false;
262 /* Get the innermost object and the constant (bitpos) and possibly
263 variable (offset) offset of the access. */
264 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
265 &unsignedp, &reversep, &volatilep);
267 /* Extract alignment information from the innermost object and
268 possibly adjust bitpos and offset. */
269 if (TREE_CODE (exp) == FUNCTION_DECL)
271 /* Function addresses can encode extra information besides their
272 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
273 allows the low bit to be used as a virtual bit, we know
274 that the address itself must be at least 2-byte aligned. */
275 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
276 align = 2 * BITS_PER_UNIT;
278 else if (TREE_CODE (exp) == LABEL_DECL)
280 else if (TREE_CODE (exp) == CONST_DECL)
282 /* The alignment of a CONST_DECL is determined by its initializer. */
283 exp = DECL_INITIAL (exp);
284 align = TYPE_ALIGN (TREE_TYPE (exp));
285 if (CONSTANT_CLASS_P (exp))
286 align = targetm.constant_alignment (exp, align);
288 known_alignment = true;
290 else if (DECL_P (exp))
292 align = DECL_ALIGN (exp);
293 known_alignment = true;
295 else if (TREE_CODE (exp) == INDIRECT_REF
296 || TREE_CODE (exp) == MEM_REF
297 || TREE_CODE (exp) == TARGET_MEM_REF)
299 tree addr = TREE_OPERAND (exp, 0);
300 unsigned ptr_align;
301 unsigned HOST_WIDE_INT ptr_bitpos;
302 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
304 /* If the address is explicitely aligned, handle that. */
305 if (TREE_CODE (addr) == BIT_AND_EXPR
306 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
308 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
309 ptr_bitmask *= BITS_PER_UNIT;
310 align = least_bit_hwi (ptr_bitmask);
311 addr = TREE_OPERAND (addr, 0);
314 known_alignment
315 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
316 align = MAX (ptr_align, align);
318 /* Re-apply explicit alignment to the bitpos. */
319 ptr_bitpos &= ptr_bitmask;
321 /* The alignment of the pointer operand in a TARGET_MEM_REF
322 has to take the variable offset parts into account. */
323 if (TREE_CODE (exp) == TARGET_MEM_REF)
325 if (TMR_INDEX (exp))
327 unsigned HOST_WIDE_INT step = 1;
328 if (TMR_STEP (exp))
329 step = TREE_INT_CST_LOW (TMR_STEP (exp));
330 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
332 if (TMR_INDEX2 (exp))
333 align = BITS_PER_UNIT;
334 known_alignment = false;
337 /* When EXP is an actual memory reference then we can use
338 TYPE_ALIGN of a pointer indirection to derive alignment.
339 Do so only if get_pointer_alignment_1 did not reveal absolute
340 alignment knowledge and if using that alignment would
341 improve the situation. */
342 unsigned int talign;
343 if (!addr_p && !known_alignment
344 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
345 && talign > align)
346 align = talign;
347 else
349 /* Else adjust bitpos accordingly. */
350 bitpos += ptr_bitpos;
351 if (TREE_CODE (exp) == MEM_REF
352 || TREE_CODE (exp) == TARGET_MEM_REF)
353 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
356 else if (TREE_CODE (exp) == STRING_CST)
358 /* STRING_CST are the only constant objects we allow to be not
359 wrapped inside a CONST_DECL. */
360 align = TYPE_ALIGN (TREE_TYPE (exp));
361 if (CONSTANT_CLASS_P (exp))
362 align = targetm.constant_alignment (exp, align);
364 known_alignment = true;
367 /* If there is a non-constant offset part extract the maximum
368 alignment that can prevail. */
369 if (offset)
371 unsigned int trailing_zeros = tree_ctz (offset);
372 if (trailing_zeros < HOST_BITS_PER_INT)
374 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
375 if (inner)
376 align = MIN (align, inner);
380 *alignp = align;
381 *bitposp = bitpos & (*alignp - 1);
382 return known_alignment;
385 /* For a memory reference expression EXP compute values M and N such that M
386 divides (&EXP - N) and such that N < M. If these numbers can be determined,
387 store M in alignp and N in *BITPOSP and return true. Otherwise return false
388 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
390 bool
391 get_object_alignment_1 (tree exp, unsigned int *alignp,
392 unsigned HOST_WIDE_INT *bitposp)
394 return get_object_alignment_2 (exp, alignp, bitposp, false);
397 /* Return the alignment in bits of EXP, an object. */
399 unsigned int
400 get_object_alignment (tree exp)
402 unsigned HOST_WIDE_INT bitpos = 0;
403 unsigned int align;
405 get_object_alignment_1 (exp, &align, &bitpos);
407 /* align and bitpos now specify known low bits of the pointer.
408 ptr & (align - 1) == bitpos. */
410 if (bitpos != 0)
411 align = least_bit_hwi (bitpos);
412 return align;
415 /* For a pointer valued expression EXP compute values M and N such that M
416 divides (EXP - N) and such that N < M. If these numbers can be determined,
417 store M in alignp and N in *BITPOSP and return true. Return false if
418 the results are just a conservative approximation.
420 If EXP is not a pointer, false is returned too. */
422 bool
423 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
424 unsigned HOST_WIDE_INT *bitposp)
426 STRIP_NOPS (exp);
428 if (TREE_CODE (exp) == ADDR_EXPR)
429 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
430 alignp, bitposp, true);
431 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
433 unsigned int align;
434 unsigned HOST_WIDE_INT bitpos;
435 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
436 &align, &bitpos);
437 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
438 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
439 else
441 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
442 if (trailing_zeros < HOST_BITS_PER_INT)
444 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
445 if (inner)
446 align = MIN (align, inner);
449 *alignp = align;
450 *bitposp = bitpos & (align - 1);
451 return res;
453 else if (TREE_CODE (exp) == SSA_NAME
454 && POINTER_TYPE_P (TREE_TYPE (exp)))
456 unsigned int ptr_align, ptr_misalign;
457 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
459 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
461 *bitposp = ptr_misalign * BITS_PER_UNIT;
462 *alignp = ptr_align * BITS_PER_UNIT;
463 /* Make sure to return a sensible alignment when the multiplication
464 by BITS_PER_UNIT overflowed. */
465 if (*alignp == 0)
466 *alignp = 1u << (HOST_BITS_PER_INT - 1);
467 /* We cannot really tell whether this result is an approximation. */
468 return false;
470 else
472 *bitposp = 0;
473 *alignp = BITS_PER_UNIT;
474 return false;
477 else if (TREE_CODE (exp) == INTEGER_CST)
479 *alignp = BIGGEST_ALIGNMENT;
480 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
481 & (BIGGEST_ALIGNMENT - 1));
482 return true;
485 *bitposp = 0;
486 *alignp = BITS_PER_UNIT;
487 return false;
490 /* Return the alignment in bits of EXP, a pointer valued expression.
491 The alignment returned is, by default, the alignment of the thing that
492 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
494 Otherwise, look at the expression to see if we can do better, i.e., if the
495 expression is actually pointing at an object whose alignment is tighter. */
497 unsigned int
498 get_pointer_alignment (tree exp)
500 unsigned HOST_WIDE_INT bitpos = 0;
501 unsigned int align;
503 get_pointer_alignment_1 (exp, &align, &bitpos);
505 /* align and bitpos now specify known low bits of the pointer.
506 ptr & (align - 1) == bitpos. */
508 if (bitpos != 0)
509 align = least_bit_hwi (bitpos);
511 return align;
514 /* Return the number of non-zero elements in the sequence
515 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
516 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
518 static unsigned
519 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
521 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
523 unsigned n;
525 if (eltsize == 1)
527 /* Optimize the common case of plain char. */
528 for (n = 0; n < maxelts; n++)
530 const char *elt = (const char*) ptr + n;
531 if (!*elt)
532 break;
535 else
537 for (n = 0; n < maxelts; n++)
539 const char *elt = (const char*) ptr + n * eltsize;
540 if (!memcmp (elt, "\0\0\0\0", eltsize))
541 break;
544 return n;
547 /* Compute the length of a null-terminated character string or wide
548 character string handling character sizes of 1, 2, and 4 bytes.
549 TREE_STRING_LENGTH is not the right way because it evaluates to
550 the size of the character array in bytes (as opposed to characters)
551 and because it can contain a zero byte in the middle.
553 ONLY_VALUE should be nonzero if the result is not going to be emitted
554 into the instruction stream and zero if it is going to be expanded.
555 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
556 is returned, otherwise NULL, since
557 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
558 evaluate the side-effects.
560 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
561 accesses. Note that this implies the result is not going to be emitted
562 into the instruction stream.
564 The value returned is of type `ssizetype'.
566 Unfortunately, string_constant can't access the values of const char
567 arrays with initializers, so neither can we do so here. */
569 tree
570 c_strlen (tree src, int only_value)
572 STRIP_NOPS (src);
573 if (TREE_CODE (src) == COND_EXPR
574 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
576 tree len1, len2;
578 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
579 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
580 if (tree_int_cst_equal (len1, len2))
581 return len1;
584 if (TREE_CODE (src) == COMPOUND_EXPR
585 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
586 return c_strlen (TREE_OPERAND (src, 1), only_value);
588 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
590 /* Offset from the beginning of the string in bytes. */
591 tree byteoff;
592 src = string_constant (src, &byteoff);
593 if (src == 0)
594 return NULL_TREE;
596 /* Determine the size of the string element. */
597 unsigned eltsize
598 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
600 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
601 length of SRC. */
602 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
604 /* PTR can point to the byte representation of any string type, including
605 char* and wchar_t*. */
606 const char *ptr = TREE_STRING_POINTER (src);
608 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
610 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
611 compute the offset to the following null if we don't know where to
612 start searching for it. */
613 if (string_length (ptr, eltsize, maxelts) < maxelts)
615 /* Return when an embedded null character is found. */
616 return NULL_TREE;
619 /* We don't know the starting offset, but we do know that the string
620 has no internal zero bytes. We can assume that the offset falls
621 within the bounds of the string; otherwise, the programmer deserves
622 what he gets. Subtract the offset from the length of the string,
623 and return that. This would perhaps not be valid if we were dealing
624 with named arrays in addition to literal string constants. */
626 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
629 /* Offset from the beginning of the string in elements. */
630 HOST_WIDE_INT eltoff;
632 /* We have a known offset into the string. Start searching there for
633 a null character if we can represent it as a single HOST_WIDE_INT. */
634 if (byteoff == 0)
635 eltoff = 0;
636 else if (! tree_fits_shwi_p (byteoff))
637 eltoff = -1;
638 else
639 eltoff = tree_to_shwi (byteoff) / eltsize;
641 /* If the offset is known to be out of bounds, warn, and call strlen at
642 runtime. */
643 if (eltoff < 0 || eltoff > maxelts)
645 /* Suppress multiple warnings for propagated constant strings. */
646 if (only_value != 2
647 && !TREE_NO_WARNING (src))
649 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
650 eltoff);
651 TREE_NO_WARNING (src) = 1;
653 return NULL_TREE;
656 /* Use strlen to search for the first zero byte. Since any strings
657 constructed with build_string will have nulls appended, we win even
658 if we get handed something like (char[4])"abcd".
660 Since ELTOFF is our starting index into the string, no further
661 calculation is needed. */
662 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
663 maxelts - eltoff);
665 return ssize_int (len);
668 /* Return a constant integer corresponding to target reading
669 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
671 static rtx
672 c_readstr (const char *str, scalar_int_mode mode)
674 HOST_WIDE_INT ch;
675 unsigned int i, j;
676 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
678 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
679 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
680 / HOST_BITS_PER_WIDE_INT;
682 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
683 for (i = 0; i < len; i++)
684 tmp[i] = 0;
686 ch = 1;
687 for (i = 0; i < GET_MODE_SIZE (mode); i++)
689 j = i;
690 if (WORDS_BIG_ENDIAN)
691 j = GET_MODE_SIZE (mode) - i - 1;
692 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
693 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
694 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
695 j *= BITS_PER_UNIT;
697 if (ch)
698 ch = (unsigned char) str[i];
699 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
702 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
703 return immed_wide_int_const (c, mode);
706 /* Cast a target constant CST to target CHAR and if that value fits into
707 host char type, return zero and put that value into variable pointed to by
708 P. */
710 static int
711 target_char_cast (tree cst, char *p)
713 unsigned HOST_WIDE_INT val, hostval;
715 if (TREE_CODE (cst) != INTEGER_CST
716 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
717 return 1;
719 /* Do not care if it fits or not right here. */
720 val = TREE_INT_CST_LOW (cst);
722 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
723 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
725 hostval = val;
726 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
727 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
729 if (val != hostval)
730 return 1;
732 *p = hostval;
733 return 0;
736 /* Similar to save_expr, but assumes that arbitrary code is not executed
737 in between the multiple evaluations. In particular, we assume that a
738 non-addressable local variable will not be modified. */
740 static tree
741 builtin_save_expr (tree exp)
743 if (TREE_CODE (exp) == SSA_NAME
744 || (TREE_ADDRESSABLE (exp) == 0
745 && (TREE_CODE (exp) == PARM_DECL
746 || (VAR_P (exp) && !TREE_STATIC (exp)))))
747 return exp;
749 return save_expr (exp);
752 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
753 times to get the address of either a higher stack frame, or a return
754 address located within it (depending on FNDECL_CODE). */
756 static rtx
757 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
759 int i;
760 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
761 if (tem == NULL_RTX)
763 /* For a zero count with __builtin_return_address, we don't care what
764 frame address we return, because target-specific definitions will
765 override us. Therefore frame pointer elimination is OK, and using
766 the soft frame pointer is OK.
768 For a nonzero count, or a zero count with __builtin_frame_address,
769 we require a stable offset from the current frame pointer to the
770 previous one, so we must use the hard frame pointer, and
771 we must disable frame pointer elimination. */
772 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
773 tem = frame_pointer_rtx;
774 else
776 tem = hard_frame_pointer_rtx;
778 /* Tell reload not to eliminate the frame pointer. */
779 crtl->accesses_prior_frames = 1;
783 if (count > 0)
784 SETUP_FRAME_ADDRESSES ();
786 /* On the SPARC, the return address is not in the frame, it is in a
787 register. There is no way to access it off of the current frame
788 pointer, but it can be accessed off the previous frame pointer by
789 reading the value from the register window save area. */
790 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
791 count--;
793 /* Scan back COUNT frames to the specified frame. */
794 for (i = 0; i < count; i++)
796 /* Assume the dynamic chain pointer is in the word that the
797 frame address points to, unless otherwise specified. */
798 tem = DYNAMIC_CHAIN_ADDRESS (tem);
799 tem = memory_address (Pmode, tem);
800 tem = gen_frame_mem (Pmode, tem);
801 tem = copy_to_reg (tem);
804 /* For __builtin_frame_address, return what we've got. But, on
805 the SPARC for example, we may have to add a bias. */
806 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
807 return FRAME_ADDR_RTX (tem);
809 /* For __builtin_return_address, get the return address from that frame. */
810 #ifdef RETURN_ADDR_RTX
811 tem = RETURN_ADDR_RTX (count, tem);
812 #else
813 tem = memory_address (Pmode,
814 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
815 tem = gen_frame_mem (Pmode, tem);
816 #endif
817 return tem;
820 /* Alias set used for setjmp buffer. */
821 static alias_set_type setjmp_alias_set = -1;
823 /* Construct the leading half of a __builtin_setjmp call. Control will
824 return to RECEIVER_LABEL. This is also called directly by the SJLJ
825 exception handling code. */
827 void
828 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
830 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
831 rtx stack_save;
832 rtx mem;
834 if (setjmp_alias_set == -1)
835 setjmp_alias_set = new_alias_set ();
837 buf_addr = convert_memory_address (Pmode, buf_addr);
839 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
841 /* We store the frame pointer and the address of receiver_label in
842 the buffer and use the rest of it for the stack save area, which
843 is machine-dependent. */
845 mem = gen_rtx_MEM (Pmode, buf_addr);
846 set_mem_alias_set (mem, setjmp_alias_set);
847 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
849 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
850 GET_MODE_SIZE (Pmode))),
851 set_mem_alias_set (mem, setjmp_alias_set);
853 emit_move_insn (validize_mem (mem),
854 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
856 stack_save = gen_rtx_MEM (sa_mode,
857 plus_constant (Pmode, buf_addr,
858 2 * GET_MODE_SIZE (Pmode)));
859 set_mem_alias_set (stack_save, setjmp_alias_set);
860 emit_stack_save (SAVE_NONLOCAL, &stack_save);
862 /* If there is further processing to do, do it. */
863 if (targetm.have_builtin_setjmp_setup ())
864 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
866 /* We have a nonlocal label. */
867 cfun->has_nonlocal_label = 1;
870 /* Construct the trailing part of a __builtin_setjmp call. This is
871 also called directly by the SJLJ exception handling code.
872 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
874 void
875 expand_builtin_setjmp_receiver (rtx receiver_label)
877 rtx chain;
879 /* Mark the FP as used when we get here, so we have to make sure it's
880 marked as used by this function. */
881 emit_use (hard_frame_pointer_rtx);
883 /* Mark the static chain as clobbered here so life information
884 doesn't get messed up for it. */
885 chain = targetm.calls.static_chain (current_function_decl, true);
886 if (chain && REG_P (chain))
887 emit_clobber (chain);
889 /* Now put in the code to restore the frame pointer, and argument
890 pointer, if needed. */
891 if (! targetm.have_nonlocal_goto ())
893 /* First adjust our frame pointer to its actual value. It was
894 previously set to the start of the virtual area corresponding to
895 the stacked variables when we branched here and now needs to be
896 adjusted to the actual hardware fp value.
898 Assignments to virtual registers are converted by
899 instantiate_virtual_regs into the corresponding assignment
900 to the underlying register (fp in this case) that makes
901 the original assignment true.
902 So the following insn will actually be decrementing fp by
903 TARGET_STARTING_FRAME_OFFSET. */
904 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
906 /* Restoring the frame pointer also modifies the hard frame pointer.
907 Mark it used (so that the previous assignment remains live once
908 the frame pointer is eliminated) and clobbered (to represent the
909 implicit update from the assignment). */
910 emit_use (hard_frame_pointer_rtx);
911 emit_clobber (hard_frame_pointer_rtx);
914 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
916 /* If the argument pointer can be eliminated in favor of the
917 frame pointer, we don't need to restore it. We assume here
918 that if such an elimination is present, it can always be used.
919 This is the case on all known machines; if we don't make this
920 assumption, we do unnecessary saving on many machines. */
921 size_t i;
922 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
924 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
925 if (elim_regs[i].from == ARG_POINTER_REGNUM
926 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
927 break;
929 if (i == ARRAY_SIZE (elim_regs))
931 /* Now restore our arg pointer from the address at which it
932 was saved in our stack frame. */
933 emit_move_insn (crtl->args.internal_arg_pointer,
934 copy_to_reg (get_arg_pointer_save_area ()));
938 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
939 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
940 else if (targetm.have_nonlocal_goto_receiver ())
941 emit_insn (targetm.gen_nonlocal_goto_receiver ());
942 else
943 { /* Nothing */ }
945 /* We must not allow the code we just generated to be reordered by
946 scheduling. Specifically, the update of the frame pointer must
947 happen immediately, not later. */
948 emit_insn (gen_blockage ());
951 /* __builtin_longjmp is passed a pointer to an array of five words (not
952 all will be used on all machines). It operates similarly to the C
953 library function of the same name, but is more efficient. Much of
954 the code below is copied from the handling of non-local gotos. */
956 static void
957 expand_builtin_longjmp (rtx buf_addr, rtx value)
959 rtx fp, lab, stack;
960 rtx_insn *insn, *last;
961 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
963 /* DRAP is needed for stack realign if longjmp is expanded to current
964 function */
965 if (SUPPORTS_STACK_ALIGNMENT)
966 crtl->need_drap = true;
968 if (setjmp_alias_set == -1)
969 setjmp_alias_set = new_alias_set ();
971 buf_addr = convert_memory_address (Pmode, buf_addr);
973 buf_addr = force_reg (Pmode, buf_addr);
975 /* We require that the user must pass a second argument of 1, because
976 that is what builtin_setjmp will return. */
977 gcc_assert (value == const1_rtx);
979 last = get_last_insn ();
980 if (targetm.have_builtin_longjmp ())
981 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
982 else
984 fp = gen_rtx_MEM (Pmode, buf_addr);
985 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
986 GET_MODE_SIZE (Pmode)));
988 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
989 2 * GET_MODE_SIZE (Pmode)));
990 set_mem_alias_set (fp, setjmp_alias_set);
991 set_mem_alias_set (lab, setjmp_alias_set);
992 set_mem_alias_set (stack, setjmp_alias_set);
994 /* Pick up FP, label, and SP from the block and jump. This code is
995 from expand_goto in stmt.c; see there for detailed comments. */
996 if (targetm.have_nonlocal_goto ())
997 /* We have to pass a value to the nonlocal_goto pattern that will
998 get copied into the static_chain pointer, but it does not matter
999 what that value is, because builtin_setjmp does not use it. */
1000 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1001 else
1003 lab = copy_to_reg (lab);
1005 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1006 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1008 emit_move_insn (hard_frame_pointer_rtx, fp);
1009 emit_stack_restore (SAVE_NONLOCAL, stack);
1011 emit_use (hard_frame_pointer_rtx);
1012 emit_use (stack_pointer_rtx);
1013 emit_indirect_jump (lab);
1017 /* Search backwards and mark the jump insn as a non-local goto.
1018 Note that this precludes the use of __builtin_longjmp to a
1019 __builtin_setjmp target in the same function. However, we've
1020 already cautioned the user that these functions are for
1021 internal exception handling use only. */
1022 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1024 gcc_assert (insn != last);
1026 if (JUMP_P (insn))
1028 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1029 break;
1031 else if (CALL_P (insn))
1032 break;
1036 static inline bool
1037 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1039 return (iter->i < iter->n);
1042 /* This function validates the types of a function call argument list
1043 against a specified list of tree_codes. If the last specifier is a 0,
1044 that represents an ellipsis, otherwise the last specifier must be a
1045 VOID_TYPE. */
1047 static bool
1048 validate_arglist (const_tree callexpr, ...)
1050 enum tree_code code;
1051 bool res = 0;
1052 va_list ap;
1053 const_call_expr_arg_iterator iter;
1054 const_tree arg;
1056 va_start (ap, callexpr);
1057 init_const_call_expr_arg_iterator (callexpr, &iter);
1059 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1060 tree fn = CALL_EXPR_FN (callexpr);
1061 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1063 for (unsigned argno = 1; ; ++argno)
1065 code = (enum tree_code) va_arg (ap, int);
1067 switch (code)
1069 case 0:
1070 /* This signifies an ellipses, any further arguments are all ok. */
1071 res = true;
1072 goto end;
1073 case VOID_TYPE:
1074 /* This signifies an endlink, if no arguments remain, return
1075 true, otherwise return false. */
1076 res = !more_const_call_expr_args_p (&iter);
1077 goto end;
1078 case POINTER_TYPE:
1079 /* The actual argument must be nonnull when either the whole
1080 called function has been declared nonnull, or when the formal
1081 argument corresponding to the actual argument has been. */
1082 if (argmap
1083 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1085 arg = next_const_call_expr_arg (&iter);
1086 if (!validate_arg (arg, code) || integer_zerop (arg))
1087 goto end;
1088 break;
1090 /* FALLTHRU */
1091 default:
1092 /* If no parameters remain or the parameter's code does not
1093 match the specified code, return false. Otherwise continue
1094 checking any remaining arguments. */
1095 arg = next_const_call_expr_arg (&iter);
1096 if (!validate_arg (arg, code))
1097 goto end;
1098 break;
1102 /* We need gotos here since we can only have one VA_CLOSE in a
1103 function. */
1104 end: ;
1105 va_end (ap);
1107 BITMAP_FREE (argmap);
1109 return res;
1112 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1113 and the address of the save area. */
1115 static rtx
1116 expand_builtin_nonlocal_goto (tree exp)
1118 tree t_label, t_save_area;
1119 rtx r_label, r_save_area, r_fp, r_sp;
1120 rtx_insn *insn;
1122 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1123 return NULL_RTX;
1125 t_label = CALL_EXPR_ARG (exp, 0);
1126 t_save_area = CALL_EXPR_ARG (exp, 1);
1128 r_label = expand_normal (t_label);
1129 r_label = convert_memory_address (Pmode, r_label);
1130 r_save_area = expand_normal (t_save_area);
1131 r_save_area = convert_memory_address (Pmode, r_save_area);
1132 /* Copy the address of the save location to a register just in case it was
1133 based on the frame pointer. */
1134 r_save_area = copy_to_reg (r_save_area);
1135 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1136 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1137 plus_constant (Pmode, r_save_area,
1138 GET_MODE_SIZE (Pmode)));
1140 crtl->has_nonlocal_goto = 1;
1142 /* ??? We no longer need to pass the static chain value, afaik. */
1143 if (targetm.have_nonlocal_goto ())
1144 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1145 else
1147 r_label = copy_to_reg (r_label);
1149 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1150 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1152 /* Restore frame pointer for containing function. */
1153 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1154 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1156 /* USE of hard_frame_pointer_rtx added for consistency;
1157 not clear if really needed. */
1158 emit_use (hard_frame_pointer_rtx);
1159 emit_use (stack_pointer_rtx);
1161 /* If the architecture is using a GP register, we must
1162 conservatively assume that the target function makes use of it.
1163 The prologue of functions with nonlocal gotos must therefore
1164 initialize the GP register to the appropriate value, and we
1165 must then make sure that this value is live at the point
1166 of the jump. (Note that this doesn't necessarily apply
1167 to targets with a nonlocal_goto pattern; they are free
1168 to implement it in their own way. Note also that this is
1169 a no-op if the GP register is a global invariant.) */
1170 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1171 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1172 emit_use (pic_offset_table_rtx);
1174 emit_indirect_jump (r_label);
1177 /* Search backwards to the jump insn and mark it as a
1178 non-local goto. */
1179 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1181 if (JUMP_P (insn))
1183 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1184 break;
1186 else if (CALL_P (insn))
1187 break;
1190 return const0_rtx;
1193 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1194 (not all will be used on all machines) that was passed to __builtin_setjmp.
1195 It updates the stack pointer in that block to the current value. This is
1196 also called directly by the SJLJ exception handling code. */
1198 void
1199 expand_builtin_update_setjmp_buf (rtx buf_addr)
1201 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1202 buf_addr = convert_memory_address (Pmode, buf_addr);
1203 rtx stack_save
1204 = gen_rtx_MEM (sa_mode,
1205 memory_address
1206 (sa_mode,
1207 plus_constant (Pmode, buf_addr,
1208 2 * GET_MODE_SIZE (Pmode))));
1210 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1213 /* Expand a call to __builtin_prefetch. For a target that does not support
1214 data prefetch, evaluate the memory address argument in case it has side
1215 effects. */
1217 static void
1218 expand_builtin_prefetch (tree exp)
1220 tree arg0, arg1, arg2;
1221 int nargs;
1222 rtx op0, op1, op2;
1224 if (!validate_arglist (exp, POINTER_TYPE, 0))
1225 return;
1227 arg0 = CALL_EXPR_ARG (exp, 0);
1229 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1230 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1231 locality). */
1232 nargs = call_expr_nargs (exp);
1233 if (nargs > 1)
1234 arg1 = CALL_EXPR_ARG (exp, 1);
1235 else
1236 arg1 = integer_zero_node;
1237 if (nargs > 2)
1238 arg2 = CALL_EXPR_ARG (exp, 2);
1239 else
1240 arg2 = integer_three_node;
1242 /* Argument 0 is an address. */
1243 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1245 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1246 if (TREE_CODE (arg1) != INTEGER_CST)
1248 error ("second argument to %<__builtin_prefetch%> must be a constant");
1249 arg1 = integer_zero_node;
1251 op1 = expand_normal (arg1);
1252 /* Argument 1 must be either zero or one. */
1253 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1255 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1256 " using zero");
1257 op1 = const0_rtx;
1260 /* Argument 2 (locality) must be a compile-time constant int. */
1261 if (TREE_CODE (arg2) != INTEGER_CST)
1263 error ("third argument to %<__builtin_prefetch%> must be a constant");
1264 arg2 = integer_zero_node;
1266 op2 = expand_normal (arg2);
1267 /* Argument 2 must be 0, 1, 2, or 3. */
1268 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1270 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1271 op2 = const0_rtx;
1274 if (targetm.have_prefetch ())
1276 struct expand_operand ops[3];
1278 create_address_operand (&ops[0], op0);
1279 create_integer_operand (&ops[1], INTVAL (op1));
1280 create_integer_operand (&ops[2], INTVAL (op2));
1281 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1282 return;
1285 /* Don't do anything with direct references to volatile memory, but
1286 generate code to handle other side effects. */
1287 if (!MEM_P (op0) && side_effects_p (op0))
1288 emit_insn (op0);
1291 /* Get a MEM rtx for expression EXP which is the address of an operand
1292 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1293 the maximum length of the block of memory that might be accessed or
1294 NULL if unknown. */
1296 static rtx
1297 get_memory_rtx (tree exp, tree len)
1299 tree orig_exp = exp;
1300 rtx addr, mem;
1302 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1303 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1304 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1305 exp = TREE_OPERAND (exp, 0);
1307 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1308 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1310 /* Get an expression we can use to find the attributes to assign to MEM.
1311 First remove any nops. */
1312 while (CONVERT_EXPR_P (exp)
1313 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1314 exp = TREE_OPERAND (exp, 0);
1316 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1317 (as builtin stringops may alias with anything). */
1318 exp = fold_build2 (MEM_REF,
1319 build_array_type (char_type_node,
1320 build_range_type (sizetype,
1321 size_one_node, len)),
1322 exp, build_int_cst (ptr_type_node, 0));
1324 /* If the MEM_REF has no acceptable address, try to get the base object
1325 from the original address we got, and build an all-aliasing
1326 unknown-sized access to that one. */
1327 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1328 set_mem_attributes (mem, exp, 0);
1329 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1330 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1331 0))))
1333 exp = build_fold_addr_expr (exp);
1334 exp = fold_build2 (MEM_REF,
1335 build_array_type (char_type_node,
1336 build_range_type (sizetype,
1337 size_zero_node,
1338 NULL)),
1339 exp, build_int_cst (ptr_type_node, 0));
1340 set_mem_attributes (mem, exp, 0);
1342 set_mem_alias_set (mem, 0);
1343 return mem;
1346 /* Built-in functions to perform an untyped call and return. */
1348 #define apply_args_mode \
1349 (this_target_builtins->x_apply_args_mode)
1350 #define apply_result_mode \
1351 (this_target_builtins->x_apply_result_mode)
1353 /* Return the size required for the block returned by __builtin_apply_args,
1354 and initialize apply_args_mode. */
1356 static int
1357 apply_args_size (void)
1359 static int size = -1;
1360 int align;
1361 unsigned int regno;
1362 machine_mode mode;
1364 /* The values computed by this function never change. */
1365 if (size < 0)
1367 /* The first value is the incoming arg-pointer. */
1368 size = GET_MODE_SIZE (Pmode);
1370 /* The second value is the structure value address unless this is
1371 passed as an "invisible" first argument. */
1372 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1373 size += GET_MODE_SIZE (Pmode);
1375 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1376 if (FUNCTION_ARG_REGNO_P (regno))
1378 mode = targetm.calls.get_raw_arg_mode (regno);
1380 gcc_assert (mode != VOIDmode);
1382 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1383 if (size % align != 0)
1384 size = CEIL (size, align) * align;
1385 size += GET_MODE_SIZE (mode);
1386 apply_args_mode[regno] = mode;
1388 else
1390 apply_args_mode[regno] = VOIDmode;
1393 return size;
1396 /* Return the size required for the block returned by __builtin_apply,
1397 and initialize apply_result_mode. */
1399 static int
1400 apply_result_size (void)
1402 static int size = -1;
1403 int align, regno;
1404 machine_mode mode;
1406 /* The values computed by this function never change. */
1407 if (size < 0)
1409 size = 0;
1411 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1412 if (targetm.calls.function_value_regno_p (regno))
1414 mode = targetm.calls.get_raw_result_mode (regno);
1416 gcc_assert (mode != VOIDmode);
1418 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1419 if (size % align != 0)
1420 size = CEIL (size, align) * align;
1421 size += GET_MODE_SIZE (mode);
1422 apply_result_mode[regno] = mode;
1424 else
1425 apply_result_mode[regno] = VOIDmode;
1427 /* Allow targets that use untyped_call and untyped_return to override
1428 the size so that machine-specific information can be stored here. */
1429 #ifdef APPLY_RESULT_SIZE
1430 size = APPLY_RESULT_SIZE;
1431 #endif
1433 return size;
1436 /* Create a vector describing the result block RESULT. If SAVEP is true,
1437 the result block is used to save the values; otherwise it is used to
1438 restore the values. */
1440 static rtx
1441 result_vector (int savep, rtx result)
1443 int regno, size, align, nelts;
1444 machine_mode mode;
1445 rtx reg, mem;
1446 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1448 size = nelts = 0;
1449 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1450 if ((mode = apply_result_mode[regno]) != VOIDmode)
1452 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1453 if (size % align != 0)
1454 size = CEIL (size, align) * align;
1455 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1456 mem = adjust_address (result, mode, size);
1457 savevec[nelts++] = (savep
1458 ? gen_rtx_SET (mem, reg)
1459 : gen_rtx_SET (reg, mem));
1460 size += GET_MODE_SIZE (mode);
1462 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1465 /* Save the state required to perform an untyped call with the same
1466 arguments as were passed to the current function. */
1468 static rtx
1469 expand_builtin_apply_args_1 (void)
1471 rtx registers, tem;
1472 int size, align, regno;
1473 machine_mode mode;
1474 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1476 /* Create a block where the arg-pointer, structure value address,
1477 and argument registers can be saved. */
1478 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1480 /* Walk past the arg-pointer and structure value address. */
1481 size = GET_MODE_SIZE (Pmode);
1482 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1483 size += GET_MODE_SIZE (Pmode);
1485 /* Save each register used in calling a function to the block. */
1486 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1487 if ((mode = apply_args_mode[regno]) != VOIDmode)
1489 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1490 if (size % align != 0)
1491 size = CEIL (size, align) * align;
1493 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1495 emit_move_insn (adjust_address (registers, mode, size), tem);
1496 size += GET_MODE_SIZE (mode);
1499 /* Save the arg pointer to the block. */
1500 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1501 /* We need the pointer as the caller actually passed them to us, not
1502 as we might have pretended they were passed. Make sure it's a valid
1503 operand, as emit_move_insn isn't expected to handle a PLUS. */
1504 if (STACK_GROWS_DOWNWARD)
1506 = force_operand (plus_constant (Pmode, tem,
1507 crtl->args.pretend_args_size),
1508 NULL_RTX);
1509 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1511 size = GET_MODE_SIZE (Pmode);
1513 /* Save the structure value address unless this is passed as an
1514 "invisible" first argument. */
1515 if (struct_incoming_value)
1517 emit_move_insn (adjust_address (registers, Pmode, size),
1518 copy_to_reg (struct_incoming_value));
1519 size += GET_MODE_SIZE (Pmode);
1522 /* Return the address of the block. */
1523 return copy_addr_to_reg (XEXP (registers, 0));
1526 /* __builtin_apply_args returns block of memory allocated on
1527 the stack into which is stored the arg pointer, structure
1528 value address, static chain, and all the registers that might
1529 possibly be used in performing a function call. The code is
1530 moved to the start of the function so the incoming values are
1531 saved. */
1533 static rtx
1534 expand_builtin_apply_args (void)
1536 /* Don't do __builtin_apply_args more than once in a function.
1537 Save the result of the first call and reuse it. */
1538 if (apply_args_value != 0)
1539 return apply_args_value;
1541 /* When this function is called, it means that registers must be
1542 saved on entry to this function. So we migrate the
1543 call to the first insn of this function. */
1544 rtx temp;
1546 start_sequence ();
1547 temp = expand_builtin_apply_args_1 ();
1548 rtx_insn *seq = get_insns ();
1549 end_sequence ();
1551 apply_args_value = temp;
1553 /* Put the insns after the NOTE that starts the function.
1554 If this is inside a start_sequence, make the outer-level insn
1555 chain current, so the code is placed at the start of the
1556 function. If internal_arg_pointer is a non-virtual pseudo,
1557 it needs to be placed after the function that initializes
1558 that pseudo. */
1559 push_topmost_sequence ();
1560 if (REG_P (crtl->args.internal_arg_pointer)
1561 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1562 emit_insn_before (seq, parm_birth_insn);
1563 else
1564 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1565 pop_topmost_sequence ();
1566 return temp;
1570 /* Perform an untyped call and save the state required to perform an
1571 untyped return of whatever value was returned by the given function. */
1573 static rtx
1574 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1576 int size, align, regno;
1577 machine_mode mode;
1578 rtx incoming_args, result, reg, dest, src;
1579 rtx_call_insn *call_insn;
1580 rtx old_stack_level = 0;
1581 rtx call_fusage = 0;
1582 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1584 arguments = convert_memory_address (Pmode, arguments);
1586 /* Create a block where the return registers can be saved. */
1587 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1589 /* Fetch the arg pointer from the ARGUMENTS block. */
1590 incoming_args = gen_reg_rtx (Pmode);
1591 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1592 if (!STACK_GROWS_DOWNWARD)
1593 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1594 incoming_args, 0, OPTAB_LIB_WIDEN);
1596 /* Push a new argument block and copy the arguments. Do not allow
1597 the (potential) memcpy call below to interfere with our stack
1598 manipulations. */
1599 do_pending_stack_adjust ();
1600 NO_DEFER_POP;
1602 /* Save the stack with nonlocal if available. */
1603 if (targetm.have_save_stack_nonlocal ())
1604 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1605 else
1606 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1608 /* Allocate a block of memory onto the stack and copy the memory
1609 arguments to the outgoing arguments address. We can pass TRUE
1610 as the 4th argument because we just saved the stack pointer
1611 and will restore it right after the call. */
1612 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1614 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1615 may have already set current_function_calls_alloca to true.
1616 current_function_calls_alloca won't be set if argsize is zero,
1617 so we have to guarantee need_drap is true here. */
1618 if (SUPPORTS_STACK_ALIGNMENT)
1619 crtl->need_drap = true;
1621 dest = virtual_outgoing_args_rtx;
1622 if (!STACK_GROWS_DOWNWARD)
1624 if (CONST_INT_P (argsize))
1625 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1626 else
1627 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1629 dest = gen_rtx_MEM (BLKmode, dest);
1630 set_mem_align (dest, PARM_BOUNDARY);
1631 src = gen_rtx_MEM (BLKmode, incoming_args);
1632 set_mem_align (src, PARM_BOUNDARY);
1633 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1635 /* Refer to the argument block. */
1636 apply_args_size ();
1637 arguments = gen_rtx_MEM (BLKmode, arguments);
1638 set_mem_align (arguments, PARM_BOUNDARY);
1640 /* Walk past the arg-pointer and structure value address. */
1641 size = GET_MODE_SIZE (Pmode);
1642 if (struct_value)
1643 size += GET_MODE_SIZE (Pmode);
1645 /* Restore each of the registers previously saved. Make USE insns
1646 for each of these registers for use in making the call. */
1647 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1648 if ((mode = apply_args_mode[regno]) != VOIDmode)
1650 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1651 if (size % align != 0)
1652 size = CEIL (size, align) * align;
1653 reg = gen_rtx_REG (mode, regno);
1654 emit_move_insn (reg, adjust_address (arguments, mode, size));
1655 use_reg (&call_fusage, reg);
1656 size += GET_MODE_SIZE (mode);
1659 /* Restore the structure value address unless this is passed as an
1660 "invisible" first argument. */
1661 size = GET_MODE_SIZE (Pmode);
1662 if (struct_value)
1664 rtx value = gen_reg_rtx (Pmode);
1665 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1666 emit_move_insn (struct_value, value);
1667 if (REG_P (struct_value))
1668 use_reg (&call_fusage, struct_value);
1669 size += GET_MODE_SIZE (Pmode);
1672 /* All arguments and registers used for the call are set up by now! */
1673 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1675 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1676 and we don't want to load it into a register as an optimization,
1677 because prepare_call_address already did it if it should be done. */
1678 if (GET_CODE (function) != SYMBOL_REF)
1679 function = memory_address (FUNCTION_MODE, function);
1681 /* Generate the actual call instruction and save the return value. */
1682 if (targetm.have_untyped_call ())
1684 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1685 emit_call_insn (targetm.gen_untyped_call (mem, result,
1686 result_vector (1, result)));
1688 else if (targetm.have_call_value ())
1690 rtx valreg = 0;
1692 /* Locate the unique return register. It is not possible to
1693 express a call that sets more than one return register using
1694 call_value; use untyped_call for that. In fact, untyped_call
1695 only needs to save the return registers in the given block. */
1696 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1697 if ((mode = apply_result_mode[regno]) != VOIDmode)
1699 gcc_assert (!valreg); /* have_untyped_call required. */
1701 valreg = gen_rtx_REG (mode, regno);
1704 emit_insn (targetm.gen_call_value (valreg,
1705 gen_rtx_MEM (FUNCTION_MODE, function),
1706 const0_rtx, NULL_RTX, const0_rtx));
1708 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1710 else
1711 gcc_unreachable ();
1713 /* Find the CALL insn we just emitted, and attach the register usage
1714 information. */
1715 call_insn = last_call_insn ();
1716 add_function_usage_to (call_insn, call_fusage);
1718 /* Restore the stack. */
1719 if (targetm.have_save_stack_nonlocal ())
1720 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1721 else
1722 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1723 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1725 OK_DEFER_POP;
1727 /* Return the address of the result block. */
1728 result = copy_addr_to_reg (XEXP (result, 0));
1729 return convert_memory_address (ptr_mode, result);
1732 /* Perform an untyped return. */
1734 static void
1735 expand_builtin_return (rtx result)
1737 int size, align, regno;
1738 machine_mode mode;
1739 rtx reg;
1740 rtx_insn *call_fusage = 0;
1742 result = convert_memory_address (Pmode, result);
1744 apply_result_size ();
1745 result = gen_rtx_MEM (BLKmode, result);
1747 if (targetm.have_untyped_return ())
1749 rtx vector = result_vector (0, result);
1750 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1751 emit_barrier ();
1752 return;
1755 /* Restore the return value and note that each value is used. */
1756 size = 0;
1757 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1758 if ((mode = apply_result_mode[regno]) != VOIDmode)
1760 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1761 if (size % align != 0)
1762 size = CEIL (size, align) * align;
1763 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1764 emit_move_insn (reg, adjust_address (result, mode, size));
1766 push_to_sequence (call_fusage);
1767 emit_use (reg);
1768 call_fusage = get_insns ();
1769 end_sequence ();
1770 size += GET_MODE_SIZE (mode);
1773 /* Put the USE insns before the return. */
1774 emit_insn (call_fusage);
1776 /* Return whatever values was restored by jumping directly to the end
1777 of the function. */
1778 expand_naked_return ();
1781 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1783 static enum type_class
1784 type_to_class (tree type)
1786 switch (TREE_CODE (type))
1788 case VOID_TYPE: return void_type_class;
1789 case INTEGER_TYPE: return integer_type_class;
1790 case ENUMERAL_TYPE: return enumeral_type_class;
1791 case BOOLEAN_TYPE: return boolean_type_class;
1792 case POINTER_TYPE: return pointer_type_class;
1793 case REFERENCE_TYPE: return reference_type_class;
1794 case OFFSET_TYPE: return offset_type_class;
1795 case REAL_TYPE: return real_type_class;
1796 case COMPLEX_TYPE: return complex_type_class;
1797 case FUNCTION_TYPE: return function_type_class;
1798 case METHOD_TYPE: return method_type_class;
1799 case RECORD_TYPE: return record_type_class;
1800 case UNION_TYPE:
1801 case QUAL_UNION_TYPE: return union_type_class;
1802 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1803 ? string_type_class : array_type_class);
1804 case LANG_TYPE: return lang_type_class;
1805 default: return no_type_class;
1809 /* Expand a call EXP to __builtin_classify_type. */
1811 static rtx
1812 expand_builtin_classify_type (tree exp)
1814 if (call_expr_nargs (exp))
1815 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1816 return GEN_INT (no_type_class);
1819 /* This helper macro, meant to be used in mathfn_built_in below, determines
1820 which among a set of builtin math functions is appropriate for a given type
1821 mode. The `F' (float) and `L' (long double) are automatically generated
1822 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1823 types, there are additional types that are considered with 'F32', 'F64',
1824 'F128', etc. suffixes. */
1825 #define CASE_MATHFN(MATHFN) \
1826 CASE_CFN_##MATHFN: \
1827 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1828 fcodel = BUILT_IN_##MATHFN##L ; break;
1829 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1830 types. */
1831 #define CASE_MATHFN_FLOATN(MATHFN) \
1832 CASE_CFN_##MATHFN: \
1833 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1834 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1835 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1836 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1837 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1838 break;
1839 /* Similar to above, but appends _R after any F/L suffix. */
1840 #define CASE_MATHFN_REENT(MATHFN) \
1841 case CFN_BUILT_IN_##MATHFN##_R: \
1842 case CFN_BUILT_IN_##MATHFN##F_R: \
1843 case CFN_BUILT_IN_##MATHFN##L_R: \
1844 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1845 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1847 /* Return a function equivalent to FN but operating on floating-point
1848 values of type TYPE, or END_BUILTINS if no such function exists.
1849 This is purely an operation on function codes; it does not guarantee
1850 that the target actually has an implementation of the function. */
1852 static built_in_function
1853 mathfn_built_in_2 (tree type, combined_fn fn)
1855 tree mtype;
1856 built_in_function fcode, fcodef, fcodel;
1857 built_in_function fcodef16 = END_BUILTINS;
1858 built_in_function fcodef32 = END_BUILTINS;
1859 built_in_function fcodef64 = END_BUILTINS;
1860 built_in_function fcodef128 = END_BUILTINS;
1861 built_in_function fcodef32x = END_BUILTINS;
1862 built_in_function fcodef64x = END_BUILTINS;
1863 built_in_function fcodef128x = END_BUILTINS;
1865 switch (fn)
1867 CASE_MATHFN (ACOS)
1868 CASE_MATHFN (ACOSH)
1869 CASE_MATHFN (ASIN)
1870 CASE_MATHFN (ASINH)
1871 CASE_MATHFN (ATAN)
1872 CASE_MATHFN (ATAN2)
1873 CASE_MATHFN (ATANH)
1874 CASE_MATHFN (CBRT)
1875 CASE_MATHFN (CEIL)
1876 CASE_MATHFN (CEXPI)
1877 CASE_MATHFN_FLOATN (COPYSIGN)
1878 CASE_MATHFN (COS)
1879 CASE_MATHFN (COSH)
1880 CASE_MATHFN (DREM)
1881 CASE_MATHFN (ERF)
1882 CASE_MATHFN (ERFC)
1883 CASE_MATHFN (EXP)
1884 CASE_MATHFN (EXP10)
1885 CASE_MATHFN (EXP2)
1886 CASE_MATHFN (EXPM1)
1887 CASE_MATHFN (FABS)
1888 CASE_MATHFN (FDIM)
1889 CASE_MATHFN (FLOOR)
1890 CASE_MATHFN_FLOATN (FMA)
1891 CASE_MATHFN_FLOATN (FMAX)
1892 CASE_MATHFN_FLOATN (FMIN)
1893 CASE_MATHFN (FMOD)
1894 CASE_MATHFN (FREXP)
1895 CASE_MATHFN (GAMMA)
1896 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1897 CASE_MATHFN (HUGE_VAL)
1898 CASE_MATHFN (HYPOT)
1899 CASE_MATHFN (ILOGB)
1900 CASE_MATHFN (ICEIL)
1901 CASE_MATHFN (IFLOOR)
1902 CASE_MATHFN (INF)
1903 CASE_MATHFN (IRINT)
1904 CASE_MATHFN (IROUND)
1905 CASE_MATHFN (ISINF)
1906 CASE_MATHFN (J0)
1907 CASE_MATHFN (J1)
1908 CASE_MATHFN (JN)
1909 CASE_MATHFN (LCEIL)
1910 CASE_MATHFN (LDEXP)
1911 CASE_MATHFN (LFLOOR)
1912 CASE_MATHFN (LGAMMA)
1913 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1914 CASE_MATHFN (LLCEIL)
1915 CASE_MATHFN (LLFLOOR)
1916 CASE_MATHFN (LLRINT)
1917 CASE_MATHFN (LLROUND)
1918 CASE_MATHFN (LOG)
1919 CASE_MATHFN (LOG10)
1920 CASE_MATHFN (LOG1P)
1921 CASE_MATHFN (LOG2)
1922 CASE_MATHFN (LOGB)
1923 CASE_MATHFN (LRINT)
1924 CASE_MATHFN (LROUND)
1925 CASE_MATHFN (MODF)
1926 CASE_MATHFN (NAN)
1927 CASE_MATHFN (NANS)
1928 CASE_MATHFN (NEARBYINT)
1929 CASE_MATHFN (NEXTAFTER)
1930 CASE_MATHFN (NEXTTOWARD)
1931 CASE_MATHFN (POW)
1932 CASE_MATHFN (POWI)
1933 CASE_MATHFN (POW10)
1934 CASE_MATHFN (REMAINDER)
1935 CASE_MATHFN (REMQUO)
1936 CASE_MATHFN (RINT)
1937 CASE_MATHFN (ROUND)
1938 CASE_MATHFN (SCALB)
1939 CASE_MATHFN (SCALBLN)
1940 CASE_MATHFN (SCALBN)
1941 CASE_MATHFN (SIGNBIT)
1942 CASE_MATHFN (SIGNIFICAND)
1943 CASE_MATHFN (SIN)
1944 CASE_MATHFN (SINCOS)
1945 CASE_MATHFN (SINH)
1946 CASE_MATHFN_FLOATN (SQRT)
1947 CASE_MATHFN (TAN)
1948 CASE_MATHFN (TANH)
1949 CASE_MATHFN (TGAMMA)
1950 CASE_MATHFN (TRUNC)
1951 CASE_MATHFN (Y0)
1952 CASE_MATHFN (Y1)
1953 CASE_MATHFN (YN)
1955 default:
1956 return END_BUILTINS;
1959 mtype = TYPE_MAIN_VARIANT (type);
1960 if (mtype == double_type_node)
1961 return fcode;
1962 else if (mtype == float_type_node)
1963 return fcodef;
1964 else if (mtype == long_double_type_node)
1965 return fcodel;
1966 else if (mtype == float16_type_node)
1967 return fcodef16;
1968 else if (mtype == float32_type_node)
1969 return fcodef32;
1970 else if (mtype == float64_type_node)
1971 return fcodef64;
1972 else if (mtype == float128_type_node)
1973 return fcodef128;
1974 else if (mtype == float32x_type_node)
1975 return fcodef32x;
1976 else if (mtype == float64x_type_node)
1977 return fcodef64x;
1978 else if (mtype == float128x_type_node)
1979 return fcodef128x;
1980 else
1981 return END_BUILTINS;
1984 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1985 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1986 otherwise use the explicit declaration. If we can't do the conversion,
1987 return null. */
1989 static tree
1990 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1992 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1993 if (fcode2 == END_BUILTINS)
1994 return NULL_TREE;
1996 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1997 return NULL_TREE;
1999 return builtin_decl_explicit (fcode2);
2002 /* Like mathfn_built_in_1, but always use the implicit array. */
2004 tree
2005 mathfn_built_in (tree type, combined_fn fn)
2007 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2010 /* Like mathfn_built_in_1, but take a built_in_function and
2011 always use the implicit array. */
2013 tree
2014 mathfn_built_in (tree type, enum built_in_function fn)
2016 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2019 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2020 return its code, otherwise return IFN_LAST. Note that this function
2021 only tests whether the function is defined in internals.def, not whether
2022 it is actually available on the target. */
2024 internal_fn
2025 associated_internal_fn (tree fndecl)
2027 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2028 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2029 switch (DECL_FUNCTION_CODE (fndecl))
2031 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2032 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2033 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2034 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2035 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2036 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2037 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2038 #include "internal-fn.def"
2040 CASE_FLT_FN (BUILT_IN_POW10):
2041 return IFN_EXP10;
2043 CASE_FLT_FN (BUILT_IN_DREM):
2044 return IFN_REMAINDER;
2046 CASE_FLT_FN (BUILT_IN_SCALBN):
2047 CASE_FLT_FN (BUILT_IN_SCALBLN):
2048 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2049 return IFN_LDEXP;
2050 return IFN_LAST;
2052 default:
2053 return IFN_LAST;
2057 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2058 on the current target by a call to an internal function, return the
2059 code of that internal function, otherwise return IFN_LAST. The caller
2060 is responsible for ensuring that any side-effects of the built-in
2061 call are dealt with correctly. E.g. if CALL sets errno, the caller
2062 must decide that the errno result isn't needed or make it available
2063 in some other way. */
2065 internal_fn
2066 replacement_internal_fn (gcall *call)
2068 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2070 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2071 if (ifn != IFN_LAST)
2073 tree_pair types = direct_internal_fn_types (ifn, call);
2074 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2075 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2076 return ifn;
2079 return IFN_LAST;
2082 /* Expand a call to the builtin trinary math functions (fma).
2083 Return NULL_RTX if a normal call should be emitted rather than expanding the
2084 function in-line. EXP is the expression that is a call to the builtin
2085 function; if convenient, the result should be placed in TARGET.
2086 SUBTARGET may be used as the target for computing one of EXP's
2087 operands. */
2089 static rtx
2090 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2092 optab builtin_optab;
2093 rtx op0, op1, op2, result;
2094 rtx_insn *insns;
2095 tree fndecl = get_callee_fndecl (exp);
2096 tree arg0, arg1, arg2;
2097 machine_mode mode;
2099 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2100 return NULL_RTX;
2102 arg0 = CALL_EXPR_ARG (exp, 0);
2103 arg1 = CALL_EXPR_ARG (exp, 1);
2104 arg2 = CALL_EXPR_ARG (exp, 2);
2106 switch (DECL_FUNCTION_CODE (fndecl))
2108 CASE_FLT_FN (BUILT_IN_FMA):
2109 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2110 builtin_optab = fma_optab; break;
2111 default:
2112 gcc_unreachable ();
2115 /* Make a suitable register to place result in. */
2116 mode = TYPE_MODE (TREE_TYPE (exp));
2118 /* Before working hard, check whether the instruction is available. */
2119 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2120 return NULL_RTX;
2122 result = gen_reg_rtx (mode);
2124 /* Always stabilize the argument list. */
2125 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2126 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2127 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2129 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2130 op1 = expand_normal (arg1);
2131 op2 = expand_normal (arg2);
2133 start_sequence ();
2135 /* Compute into RESULT.
2136 Set RESULT to wherever the result comes back. */
2137 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2138 result, 0);
2140 /* If we were unable to expand via the builtin, stop the sequence
2141 (without outputting the insns) and call to the library function
2142 with the stabilized argument list. */
2143 if (result == 0)
2145 end_sequence ();
2146 return expand_call (exp, target, target == const0_rtx);
2149 /* Output the entire sequence. */
2150 insns = get_insns ();
2151 end_sequence ();
2152 emit_insn (insns);
2154 return result;
2157 /* Expand a call to the builtin sin and cos math functions.
2158 Return NULL_RTX if a normal call should be emitted rather than expanding the
2159 function in-line. EXP is the expression that is a call to the builtin
2160 function; if convenient, the result should be placed in TARGET.
2161 SUBTARGET may be used as the target for computing one of EXP's
2162 operands. */
2164 static rtx
2165 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2167 optab builtin_optab;
2168 rtx op0;
2169 rtx_insn *insns;
2170 tree fndecl = get_callee_fndecl (exp);
2171 machine_mode mode;
2172 tree arg;
2174 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2175 return NULL_RTX;
2177 arg = CALL_EXPR_ARG (exp, 0);
2179 switch (DECL_FUNCTION_CODE (fndecl))
2181 CASE_FLT_FN (BUILT_IN_SIN):
2182 CASE_FLT_FN (BUILT_IN_COS):
2183 builtin_optab = sincos_optab; break;
2184 default:
2185 gcc_unreachable ();
2188 /* Make a suitable register to place result in. */
2189 mode = TYPE_MODE (TREE_TYPE (exp));
2191 /* Check if sincos insn is available, otherwise fallback
2192 to sin or cos insn. */
2193 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2194 switch (DECL_FUNCTION_CODE (fndecl))
2196 CASE_FLT_FN (BUILT_IN_SIN):
2197 builtin_optab = sin_optab; break;
2198 CASE_FLT_FN (BUILT_IN_COS):
2199 builtin_optab = cos_optab; break;
2200 default:
2201 gcc_unreachable ();
2204 /* Before working hard, check whether the instruction is available. */
2205 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2207 rtx result = gen_reg_rtx (mode);
2209 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2210 need to expand the argument again. This way, we will not perform
2211 side-effects more the once. */
2212 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2214 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2216 start_sequence ();
2218 /* Compute into RESULT.
2219 Set RESULT to wherever the result comes back. */
2220 if (builtin_optab == sincos_optab)
2222 int ok;
2224 switch (DECL_FUNCTION_CODE (fndecl))
2226 CASE_FLT_FN (BUILT_IN_SIN):
2227 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2228 break;
2229 CASE_FLT_FN (BUILT_IN_COS):
2230 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2231 break;
2232 default:
2233 gcc_unreachable ();
2235 gcc_assert (ok);
2237 else
2238 result = expand_unop (mode, builtin_optab, op0, result, 0);
2240 if (result != 0)
2242 /* Output the entire sequence. */
2243 insns = get_insns ();
2244 end_sequence ();
2245 emit_insn (insns);
2246 return result;
2249 /* If we were unable to expand via the builtin, stop the sequence
2250 (without outputting the insns) and call to the library function
2251 with the stabilized argument list. */
2252 end_sequence ();
2255 return expand_call (exp, target, target == const0_rtx);
2258 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2259 return an RTL instruction code that implements the functionality.
2260 If that isn't possible or available return CODE_FOR_nothing. */
2262 static enum insn_code
2263 interclass_mathfn_icode (tree arg, tree fndecl)
2265 bool errno_set = false;
2266 optab builtin_optab = unknown_optab;
2267 machine_mode mode;
2269 switch (DECL_FUNCTION_CODE (fndecl))
2271 CASE_FLT_FN (BUILT_IN_ILOGB):
2272 errno_set = true; builtin_optab = ilogb_optab; break;
2273 CASE_FLT_FN (BUILT_IN_ISINF):
2274 builtin_optab = isinf_optab; break;
2275 case BUILT_IN_ISNORMAL:
2276 case BUILT_IN_ISFINITE:
2277 CASE_FLT_FN (BUILT_IN_FINITE):
2278 case BUILT_IN_FINITED32:
2279 case BUILT_IN_FINITED64:
2280 case BUILT_IN_FINITED128:
2281 case BUILT_IN_ISINFD32:
2282 case BUILT_IN_ISINFD64:
2283 case BUILT_IN_ISINFD128:
2284 /* These builtins have no optabs (yet). */
2285 break;
2286 default:
2287 gcc_unreachable ();
2290 /* There's no easy way to detect the case we need to set EDOM. */
2291 if (flag_errno_math && errno_set)
2292 return CODE_FOR_nothing;
2294 /* Optab mode depends on the mode of the input argument. */
2295 mode = TYPE_MODE (TREE_TYPE (arg));
2297 if (builtin_optab)
2298 return optab_handler (builtin_optab, mode);
2299 return CODE_FOR_nothing;
2302 /* Expand a call to one of the builtin math functions that operate on
2303 floating point argument and output an integer result (ilogb, isinf,
2304 isnan, etc).
2305 Return 0 if a normal call should be emitted rather than expanding the
2306 function in-line. EXP is the expression that is a call to the builtin
2307 function; if convenient, the result should be placed in TARGET. */
2309 static rtx
2310 expand_builtin_interclass_mathfn (tree exp, rtx target)
2312 enum insn_code icode = CODE_FOR_nothing;
2313 rtx op0;
2314 tree fndecl = get_callee_fndecl (exp);
2315 machine_mode mode;
2316 tree arg;
2318 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2319 return NULL_RTX;
2321 arg = CALL_EXPR_ARG (exp, 0);
2322 icode = interclass_mathfn_icode (arg, fndecl);
2323 mode = TYPE_MODE (TREE_TYPE (arg));
2325 if (icode != CODE_FOR_nothing)
2327 struct expand_operand ops[1];
2328 rtx_insn *last = get_last_insn ();
2329 tree orig_arg = arg;
2331 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2332 need to expand the argument again. This way, we will not perform
2333 side-effects more the once. */
2334 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2336 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2338 if (mode != GET_MODE (op0))
2339 op0 = convert_to_mode (mode, op0, 0);
2341 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2342 if (maybe_legitimize_operands (icode, 0, 1, ops)
2343 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2344 return ops[0].value;
2346 delete_insns_since (last);
2347 CALL_EXPR_ARG (exp, 0) = orig_arg;
2350 return NULL_RTX;
2353 /* Expand a call to the builtin sincos math function.
2354 Return NULL_RTX if a normal call should be emitted rather than expanding the
2355 function in-line. EXP is the expression that is a call to the builtin
2356 function. */
2358 static rtx
2359 expand_builtin_sincos (tree exp)
2361 rtx op0, op1, op2, target1, target2;
2362 machine_mode mode;
2363 tree arg, sinp, cosp;
2364 int result;
2365 location_t loc = EXPR_LOCATION (exp);
2366 tree alias_type, alias_off;
2368 if (!validate_arglist (exp, REAL_TYPE,
2369 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2370 return NULL_RTX;
2372 arg = CALL_EXPR_ARG (exp, 0);
2373 sinp = CALL_EXPR_ARG (exp, 1);
2374 cosp = CALL_EXPR_ARG (exp, 2);
2376 /* Make a suitable register to place result in. */
2377 mode = TYPE_MODE (TREE_TYPE (arg));
2379 /* Check if sincos insn is available, otherwise emit the call. */
2380 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2381 return NULL_RTX;
2383 target1 = gen_reg_rtx (mode);
2384 target2 = gen_reg_rtx (mode);
2386 op0 = expand_normal (arg);
2387 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2388 alias_off = build_int_cst (alias_type, 0);
2389 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2390 sinp, alias_off));
2391 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2392 cosp, alias_off));
2394 /* Compute into target1 and target2.
2395 Set TARGET to wherever the result comes back. */
2396 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2397 gcc_assert (result);
2399 /* Move target1 and target2 to the memory locations indicated
2400 by op1 and op2. */
2401 emit_move_insn (op1, target1);
2402 emit_move_insn (op2, target2);
2404 return const0_rtx;
2407 /* Expand a call to the internal cexpi builtin to the sincos math function.
2408 EXP is the expression that is a call to the builtin function; if convenient,
2409 the result should be placed in TARGET. */
2411 static rtx
2412 expand_builtin_cexpi (tree exp, rtx target)
2414 tree fndecl = get_callee_fndecl (exp);
2415 tree arg, type;
2416 machine_mode mode;
2417 rtx op0, op1, op2;
2418 location_t loc = EXPR_LOCATION (exp);
2420 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2421 return NULL_RTX;
2423 arg = CALL_EXPR_ARG (exp, 0);
2424 type = TREE_TYPE (arg);
2425 mode = TYPE_MODE (TREE_TYPE (arg));
2427 /* Try expanding via a sincos optab, fall back to emitting a libcall
2428 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2429 is only generated from sincos, cexp or if we have either of them. */
2430 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2432 op1 = gen_reg_rtx (mode);
2433 op2 = gen_reg_rtx (mode);
2435 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2437 /* Compute into op1 and op2. */
2438 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2440 else if (targetm.libc_has_function (function_sincos))
2442 tree call, fn = NULL_TREE;
2443 tree top1, top2;
2444 rtx op1a, op2a;
2446 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2447 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2448 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2449 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2450 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2451 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2452 else
2453 gcc_unreachable ();
2455 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2456 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2457 op1a = copy_addr_to_reg (XEXP (op1, 0));
2458 op2a = copy_addr_to_reg (XEXP (op2, 0));
2459 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2460 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2462 /* Make sure not to fold the sincos call again. */
2463 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2464 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2465 call, 3, arg, top1, top2));
2467 else
2469 tree call, fn = NULL_TREE, narg;
2470 tree ctype = build_complex_type (type);
2472 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2473 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2474 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2475 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2476 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2477 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2478 else
2479 gcc_unreachable ();
2481 /* If we don't have a decl for cexp create one. This is the
2482 friendliest fallback if the user calls __builtin_cexpi
2483 without full target C99 function support. */
2484 if (fn == NULL_TREE)
2486 tree fntype;
2487 const char *name = NULL;
2489 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2490 name = "cexpf";
2491 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2492 name = "cexp";
2493 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2494 name = "cexpl";
2496 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2497 fn = build_fn_decl (name, fntype);
2500 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2501 build_real (type, dconst0), arg);
2503 /* Make sure not to fold the cexp call again. */
2504 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2505 return expand_expr (build_call_nary (ctype, call, 1, narg),
2506 target, VOIDmode, EXPAND_NORMAL);
2509 /* Now build the proper return type. */
2510 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2511 make_tree (TREE_TYPE (arg), op2),
2512 make_tree (TREE_TYPE (arg), op1)),
2513 target, VOIDmode, EXPAND_NORMAL);
2516 /* Conveniently construct a function call expression. FNDECL names the
2517 function to be called, N is the number of arguments, and the "..."
2518 parameters are the argument expressions. Unlike build_call_exr
2519 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2521 static tree
2522 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2524 va_list ap;
2525 tree fntype = TREE_TYPE (fndecl);
2526 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2528 va_start (ap, n);
2529 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2530 va_end (ap);
2531 SET_EXPR_LOCATION (fn, loc);
2532 return fn;
2535 /* Expand a call to one of the builtin rounding functions gcc defines
2536 as an extension (lfloor and lceil). As these are gcc extensions we
2537 do not need to worry about setting errno to EDOM.
2538 If expanding via optab fails, lower expression to (int)(floor(x)).
2539 EXP is the expression that is a call to the builtin function;
2540 if convenient, the result should be placed in TARGET. */
2542 static rtx
2543 expand_builtin_int_roundingfn (tree exp, rtx target)
2545 convert_optab builtin_optab;
2546 rtx op0, tmp;
2547 rtx_insn *insns;
2548 tree fndecl = get_callee_fndecl (exp);
2549 enum built_in_function fallback_fn;
2550 tree fallback_fndecl;
2551 machine_mode mode;
2552 tree arg;
2554 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2555 gcc_unreachable ();
2557 arg = CALL_EXPR_ARG (exp, 0);
2559 switch (DECL_FUNCTION_CODE (fndecl))
2561 CASE_FLT_FN (BUILT_IN_ICEIL):
2562 CASE_FLT_FN (BUILT_IN_LCEIL):
2563 CASE_FLT_FN (BUILT_IN_LLCEIL):
2564 builtin_optab = lceil_optab;
2565 fallback_fn = BUILT_IN_CEIL;
2566 break;
2568 CASE_FLT_FN (BUILT_IN_IFLOOR):
2569 CASE_FLT_FN (BUILT_IN_LFLOOR):
2570 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2571 builtin_optab = lfloor_optab;
2572 fallback_fn = BUILT_IN_FLOOR;
2573 break;
2575 default:
2576 gcc_unreachable ();
2579 /* Make a suitable register to place result in. */
2580 mode = TYPE_MODE (TREE_TYPE (exp));
2582 target = gen_reg_rtx (mode);
2584 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2585 need to expand the argument again. This way, we will not perform
2586 side-effects more the once. */
2587 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2589 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2591 start_sequence ();
2593 /* Compute into TARGET. */
2594 if (expand_sfix_optab (target, op0, builtin_optab))
2596 /* Output the entire sequence. */
2597 insns = get_insns ();
2598 end_sequence ();
2599 emit_insn (insns);
2600 return target;
2603 /* If we were unable to expand via the builtin, stop the sequence
2604 (without outputting the insns). */
2605 end_sequence ();
2607 /* Fall back to floating point rounding optab. */
2608 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2610 /* For non-C99 targets we may end up without a fallback fndecl here
2611 if the user called __builtin_lfloor directly. In this case emit
2612 a call to the floor/ceil variants nevertheless. This should result
2613 in the best user experience for not full C99 targets. */
2614 if (fallback_fndecl == NULL_TREE)
2616 tree fntype;
2617 const char *name = NULL;
2619 switch (DECL_FUNCTION_CODE (fndecl))
2621 case BUILT_IN_ICEIL:
2622 case BUILT_IN_LCEIL:
2623 case BUILT_IN_LLCEIL:
2624 name = "ceil";
2625 break;
2626 case BUILT_IN_ICEILF:
2627 case BUILT_IN_LCEILF:
2628 case BUILT_IN_LLCEILF:
2629 name = "ceilf";
2630 break;
2631 case BUILT_IN_ICEILL:
2632 case BUILT_IN_LCEILL:
2633 case BUILT_IN_LLCEILL:
2634 name = "ceill";
2635 break;
2636 case BUILT_IN_IFLOOR:
2637 case BUILT_IN_LFLOOR:
2638 case BUILT_IN_LLFLOOR:
2639 name = "floor";
2640 break;
2641 case BUILT_IN_IFLOORF:
2642 case BUILT_IN_LFLOORF:
2643 case BUILT_IN_LLFLOORF:
2644 name = "floorf";
2645 break;
2646 case BUILT_IN_IFLOORL:
2647 case BUILT_IN_LFLOORL:
2648 case BUILT_IN_LLFLOORL:
2649 name = "floorl";
2650 break;
2651 default:
2652 gcc_unreachable ();
2655 fntype = build_function_type_list (TREE_TYPE (arg),
2656 TREE_TYPE (arg), NULL_TREE);
2657 fallback_fndecl = build_fn_decl (name, fntype);
2660 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2662 tmp = expand_normal (exp);
2663 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2665 /* Truncate the result of floating point optab to integer
2666 via expand_fix (). */
2667 target = gen_reg_rtx (mode);
2668 expand_fix (target, tmp, 0);
2670 return target;
2673 /* Expand a call to one of the builtin math functions doing integer
2674 conversion (lrint).
2675 Return 0 if a normal call should be emitted rather than expanding the
2676 function in-line. EXP is the expression that is a call to the builtin
2677 function; if convenient, the result should be placed in TARGET. */
2679 static rtx
2680 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2682 convert_optab builtin_optab;
2683 rtx op0;
2684 rtx_insn *insns;
2685 tree fndecl = get_callee_fndecl (exp);
2686 tree arg;
2687 machine_mode mode;
2688 enum built_in_function fallback_fn = BUILT_IN_NONE;
2690 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2691 gcc_unreachable ();
2693 arg = CALL_EXPR_ARG (exp, 0);
2695 switch (DECL_FUNCTION_CODE (fndecl))
2697 CASE_FLT_FN (BUILT_IN_IRINT):
2698 fallback_fn = BUILT_IN_LRINT;
2699 gcc_fallthrough ();
2700 CASE_FLT_FN (BUILT_IN_LRINT):
2701 CASE_FLT_FN (BUILT_IN_LLRINT):
2702 builtin_optab = lrint_optab;
2703 break;
2705 CASE_FLT_FN (BUILT_IN_IROUND):
2706 fallback_fn = BUILT_IN_LROUND;
2707 gcc_fallthrough ();
2708 CASE_FLT_FN (BUILT_IN_LROUND):
2709 CASE_FLT_FN (BUILT_IN_LLROUND):
2710 builtin_optab = lround_optab;
2711 break;
2713 default:
2714 gcc_unreachable ();
2717 /* There's no easy way to detect the case we need to set EDOM. */
2718 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2719 return NULL_RTX;
2721 /* Make a suitable register to place result in. */
2722 mode = TYPE_MODE (TREE_TYPE (exp));
2724 /* There's no easy way to detect the case we need to set EDOM. */
2725 if (!flag_errno_math)
2727 rtx result = gen_reg_rtx (mode);
2729 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2730 need to expand the argument again. This way, we will not perform
2731 side-effects more the once. */
2732 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2734 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2736 start_sequence ();
2738 if (expand_sfix_optab (result, op0, builtin_optab))
2740 /* Output the entire sequence. */
2741 insns = get_insns ();
2742 end_sequence ();
2743 emit_insn (insns);
2744 return result;
2747 /* If we were unable to expand via the builtin, stop the sequence
2748 (without outputting the insns) and call to the library function
2749 with the stabilized argument list. */
2750 end_sequence ();
2753 if (fallback_fn != BUILT_IN_NONE)
2755 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2756 targets, (int) round (x) should never be transformed into
2757 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2758 a call to lround in the hope that the target provides at least some
2759 C99 functions. This should result in the best user experience for
2760 not full C99 targets. */
2761 tree fallback_fndecl = mathfn_built_in_1
2762 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2764 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2765 fallback_fndecl, 1, arg);
2767 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2768 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2769 return convert_to_mode (mode, target, 0);
2772 return expand_call (exp, target, target == const0_rtx);
2775 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2776 a normal call should be emitted rather than expanding the function
2777 in-line. EXP is the expression that is a call to the builtin
2778 function; if convenient, the result should be placed in TARGET. */
2780 static rtx
2781 expand_builtin_powi (tree exp, rtx target)
2783 tree arg0, arg1;
2784 rtx op0, op1;
2785 machine_mode mode;
2786 machine_mode mode2;
2788 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2789 return NULL_RTX;
2791 arg0 = CALL_EXPR_ARG (exp, 0);
2792 arg1 = CALL_EXPR_ARG (exp, 1);
2793 mode = TYPE_MODE (TREE_TYPE (exp));
2795 /* Emit a libcall to libgcc. */
2797 /* Mode of the 2nd argument must match that of an int. */
2798 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2800 if (target == NULL_RTX)
2801 target = gen_reg_rtx (mode);
2803 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2804 if (GET_MODE (op0) != mode)
2805 op0 = convert_to_mode (mode, op0, 0);
2806 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2807 if (GET_MODE (op1) != mode2)
2808 op1 = convert_to_mode (mode2, op1, 0);
2810 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2811 target, LCT_CONST, mode,
2812 op0, mode, op1, mode2);
2814 return target;
2817 /* Expand expression EXP which is a call to the strlen builtin. Return
2818 NULL_RTX if we failed the caller should emit a normal call, otherwise
2819 try to get the result in TARGET, if convenient. */
2821 static rtx
2822 expand_builtin_strlen (tree exp, rtx target,
2823 machine_mode target_mode)
2825 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2826 return NULL_RTX;
2827 else
2829 struct expand_operand ops[4];
2830 rtx pat;
2831 tree len;
2832 tree src = CALL_EXPR_ARG (exp, 0);
2833 rtx src_reg;
2834 rtx_insn *before_strlen;
2835 machine_mode insn_mode;
2836 enum insn_code icode = CODE_FOR_nothing;
2837 unsigned int align;
2839 /* If the length can be computed at compile-time, return it. */
2840 len = c_strlen (src, 0);
2841 if (len)
2842 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2844 /* If the length can be computed at compile-time and is constant
2845 integer, but there are side-effects in src, evaluate
2846 src for side-effects, then return len.
2847 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2848 can be optimized into: i++; x = 3; */
2849 len = c_strlen (src, 1);
2850 if (len && TREE_CODE (len) == INTEGER_CST)
2852 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2853 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2856 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2858 /* If SRC is not a pointer type, don't do this operation inline. */
2859 if (align == 0)
2860 return NULL_RTX;
2862 /* Bail out if we can't compute strlen in the right mode. */
2863 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2865 icode = optab_handler (strlen_optab, insn_mode);
2866 if (icode != CODE_FOR_nothing)
2867 break;
2869 if (insn_mode == VOIDmode)
2870 return NULL_RTX;
2872 /* Make a place to hold the source address. We will not expand
2873 the actual source until we are sure that the expansion will
2874 not fail -- there are trees that cannot be expanded twice. */
2875 src_reg = gen_reg_rtx (Pmode);
2877 /* Mark the beginning of the strlen sequence so we can emit the
2878 source operand later. */
2879 before_strlen = get_last_insn ();
2881 create_output_operand (&ops[0], target, insn_mode);
2882 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2883 create_integer_operand (&ops[2], 0);
2884 create_integer_operand (&ops[3], align);
2885 if (!maybe_expand_insn (icode, 4, ops))
2886 return NULL_RTX;
2888 /* Now that we are assured of success, expand the source. */
2889 start_sequence ();
2890 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2891 if (pat != src_reg)
2893 #ifdef POINTERS_EXTEND_UNSIGNED
2894 if (GET_MODE (pat) != Pmode)
2895 pat = convert_to_mode (Pmode, pat,
2896 POINTERS_EXTEND_UNSIGNED);
2897 #endif
2898 emit_move_insn (src_reg, pat);
2900 pat = get_insns ();
2901 end_sequence ();
2903 if (before_strlen)
2904 emit_insn_after (pat, before_strlen);
2905 else
2906 emit_insn_before (pat, get_insns ());
2908 /* Return the value in the proper mode for this function. */
2909 if (GET_MODE (ops[0].value) == target_mode)
2910 target = ops[0].value;
2911 else if (target != 0)
2912 convert_move (target, ops[0].value, 0);
2913 else
2914 target = convert_to_mode (target_mode, ops[0].value, 0);
2916 return target;
2920 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2921 bytes from constant string DATA + OFFSET and return it as target
2922 constant. */
2924 static rtx
2925 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2926 scalar_int_mode mode)
2928 const char *str = (const char *) data;
2930 gcc_assert (offset >= 0
2931 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2932 <= strlen (str) + 1));
2934 return c_readstr (str + offset, mode);
2937 /* LEN specify length of the block of memcpy/memset operation.
2938 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2939 In some cases we can make very likely guess on max size, then we
2940 set it into PROBABLE_MAX_SIZE. */
2942 static void
2943 determine_block_size (tree len, rtx len_rtx,
2944 unsigned HOST_WIDE_INT *min_size,
2945 unsigned HOST_WIDE_INT *max_size,
2946 unsigned HOST_WIDE_INT *probable_max_size)
2948 if (CONST_INT_P (len_rtx))
2950 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2951 return;
2953 else
2955 wide_int min, max;
2956 enum value_range_type range_type = VR_UNDEFINED;
2958 /* Determine bounds from the type. */
2959 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2960 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2961 else
2962 *min_size = 0;
2963 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2964 *probable_max_size = *max_size
2965 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2966 else
2967 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2969 if (TREE_CODE (len) == SSA_NAME)
2970 range_type = get_range_info (len, &min, &max);
2971 if (range_type == VR_RANGE)
2973 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2974 *min_size = min.to_uhwi ();
2975 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2976 *probable_max_size = *max_size = max.to_uhwi ();
2978 else if (range_type == VR_ANTI_RANGE)
2980 /* Anti range 0...N lets us to determine minimal size to N+1. */
2981 if (min == 0)
2983 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2984 *min_size = max.to_uhwi () + 1;
2986 /* Code like
2988 int n;
2989 if (n < 100)
2990 memcpy (a, b, n)
2992 Produce anti range allowing negative values of N. We still
2993 can use the information and make a guess that N is not negative.
2995 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2996 *probable_max_size = min.to_uhwi () - 1;
2999 gcc_checking_assert (*max_size <=
3000 (unsigned HOST_WIDE_INT)
3001 GET_MODE_MASK (GET_MODE (len_rtx)));
3004 /* Try to verify that the sizes and lengths of the arguments to a string
3005 manipulation function given by EXP are within valid bounds and that
3006 the operation does not lead to buffer overflow. Arguments other than
3007 EXP may be null. When non-null, the arguments have the following
3008 meaning:
3009 SIZE is the user-supplied size argument to the function (such as in
3010 memcpy(d, s, SIZE) or strncpy(d, s, SIZE). It specifies the exact
3011 number of bytes to write.
3012 MAXLEN is the user-supplied bound on the length of the source sequence
3013 (such as in strncat(d, s, N). It specifies the upper limit on the number
3014 of bytes to write.
3015 SRC is the source string (such as in strcpy(d, s)) when the expression
3016 EXP is a string function call (as opposed to a memory call like memcpy).
3017 As an exception, SRC can also be an integer denoting the precomputed
3018 size of the source string or object (for functions like memcpy).
3019 OBJSIZE is the size of the destination object specified by the last
3020 argument to the _chk builtins, typically resulting from the expansion
3021 of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
3022 OBJSIZE).
3024 When SIZE is null LEN is checked to verify that it doesn't exceed
3025 SIZE_MAX.
3027 If the call is successfully verified as safe from buffer overflow
3028 the function returns true, otherwise false.. */
3030 static bool
3031 check_sizes (int opt, tree exp, tree size, tree maxlen, tree src, tree objsize)
3033 /* The size of the largest object is half the address space, or
3034 SSIZE_MAX. (This is way too permissive.) */
3035 tree maxobjsize = TYPE_MAX_VALUE (ssizetype);
3037 tree slen = NULL_TREE;
3039 tree range[2] = { NULL_TREE, NULL_TREE };
3041 /* Set to true when the exact number of bytes written by a string
3042 function like strcpy is not known and the only thing that is
3043 known is that it must be at least one (for the terminating nul). */
3044 bool at_least_one = false;
3045 if (src)
3047 /* SRC is normally a pointer to string but as a special case
3048 it can be an integer denoting the length of a string. */
3049 if (POINTER_TYPE_P (TREE_TYPE (src)))
3051 /* Try to determine the range of lengths the source string
3052 refers to. If it can be determined and is less than
3053 the upper bound given by MAXLEN add one to it for
3054 the terminating nul. Otherwise, set it to one for
3055 the same reason, or to MAXLEN as appropriate. */
3056 get_range_strlen (src, range);
3057 if (range[0] && (!maxlen || TREE_CODE (maxlen) == INTEGER_CST))
3059 if (maxlen && tree_int_cst_le (maxlen, range[0]))
3060 range[0] = range[1] = maxlen;
3061 else
3062 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3063 range[0], size_one_node);
3065 if (maxlen && tree_int_cst_le (maxlen, range[1]))
3066 range[1] = maxlen;
3067 else if (!integer_all_onesp (range[1]))
3068 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3069 range[1], size_one_node);
3071 slen = range[0];
3073 else
3075 at_least_one = true;
3076 slen = size_one_node;
3079 else
3080 slen = src;
3083 if (!size && !maxlen)
3085 /* When the only available piece of data is the object size
3086 there is nothing to do. */
3087 if (!slen)
3088 return true;
3090 /* Otherwise, when the length of the source sequence is known
3091 (as with with strlen), set SIZE to it. */
3092 if (!range[0])
3093 size = slen;
3096 if (!objsize)
3097 objsize = maxobjsize;
3099 /* The SIZE is exact if it's non-null, constant, and in range of
3100 unsigned HOST_WIDE_INT. */
3101 bool exactsize = size && tree_fits_uhwi_p (size);
3103 if (size)
3104 get_size_range (size, range);
3106 /* First check the number of bytes to be written against the maximum
3107 object size. */
3108 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3110 location_t loc = tree_nonartificial_location (exp);
3111 loc = expansion_point_location_if_in_system_header (loc);
3113 if (range[0] == range[1])
3114 warning_at (loc, opt,
3115 "%K%qD specified size %E "
3116 "exceeds maximum object size %E",
3117 exp, get_callee_fndecl (exp), range[0], maxobjsize);
3118 else
3119 warning_at (loc, opt,
3120 "%K%qD specified size between %E and %E "
3121 "exceeds maximum object size %E",
3122 exp, get_callee_fndecl (exp),
3123 range[0], range[1], maxobjsize);
3124 return false;
3127 /* Next check the number of bytes to be written against the destination
3128 object size. */
3129 if (range[0] || !exactsize || integer_all_onesp (size))
3131 if (range[0]
3132 && ((tree_fits_uhwi_p (objsize)
3133 && tree_int_cst_lt (objsize, range[0]))
3134 || (tree_fits_uhwi_p (size)
3135 && tree_int_cst_lt (size, range[0]))))
3137 location_t loc = tree_nonartificial_location (exp);
3138 loc = expansion_point_location_if_in_system_header (loc);
3140 if (size == slen && at_least_one)
3142 /* This is a call to strcpy with a destination of 0 size
3143 and a source of unknown length. The call will write
3144 at least one byte past the end of the destination. */
3145 warning_at (loc, opt,
3146 "%K%qD writing %E or more bytes into a region "
3147 "of size %E overflows the destination",
3148 exp, get_callee_fndecl (exp), range[0], objsize);
3150 else if (tree_int_cst_equal (range[0], range[1]))
3151 warning_at (loc, opt,
3152 (integer_onep (range[0])
3153 ? G_("%K%qD writing %E byte into a region "
3154 "of size %E overflows the destination")
3155 : G_("%K%qD writing %E bytes into a region "
3156 "of size %E overflows the destination")),
3157 exp, get_callee_fndecl (exp), range[0], objsize);
3158 else if (tree_int_cst_sign_bit (range[1]))
3160 /* Avoid printing the upper bound if it's invalid. */
3161 warning_at (loc, opt,
3162 "%K%qD writing %E or more bytes into a region "
3163 "of size %E overflows the destination",
3164 exp, get_callee_fndecl (exp), range[0], objsize);
3166 else
3167 warning_at (loc, opt,
3168 "%K%qD writing between %E and %E bytes into "
3169 "a region of size %E overflows the destination",
3170 exp, get_callee_fndecl (exp), range[0], range[1],
3171 objsize);
3173 /* Return error when an overflow has been detected. */
3174 return false;
3178 /* Check the maximum length of the source sequence against the size
3179 of the destination object if known, or against the maximum size
3180 of an object. */
3181 if (maxlen)
3183 get_size_range (maxlen, range);
3185 if (range[0] && objsize && tree_fits_uhwi_p (objsize))
3187 location_t loc = tree_nonartificial_location (exp);
3188 loc = expansion_point_location_if_in_system_header (loc);
3190 if (tree_int_cst_lt (maxobjsize, range[0]))
3192 /* Warn about crazy big sizes first since that's more
3193 likely to be meaningful than saying that the bound
3194 is greater than the object size if both are big. */
3195 if (range[0] == range[1])
3196 warning_at (loc, opt,
3197 "%K%qD specified bound %E "
3198 "exceeds maximum object size %E",
3199 exp, get_callee_fndecl (exp),
3200 range[0], maxobjsize);
3201 else
3202 warning_at (loc, opt,
3203 "%K%qD specified bound between %E and %E "
3204 "exceeds maximum object size %E",
3205 exp, get_callee_fndecl (exp),
3206 range[0], range[1], maxobjsize);
3208 return false;
3211 if (objsize != maxobjsize && tree_int_cst_lt (objsize, range[0]))
3213 if (tree_int_cst_equal (range[0], range[1]))
3214 warning_at (loc, opt,
3215 "%K%qD specified bound %E "
3216 "exceeds destination size %E",
3217 exp, get_callee_fndecl (exp),
3218 range[0], objsize);
3219 else
3220 warning_at (loc, opt,
3221 "%K%qD specified bound between %E and %E "
3222 "exceeds destination size %E",
3223 exp, get_callee_fndecl (exp),
3224 range[0], range[1], objsize);
3225 return false;
3230 if (slen
3231 && slen == src
3232 && size && range[0]
3233 && tree_int_cst_lt (slen, range[0]))
3235 location_t loc = tree_nonartificial_location (exp);
3237 if (tree_int_cst_equal (range[0], range[1]))
3238 warning_at (loc, opt,
3239 (tree_int_cst_equal (range[0], integer_one_node)
3240 ? G_("%K%qD reading %E byte from a region of size %E")
3241 : G_("%K%qD reading %E bytes from a region of size %E")),
3242 exp, get_callee_fndecl (exp), range[0], slen);
3243 else if (tree_int_cst_sign_bit (range[1]))
3245 /* Avoid printing the upper bound if it's invalid. */
3246 warning_at (loc, opt,
3247 "%K%qD reading %E or more bytes from a region "
3248 "of size %E",
3249 exp, get_callee_fndecl (exp), range[0], slen);
3251 else
3252 warning_at (loc, opt,
3253 "%K%qD reading between %E and %E bytes from a region "
3254 "of size %E",
3255 exp, get_callee_fndecl (exp), range[0], range[1], slen);
3256 return false;
3259 return true;
3262 /* Helper to compute the size of the object referenced by the DEST
3263 expression which must have pointer type, using Object Size type
3264 OSTYPE (only the least significant 2 bits are used). Return
3265 the size of the object if successful or NULL when the size cannot
3266 be determined. */
3268 tree
3269 compute_objsize (tree dest, int ostype)
3271 unsigned HOST_WIDE_INT size;
3273 /* Only the two least significant bits are meaningful. */
3274 ostype &= 3;
3276 if (compute_builtin_object_size (dest, ostype, &size))
3277 return build_int_cst (sizetype, size);
3279 /* Unless computing the largest size (for memcpy and other raw memory
3280 functions), try to determine the size of the object from its type. */
3281 if (!ostype)
3282 return NULL_TREE;
3284 if (TREE_CODE (dest) == SSA_NAME)
3286 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3287 if (!is_gimple_assign (stmt))
3288 return NULL_TREE;
3290 tree_code code = gimple_assign_rhs_code (stmt);
3291 if (code != ADDR_EXPR && code != POINTER_PLUS_EXPR)
3292 return NULL_TREE;
3294 dest = gimple_assign_rhs1 (stmt);
3297 if (TREE_CODE (dest) != ADDR_EXPR)
3298 return NULL_TREE;
3300 tree type = TREE_TYPE (dest);
3301 if (TREE_CODE (type) == POINTER_TYPE)
3302 type = TREE_TYPE (type);
3304 type = TYPE_MAIN_VARIANT (type);
3306 if (TREE_CODE (type) == ARRAY_TYPE
3307 && !array_at_struct_end_p (dest))
3309 /* Return the constant size unless it's zero (that's a zero-length
3310 array likely at the end of a struct). */
3311 tree size = TYPE_SIZE_UNIT (type);
3312 if (size && TREE_CODE (size) == INTEGER_CST
3313 && !integer_zerop (size))
3314 return size;
3317 return NULL_TREE;
3320 /* Helper to determine and check the sizes of the source and the destination
3321 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3322 call expression, DEST is the destination argument, SRC is the source
3323 argument or null, and LEN is the number of bytes. Use Object Size type-0
3324 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3325 (no overflow or invalid sizes), false otherwise. */
3327 static bool
3328 check_memop_sizes (tree exp, tree dest, tree src, tree size)
3330 if (!warn_stringop_overflow)
3331 return true;
3333 /* For functions like memset and memcpy that operate on raw memory
3334 try to determine the size of the largest source and destination
3335 object using type-0 Object Size regardless of the object size
3336 type specified by the option. */
3337 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3338 tree dstsize = compute_objsize (dest, 0);
3340 return check_sizes (OPT_Wstringop_overflow_, exp,
3341 size, /*maxlen=*/NULL_TREE, srcsize, dstsize);
3344 /* Validate memchr arguments without performing any expansion.
3345 Return NULL_RTX. */
3347 static rtx
3348 expand_builtin_memchr (tree exp, rtx)
3350 if (!validate_arglist (exp,
3351 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3352 return NULL_RTX;
3354 tree arg1 = CALL_EXPR_ARG (exp, 0);
3355 tree len = CALL_EXPR_ARG (exp, 2);
3357 /* Diagnose calls where the specified length exceeds the size
3358 of the object. */
3359 if (warn_stringop_overflow)
3361 tree size = compute_objsize (arg1, 0);
3362 check_sizes (OPT_Wstringop_overflow_,
3363 exp, len, /*maxlen=*/NULL_TREE,
3364 size, /*objsize=*/NULL_TREE);
3367 return NULL_RTX;
3370 /* Expand a call EXP to the memcpy builtin.
3371 Return NULL_RTX if we failed, the caller should emit a normal call,
3372 otherwise try to get the result in TARGET, if convenient (and in
3373 mode MODE if that's convenient). */
3375 static rtx
3376 expand_builtin_memcpy (tree exp, rtx target)
3378 if (!validate_arglist (exp,
3379 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3380 return NULL_RTX;
3382 tree dest = CALL_EXPR_ARG (exp, 0);
3383 tree src = CALL_EXPR_ARG (exp, 1);
3384 tree len = CALL_EXPR_ARG (exp, 2);
3386 check_memop_sizes (exp, dest, src, len);
3388 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3389 /*endp=*/ 0);
3392 /* Check a call EXP to the memmove built-in for validity.
3393 Return NULL_RTX on both success and failure. */
3395 static rtx
3396 expand_builtin_memmove (tree exp, rtx)
3398 if (!validate_arglist (exp,
3399 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3400 return NULL_RTX;
3402 tree dest = CALL_EXPR_ARG (exp, 0);
3403 tree src = CALL_EXPR_ARG (exp, 1);
3404 tree len = CALL_EXPR_ARG (exp, 2);
3406 check_memop_sizes (exp, dest, src, len);
3408 return NULL_RTX;
3411 /* Expand an instrumented call EXP to the memcpy builtin.
3412 Return NULL_RTX if we failed, the caller should emit a normal call,
3413 otherwise try to get the result in TARGET, if convenient (and in
3414 mode MODE if that's convenient). */
3416 static rtx
3417 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3419 if (!validate_arglist (exp,
3420 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3421 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3422 INTEGER_TYPE, VOID_TYPE))
3423 return NULL_RTX;
3424 else
3426 tree dest = CALL_EXPR_ARG (exp, 0);
3427 tree src = CALL_EXPR_ARG (exp, 2);
3428 tree len = CALL_EXPR_ARG (exp, 4);
3429 rtx res = expand_builtin_memory_copy_args (dest, src, len, target, exp,
3430 /*end_p=*/ 0);
3432 /* Return src bounds with the result. */
3433 if (res)
3435 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3436 expand_normal (CALL_EXPR_ARG (exp, 1)));
3437 res = chkp_join_splitted_slot (res, bnd);
3439 return res;
3443 /* Expand a call EXP to the mempcpy builtin.
3444 Return NULL_RTX if we failed; the caller should emit a normal call,
3445 otherwise try to get the result in TARGET, if convenient (and in
3446 mode MODE if that's convenient). If ENDP is 0 return the
3447 destination pointer, if ENDP is 1 return the end pointer ala
3448 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3449 stpcpy. */
3451 static rtx
3452 expand_builtin_mempcpy (tree exp, rtx target)
3454 if (!validate_arglist (exp,
3455 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3456 return NULL_RTX;
3458 tree dest = CALL_EXPR_ARG (exp, 0);
3459 tree src = CALL_EXPR_ARG (exp, 1);
3460 tree len = CALL_EXPR_ARG (exp, 2);
3462 /* Avoid expanding mempcpy into memcpy when the call is determined
3463 to overflow the buffer. This also prevents the same overflow
3464 from being diagnosed again when expanding memcpy. */
3465 if (!check_memop_sizes (exp, dest, src, len))
3466 return NULL_RTX;
3468 return expand_builtin_mempcpy_args (dest, src, len,
3469 target, exp, /*endp=*/ 1);
3472 /* Expand an instrumented call EXP to the mempcpy builtin.
3473 Return NULL_RTX if we failed, the caller should emit a normal call,
3474 otherwise try to get the result in TARGET, if convenient (and in
3475 mode MODE if that's convenient). */
3477 static rtx
3478 expand_builtin_mempcpy_with_bounds (tree exp, rtx target)
3480 if (!validate_arglist (exp,
3481 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3482 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3483 INTEGER_TYPE, VOID_TYPE))
3484 return NULL_RTX;
3485 else
3487 tree dest = CALL_EXPR_ARG (exp, 0);
3488 tree src = CALL_EXPR_ARG (exp, 2);
3489 tree len = CALL_EXPR_ARG (exp, 4);
3490 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3491 exp, 1);
3493 /* Return src bounds with the result. */
3494 if (res)
3496 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3497 expand_normal (CALL_EXPR_ARG (exp, 1)));
3498 res = chkp_join_splitted_slot (res, bnd);
3500 return res;
3504 /* Helper function to do the actual work for expand of memory copy family
3505 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3506 of memory from SRC to DEST and assign to TARGET if convenient.
3507 If ENDP is 0 return the
3508 destination pointer, if ENDP is 1 return the end pointer ala
3509 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3510 stpcpy. */
3512 static rtx
3513 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3514 rtx target, tree exp, int endp)
3516 const char *src_str;
3517 unsigned int src_align = get_pointer_alignment (src);
3518 unsigned int dest_align = get_pointer_alignment (dest);
3519 rtx dest_mem, src_mem, dest_addr, len_rtx;
3520 HOST_WIDE_INT expected_size = -1;
3521 unsigned int expected_align = 0;
3522 unsigned HOST_WIDE_INT min_size;
3523 unsigned HOST_WIDE_INT max_size;
3524 unsigned HOST_WIDE_INT probable_max_size;
3526 /* If DEST is not a pointer type, call the normal function. */
3527 if (dest_align == 0)
3528 return NULL_RTX;
3530 /* If either SRC is not a pointer type, don't do this
3531 operation in-line. */
3532 if (src_align == 0)
3533 return NULL_RTX;
3535 if (currently_expanding_gimple_stmt)
3536 stringop_block_profile (currently_expanding_gimple_stmt,
3537 &expected_align, &expected_size);
3539 if (expected_align < dest_align)
3540 expected_align = dest_align;
3541 dest_mem = get_memory_rtx (dest, len);
3542 set_mem_align (dest_mem, dest_align);
3543 len_rtx = expand_normal (len);
3544 determine_block_size (len, len_rtx, &min_size, &max_size,
3545 &probable_max_size);
3546 src_str = c_getstr (src);
3548 /* If SRC is a string constant and block move would be done
3549 by pieces, we can avoid loading the string from memory
3550 and only stored the computed constants. */
3551 if (src_str
3552 && CONST_INT_P (len_rtx)
3553 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3554 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3555 CONST_CAST (char *, src_str),
3556 dest_align, false))
3558 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3559 builtin_memcpy_read_str,
3560 CONST_CAST (char *, src_str),
3561 dest_align, false, endp);
3562 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3563 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3564 return dest_mem;
3567 src_mem = get_memory_rtx (src, len);
3568 set_mem_align (src_mem, src_align);
3570 /* Copy word part most expediently. */
3571 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3572 CALL_EXPR_TAILCALL (exp)
3573 && (endp == 0 || target == const0_rtx)
3574 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3575 expected_align, expected_size,
3576 min_size, max_size, probable_max_size);
3578 if (dest_addr == 0)
3580 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3581 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3584 if (endp && target != const0_rtx)
3586 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3587 /* stpcpy pointer to last byte. */
3588 if (endp == 2)
3589 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3592 return dest_addr;
3595 static rtx
3596 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3597 rtx target, tree orig_exp, int endp)
3599 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3600 endp);
3603 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3604 we failed, the caller should emit a normal call, otherwise try to
3605 get the result in TARGET, if convenient. If ENDP is 0 return the
3606 destination pointer, if ENDP is 1 return the end pointer ala
3607 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3608 stpcpy. */
3610 static rtx
3611 expand_movstr (tree dest, tree src, rtx target, int endp)
3613 struct expand_operand ops[3];
3614 rtx dest_mem;
3615 rtx src_mem;
3617 if (!targetm.have_movstr ())
3618 return NULL_RTX;
3620 dest_mem = get_memory_rtx (dest, NULL);
3621 src_mem = get_memory_rtx (src, NULL);
3622 if (!endp)
3624 target = force_reg (Pmode, XEXP (dest_mem, 0));
3625 dest_mem = replace_equiv_address (dest_mem, target);
3628 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3629 create_fixed_operand (&ops[1], dest_mem);
3630 create_fixed_operand (&ops[2], src_mem);
3631 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3632 return NULL_RTX;
3634 if (endp && target != const0_rtx)
3636 target = ops[0].value;
3637 /* movstr is supposed to set end to the address of the NUL
3638 terminator. If the caller requested a mempcpy-like return value,
3639 adjust it. */
3640 if (endp == 1)
3642 rtx tem = plus_constant (GET_MODE (target),
3643 gen_lowpart (GET_MODE (target), target), 1);
3644 emit_move_insn (target, force_operand (tem, NULL_RTX));
3647 return target;
3650 /* Do some very basic size validation of a call to the strcpy builtin
3651 given by EXP. Return NULL_RTX to have the built-in expand to a call
3652 to the library function. */
3654 static rtx
3655 expand_builtin_strcat (tree exp, rtx)
3657 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3658 || !warn_stringop_overflow)
3659 return NULL_RTX;
3661 tree dest = CALL_EXPR_ARG (exp, 0);
3662 tree src = CALL_EXPR_ARG (exp, 1);
3664 /* There is no way here to determine the length of the string in
3665 the destination to which the SRC string is being appended so
3666 just diagnose cases when the souce string is longer than
3667 the destination object. */
3669 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3671 check_sizes (OPT_Wstringop_overflow_,
3672 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3674 return NULL_RTX;
3677 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3678 NULL_RTX if we failed the caller should emit a normal call, otherwise
3679 try to get the result in TARGET, if convenient (and in mode MODE if that's
3680 convenient). */
3682 static rtx
3683 expand_builtin_strcpy (tree exp, rtx target)
3685 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3686 return NULL_RTX;
3688 tree dest = CALL_EXPR_ARG (exp, 0);
3689 tree src = CALL_EXPR_ARG (exp, 1);
3691 if (warn_stringop_overflow)
3693 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3694 check_sizes (OPT_Wstringop_overflow_,
3695 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3698 return expand_builtin_strcpy_args (dest, src, target);
3701 /* Helper function to do the actual work for expand_builtin_strcpy. The
3702 arguments to the builtin_strcpy call DEST and SRC are broken out
3703 so that this can also be called without constructing an actual CALL_EXPR.
3704 The other arguments and return value are the same as for
3705 expand_builtin_strcpy. */
3707 static rtx
3708 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3710 return expand_movstr (dest, src, target, /*endp=*/0);
3713 /* Expand a call EXP to the stpcpy builtin.
3714 Return NULL_RTX if we failed the caller should emit a normal call,
3715 otherwise try to get the result in TARGET, if convenient (and in
3716 mode MODE if that's convenient). */
3718 static rtx
3719 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3721 tree dst, src;
3722 location_t loc = EXPR_LOCATION (exp);
3724 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3725 return NULL_RTX;
3727 dst = CALL_EXPR_ARG (exp, 0);
3728 src = CALL_EXPR_ARG (exp, 1);
3730 if (warn_stringop_overflow)
3732 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3733 check_sizes (OPT_Wstringop_overflow_,
3734 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3737 /* If return value is ignored, transform stpcpy into strcpy. */
3738 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3740 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3741 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3742 return expand_expr (result, target, mode, EXPAND_NORMAL);
3744 else
3746 tree len, lenp1;
3747 rtx ret;
3749 /* Ensure we get an actual string whose length can be evaluated at
3750 compile-time, not an expression containing a string. This is
3751 because the latter will potentially produce pessimized code
3752 when used to produce the return value. */
3753 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3754 return expand_movstr (dst, src, target, /*endp=*/2);
3756 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3757 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3758 target, exp, /*endp=*/2);
3760 if (ret)
3761 return ret;
3763 if (TREE_CODE (len) == INTEGER_CST)
3765 rtx len_rtx = expand_normal (len);
3767 if (CONST_INT_P (len_rtx))
3769 ret = expand_builtin_strcpy_args (dst, src, target);
3771 if (ret)
3773 if (! target)
3775 if (mode != VOIDmode)
3776 target = gen_reg_rtx (mode);
3777 else
3778 target = gen_reg_rtx (GET_MODE (ret));
3780 if (GET_MODE (target) != GET_MODE (ret))
3781 ret = gen_lowpart (GET_MODE (target), ret);
3783 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3784 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3785 gcc_assert (ret);
3787 return target;
3792 return expand_movstr (dst, src, target, /*endp=*/2);
3796 /* Check a call EXP to the stpncpy built-in for validity.
3797 Return NULL_RTX on both success and failure. */
3799 static rtx
3800 expand_builtin_stpncpy (tree exp, rtx)
3802 if (!validate_arglist (exp,
3803 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3804 || !warn_stringop_overflow)
3805 return NULL_RTX;
3807 /* The source and destination of the call. */
3808 tree dest = CALL_EXPR_ARG (exp, 0);
3809 tree src = CALL_EXPR_ARG (exp, 1);
3811 /* The exact number of bytes to write (not the maximum). */
3812 tree len = CALL_EXPR_ARG (exp, 2);
3814 /* The size of the destination object. */
3815 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3817 check_sizes (OPT_Wstringop_overflow_,
3818 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
3820 return NULL_RTX;
3823 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3824 bytes from constant string DATA + OFFSET and return it as target
3825 constant. */
3828 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3829 scalar_int_mode mode)
3831 const char *str = (const char *) data;
3833 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3834 return const0_rtx;
3836 return c_readstr (str + offset, mode);
3839 /* Helper to check the sizes of sequences and the destination of calls
3840 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3841 success (no overflow or invalid sizes), false otherwise. */
3843 static bool
3844 check_strncat_sizes (tree exp, tree objsize)
3846 tree dest = CALL_EXPR_ARG (exp, 0);
3847 tree src = CALL_EXPR_ARG (exp, 1);
3848 tree maxlen = CALL_EXPR_ARG (exp, 2);
3850 /* Try to determine the range of lengths that the source expression
3851 refers to. */
3852 tree lenrange[2];
3853 get_range_strlen (src, lenrange);
3855 /* Try to verify that the destination is big enough for the shortest
3856 string. */
3858 if (!objsize && warn_stringop_overflow)
3860 /* If it hasn't been provided by __strncat_chk, try to determine
3861 the size of the destination object into which the source is
3862 being copied. */
3863 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3866 /* Add one for the terminating nul. */
3867 tree srclen = (lenrange[0]
3868 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3869 size_one_node)
3870 : NULL_TREE);
3872 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3873 nul so the specified upper bound should never be equal to (or greater
3874 than) the size of the destination. */
3875 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (objsize)
3876 && tree_int_cst_equal (objsize, maxlen))
3878 location_t loc = tree_nonartificial_location (exp);
3879 loc = expansion_point_location_if_in_system_header (loc);
3881 warning_at (loc, OPT_Wstringop_overflow_,
3882 "%K%qD specified bound %E equals destination size",
3883 exp, get_callee_fndecl (exp), maxlen);
3885 return false;
3888 if (!srclen
3889 || (maxlen && tree_fits_uhwi_p (maxlen)
3890 && tree_fits_uhwi_p (srclen)
3891 && tree_int_cst_lt (maxlen, srclen)))
3892 srclen = maxlen;
3894 /* The number of bytes to write is LEN but check_sizes will also
3895 check SRCLEN if LEN's value isn't known. */
3896 return check_sizes (OPT_Wstringop_overflow_,
3897 exp, /*size=*/NULL_TREE, maxlen, srclen, objsize);
3900 /* Similar to expand_builtin_strcat, do some very basic size validation
3901 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3902 the built-in expand to a call to the library function. */
3904 static rtx
3905 expand_builtin_strncat (tree exp, rtx)
3907 if (!validate_arglist (exp,
3908 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3909 || !warn_stringop_overflow)
3910 return NULL_RTX;
3912 tree dest = CALL_EXPR_ARG (exp, 0);
3913 tree src = CALL_EXPR_ARG (exp, 1);
3914 /* The upper bound on the number of bytes to write. */
3915 tree maxlen = CALL_EXPR_ARG (exp, 2);
3916 /* The length of the source sequence. */
3917 tree slen = c_strlen (src, 1);
3919 /* Try to determine the range of lengths that the source expression
3920 refers to. */
3921 tree lenrange[2];
3922 if (slen)
3923 lenrange[0] = lenrange[1] = slen;
3924 else
3925 get_range_strlen (src, lenrange);
3927 /* Try to verify that the destination is big enough for the shortest
3928 string. First try to determine the size of the destination object
3929 into which the source is being copied. */
3930 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3932 /* Add one for the terminating nul. */
3933 tree srclen = (lenrange[0]
3934 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3935 size_one_node)
3936 : NULL_TREE);
3938 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3939 nul so the specified upper bound should never be equal to (or greater
3940 than) the size of the destination. */
3941 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (destsize)
3942 && tree_int_cst_equal (destsize, maxlen))
3944 location_t loc = tree_nonartificial_location (exp);
3945 loc = expansion_point_location_if_in_system_header (loc);
3947 warning_at (loc, OPT_Wstringop_overflow_,
3948 "%K%qD specified bound %E equals destination size",
3949 exp, get_callee_fndecl (exp), maxlen);
3951 return NULL_RTX;
3954 if (!srclen
3955 || (maxlen && tree_fits_uhwi_p (maxlen)
3956 && tree_fits_uhwi_p (srclen)
3957 && tree_int_cst_lt (maxlen, srclen)))
3958 srclen = maxlen;
3960 /* The number of bytes to write is LEN but check_sizes will also
3961 check SRCLEN if LEN's value isn't known. */
3962 check_sizes (OPT_Wstringop_overflow_,
3963 exp, /*size=*/NULL_TREE, maxlen, srclen, destsize);
3965 return NULL_RTX;
3968 /* Helper to check the sizes of sequences and the destination of calls
3969 to __builtin_strncpy (DST, SRC, CNT) and __builtin___strncpy_chk.
3970 Returns true on success (no overflow warning), false otherwise. */
3972 static bool
3973 check_strncpy_sizes (tree exp, tree dst, tree src, tree cnt)
3975 tree dstsize = compute_objsize (dst, warn_stringop_overflow - 1);
3977 if (!check_sizes (OPT_Wstringop_overflow_,
3978 exp, cnt, /*maxlen=*/NULL_TREE, src, dstsize))
3979 return false;
3981 return true;
3984 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3985 NULL_RTX if we failed the caller should emit a normal call. */
3987 static rtx
3988 expand_builtin_strncpy (tree exp, rtx target)
3990 location_t loc = EXPR_LOCATION (exp);
3992 if (validate_arglist (exp,
3993 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3995 tree dest = CALL_EXPR_ARG (exp, 0);
3996 tree src = CALL_EXPR_ARG (exp, 1);
3997 /* The number of bytes to write (not the maximum). */
3998 tree len = CALL_EXPR_ARG (exp, 2);
3999 /* The length of the source sequence. */
4000 tree slen = c_strlen (src, 1);
4002 check_strncpy_sizes (exp, dest, src, len);
4004 /* We must be passed a constant len and src parameter. */
4005 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4006 return NULL_RTX;
4008 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4010 /* We're required to pad with trailing zeros if the requested
4011 len is greater than strlen(s2)+1. In that case try to
4012 use store_by_pieces, if it fails, punt. */
4013 if (tree_int_cst_lt (slen, len))
4015 unsigned int dest_align = get_pointer_alignment (dest);
4016 const char *p = c_getstr (src);
4017 rtx dest_mem;
4019 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4020 || !can_store_by_pieces (tree_to_uhwi (len),
4021 builtin_strncpy_read_str,
4022 CONST_CAST (char *, p),
4023 dest_align, false))
4024 return NULL_RTX;
4026 dest_mem = get_memory_rtx (dest, len);
4027 store_by_pieces (dest_mem, tree_to_uhwi (len),
4028 builtin_strncpy_read_str,
4029 CONST_CAST (char *, p), dest_align, false, 0);
4030 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4031 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4032 return dest_mem;
4035 return NULL_RTX;
4038 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4039 bytes from constant string DATA + OFFSET and return it as target
4040 constant. */
4043 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4044 scalar_int_mode mode)
4046 const char *c = (const char *) data;
4047 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4049 memset (p, *c, GET_MODE_SIZE (mode));
4051 return c_readstr (p, mode);
4054 /* Callback routine for store_by_pieces. Return the RTL of a register
4055 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4056 char value given in the RTL register data. For example, if mode is
4057 4 bytes wide, return the RTL for 0x01010101*data. */
4059 static rtx
4060 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4061 scalar_int_mode mode)
4063 rtx target, coeff;
4064 size_t size;
4065 char *p;
4067 size = GET_MODE_SIZE (mode);
4068 if (size == 1)
4069 return (rtx) data;
4071 p = XALLOCAVEC (char, size);
4072 memset (p, 1, size);
4073 coeff = c_readstr (p, mode);
4075 target = convert_to_mode (mode, (rtx) data, 1);
4076 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4077 return force_reg (mode, target);
4080 /* Expand expression EXP, which is a call to the memset builtin. Return
4081 NULL_RTX if we failed the caller should emit a normal call, otherwise
4082 try to get the result in TARGET, if convenient (and in mode MODE if that's
4083 convenient). */
4085 static rtx
4086 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4088 if (!validate_arglist (exp,
4089 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4090 return NULL_RTX;
4092 tree dest = CALL_EXPR_ARG (exp, 0);
4093 tree val = CALL_EXPR_ARG (exp, 1);
4094 tree len = CALL_EXPR_ARG (exp, 2);
4096 check_memop_sizes (exp, dest, NULL_TREE, len);
4098 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4101 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4102 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4103 try to get the result in TARGET, if convenient (and in mode MODE if that's
4104 convenient). */
4106 static rtx
4107 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4109 if (!validate_arglist (exp,
4110 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4111 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4112 return NULL_RTX;
4113 else
4115 tree dest = CALL_EXPR_ARG (exp, 0);
4116 tree val = CALL_EXPR_ARG (exp, 2);
4117 tree len = CALL_EXPR_ARG (exp, 3);
4118 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4120 /* Return src bounds with the result. */
4121 if (res)
4123 rtx bnd = force_reg (targetm.chkp_bound_mode (),
4124 expand_normal (CALL_EXPR_ARG (exp, 1)));
4125 res = chkp_join_splitted_slot (res, bnd);
4127 return res;
4131 /* Helper function to do the actual work for expand_builtin_memset. The
4132 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4133 so that this can also be called without constructing an actual CALL_EXPR.
4134 The other arguments and return value are the same as for
4135 expand_builtin_memset. */
4137 static rtx
4138 expand_builtin_memset_args (tree dest, tree val, tree len,
4139 rtx target, machine_mode mode, tree orig_exp)
4141 tree fndecl, fn;
4142 enum built_in_function fcode;
4143 machine_mode val_mode;
4144 char c;
4145 unsigned int dest_align;
4146 rtx dest_mem, dest_addr, len_rtx;
4147 HOST_WIDE_INT expected_size = -1;
4148 unsigned int expected_align = 0;
4149 unsigned HOST_WIDE_INT min_size;
4150 unsigned HOST_WIDE_INT max_size;
4151 unsigned HOST_WIDE_INT probable_max_size;
4153 dest_align = get_pointer_alignment (dest);
4155 /* If DEST is not a pointer type, don't do this operation in-line. */
4156 if (dest_align == 0)
4157 return NULL_RTX;
4159 if (currently_expanding_gimple_stmt)
4160 stringop_block_profile (currently_expanding_gimple_stmt,
4161 &expected_align, &expected_size);
4163 if (expected_align < dest_align)
4164 expected_align = dest_align;
4166 /* If the LEN parameter is zero, return DEST. */
4167 if (integer_zerop (len))
4169 /* Evaluate and ignore VAL in case it has side-effects. */
4170 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4171 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4174 /* Stabilize the arguments in case we fail. */
4175 dest = builtin_save_expr (dest);
4176 val = builtin_save_expr (val);
4177 len = builtin_save_expr (len);
4179 len_rtx = expand_normal (len);
4180 determine_block_size (len, len_rtx, &min_size, &max_size,
4181 &probable_max_size);
4182 dest_mem = get_memory_rtx (dest, len);
4183 val_mode = TYPE_MODE (unsigned_char_type_node);
4185 if (TREE_CODE (val) != INTEGER_CST)
4187 rtx val_rtx;
4189 val_rtx = expand_normal (val);
4190 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4192 /* Assume that we can memset by pieces if we can store
4193 * the coefficients by pieces (in the required modes).
4194 * We can't pass builtin_memset_gen_str as that emits RTL. */
4195 c = 1;
4196 if (tree_fits_uhwi_p (len)
4197 && can_store_by_pieces (tree_to_uhwi (len),
4198 builtin_memset_read_str, &c, dest_align,
4199 true))
4201 val_rtx = force_reg (val_mode, val_rtx);
4202 store_by_pieces (dest_mem, tree_to_uhwi (len),
4203 builtin_memset_gen_str, val_rtx, dest_align,
4204 true, 0);
4206 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4207 dest_align, expected_align,
4208 expected_size, min_size, max_size,
4209 probable_max_size))
4210 goto do_libcall;
4212 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4213 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4214 return dest_mem;
4217 if (target_char_cast (val, &c))
4218 goto do_libcall;
4220 if (c)
4222 if (tree_fits_uhwi_p (len)
4223 && can_store_by_pieces (tree_to_uhwi (len),
4224 builtin_memset_read_str, &c, dest_align,
4225 true))
4226 store_by_pieces (dest_mem, tree_to_uhwi (len),
4227 builtin_memset_read_str, &c, dest_align, true, 0);
4228 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4229 gen_int_mode (c, val_mode),
4230 dest_align, expected_align,
4231 expected_size, min_size, max_size,
4232 probable_max_size))
4233 goto do_libcall;
4235 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4236 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4237 return dest_mem;
4240 set_mem_align (dest_mem, dest_align);
4241 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4242 CALL_EXPR_TAILCALL (orig_exp)
4243 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4244 expected_align, expected_size,
4245 min_size, max_size,
4246 probable_max_size);
4248 if (dest_addr == 0)
4250 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4251 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4254 return dest_addr;
4256 do_libcall:
4257 fndecl = get_callee_fndecl (orig_exp);
4258 fcode = DECL_FUNCTION_CODE (fndecl);
4259 if (fcode == BUILT_IN_MEMSET
4260 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4261 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4262 dest, val, len);
4263 else if (fcode == BUILT_IN_BZERO)
4264 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4265 dest, len);
4266 else
4267 gcc_unreachable ();
4268 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4269 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4270 return expand_call (fn, target, target == const0_rtx);
4273 /* Expand expression EXP, which is a call to the bzero builtin. Return
4274 NULL_RTX if we failed the caller should emit a normal call. */
4276 static rtx
4277 expand_builtin_bzero (tree exp)
4279 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4280 return NULL_RTX;
4282 tree dest = CALL_EXPR_ARG (exp, 0);
4283 tree size = CALL_EXPR_ARG (exp, 1);
4285 check_memop_sizes (exp, dest, NULL_TREE, size);
4287 /* New argument list transforming bzero(ptr x, int y) to
4288 memset(ptr x, int 0, size_t y). This is done this way
4289 so that if it isn't expanded inline, we fallback to
4290 calling bzero instead of memset. */
4292 location_t loc = EXPR_LOCATION (exp);
4294 return expand_builtin_memset_args (dest, integer_zero_node,
4295 fold_convert_loc (loc,
4296 size_type_node, size),
4297 const0_rtx, VOIDmode, exp);
4300 /* Try to expand cmpstr operation ICODE with the given operands.
4301 Return the result rtx on success, otherwise return null. */
4303 static rtx
4304 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4305 HOST_WIDE_INT align)
4307 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4309 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4310 target = NULL_RTX;
4312 struct expand_operand ops[4];
4313 create_output_operand (&ops[0], target, insn_mode);
4314 create_fixed_operand (&ops[1], arg1_rtx);
4315 create_fixed_operand (&ops[2], arg2_rtx);
4316 create_integer_operand (&ops[3], align);
4317 if (maybe_expand_insn (icode, 4, ops))
4318 return ops[0].value;
4319 return NULL_RTX;
4322 /* Expand expression EXP, which is a call to the memcmp built-in function.
4323 Return NULL_RTX if we failed and the caller should emit a normal call,
4324 otherwise try to get the result in TARGET, if convenient.
4325 RESULT_EQ is true if we can relax the returned value to be either zero
4326 or nonzero, without caring about the sign. */
4328 static rtx
4329 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4331 if (!validate_arglist (exp,
4332 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4333 return NULL_RTX;
4335 tree arg1 = CALL_EXPR_ARG (exp, 0);
4336 tree arg2 = CALL_EXPR_ARG (exp, 1);
4337 tree len = CALL_EXPR_ARG (exp, 2);
4339 /* Diagnose calls where the specified length exceeds the size of either
4340 object. */
4341 if (warn_stringop_overflow)
4343 tree size = compute_objsize (arg1, 0);
4344 if (check_sizes (OPT_Wstringop_overflow_,
4345 exp, len, /*maxlen=*/NULL_TREE,
4346 size, /*objsize=*/NULL_TREE))
4348 size = compute_objsize (arg2, 0);
4349 check_sizes (OPT_Wstringop_overflow_,
4350 exp, len, /*maxlen=*/NULL_TREE,
4351 size, /*objsize=*/NULL_TREE);
4355 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4356 location_t loc = EXPR_LOCATION (exp);
4358 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4359 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4361 /* If we don't have POINTER_TYPE, call the function. */
4362 if (arg1_align == 0 || arg2_align == 0)
4363 return NULL_RTX;
4365 rtx arg1_rtx = get_memory_rtx (arg1, len);
4366 rtx arg2_rtx = get_memory_rtx (arg2, len);
4367 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4369 /* Set MEM_SIZE as appropriate. */
4370 if (CONST_INT_P (len_rtx))
4372 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4373 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4376 by_pieces_constfn constfn = NULL;
4378 const char *src_str = c_getstr (arg2);
4379 if (result_eq && src_str == NULL)
4381 src_str = c_getstr (arg1);
4382 if (src_str != NULL)
4383 std::swap (arg1_rtx, arg2_rtx);
4386 /* If SRC is a string constant and block move would be done
4387 by pieces, we can avoid loading the string from memory
4388 and only stored the computed constants. */
4389 if (src_str
4390 && CONST_INT_P (len_rtx)
4391 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4392 constfn = builtin_memcpy_read_str;
4394 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4395 TREE_TYPE (len), target,
4396 result_eq, constfn,
4397 CONST_CAST (char *, src_str));
4399 if (result)
4401 /* Return the value in the proper mode for this function. */
4402 if (GET_MODE (result) == mode)
4403 return result;
4405 if (target != 0)
4407 convert_move (target, result, 0);
4408 return target;
4411 return convert_to_mode (mode, result, 0);
4414 return NULL_RTX;
4417 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4418 if we failed the caller should emit a normal call, otherwise try to get
4419 the result in TARGET, if convenient. */
4421 static rtx
4422 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4424 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4425 return NULL_RTX;
4427 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4428 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4429 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4431 rtx arg1_rtx, arg2_rtx;
4432 tree fndecl, fn;
4433 tree arg1 = CALL_EXPR_ARG (exp, 0);
4434 tree arg2 = CALL_EXPR_ARG (exp, 1);
4435 rtx result = NULL_RTX;
4437 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4438 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4440 /* If we don't have POINTER_TYPE, call the function. */
4441 if (arg1_align == 0 || arg2_align == 0)
4442 return NULL_RTX;
4444 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4445 arg1 = builtin_save_expr (arg1);
4446 arg2 = builtin_save_expr (arg2);
4448 arg1_rtx = get_memory_rtx (arg1, NULL);
4449 arg2_rtx = get_memory_rtx (arg2, NULL);
4451 /* Try to call cmpstrsi. */
4452 if (cmpstr_icode != CODE_FOR_nothing)
4453 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4454 MIN (arg1_align, arg2_align));
4456 /* Try to determine at least one length and call cmpstrnsi. */
4457 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4459 tree len;
4460 rtx arg3_rtx;
4462 tree len1 = c_strlen (arg1, 1);
4463 tree len2 = c_strlen (arg2, 1);
4465 if (len1)
4466 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4467 if (len2)
4468 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4470 /* If we don't have a constant length for the first, use the length
4471 of the second, if we know it. We don't require a constant for
4472 this case; some cost analysis could be done if both are available
4473 but neither is constant. For now, assume they're equally cheap,
4474 unless one has side effects. If both strings have constant lengths,
4475 use the smaller. */
4477 if (!len1)
4478 len = len2;
4479 else if (!len2)
4480 len = len1;
4481 else if (TREE_SIDE_EFFECTS (len1))
4482 len = len2;
4483 else if (TREE_SIDE_EFFECTS (len2))
4484 len = len1;
4485 else if (TREE_CODE (len1) != INTEGER_CST)
4486 len = len2;
4487 else if (TREE_CODE (len2) != INTEGER_CST)
4488 len = len1;
4489 else if (tree_int_cst_lt (len1, len2))
4490 len = len1;
4491 else
4492 len = len2;
4494 /* If both arguments have side effects, we cannot optimize. */
4495 if (len && !TREE_SIDE_EFFECTS (len))
4497 arg3_rtx = expand_normal (len);
4498 result = expand_cmpstrn_or_cmpmem
4499 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4500 arg3_rtx, MIN (arg1_align, arg2_align));
4504 if (result)
4506 /* Return the value in the proper mode for this function. */
4507 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4508 if (GET_MODE (result) == mode)
4509 return result;
4510 if (target == 0)
4511 return convert_to_mode (mode, result, 0);
4512 convert_move (target, result, 0);
4513 return target;
4516 /* Expand the library call ourselves using a stabilized argument
4517 list to avoid re-evaluating the function's arguments twice. */
4518 fndecl = get_callee_fndecl (exp);
4519 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4520 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4521 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4522 return expand_call (fn, target, target == const0_rtx);
4524 return NULL_RTX;
4527 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4528 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4529 the result in TARGET, if convenient. */
4531 static rtx
4532 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4533 ATTRIBUTE_UNUSED machine_mode mode)
4535 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4537 if (!validate_arglist (exp,
4538 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4539 return NULL_RTX;
4541 /* If c_strlen can determine an expression for one of the string
4542 lengths, and it doesn't have side effects, then emit cmpstrnsi
4543 using length MIN(strlen(string)+1, arg3). */
4544 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4545 if (cmpstrn_icode != CODE_FOR_nothing)
4547 tree len, len1, len2, len3;
4548 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4549 rtx result;
4550 tree fndecl, fn;
4551 tree arg1 = CALL_EXPR_ARG (exp, 0);
4552 tree arg2 = CALL_EXPR_ARG (exp, 1);
4553 tree arg3 = CALL_EXPR_ARG (exp, 2);
4555 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4556 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4558 len1 = c_strlen (arg1, 1);
4559 len2 = c_strlen (arg2, 1);
4561 if (len1)
4562 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4563 if (len2)
4564 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4566 len3 = fold_convert_loc (loc, sizetype, arg3);
4568 /* If we don't have a constant length for the first, use the length
4569 of the second, if we know it. If neither string is constant length,
4570 use the given length argument. We don't require a constant for
4571 this case; some cost analysis could be done if both are available
4572 but neither is constant. For now, assume they're equally cheap,
4573 unless one has side effects. If both strings have constant lengths,
4574 use the smaller. */
4576 if (!len1 && !len2)
4577 len = len3;
4578 else if (!len1)
4579 len = len2;
4580 else if (!len2)
4581 len = len1;
4582 else if (TREE_SIDE_EFFECTS (len1))
4583 len = len2;
4584 else if (TREE_SIDE_EFFECTS (len2))
4585 len = len1;
4586 else if (TREE_CODE (len1) != INTEGER_CST)
4587 len = len2;
4588 else if (TREE_CODE (len2) != INTEGER_CST)
4589 len = len1;
4590 else if (tree_int_cst_lt (len1, len2))
4591 len = len1;
4592 else
4593 len = len2;
4595 /* If we are not using the given length, we must incorporate it here.
4596 The actual new length parameter will be MIN(len,arg3) in this case. */
4597 if (len != len3)
4598 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4599 arg1_rtx = get_memory_rtx (arg1, len);
4600 arg2_rtx = get_memory_rtx (arg2, len);
4601 arg3_rtx = expand_normal (len);
4602 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4603 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4604 MIN (arg1_align, arg2_align));
4605 if (result)
4607 /* Return the value in the proper mode for this function. */
4608 mode = TYPE_MODE (TREE_TYPE (exp));
4609 if (GET_MODE (result) == mode)
4610 return result;
4611 if (target == 0)
4612 return convert_to_mode (mode, result, 0);
4613 convert_move (target, result, 0);
4614 return target;
4617 /* Expand the library call ourselves using a stabilized argument
4618 list to avoid re-evaluating the function's arguments twice. */
4619 fndecl = get_callee_fndecl (exp);
4620 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4621 arg1, arg2, len);
4622 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4623 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4624 return expand_call (fn, target, target == const0_rtx);
4626 return NULL_RTX;
4629 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4630 if that's convenient. */
4633 expand_builtin_saveregs (void)
4635 rtx val;
4636 rtx_insn *seq;
4638 /* Don't do __builtin_saveregs more than once in a function.
4639 Save the result of the first call and reuse it. */
4640 if (saveregs_value != 0)
4641 return saveregs_value;
4643 /* When this function is called, it means that registers must be
4644 saved on entry to this function. So we migrate the call to the
4645 first insn of this function. */
4647 start_sequence ();
4649 /* Do whatever the machine needs done in this case. */
4650 val = targetm.calls.expand_builtin_saveregs ();
4652 seq = get_insns ();
4653 end_sequence ();
4655 saveregs_value = val;
4657 /* Put the insns after the NOTE that starts the function. If this
4658 is inside a start_sequence, make the outer-level insn chain current, so
4659 the code is placed at the start of the function. */
4660 push_topmost_sequence ();
4661 emit_insn_after (seq, entry_of_function ());
4662 pop_topmost_sequence ();
4664 return val;
4667 /* Expand a call to __builtin_next_arg. */
4669 static rtx
4670 expand_builtin_next_arg (void)
4672 /* Checking arguments is already done in fold_builtin_next_arg
4673 that must be called before this function. */
4674 return expand_binop (ptr_mode, add_optab,
4675 crtl->args.internal_arg_pointer,
4676 crtl->args.arg_offset_rtx,
4677 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4680 /* Make it easier for the backends by protecting the valist argument
4681 from multiple evaluations. */
4683 static tree
4684 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4686 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4688 /* The current way of determining the type of valist is completely
4689 bogus. We should have the information on the va builtin instead. */
4690 if (!vatype)
4691 vatype = targetm.fn_abi_va_list (cfun->decl);
4693 if (TREE_CODE (vatype) == ARRAY_TYPE)
4695 if (TREE_SIDE_EFFECTS (valist))
4696 valist = save_expr (valist);
4698 /* For this case, the backends will be expecting a pointer to
4699 vatype, but it's possible we've actually been given an array
4700 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4701 So fix it. */
4702 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4704 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4705 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4708 else
4710 tree pt = build_pointer_type (vatype);
4712 if (! needs_lvalue)
4714 if (! TREE_SIDE_EFFECTS (valist))
4715 return valist;
4717 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4718 TREE_SIDE_EFFECTS (valist) = 1;
4721 if (TREE_SIDE_EFFECTS (valist))
4722 valist = save_expr (valist);
4723 valist = fold_build2_loc (loc, MEM_REF,
4724 vatype, valist, build_int_cst (pt, 0));
4727 return valist;
4730 /* The "standard" definition of va_list is void*. */
4732 tree
4733 std_build_builtin_va_list (void)
4735 return ptr_type_node;
4738 /* The "standard" abi va_list is va_list_type_node. */
4740 tree
4741 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4743 return va_list_type_node;
4746 /* The "standard" type of va_list is va_list_type_node. */
4748 tree
4749 std_canonical_va_list_type (tree type)
4751 tree wtype, htype;
4753 wtype = va_list_type_node;
4754 htype = type;
4756 if (TREE_CODE (wtype) == ARRAY_TYPE)
4758 /* If va_list is an array type, the argument may have decayed
4759 to a pointer type, e.g. by being passed to another function.
4760 In that case, unwrap both types so that we can compare the
4761 underlying records. */
4762 if (TREE_CODE (htype) == ARRAY_TYPE
4763 || POINTER_TYPE_P (htype))
4765 wtype = TREE_TYPE (wtype);
4766 htype = TREE_TYPE (htype);
4769 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4770 return va_list_type_node;
4772 return NULL_TREE;
4775 /* The "standard" implementation of va_start: just assign `nextarg' to
4776 the variable. */
4778 void
4779 std_expand_builtin_va_start (tree valist, rtx nextarg)
4781 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4782 convert_move (va_r, nextarg, 0);
4784 /* We do not have any valid bounds for the pointer, so
4785 just store zero bounds for it. */
4786 if (chkp_function_instrumented_p (current_function_decl))
4787 chkp_expand_bounds_reset_for_mem (valist,
4788 make_tree (TREE_TYPE (valist),
4789 nextarg));
4792 /* Expand EXP, a call to __builtin_va_start. */
4794 static rtx
4795 expand_builtin_va_start (tree exp)
4797 rtx nextarg;
4798 tree valist;
4799 location_t loc = EXPR_LOCATION (exp);
4801 if (call_expr_nargs (exp) < 2)
4803 error_at (loc, "too few arguments to function %<va_start%>");
4804 return const0_rtx;
4807 if (fold_builtin_next_arg (exp, true))
4808 return const0_rtx;
4810 nextarg = expand_builtin_next_arg ();
4811 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4813 if (targetm.expand_builtin_va_start)
4814 targetm.expand_builtin_va_start (valist, nextarg);
4815 else
4816 std_expand_builtin_va_start (valist, nextarg);
4818 return const0_rtx;
4821 /* Expand EXP, a call to __builtin_va_end. */
4823 static rtx
4824 expand_builtin_va_end (tree exp)
4826 tree valist = CALL_EXPR_ARG (exp, 0);
4828 /* Evaluate for side effects, if needed. I hate macros that don't
4829 do that. */
4830 if (TREE_SIDE_EFFECTS (valist))
4831 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4833 return const0_rtx;
4836 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4837 builtin rather than just as an assignment in stdarg.h because of the
4838 nastiness of array-type va_list types. */
4840 static rtx
4841 expand_builtin_va_copy (tree exp)
4843 tree dst, src, t;
4844 location_t loc = EXPR_LOCATION (exp);
4846 dst = CALL_EXPR_ARG (exp, 0);
4847 src = CALL_EXPR_ARG (exp, 1);
4849 dst = stabilize_va_list_loc (loc, dst, 1);
4850 src = stabilize_va_list_loc (loc, src, 0);
4852 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4854 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4856 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4857 TREE_SIDE_EFFECTS (t) = 1;
4858 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4860 else
4862 rtx dstb, srcb, size;
4864 /* Evaluate to pointers. */
4865 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4866 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4867 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4868 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4870 dstb = convert_memory_address (Pmode, dstb);
4871 srcb = convert_memory_address (Pmode, srcb);
4873 /* "Dereference" to BLKmode memories. */
4874 dstb = gen_rtx_MEM (BLKmode, dstb);
4875 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4876 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4877 srcb = gen_rtx_MEM (BLKmode, srcb);
4878 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4879 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4881 /* Copy. */
4882 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4885 return const0_rtx;
4888 /* Expand a call to one of the builtin functions __builtin_frame_address or
4889 __builtin_return_address. */
4891 static rtx
4892 expand_builtin_frame_address (tree fndecl, tree exp)
4894 /* The argument must be a nonnegative integer constant.
4895 It counts the number of frames to scan up the stack.
4896 The value is either the frame pointer value or the return
4897 address saved in that frame. */
4898 if (call_expr_nargs (exp) == 0)
4899 /* Warning about missing arg was already issued. */
4900 return const0_rtx;
4901 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4903 error ("invalid argument to %qD", fndecl);
4904 return const0_rtx;
4906 else
4908 /* Number of frames to scan up the stack. */
4909 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4911 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4913 /* Some ports cannot access arbitrary stack frames. */
4914 if (tem == NULL)
4916 warning (0, "unsupported argument to %qD", fndecl);
4917 return const0_rtx;
4920 if (count)
4922 /* Warn since no effort is made to ensure that any frame
4923 beyond the current one exists or can be safely reached. */
4924 warning (OPT_Wframe_address, "calling %qD with "
4925 "a nonzero argument is unsafe", fndecl);
4928 /* For __builtin_frame_address, return what we've got. */
4929 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4930 return tem;
4932 if (!REG_P (tem)
4933 && ! CONSTANT_P (tem))
4934 tem = copy_addr_to_reg (tem);
4935 return tem;
4939 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4940 failed and the caller should emit a normal call. */
4942 static rtx
4943 expand_builtin_alloca (tree exp)
4945 rtx op0;
4946 rtx result;
4947 unsigned int align;
4948 tree fndecl = get_callee_fndecl (exp);
4949 HOST_WIDE_INT max_size;
4950 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4951 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4952 bool valid_arglist
4953 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4954 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
4955 VOID_TYPE)
4956 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
4957 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4958 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4960 if (!valid_arglist)
4961 return NULL_RTX;
4963 if ((alloca_for_var && !warn_vla_limit)
4964 || (!alloca_for_var && !warn_alloca_limit))
4966 /* -Walloca-larger-than and -Wvla-larger-than settings override
4967 the more general -Walloc-size-larger-than so unless either of
4968 the former options is specified check the alloca arguments for
4969 overflow. */
4970 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4971 int idx[] = { 0, -1 };
4972 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4975 /* Compute the argument. */
4976 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4978 /* Compute the alignment. */
4979 align = (fcode == BUILT_IN_ALLOCA
4980 ? BIGGEST_ALIGNMENT
4981 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
4983 /* Compute the maximum size. */
4984 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4985 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
4986 : -1);
4988 /* Allocate the desired space. If the allocation stems from the declaration
4989 of a variable-sized object, it cannot accumulate. */
4990 result
4991 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
4992 result = convert_memory_address (ptr_mode, result);
4994 return result;
4997 /* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
4998 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
4999 dummy value into second parameter relying on this function to perform the
5000 change. See motivation for this in comment to handle_builtin_stack_restore
5001 function. */
5003 static rtx
5004 expand_asan_emit_allocas_unpoison (tree exp)
5006 tree arg0 = CALL_EXPR_ARG (exp, 0);
5007 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5008 rtx bot = convert_memory_address (ptr_mode, virtual_stack_dynamic_rtx);
5009 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5010 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5011 top, ptr_mode, bot, ptr_mode);
5012 return ret;
5015 /* Expand a call to bswap builtin in EXP.
5016 Return NULL_RTX if a normal call should be emitted rather than expanding the
5017 function in-line. If convenient, the result should be placed in TARGET.
5018 SUBTARGET may be used as the target for computing one of EXP's operands. */
5020 static rtx
5021 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5022 rtx subtarget)
5024 tree arg;
5025 rtx op0;
5027 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5028 return NULL_RTX;
5030 arg = CALL_EXPR_ARG (exp, 0);
5031 op0 = expand_expr (arg,
5032 subtarget && GET_MODE (subtarget) == target_mode
5033 ? subtarget : NULL_RTX,
5034 target_mode, EXPAND_NORMAL);
5035 if (GET_MODE (op0) != target_mode)
5036 op0 = convert_to_mode (target_mode, op0, 1);
5038 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5040 gcc_assert (target);
5042 return convert_to_mode (target_mode, target, 1);
5045 /* Expand a call to a unary builtin in EXP.
5046 Return NULL_RTX if a normal call should be emitted rather than expanding the
5047 function in-line. If convenient, the result should be placed in TARGET.
5048 SUBTARGET may be used as the target for computing one of EXP's operands. */
5050 static rtx
5051 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5052 rtx subtarget, optab op_optab)
5054 rtx op0;
5056 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5057 return NULL_RTX;
5059 /* Compute the argument. */
5060 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5061 (subtarget
5062 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5063 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5064 VOIDmode, EXPAND_NORMAL);
5065 /* Compute op, into TARGET if possible.
5066 Set TARGET to wherever the result comes back. */
5067 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5068 op_optab, op0, target, op_optab != clrsb_optab);
5069 gcc_assert (target);
5071 return convert_to_mode (target_mode, target, 0);
5074 /* Expand a call to __builtin_expect. We just return our argument
5075 as the builtin_expect semantic should've been already executed by
5076 tree branch prediction pass. */
5078 static rtx
5079 expand_builtin_expect (tree exp, rtx target)
5081 tree arg;
5083 if (call_expr_nargs (exp) < 2)
5084 return const0_rtx;
5085 arg = CALL_EXPR_ARG (exp, 0);
5087 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5088 /* When guessing was done, the hints should be already stripped away. */
5089 gcc_assert (!flag_guess_branch_prob
5090 || optimize == 0 || seen_error ());
5091 return target;
5094 /* Expand a call to __builtin_assume_aligned. We just return our first
5095 argument as the builtin_assume_aligned semantic should've been already
5096 executed by CCP. */
5098 static rtx
5099 expand_builtin_assume_aligned (tree exp, rtx target)
5101 if (call_expr_nargs (exp) < 2)
5102 return const0_rtx;
5103 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5104 EXPAND_NORMAL);
5105 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5106 && (call_expr_nargs (exp) < 3
5107 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5108 return target;
5111 void
5112 expand_builtin_trap (void)
5114 if (targetm.have_trap ())
5116 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5117 /* For trap insns when not accumulating outgoing args force
5118 REG_ARGS_SIZE note to prevent crossjumping of calls with
5119 different args sizes. */
5120 if (!ACCUMULATE_OUTGOING_ARGS)
5121 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
5123 else
5125 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5126 tree call_expr = build_call_expr (fn, 0);
5127 expand_call (call_expr, NULL_RTX, false);
5130 emit_barrier ();
5133 /* Expand a call to __builtin_unreachable. We do nothing except emit
5134 a barrier saying that control flow will not pass here.
5136 It is the responsibility of the program being compiled to ensure
5137 that control flow does never reach __builtin_unreachable. */
5138 static void
5139 expand_builtin_unreachable (void)
5141 emit_barrier ();
5144 /* Expand EXP, a call to fabs, fabsf or fabsl.
5145 Return NULL_RTX if a normal call should be emitted rather than expanding
5146 the function inline. If convenient, the result should be placed
5147 in TARGET. SUBTARGET may be used as the target for computing
5148 the operand. */
5150 static rtx
5151 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5153 machine_mode mode;
5154 tree arg;
5155 rtx op0;
5157 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5158 return NULL_RTX;
5160 arg = CALL_EXPR_ARG (exp, 0);
5161 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5162 mode = TYPE_MODE (TREE_TYPE (arg));
5163 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5164 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5167 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5168 Return NULL is a normal call should be emitted rather than expanding the
5169 function inline. If convenient, the result should be placed in TARGET.
5170 SUBTARGET may be used as the target for computing the operand. */
5172 static rtx
5173 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5175 rtx op0, op1;
5176 tree arg;
5178 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5179 return NULL_RTX;
5181 arg = CALL_EXPR_ARG (exp, 0);
5182 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5184 arg = CALL_EXPR_ARG (exp, 1);
5185 op1 = expand_normal (arg);
5187 return expand_copysign (op0, op1, target);
5190 /* Expand a call to __builtin___clear_cache. */
5192 static rtx
5193 expand_builtin___clear_cache (tree exp)
5195 if (!targetm.code_for_clear_cache)
5197 #ifdef CLEAR_INSN_CACHE
5198 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5199 does something. Just do the default expansion to a call to
5200 __clear_cache(). */
5201 return NULL_RTX;
5202 #else
5203 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5204 does nothing. There is no need to call it. Do nothing. */
5205 return const0_rtx;
5206 #endif /* CLEAR_INSN_CACHE */
5209 /* We have a "clear_cache" insn, and it will handle everything. */
5210 tree begin, end;
5211 rtx begin_rtx, end_rtx;
5213 /* We must not expand to a library call. If we did, any
5214 fallback library function in libgcc that might contain a call to
5215 __builtin___clear_cache() would recurse infinitely. */
5216 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5218 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5219 return const0_rtx;
5222 if (targetm.have_clear_cache ())
5224 struct expand_operand ops[2];
5226 begin = CALL_EXPR_ARG (exp, 0);
5227 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5229 end = CALL_EXPR_ARG (exp, 1);
5230 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5232 create_address_operand (&ops[0], begin_rtx);
5233 create_address_operand (&ops[1], end_rtx);
5234 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5235 return const0_rtx;
5237 return const0_rtx;
5240 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5242 static rtx
5243 round_trampoline_addr (rtx tramp)
5245 rtx temp, addend, mask;
5247 /* If we don't need too much alignment, we'll have been guaranteed
5248 proper alignment by get_trampoline_type. */
5249 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5250 return tramp;
5252 /* Round address up to desired boundary. */
5253 temp = gen_reg_rtx (Pmode);
5254 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5255 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5257 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5258 temp, 0, OPTAB_LIB_WIDEN);
5259 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5260 temp, 0, OPTAB_LIB_WIDEN);
5262 return tramp;
5265 static rtx
5266 expand_builtin_init_trampoline (tree exp, bool onstack)
5268 tree t_tramp, t_func, t_chain;
5269 rtx m_tramp, r_tramp, r_chain, tmp;
5271 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5272 POINTER_TYPE, VOID_TYPE))
5273 return NULL_RTX;
5275 t_tramp = CALL_EXPR_ARG (exp, 0);
5276 t_func = CALL_EXPR_ARG (exp, 1);
5277 t_chain = CALL_EXPR_ARG (exp, 2);
5279 r_tramp = expand_normal (t_tramp);
5280 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5281 MEM_NOTRAP_P (m_tramp) = 1;
5283 /* If ONSTACK, the TRAMP argument should be the address of a field
5284 within the local function's FRAME decl. Either way, let's see if
5285 we can fill in the MEM_ATTRs for this memory. */
5286 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5287 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5289 /* Creator of a heap trampoline is responsible for making sure the
5290 address is aligned to at least STACK_BOUNDARY. Normally malloc
5291 will ensure this anyhow. */
5292 tmp = round_trampoline_addr (r_tramp);
5293 if (tmp != r_tramp)
5295 m_tramp = change_address (m_tramp, BLKmode, tmp);
5296 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5297 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5300 /* The FUNC argument should be the address of the nested function.
5301 Extract the actual function decl to pass to the hook. */
5302 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5303 t_func = TREE_OPERAND (t_func, 0);
5304 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5306 r_chain = expand_normal (t_chain);
5308 /* Generate insns to initialize the trampoline. */
5309 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5311 if (onstack)
5313 trampolines_created = 1;
5315 if (targetm.calls.custom_function_descriptors != 0)
5316 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5317 "trampoline generated for nested function %qD", t_func);
5320 return const0_rtx;
5323 static rtx
5324 expand_builtin_adjust_trampoline (tree exp)
5326 rtx tramp;
5328 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5329 return NULL_RTX;
5331 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5332 tramp = round_trampoline_addr (tramp);
5333 if (targetm.calls.trampoline_adjust_address)
5334 tramp = targetm.calls.trampoline_adjust_address (tramp);
5336 return tramp;
5339 /* Expand a call to the builtin descriptor initialization routine.
5340 A descriptor is made up of a couple of pointers to the static
5341 chain and the code entry in this order. */
5343 static rtx
5344 expand_builtin_init_descriptor (tree exp)
5346 tree t_descr, t_func, t_chain;
5347 rtx m_descr, r_descr, r_func, r_chain;
5349 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5350 VOID_TYPE))
5351 return NULL_RTX;
5353 t_descr = CALL_EXPR_ARG (exp, 0);
5354 t_func = CALL_EXPR_ARG (exp, 1);
5355 t_chain = CALL_EXPR_ARG (exp, 2);
5357 r_descr = expand_normal (t_descr);
5358 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5359 MEM_NOTRAP_P (m_descr) = 1;
5361 r_func = expand_normal (t_func);
5362 r_chain = expand_normal (t_chain);
5364 /* Generate insns to initialize the descriptor. */
5365 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5366 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5367 POINTER_SIZE / BITS_PER_UNIT), r_func);
5369 return const0_rtx;
5372 /* Expand a call to the builtin descriptor adjustment routine. */
5374 static rtx
5375 expand_builtin_adjust_descriptor (tree exp)
5377 rtx tramp;
5379 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5380 return NULL_RTX;
5382 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5384 /* Unalign the descriptor to allow runtime identification. */
5385 tramp = plus_constant (ptr_mode, tramp,
5386 targetm.calls.custom_function_descriptors);
5388 return force_operand (tramp, NULL_RTX);
5391 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5392 function. The function first checks whether the back end provides
5393 an insn to implement signbit for the respective mode. If not, it
5394 checks whether the floating point format of the value is such that
5395 the sign bit can be extracted. If that is not the case, error out.
5396 EXP is the expression that is a call to the builtin function; if
5397 convenient, the result should be placed in TARGET. */
5398 static rtx
5399 expand_builtin_signbit (tree exp, rtx target)
5401 const struct real_format *fmt;
5402 scalar_float_mode fmode;
5403 scalar_int_mode rmode, imode;
5404 tree arg;
5405 int word, bitpos;
5406 enum insn_code icode;
5407 rtx temp;
5408 location_t loc = EXPR_LOCATION (exp);
5410 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5411 return NULL_RTX;
5413 arg = CALL_EXPR_ARG (exp, 0);
5414 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5415 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5416 fmt = REAL_MODE_FORMAT (fmode);
5418 arg = builtin_save_expr (arg);
5420 /* Expand the argument yielding a RTX expression. */
5421 temp = expand_normal (arg);
5423 /* Check if the back end provides an insn that handles signbit for the
5424 argument's mode. */
5425 icode = optab_handler (signbit_optab, fmode);
5426 if (icode != CODE_FOR_nothing)
5428 rtx_insn *last = get_last_insn ();
5429 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5430 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5431 return target;
5432 delete_insns_since (last);
5435 /* For floating point formats without a sign bit, implement signbit
5436 as "ARG < 0.0". */
5437 bitpos = fmt->signbit_ro;
5438 if (bitpos < 0)
5440 /* But we can't do this if the format supports signed zero. */
5441 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5443 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5444 build_real (TREE_TYPE (arg), dconst0));
5445 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5448 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5450 imode = int_mode_for_mode (fmode).require ();
5451 temp = gen_lowpart (imode, temp);
5453 else
5455 imode = word_mode;
5456 /* Handle targets with different FP word orders. */
5457 if (FLOAT_WORDS_BIG_ENDIAN)
5458 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5459 else
5460 word = bitpos / BITS_PER_WORD;
5461 temp = operand_subword_force (temp, word, fmode);
5462 bitpos = bitpos % BITS_PER_WORD;
5465 /* Force the intermediate word_mode (or narrower) result into a
5466 register. This avoids attempting to create paradoxical SUBREGs
5467 of floating point modes below. */
5468 temp = force_reg (imode, temp);
5470 /* If the bitpos is within the "result mode" lowpart, the operation
5471 can be implement with a single bitwise AND. Otherwise, we need
5472 a right shift and an AND. */
5474 if (bitpos < GET_MODE_BITSIZE (rmode))
5476 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5478 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5479 temp = gen_lowpart (rmode, temp);
5480 temp = expand_binop (rmode, and_optab, temp,
5481 immed_wide_int_const (mask, rmode),
5482 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5484 else
5486 /* Perform a logical right shift to place the signbit in the least
5487 significant bit, then truncate the result to the desired mode
5488 and mask just this bit. */
5489 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5490 temp = gen_lowpart (rmode, temp);
5491 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5492 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5495 return temp;
5498 /* Expand fork or exec calls. TARGET is the desired target of the
5499 call. EXP is the call. FN is the
5500 identificator of the actual function. IGNORE is nonzero if the
5501 value is to be ignored. */
5503 static rtx
5504 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5506 tree id, decl;
5507 tree call;
5509 /* If we are not profiling, just call the function. */
5510 if (!profile_arc_flag)
5511 return NULL_RTX;
5513 /* Otherwise call the wrapper. This should be equivalent for the rest of
5514 compiler, so the code does not diverge, and the wrapper may run the
5515 code necessary for keeping the profiling sane. */
5517 switch (DECL_FUNCTION_CODE (fn))
5519 case BUILT_IN_FORK:
5520 id = get_identifier ("__gcov_fork");
5521 break;
5523 case BUILT_IN_EXECL:
5524 id = get_identifier ("__gcov_execl");
5525 break;
5527 case BUILT_IN_EXECV:
5528 id = get_identifier ("__gcov_execv");
5529 break;
5531 case BUILT_IN_EXECLP:
5532 id = get_identifier ("__gcov_execlp");
5533 break;
5535 case BUILT_IN_EXECLE:
5536 id = get_identifier ("__gcov_execle");
5537 break;
5539 case BUILT_IN_EXECVP:
5540 id = get_identifier ("__gcov_execvp");
5541 break;
5543 case BUILT_IN_EXECVE:
5544 id = get_identifier ("__gcov_execve");
5545 break;
5547 default:
5548 gcc_unreachable ();
5551 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5552 FUNCTION_DECL, id, TREE_TYPE (fn));
5553 DECL_EXTERNAL (decl) = 1;
5554 TREE_PUBLIC (decl) = 1;
5555 DECL_ARTIFICIAL (decl) = 1;
5556 TREE_NOTHROW (decl) = 1;
5557 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5558 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5559 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5560 return expand_call (call, target, ignore);
5565 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5566 the pointer in these functions is void*, the tree optimizers may remove
5567 casts. The mode computed in expand_builtin isn't reliable either, due
5568 to __sync_bool_compare_and_swap.
5570 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5571 group of builtins. This gives us log2 of the mode size. */
5573 static inline machine_mode
5574 get_builtin_sync_mode (int fcode_diff)
5576 /* The size is not negotiable, so ask not to get BLKmode in return
5577 if the target indicates that a smaller size would be better. */
5578 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5581 /* Expand the memory expression LOC and return the appropriate memory operand
5582 for the builtin_sync operations. */
5584 static rtx
5585 get_builtin_sync_mem (tree loc, machine_mode mode)
5587 rtx addr, mem;
5589 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5590 addr = convert_memory_address (Pmode, addr);
5592 /* Note that we explicitly do not want any alias information for this
5593 memory, so that we kill all other live memories. Otherwise we don't
5594 satisfy the full barrier semantics of the intrinsic. */
5595 mem = validize_mem (gen_rtx_MEM (mode, addr));
5597 /* The alignment needs to be at least according to that of the mode. */
5598 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5599 get_pointer_alignment (loc)));
5600 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5601 MEM_VOLATILE_P (mem) = 1;
5603 return mem;
5606 /* Make sure an argument is in the right mode.
5607 EXP is the tree argument.
5608 MODE is the mode it should be in. */
5610 static rtx
5611 expand_expr_force_mode (tree exp, machine_mode mode)
5613 rtx val;
5614 machine_mode old_mode;
5616 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5617 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5618 of CONST_INTs, where we know the old_mode only from the call argument. */
5620 old_mode = GET_MODE (val);
5621 if (old_mode == VOIDmode)
5622 old_mode = TYPE_MODE (TREE_TYPE (exp));
5623 val = convert_modes (mode, old_mode, val, 1);
5624 return val;
5628 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5629 EXP is the CALL_EXPR. CODE is the rtx code
5630 that corresponds to the arithmetic or logical operation from the name;
5631 an exception here is that NOT actually means NAND. TARGET is an optional
5632 place for us to store the results; AFTER is true if this is the
5633 fetch_and_xxx form. */
5635 static rtx
5636 expand_builtin_sync_operation (machine_mode mode, tree exp,
5637 enum rtx_code code, bool after,
5638 rtx target)
5640 rtx val, mem;
5641 location_t loc = EXPR_LOCATION (exp);
5643 if (code == NOT && warn_sync_nand)
5645 tree fndecl = get_callee_fndecl (exp);
5646 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5648 static bool warned_f_a_n, warned_n_a_f;
5650 switch (fcode)
5652 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5653 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5654 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5655 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5656 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5657 if (warned_f_a_n)
5658 break;
5660 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5661 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5662 warned_f_a_n = true;
5663 break;
5665 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5666 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5667 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5668 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5669 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5670 if (warned_n_a_f)
5671 break;
5673 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5674 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5675 warned_n_a_f = true;
5676 break;
5678 default:
5679 gcc_unreachable ();
5683 /* Expand the operands. */
5684 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5685 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5687 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5688 after);
5691 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5692 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5693 true if this is the boolean form. TARGET is a place for us to store the
5694 results; this is NOT optional if IS_BOOL is true. */
5696 static rtx
5697 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5698 bool is_bool, rtx target)
5700 rtx old_val, new_val, mem;
5701 rtx *pbool, *poval;
5703 /* Expand the operands. */
5704 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5705 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5706 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5708 pbool = poval = NULL;
5709 if (target != const0_rtx)
5711 if (is_bool)
5712 pbool = &target;
5713 else
5714 poval = &target;
5716 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5717 false, MEMMODEL_SYNC_SEQ_CST,
5718 MEMMODEL_SYNC_SEQ_CST))
5719 return NULL_RTX;
5721 return target;
5724 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5725 general form is actually an atomic exchange, and some targets only
5726 support a reduced form with the second argument being a constant 1.
5727 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5728 the results. */
5730 static rtx
5731 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5732 rtx target)
5734 rtx val, mem;
5736 /* Expand the operands. */
5737 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5738 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5740 return expand_sync_lock_test_and_set (target, mem, val);
5743 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5745 static void
5746 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5748 rtx mem;
5750 /* Expand the operands. */
5751 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5753 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5756 /* Given an integer representing an ``enum memmodel'', verify its
5757 correctness and return the memory model enum. */
5759 static enum memmodel
5760 get_memmodel (tree exp)
5762 rtx op;
5763 unsigned HOST_WIDE_INT val;
5764 source_location loc
5765 = expansion_point_location_if_in_system_header (input_location);
5767 /* If the parameter is not a constant, it's a run time value so we'll just
5768 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5769 if (TREE_CODE (exp) != INTEGER_CST)
5770 return MEMMODEL_SEQ_CST;
5772 op = expand_normal (exp);
5774 val = INTVAL (op);
5775 if (targetm.memmodel_check)
5776 val = targetm.memmodel_check (val);
5777 else if (val & ~MEMMODEL_MASK)
5779 warning_at (loc, OPT_Winvalid_memory_model,
5780 "unknown architecture specifier in memory model to builtin");
5781 return MEMMODEL_SEQ_CST;
5784 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5785 if (memmodel_base (val) >= MEMMODEL_LAST)
5787 warning_at (loc, OPT_Winvalid_memory_model,
5788 "invalid memory model argument to builtin");
5789 return MEMMODEL_SEQ_CST;
5792 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5793 be conservative and promote consume to acquire. */
5794 if (val == MEMMODEL_CONSUME)
5795 val = MEMMODEL_ACQUIRE;
5797 return (enum memmodel) val;
5800 /* Expand the __atomic_exchange intrinsic:
5801 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5802 EXP is the CALL_EXPR.
5803 TARGET is an optional place for us to store the results. */
5805 static rtx
5806 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5808 rtx val, mem;
5809 enum memmodel model;
5811 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5813 if (!flag_inline_atomics)
5814 return NULL_RTX;
5816 /* Expand the operands. */
5817 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5818 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5820 return expand_atomic_exchange (target, mem, val, model);
5823 /* Expand the __atomic_compare_exchange intrinsic:
5824 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5825 TYPE desired, BOOL weak,
5826 enum memmodel success,
5827 enum memmodel failure)
5828 EXP is the CALL_EXPR.
5829 TARGET is an optional place for us to store the results. */
5831 static rtx
5832 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5833 rtx target)
5835 rtx expect, desired, mem, oldval;
5836 rtx_code_label *label;
5837 enum memmodel success, failure;
5838 tree weak;
5839 bool is_weak;
5840 source_location loc
5841 = expansion_point_location_if_in_system_header (input_location);
5843 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5844 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5846 if (failure > success)
5848 warning_at (loc, OPT_Winvalid_memory_model,
5849 "failure memory model cannot be stronger than success "
5850 "memory model for %<__atomic_compare_exchange%>");
5851 success = MEMMODEL_SEQ_CST;
5854 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5856 warning_at (loc, OPT_Winvalid_memory_model,
5857 "invalid failure memory model for "
5858 "%<__atomic_compare_exchange%>");
5859 failure = MEMMODEL_SEQ_CST;
5860 success = MEMMODEL_SEQ_CST;
5864 if (!flag_inline_atomics)
5865 return NULL_RTX;
5867 /* Expand the operands. */
5868 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5870 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5871 expect = convert_memory_address (Pmode, expect);
5872 expect = gen_rtx_MEM (mode, expect);
5873 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5875 weak = CALL_EXPR_ARG (exp, 3);
5876 is_weak = false;
5877 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5878 is_weak = true;
5880 if (target == const0_rtx)
5881 target = NULL;
5883 /* Lest the rtl backend create a race condition with an imporoper store
5884 to memory, always create a new pseudo for OLDVAL. */
5885 oldval = NULL;
5887 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5888 is_weak, success, failure))
5889 return NULL_RTX;
5891 /* Conditionally store back to EXPECT, lest we create a race condition
5892 with an improper store to memory. */
5893 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5894 the normal case where EXPECT is totally private, i.e. a register. At
5895 which point the store can be unconditional. */
5896 label = gen_label_rtx ();
5897 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5898 GET_MODE (target), 1, label);
5899 emit_move_insn (expect, oldval);
5900 emit_label (label);
5902 return target;
5905 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5906 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5907 call. The weak parameter must be dropped to match the expected parameter
5908 list and the expected argument changed from value to pointer to memory
5909 slot. */
5911 static void
5912 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5914 unsigned int z;
5915 vec<tree, va_gc> *vec;
5917 vec_alloc (vec, 5);
5918 vec->quick_push (gimple_call_arg (call, 0));
5919 tree expected = gimple_call_arg (call, 1);
5920 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5921 TREE_TYPE (expected));
5922 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5923 if (expd != x)
5924 emit_move_insn (x, expd);
5925 tree v = make_tree (TREE_TYPE (expected), x);
5926 vec->quick_push (build1 (ADDR_EXPR,
5927 build_pointer_type (TREE_TYPE (expected)), v));
5928 vec->quick_push (gimple_call_arg (call, 2));
5929 /* Skip the boolean weak parameter. */
5930 for (z = 4; z < 6; z++)
5931 vec->quick_push (gimple_call_arg (call, z));
5932 built_in_function fncode
5933 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5934 + exact_log2 (GET_MODE_SIZE (mode)));
5935 tree fndecl = builtin_decl_explicit (fncode);
5936 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5937 fndecl);
5938 tree exp = build_call_vec (boolean_type_node, fn, vec);
5939 tree lhs = gimple_call_lhs (call);
5940 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5941 if (lhs)
5943 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5944 if (GET_MODE (boolret) != mode)
5945 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5946 x = force_reg (mode, x);
5947 write_complex_part (target, boolret, true);
5948 write_complex_part (target, x, false);
5952 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5954 void
5955 expand_ifn_atomic_compare_exchange (gcall *call)
5957 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5958 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5959 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
5960 rtx expect, desired, mem, oldval, boolret;
5961 enum memmodel success, failure;
5962 tree lhs;
5963 bool is_weak;
5964 source_location loc
5965 = expansion_point_location_if_in_system_header (gimple_location (call));
5967 success = get_memmodel (gimple_call_arg (call, 4));
5968 failure = get_memmodel (gimple_call_arg (call, 5));
5970 if (failure > success)
5972 warning_at (loc, OPT_Winvalid_memory_model,
5973 "failure memory model cannot be stronger than success "
5974 "memory model for %<__atomic_compare_exchange%>");
5975 success = MEMMODEL_SEQ_CST;
5978 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5980 warning_at (loc, OPT_Winvalid_memory_model,
5981 "invalid failure memory model for "
5982 "%<__atomic_compare_exchange%>");
5983 failure = MEMMODEL_SEQ_CST;
5984 success = MEMMODEL_SEQ_CST;
5987 if (!flag_inline_atomics)
5989 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5990 return;
5993 /* Expand the operands. */
5994 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5996 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5997 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5999 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6001 boolret = NULL;
6002 oldval = NULL;
6004 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6005 is_weak, success, failure))
6007 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6008 return;
6011 lhs = gimple_call_lhs (call);
6012 if (lhs)
6014 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6015 if (GET_MODE (boolret) != mode)
6016 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6017 write_complex_part (target, boolret, true);
6018 write_complex_part (target, oldval, false);
6022 /* Expand the __atomic_load intrinsic:
6023 TYPE __atomic_load (TYPE *object, enum memmodel)
6024 EXP is the CALL_EXPR.
6025 TARGET is an optional place for us to store the results. */
6027 static rtx
6028 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6030 rtx mem;
6031 enum memmodel model;
6033 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6034 if (is_mm_release (model) || is_mm_acq_rel (model))
6036 source_location loc
6037 = expansion_point_location_if_in_system_header (input_location);
6038 warning_at (loc, OPT_Winvalid_memory_model,
6039 "invalid memory model for %<__atomic_load%>");
6040 model = MEMMODEL_SEQ_CST;
6043 if (!flag_inline_atomics)
6044 return NULL_RTX;
6046 /* Expand the operand. */
6047 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6049 return expand_atomic_load (target, mem, model);
6053 /* Expand the __atomic_store intrinsic:
6054 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6055 EXP is the CALL_EXPR.
6056 TARGET is an optional place for us to store the results. */
6058 static rtx
6059 expand_builtin_atomic_store (machine_mode mode, tree exp)
6061 rtx mem, val;
6062 enum memmodel model;
6064 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6065 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6066 || is_mm_release (model)))
6068 source_location loc
6069 = expansion_point_location_if_in_system_header (input_location);
6070 warning_at (loc, OPT_Winvalid_memory_model,
6071 "invalid memory model for %<__atomic_store%>");
6072 model = MEMMODEL_SEQ_CST;
6075 if (!flag_inline_atomics)
6076 return NULL_RTX;
6078 /* Expand the operands. */
6079 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6080 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6082 return expand_atomic_store (mem, val, model, false);
6085 /* Expand the __atomic_fetch_XXX intrinsic:
6086 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6087 EXP is the CALL_EXPR.
6088 TARGET is an optional place for us to store the results.
6089 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6090 FETCH_AFTER is true if returning the result of the operation.
6091 FETCH_AFTER is false if returning the value before the operation.
6092 IGNORE is true if the result is not used.
6093 EXT_CALL is the correct builtin for an external call if this cannot be
6094 resolved to an instruction sequence. */
6096 static rtx
6097 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6098 enum rtx_code code, bool fetch_after,
6099 bool ignore, enum built_in_function ext_call)
6101 rtx val, mem, ret;
6102 enum memmodel model;
6103 tree fndecl;
6104 tree addr;
6106 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6108 /* Expand the operands. */
6109 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6110 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6112 /* Only try generating instructions if inlining is turned on. */
6113 if (flag_inline_atomics)
6115 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6116 if (ret)
6117 return ret;
6120 /* Return if a different routine isn't needed for the library call. */
6121 if (ext_call == BUILT_IN_NONE)
6122 return NULL_RTX;
6124 /* Change the call to the specified function. */
6125 fndecl = get_callee_fndecl (exp);
6126 addr = CALL_EXPR_FN (exp);
6127 STRIP_NOPS (addr);
6129 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6130 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6132 /* If we will emit code after the call, the call can not be a tail call.
6133 If it is emitted as a tail call, a barrier is emitted after it, and
6134 then all trailing code is removed. */
6135 if (!ignore)
6136 CALL_EXPR_TAILCALL (exp) = 0;
6138 /* Expand the call here so we can emit trailing code. */
6139 ret = expand_call (exp, target, ignore);
6141 /* Replace the original function just in case it matters. */
6142 TREE_OPERAND (addr, 0) = fndecl;
6144 /* Then issue the arithmetic correction to return the right result. */
6145 if (!ignore)
6147 if (code == NOT)
6149 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6150 OPTAB_LIB_WIDEN);
6151 ret = expand_simple_unop (mode, NOT, ret, target, true);
6153 else
6154 ret = expand_simple_binop (mode, code, ret, val, target, true,
6155 OPTAB_LIB_WIDEN);
6157 return ret;
6160 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6162 void
6163 expand_ifn_atomic_bit_test_and (gcall *call)
6165 tree ptr = gimple_call_arg (call, 0);
6166 tree bit = gimple_call_arg (call, 1);
6167 tree flag = gimple_call_arg (call, 2);
6168 tree lhs = gimple_call_lhs (call);
6169 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6170 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6171 enum rtx_code code;
6172 optab optab;
6173 struct expand_operand ops[5];
6175 gcc_assert (flag_inline_atomics);
6177 if (gimple_call_num_args (call) == 4)
6178 model = get_memmodel (gimple_call_arg (call, 3));
6180 rtx mem = get_builtin_sync_mem (ptr, mode);
6181 rtx val = expand_expr_force_mode (bit, mode);
6183 switch (gimple_call_internal_fn (call))
6185 case IFN_ATOMIC_BIT_TEST_AND_SET:
6186 code = IOR;
6187 optab = atomic_bit_test_and_set_optab;
6188 break;
6189 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6190 code = XOR;
6191 optab = atomic_bit_test_and_complement_optab;
6192 break;
6193 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6194 code = AND;
6195 optab = atomic_bit_test_and_reset_optab;
6196 break;
6197 default:
6198 gcc_unreachable ();
6201 if (lhs == NULL_TREE)
6203 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6204 val, NULL_RTX, true, OPTAB_DIRECT);
6205 if (code == AND)
6206 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6207 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6208 return;
6211 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6212 enum insn_code icode = direct_optab_handler (optab, mode);
6213 gcc_assert (icode != CODE_FOR_nothing);
6214 create_output_operand (&ops[0], target, mode);
6215 create_fixed_operand (&ops[1], mem);
6216 create_convert_operand_to (&ops[2], val, mode, true);
6217 create_integer_operand (&ops[3], model);
6218 create_integer_operand (&ops[4], integer_onep (flag));
6219 if (maybe_expand_insn (icode, 5, ops))
6220 return;
6222 rtx bitval = val;
6223 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6224 val, NULL_RTX, true, OPTAB_DIRECT);
6225 rtx maskval = val;
6226 if (code == AND)
6227 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6228 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6229 code, model, false);
6230 if (integer_onep (flag))
6232 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6233 NULL_RTX, true, OPTAB_DIRECT);
6234 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6235 true, OPTAB_DIRECT);
6237 else
6238 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6239 OPTAB_DIRECT);
6240 if (result != target)
6241 emit_move_insn (target, result);
6244 /* Expand an atomic clear operation.
6245 void _atomic_clear (BOOL *obj, enum memmodel)
6246 EXP is the call expression. */
6248 static rtx
6249 expand_builtin_atomic_clear (tree exp)
6251 machine_mode mode;
6252 rtx mem, ret;
6253 enum memmodel model;
6255 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6256 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6257 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6259 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6261 source_location loc
6262 = expansion_point_location_if_in_system_header (input_location);
6263 warning_at (loc, OPT_Winvalid_memory_model,
6264 "invalid memory model for %<__atomic_store%>");
6265 model = MEMMODEL_SEQ_CST;
6268 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6269 Failing that, a store is issued by __atomic_store. The only way this can
6270 fail is if the bool type is larger than a word size. Unlikely, but
6271 handle it anyway for completeness. Assume a single threaded model since
6272 there is no atomic support in this case, and no barriers are required. */
6273 ret = expand_atomic_store (mem, const0_rtx, model, true);
6274 if (!ret)
6275 emit_move_insn (mem, const0_rtx);
6276 return const0_rtx;
6279 /* Expand an atomic test_and_set operation.
6280 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6281 EXP is the call expression. */
6283 static rtx
6284 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6286 rtx mem;
6287 enum memmodel model;
6288 machine_mode mode;
6290 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6291 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6292 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6294 return expand_atomic_test_and_set (target, mem, model);
6298 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6299 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6301 static tree
6302 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6304 int size;
6305 machine_mode mode;
6306 unsigned int mode_align, type_align;
6308 if (TREE_CODE (arg0) != INTEGER_CST)
6309 return NULL_TREE;
6311 /* We need a corresponding integer mode for the access to be lock-free. */
6312 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6313 if (!int_mode_for_size (size, 0).exists (&mode))
6314 return boolean_false_node;
6316 mode_align = GET_MODE_ALIGNMENT (mode);
6318 if (TREE_CODE (arg1) == INTEGER_CST)
6320 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6322 /* Either this argument is null, or it's a fake pointer encoding
6323 the alignment of the object. */
6324 val = least_bit_hwi (val);
6325 val *= BITS_PER_UNIT;
6327 if (val == 0 || mode_align < val)
6328 type_align = mode_align;
6329 else
6330 type_align = val;
6332 else
6334 tree ttype = TREE_TYPE (arg1);
6336 /* This function is usually invoked and folded immediately by the front
6337 end before anything else has a chance to look at it. The pointer
6338 parameter at this point is usually cast to a void *, so check for that
6339 and look past the cast. */
6340 if (CONVERT_EXPR_P (arg1)
6341 && POINTER_TYPE_P (ttype)
6342 && VOID_TYPE_P (TREE_TYPE (ttype))
6343 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6344 arg1 = TREE_OPERAND (arg1, 0);
6346 ttype = TREE_TYPE (arg1);
6347 gcc_assert (POINTER_TYPE_P (ttype));
6349 /* Get the underlying type of the object. */
6350 ttype = TREE_TYPE (ttype);
6351 type_align = TYPE_ALIGN (ttype);
6354 /* If the object has smaller alignment, the lock free routines cannot
6355 be used. */
6356 if (type_align < mode_align)
6357 return boolean_false_node;
6359 /* Check if a compare_and_swap pattern exists for the mode which represents
6360 the required size. The pattern is not allowed to fail, so the existence
6361 of the pattern indicates support is present. Also require that an
6362 atomic load exists for the required size. */
6363 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6364 return boolean_true_node;
6365 else
6366 return boolean_false_node;
6369 /* Return true if the parameters to call EXP represent an object which will
6370 always generate lock free instructions. The first argument represents the
6371 size of the object, and the second parameter is a pointer to the object
6372 itself. If NULL is passed for the object, then the result is based on
6373 typical alignment for an object of the specified size. Otherwise return
6374 false. */
6376 static rtx
6377 expand_builtin_atomic_always_lock_free (tree exp)
6379 tree size;
6380 tree arg0 = CALL_EXPR_ARG (exp, 0);
6381 tree arg1 = CALL_EXPR_ARG (exp, 1);
6383 if (TREE_CODE (arg0) != INTEGER_CST)
6385 error ("non-constant argument 1 to __atomic_always_lock_free");
6386 return const0_rtx;
6389 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6390 if (size == boolean_true_node)
6391 return const1_rtx;
6392 return const0_rtx;
6395 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6396 is lock free on this architecture. */
6398 static tree
6399 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6401 if (!flag_inline_atomics)
6402 return NULL_TREE;
6404 /* If it isn't always lock free, don't generate a result. */
6405 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6406 return boolean_true_node;
6408 return NULL_TREE;
6411 /* Return true if the parameters to call EXP represent an object which will
6412 always generate lock free instructions. The first argument represents the
6413 size of the object, and the second parameter is a pointer to the object
6414 itself. If NULL is passed for the object, then the result is based on
6415 typical alignment for an object of the specified size. Otherwise return
6416 NULL*/
6418 static rtx
6419 expand_builtin_atomic_is_lock_free (tree exp)
6421 tree size;
6422 tree arg0 = CALL_EXPR_ARG (exp, 0);
6423 tree arg1 = CALL_EXPR_ARG (exp, 1);
6425 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6427 error ("non-integer argument 1 to __atomic_is_lock_free");
6428 return NULL_RTX;
6431 if (!flag_inline_atomics)
6432 return NULL_RTX;
6434 /* If the value is known at compile time, return the RTX for it. */
6435 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6436 if (size == boolean_true_node)
6437 return const1_rtx;
6439 return NULL_RTX;
6442 /* Expand the __atomic_thread_fence intrinsic:
6443 void __atomic_thread_fence (enum memmodel)
6444 EXP is the CALL_EXPR. */
6446 static void
6447 expand_builtin_atomic_thread_fence (tree exp)
6449 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6450 expand_mem_thread_fence (model);
6453 /* Expand the __atomic_signal_fence intrinsic:
6454 void __atomic_signal_fence (enum memmodel)
6455 EXP is the CALL_EXPR. */
6457 static void
6458 expand_builtin_atomic_signal_fence (tree exp)
6460 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6461 expand_mem_signal_fence (model);
6464 /* Expand the __sync_synchronize intrinsic. */
6466 static void
6467 expand_builtin_sync_synchronize (void)
6469 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6472 static rtx
6473 expand_builtin_thread_pointer (tree exp, rtx target)
6475 enum insn_code icode;
6476 if (!validate_arglist (exp, VOID_TYPE))
6477 return const0_rtx;
6478 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6479 if (icode != CODE_FOR_nothing)
6481 struct expand_operand op;
6482 /* If the target is not sutitable then create a new target. */
6483 if (target == NULL_RTX
6484 || !REG_P (target)
6485 || GET_MODE (target) != Pmode)
6486 target = gen_reg_rtx (Pmode);
6487 create_output_operand (&op, target, Pmode);
6488 expand_insn (icode, 1, &op);
6489 return target;
6491 error ("__builtin_thread_pointer is not supported on this target");
6492 return const0_rtx;
6495 static void
6496 expand_builtin_set_thread_pointer (tree exp)
6498 enum insn_code icode;
6499 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6500 return;
6501 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6502 if (icode != CODE_FOR_nothing)
6504 struct expand_operand op;
6505 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6506 Pmode, EXPAND_NORMAL);
6507 create_input_operand (&op, val, Pmode);
6508 expand_insn (icode, 1, &op);
6509 return;
6511 error ("__builtin_set_thread_pointer is not supported on this target");
6515 /* Emit code to restore the current value of stack. */
6517 static void
6518 expand_stack_restore (tree var)
6520 rtx_insn *prev;
6521 rtx sa = expand_normal (var);
6523 sa = convert_memory_address (Pmode, sa);
6525 prev = get_last_insn ();
6526 emit_stack_restore (SAVE_BLOCK, sa);
6528 record_new_stack_level ();
6530 fixup_args_size_notes (prev, get_last_insn (), 0);
6533 /* Emit code to save the current value of stack. */
6535 static rtx
6536 expand_stack_save (void)
6538 rtx ret = NULL_RTX;
6540 emit_stack_save (SAVE_BLOCK, &ret);
6541 return ret;
6545 /* Expand an expression EXP that calls a built-in function,
6546 with result going to TARGET if that's convenient
6547 (and in mode MODE if that's convenient).
6548 SUBTARGET may be used as the target for computing one of EXP's operands.
6549 IGNORE is nonzero if the value is to be ignored. */
6552 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6553 int ignore)
6555 tree fndecl = get_callee_fndecl (exp);
6556 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6557 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6558 int flags;
6560 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6561 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6563 /* When ASan is enabled, we don't want to expand some memory/string
6564 builtins and rely on libsanitizer's hooks. This allows us to avoid
6565 redundant checks and be sure, that possible overflow will be detected
6566 by ASan. */
6568 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6569 return expand_call (exp, target, ignore);
6571 /* When not optimizing, generate calls to library functions for a certain
6572 set of builtins. */
6573 if (!optimize
6574 && !called_as_built_in (fndecl)
6575 && fcode != BUILT_IN_FORK
6576 && fcode != BUILT_IN_EXECL
6577 && fcode != BUILT_IN_EXECV
6578 && fcode != BUILT_IN_EXECLP
6579 && fcode != BUILT_IN_EXECLE
6580 && fcode != BUILT_IN_EXECVP
6581 && fcode != BUILT_IN_EXECVE
6582 && !ALLOCA_FUNCTION_CODE_P (fcode)
6583 && fcode != BUILT_IN_FREE
6584 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6585 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6586 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6587 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6588 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6589 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6590 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6591 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6592 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6593 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6594 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6595 && fcode != BUILT_IN_CHKP_BNDRET)
6596 return expand_call (exp, target, ignore);
6598 /* The built-in function expanders test for target == const0_rtx
6599 to determine whether the function's result will be ignored. */
6600 if (ignore)
6601 target = const0_rtx;
6603 /* If the result of a pure or const built-in function is ignored, and
6604 none of its arguments are volatile, we can avoid expanding the
6605 built-in call and just evaluate the arguments for side-effects. */
6606 if (target == const0_rtx
6607 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6608 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6610 bool volatilep = false;
6611 tree arg;
6612 call_expr_arg_iterator iter;
6614 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6615 if (TREE_THIS_VOLATILE (arg))
6617 volatilep = true;
6618 break;
6621 if (! volatilep)
6623 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6624 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6625 return const0_rtx;
6629 /* expand_builtin_with_bounds is supposed to be used for
6630 instrumented builtin calls. */
6631 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6633 switch (fcode)
6635 CASE_FLT_FN (BUILT_IN_FABS):
6636 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6637 case BUILT_IN_FABSD32:
6638 case BUILT_IN_FABSD64:
6639 case BUILT_IN_FABSD128:
6640 target = expand_builtin_fabs (exp, target, subtarget);
6641 if (target)
6642 return target;
6643 break;
6645 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6646 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6647 target = expand_builtin_copysign (exp, target, subtarget);
6648 if (target)
6649 return target;
6650 break;
6652 /* Just do a normal library call if we were unable to fold
6653 the values. */
6654 CASE_FLT_FN (BUILT_IN_CABS):
6655 break;
6657 CASE_FLT_FN (BUILT_IN_FMA):
6658 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
6659 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6660 if (target)
6661 return target;
6662 break;
6664 CASE_FLT_FN (BUILT_IN_ILOGB):
6665 if (! flag_unsafe_math_optimizations)
6666 break;
6667 gcc_fallthrough ();
6668 CASE_FLT_FN (BUILT_IN_ISINF):
6669 CASE_FLT_FN (BUILT_IN_FINITE):
6670 case BUILT_IN_ISFINITE:
6671 case BUILT_IN_ISNORMAL:
6672 target = expand_builtin_interclass_mathfn (exp, target);
6673 if (target)
6674 return target;
6675 break;
6677 CASE_FLT_FN (BUILT_IN_ICEIL):
6678 CASE_FLT_FN (BUILT_IN_LCEIL):
6679 CASE_FLT_FN (BUILT_IN_LLCEIL):
6680 CASE_FLT_FN (BUILT_IN_LFLOOR):
6681 CASE_FLT_FN (BUILT_IN_IFLOOR):
6682 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6683 target = expand_builtin_int_roundingfn (exp, target);
6684 if (target)
6685 return target;
6686 break;
6688 CASE_FLT_FN (BUILT_IN_IRINT):
6689 CASE_FLT_FN (BUILT_IN_LRINT):
6690 CASE_FLT_FN (BUILT_IN_LLRINT):
6691 CASE_FLT_FN (BUILT_IN_IROUND):
6692 CASE_FLT_FN (BUILT_IN_LROUND):
6693 CASE_FLT_FN (BUILT_IN_LLROUND):
6694 target = expand_builtin_int_roundingfn_2 (exp, target);
6695 if (target)
6696 return target;
6697 break;
6699 CASE_FLT_FN (BUILT_IN_POWI):
6700 target = expand_builtin_powi (exp, target);
6701 if (target)
6702 return target;
6703 break;
6705 CASE_FLT_FN (BUILT_IN_CEXPI):
6706 target = expand_builtin_cexpi (exp, target);
6707 gcc_assert (target);
6708 return target;
6710 CASE_FLT_FN (BUILT_IN_SIN):
6711 CASE_FLT_FN (BUILT_IN_COS):
6712 if (! flag_unsafe_math_optimizations)
6713 break;
6714 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6715 if (target)
6716 return target;
6717 break;
6719 CASE_FLT_FN (BUILT_IN_SINCOS):
6720 if (! flag_unsafe_math_optimizations)
6721 break;
6722 target = expand_builtin_sincos (exp);
6723 if (target)
6724 return target;
6725 break;
6727 case BUILT_IN_APPLY_ARGS:
6728 return expand_builtin_apply_args ();
6730 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6731 FUNCTION with a copy of the parameters described by
6732 ARGUMENTS, and ARGSIZE. It returns a block of memory
6733 allocated on the stack into which is stored all the registers
6734 that might possibly be used for returning the result of a
6735 function. ARGUMENTS is the value returned by
6736 __builtin_apply_args. ARGSIZE is the number of bytes of
6737 arguments that must be copied. ??? How should this value be
6738 computed? We'll also need a safe worst case value for varargs
6739 functions. */
6740 case BUILT_IN_APPLY:
6741 if (!validate_arglist (exp, POINTER_TYPE,
6742 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6743 && !validate_arglist (exp, REFERENCE_TYPE,
6744 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6745 return const0_rtx;
6746 else
6748 rtx ops[3];
6750 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6751 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6752 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6754 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6757 /* __builtin_return (RESULT) causes the function to return the
6758 value described by RESULT. RESULT is address of the block of
6759 memory returned by __builtin_apply. */
6760 case BUILT_IN_RETURN:
6761 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6762 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6763 return const0_rtx;
6765 case BUILT_IN_SAVEREGS:
6766 return expand_builtin_saveregs ();
6768 case BUILT_IN_VA_ARG_PACK:
6769 /* All valid uses of __builtin_va_arg_pack () are removed during
6770 inlining. */
6771 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6772 return const0_rtx;
6774 case BUILT_IN_VA_ARG_PACK_LEN:
6775 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6776 inlining. */
6777 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6778 return const0_rtx;
6780 /* Return the address of the first anonymous stack arg. */
6781 case BUILT_IN_NEXT_ARG:
6782 if (fold_builtin_next_arg (exp, false))
6783 return const0_rtx;
6784 return expand_builtin_next_arg ();
6786 case BUILT_IN_CLEAR_CACHE:
6787 target = expand_builtin___clear_cache (exp);
6788 if (target)
6789 return target;
6790 break;
6792 case BUILT_IN_CLASSIFY_TYPE:
6793 return expand_builtin_classify_type (exp);
6795 case BUILT_IN_CONSTANT_P:
6796 return const0_rtx;
6798 case BUILT_IN_FRAME_ADDRESS:
6799 case BUILT_IN_RETURN_ADDRESS:
6800 return expand_builtin_frame_address (fndecl, exp);
6802 /* Returns the address of the area where the structure is returned.
6803 0 otherwise. */
6804 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6805 if (call_expr_nargs (exp) != 0
6806 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6807 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6808 return const0_rtx;
6809 else
6810 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6812 CASE_BUILT_IN_ALLOCA:
6813 target = expand_builtin_alloca (exp);
6814 if (target)
6815 return target;
6816 break;
6818 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6819 return expand_asan_emit_allocas_unpoison (exp);
6821 case BUILT_IN_STACK_SAVE:
6822 return expand_stack_save ();
6824 case BUILT_IN_STACK_RESTORE:
6825 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6826 return const0_rtx;
6828 case BUILT_IN_BSWAP16:
6829 case BUILT_IN_BSWAP32:
6830 case BUILT_IN_BSWAP64:
6831 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6832 if (target)
6833 return target;
6834 break;
6836 CASE_INT_FN (BUILT_IN_FFS):
6837 target = expand_builtin_unop (target_mode, exp, target,
6838 subtarget, ffs_optab);
6839 if (target)
6840 return target;
6841 break;
6843 CASE_INT_FN (BUILT_IN_CLZ):
6844 target = expand_builtin_unop (target_mode, exp, target,
6845 subtarget, clz_optab);
6846 if (target)
6847 return target;
6848 break;
6850 CASE_INT_FN (BUILT_IN_CTZ):
6851 target = expand_builtin_unop (target_mode, exp, target,
6852 subtarget, ctz_optab);
6853 if (target)
6854 return target;
6855 break;
6857 CASE_INT_FN (BUILT_IN_CLRSB):
6858 target = expand_builtin_unop (target_mode, exp, target,
6859 subtarget, clrsb_optab);
6860 if (target)
6861 return target;
6862 break;
6864 CASE_INT_FN (BUILT_IN_POPCOUNT):
6865 target = expand_builtin_unop (target_mode, exp, target,
6866 subtarget, popcount_optab);
6867 if (target)
6868 return target;
6869 break;
6871 CASE_INT_FN (BUILT_IN_PARITY):
6872 target = expand_builtin_unop (target_mode, exp, target,
6873 subtarget, parity_optab);
6874 if (target)
6875 return target;
6876 break;
6878 case BUILT_IN_STRLEN:
6879 target = expand_builtin_strlen (exp, target, target_mode);
6880 if (target)
6881 return target;
6882 break;
6884 case BUILT_IN_STRCAT:
6885 target = expand_builtin_strcat (exp, target);
6886 if (target)
6887 return target;
6888 break;
6890 case BUILT_IN_STRCPY:
6891 target = expand_builtin_strcpy (exp, target);
6892 if (target)
6893 return target;
6894 break;
6896 case BUILT_IN_STRNCAT:
6897 target = expand_builtin_strncat (exp, target);
6898 if (target)
6899 return target;
6900 break;
6902 case BUILT_IN_STRNCPY:
6903 target = expand_builtin_strncpy (exp, target);
6904 if (target)
6905 return target;
6906 break;
6908 case BUILT_IN_STPCPY:
6909 target = expand_builtin_stpcpy (exp, target, mode);
6910 if (target)
6911 return target;
6912 break;
6914 case BUILT_IN_STPNCPY:
6915 target = expand_builtin_stpncpy (exp, target);
6916 if (target)
6917 return target;
6918 break;
6920 case BUILT_IN_MEMCHR:
6921 target = expand_builtin_memchr (exp, target);
6922 if (target)
6923 return target;
6924 break;
6926 case BUILT_IN_MEMCPY:
6927 target = expand_builtin_memcpy (exp, target);
6928 if (target)
6929 return target;
6930 break;
6932 case BUILT_IN_MEMMOVE:
6933 target = expand_builtin_memmove (exp, target);
6934 if (target)
6935 return target;
6936 break;
6938 case BUILT_IN_MEMPCPY:
6939 target = expand_builtin_mempcpy (exp, target);
6940 if (target)
6941 return target;
6942 break;
6944 case BUILT_IN_MEMSET:
6945 target = expand_builtin_memset (exp, target, mode);
6946 if (target)
6947 return target;
6948 break;
6950 case BUILT_IN_BZERO:
6951 target = expand_builtin_bzero (exp);
6952 if (target)
6953 return target;
6954 break;
6956 case BUILT_IN_STRCMP:
6957 target = expand_builtin_strcmp (exp, target);
6958 if (target)
6959 return target;
6960 break;
6962 case BUILT_IN_STRNCMP:
6963 target = expand_builtin_strncmp (exp, target, mode);
6964 if (target)
6965 return target;
6966 break;
6968 case BUILT_IN_BCMP:
6969 case BUILT_IN_MEMCMP:
6970 case BUILT_IN_MEMCMP_EQ:
6971 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6972 if (target)
6973 return target;
6974 if (fcode == BUILT_IN_MEMCMP_EQ)
6976 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6977 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6979 break;
6981 case BUILT_IN_SETJMP:
6982 /* This should have been lowered to the builtins below. */
6983 gcc_unreachable ();
6985 case BUILT_IN_SETJMP_SETUP:
6986 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6987 and the receiver label. */
6988 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6990 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6991 VOIDmode, EXPAND_NORMAL);
6992 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6993 rtx_insn *label_r = label_rtx (label);
6995 /* This is copied from the handling of non-local gotos. */
6996 expand_builtin_setjmp_setup (buf_addr, label_r);
6997 nonlocal_goto_handler_labels
6998 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6999 nonlocal_goto_handler_labels);
7000 /* ??? Do not let expand_label treat us as such since we would
7001 not want to be both on the list of non-local labels and on
7002 the list of forced labels. */
7003 FORCED_LABEL (label) = 0;
7004 return const0_rtx;
7006 break;
7008 case BUILT_IN_SETJMP_RECEIVER:
7009 /* __builtin_setjmp_receiver is passed the receiver label. */
7010 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7012 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7013 rtx_insn *label_r = label_rtx (label);
7015 expand_builtin_setjmp_receiver (label_r);
7016 return const0_rtx;
7018 break;
7020 /* __builtin_longjmp is passed a pointer to an array of five words.
7021 It's similar to the C library longjmp function but works with
7022 __builtin_setjmp above. */
7023 case BUILT_IN_LONGJMP:
7024 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7026 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7027 VOIDmode, EXPAND_NORMAL);
7028 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7030 if (value != const1_rtx)
7032 error ("%<__builtin_longjmp%> second argument must be 1");
7033 return const0_rtx;
7036 expand_builtin_longjmp (buf_addr, value);
7037 return const0_rtx;
7039 break;
7041 case BUILT_IN_NONLOCAL_GOTO:
7042 target = expand_builtin_nonlocal_goto (exp);
7043 if (target)
7044 return target;
7045 break;
7047 /* This updates the setjmp buffer that is its argument with the value
7048 of the current stack pointer. */
7049 case BUILT_IN_UPDATE_SETJMP_BUF:
7050 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7052 rtx buf_addr
7053 = expand_normal (CALL_EXPR_ARG (exp, 0));
7055 expand_builtin_update_setjmp_buf (buf_addr);
7056 return const0_rtx;
7058 break;
7060 case BUILT_IN_TRAP:
7061 expand_builtin_trap ();
7062 return const0_rtx;
7064 case BUILT_IN_UNREACHABLE:
7065 expand_builtin_unreachable ();
7066 return const0_rtx;
7068 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7069 case BUILT_IN_SIGNBITD32:
7070 case BUILT_IN_SIGNBITD64:
7071 case BUILT_IN_SIGNBITD128:
7072 target = expand_builtin_signbit (exp, target);
7073 if (target)
7074 return target;
7075 break;
7077 /* Various hooks for the DWARF 2 __throw routine. */
7078 case BUILT_IN_UNWIND_INIT:
7079 expand_builtin_unwind_init ();
7080 return const0_rtx;
7081 case BUILT_IN_DWARF_CFA:
7082 return virtual_cfa_rtx;
7083 #ifdef DWARF2_UNWIND_INFO
7084 case BUILT_IN_DWARF_SP_COLUMN:
7085 return expand_builtin_dwarf_sp_column ();
7086 case BUILT_IN_INIT_DWARF_REG_SIZES:
7087 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7088 return const0_rtx;
7089 #endif
7090 case BUILT_IN_FROB_RETURN_ADDR:
7091 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7092 case BUILT_IN_EXTRACT_RETURN_ADDR:
7093 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7094 case BUILT_IN_EH_RETURN:
7095 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7096 CALL_EXPR_ARG (exp, 1));
7097 return const0_rtx;
7098 case BUILT_IN_EH_RETURN_DATA_REGNO:
7099 return expand_builtin_eh_return_data_regno (exp);
7100 case BUILT_IN_EXTEND_POINTER:
7101 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7102 case BUILT_IN_EH_POINTER:
7103 return expand_builtin_eh_pointer (exp);
7104 case BUILT_IN_EH_FILTER:
7105 return expand_builtin_eh_filter (exp);
7106 case BUILT_IN_EH_COPY_VALUES:
7107 return expand_builtin_eh_copy_values (exp);
7109 case BUILT_IN_VA_START:
7110 return expand_builtin_va_start (exp);
7111 case BUILT_IN_VA_END:
7112 return expand_builtin_va_end (exp);
7113 case BUILT_IN_VA_COPY:
7114 return expand_builtin_va_copy (exp);
7115 case BUILT_IN_EXPECT:
7116 return expand_builtin_expect (exp, target);
7117 case BUILT_IN_ASSUME_ALIGNED:
7118 return expand_builtin_assume_aligned (exp, target);
7119 case BUILT_IN_PREFETCH:
7120 expand_builtin_prefetch (exp);
7121 return const0_rtx;
7123 case BUILT_IN_INIT_TRAMPOLINE:
7124 return expand_builtin_init_trampoline (exp, true);
7125 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7126 return expand_builtin_init_trampoline (exp, false);
7127 case BUILT_IN_ADJUST_TRAMPOLINE:
7128 return expand_builtin_adjust_trampoline (exp);
7130 case BUILT_IN_INIT_DESCRIPTOR:
7131 return expand_builtin_init_descriptor (exp);
7132 case BUILT_IN_ADJUST_DESCRIPTOR:
7133 return expand_builtin_adjust_descriptor (exp);
7135 case BUILT_IN_FORK:
7136 case BUILT_IN_EXECL:
7137 case BUILT_IN_EXECV:
7138 case BUILT_IN_EXECLP:
7139 case BUILT_IN_EXECLE:
7140 case BUILT_IN_EXECVP:
7141 case BUILT_IN_EXECVE:
7142 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7143 if (target)
7144 return target;
7145 break;
7147 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7148 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7149 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7150 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7151 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7152 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7153 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7154 if (target)
7155 return target;
7156 break;
7158 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7159 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7160 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7161 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7162 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7163 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7164 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7165 if (target)
7166 return target;
7167 break;
7169 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7170 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7171 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7172 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7173 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7174 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7175 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7176 if (target)
7177 return target;
7178 break;
7180 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7181 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7182 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7183 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7184 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7185 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7186 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7187 if (target)
7188 return target;
7189 break;
7191 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7192 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7193 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7194 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7195 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7196 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7197 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7198 if (target)
7199 return target;
7200 break;
7202 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7203 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7204 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7205 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7206 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7207 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7208 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7209 if (target)
7210 return target;
7211 break;
7213 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7214 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7215 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7216 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7217 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7218 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7219 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7220 if (target)
7221 return target;
7222 break;
7224 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7225 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7226 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7227 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7228 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7229 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7230 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7231 if (target)
7232 return target;
7233 break;
7235 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7236 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7237 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7238 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7239 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7240 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7241 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7242 if (target)
7243 return target;
7244 break;
7246 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7247 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7248 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7249 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7250 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7251 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7252 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7253 if (target)
7254 return target;
7255 break;
7257 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7258 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7259 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7260 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7261 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7262 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7263 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7264 if (target)
7265 return target;
7266 break;
7268 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7269 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7270 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7271 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7272 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7273 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7274 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7275 if (target)
7276 return target;
7277 break;
7279 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7280 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7281 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7282 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7283 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7284 if (mode == VOIDmode)
7285 mode = TYPE_MODE (boolean_type_node);
7286 if (!target || !register_operand (target, mode))
7287 target = gen_reg_rtx (mode);
7289 mode = get_builtin_sync_mode
7290 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7291 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7292 if (target)
7293 return target;
7294 break;
7296 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7297 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7298 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7299 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7300 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7301 mode = get_builtin_sync_mode
7302 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7303 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7304 if (target)
7305 return target;
7306 break;
7308 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7309 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7310 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7311 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7312 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7313 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7314 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7315 if (target)
7316 return target;
7317 break;
7319 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7320 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7321 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7322 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7323 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7324 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7325 expand_builtin_sync_lock_release (mode, exp);
7326 return const0_rtx;
7328 case BUILT_IN_SYNC_SYNCHRONIZE:
7329 expand_builtin_sync_synchronize ();
7330 return const0_rtx;
7332 case BUILT_IN_ATOMIC_EXCHANGE_1:
7333 case BUILT_IN_ATOMIC_EXCHANGE_2:
7334 case BUILT_IN_ATOMIC_EXCHANGE_4:
7335 case BUILT_IN_ATOMIC_EXCHANGE_8:
7336 case BUILT_IN_ATOMIC_EXCHANGE_16:
7337 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7338 target = expand_builtin_atomic_exchange (mode, exp, target);
7339 if (target)
7340 return target;
7341 break;
7343 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7344 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7345 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7346 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7347 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7349 unsigned int nargs, z;
7350 vec<tree, va_gc> *vec;
7352 mode =
7353 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7354 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7355 if (target)
7356 return target;
7358 /* If this is turned into an external library call, the weak parameter
7359 must be dropped to match the expected parameter list. */
7360 nargs = call_expr_nargs (exp);
7361 vec_alloc (vec, nargs - 1);
7362 for (z = 0; z < 3; z++)
7363 vec->quick_push (CALL_EXPR_ARG (exp, z));
7364 /* Skip the boolean weak parameter. */
7365 for (z = 4; z < 6; z++)
7366 vec->quick_push (CALL_EXPR_ARG (exp, z));
7367 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7368 break;
7371 case BUILT_IN_ATOMIC_LOAD_1:
7372 case BUILT_IN_ATOMIC_LOAD_2:
7373 case BUILT_IN_ATOMIC_LOAD_4:
7374 case BUILT_IN_ATOMIC_LOAD_8:
7375 case BUILT_IN_ATOMIC_LOAD_16:
7376 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7377 target = expand_builtin_atomic_load (mode, exp, target);
7378 if (target)
7379 return target;
7380 break;
7382 case BUILT_IN_ATOMIC_STORE_1:
7383 case BUILT_IN_ATOMIC_STORE_2:
7384 case BUILT_IN_ATOMIC_STORE_4:
7385 case BUILT_IN_ATOMIC_STORE_8:
7386 case BUILT_IN_ATOMIC_STORE_16:
7387 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7388 target = expand_builtin_atomic_store (mode, exp);
7389 if (target)
7390 return const0_rtx;
7391 break;
7393 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7394 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7395 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7396 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7397 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7399 enum built_in_function lib;
7400 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7401 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7402 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7403 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7404 ignore, lib);
7405 if (target)
7406 return target;
7407 break;
7409 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7410 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7411 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7412 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7413 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7415 enum built_in_function lib;
7416 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7417 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7418 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7419 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7420 ignore, lib);
7421 if (target)
7422 return target;
7423 break;
7425 case BUILT_IN_ATOMIC_AND_FETCH_1:
7426 case BUILT_IN_ATOMIC_AND_FETCH_2:
7427 case BUILT_IN_ATOMIC_AND_FETCH_4:
7428 case BUILT_IN_ATOMIC_AND_FETCH_8:
7429 case BUILT_IN_ATOMIC_AND_FETCH_16:
7431 enum built_in_function lib;
7432 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7433 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7434 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7435 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7436 ignore, lib);
7437 if (target)
7438 return target;
7439 break;
7441 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7442 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7443 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7444 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7445 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7447 enum built_in_function lib;
7448 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7449 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7450 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7451 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7452 ignore, lib);
7453 if (target)
7454 return target;
7455 break;
7457 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7458 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7459 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7460 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7461 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7463 enum built_in_function lib;
7464 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7465 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7466 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7467 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7468 ignore, lib);
7469 if (target)
7470 return target;
7471 break;
7473 case BUILT_IN_ATOMIC_OR_FETCH_1:
7474 case BUILT_IN_ATOMIC_OR_FETCH_2:
7475 case BUILT_IN_ATOMIC_OR_FETCH_4:
7476 case BUILT_IN_ATOMIC_OR_FETCH_8:
7477 case BUILT_IN_ATOMIC_OR_FETCH_16:
7479 enum built_in_function lib;
7480 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7481 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7482 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7483 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7484 ignore, lib);
7485 if (target)
7486 return target;
7487 break;
7489 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7490 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7491 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7492 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7493 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7494 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7495 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7496 ignore, BUILT_IN_NONE);
7497 if (target)
7498 return target;
7499 break;
7501 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7502 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7503 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7504 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7505 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7506 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7507 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7508 ignore, BUILT_IN_NONE);
7509 if (target)
7510 return target;
7511 break;
7513 case BUILT_IN_ATOMIC_FETCH_AND_1:
7514 case BUILT_IN_ATOMIC_FETCH_AND_2:
7515 case BUILT_IN_ATOMIC_FETCH_AND_4:
7516 case BUILT_IN_ATOMIC_FETCH_AND_8:
7517 case BUILT_IN_ATOMIC_FETCH_AND_16:
7518 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7519 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7520 ignore, BUILT_IN_NONE);
7521 if (target)
7522 return target;
7523 break;
7525 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7526 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7527 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7528 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7529 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7530 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7531 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7532 ignore, BUILT_IN_NONE);
7533 if (target)
7534 return target;
7535 break;
7537 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7538 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7539 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7540 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7541 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7542 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7543 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7544 ignore, BUILT_IN_NONE);
7545 if (target)
7546 return target;
7547 break;
7549 case BUILT_IN_ATOMIC_FETCH_OR_1:
7550 case BUILT_IN_ATOMIC_FETCH_OR_2:
7551 case BUILT_IN_ATOMIC_FETCH_OR_4:
7552 case BUILT_IN_ATOMIC_FETCH_OR_8:
7553 case BUILT_IN_ATOMIC_FETCH_OR_16:
7554 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7555 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7556 ignore, BUILT_IN_NONE);
7557 if (target)
7558 return target;
7559 break;
7561 case BUILT_IN_ATOMIC_TEST_AND_SET:
7562 return expand_builtin_atomic_test_and_set (exp, target);
7564 case BUILT_IN_ATOMIC_CLEAR:
7565 return expand_builtin_atomic_clear (exp);
7567 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7568 return expand_builtin_atomic_always_lock_free (exp);
7570 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7571 target = expand_builtin_atomic_is_lock_free (exp);
7572 if (target)
7573 return target;
7574 break;
7576 case BUILT_IN_ATOMIC_THREAD_FENCE:
7577 expand_builtin_atomic_thread_fence (exp);
7578 return const0_rtx;
7580 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7581 expand_builtin_atomic_signal_fence (exp);
7582 return const0_rtx;
7584 case BUILT_IN_OBJECT_SIZE:
7585 return expand_builtin_object_size (exp);
7587 case BUILT_IN_MEMCPY_CHK:
7588 case BUILT_IN_MEMPCPY_CHK:
7589 case BUILT_IN_MEMMOVE_CHK:
7590 case BUILT_IN_MEMSET_CHK:
7591 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7592 if (target)
7593 return target;
7594 break;
7596 case BUILT_IN_STRCPY_CHK:
7597 case BUILT_IN_STPCPY_CHK:
7598 case BUILT_IN_STRNCPY_CHK:
7599 case BUILT_IN_STPNCPY_CHK:
7600 case BUILT_IN_STRCAT_CHK:
7601 case BUILT_IN_STRNCAT_CHK:
7602 case BUILT_IN_SNPRINTF_CHK:
7603 case BUILT_IN_VSNPRINTF_CHK:
7604 maybe_emit_chk_warning (exp, fcode);
7605 break;
7607 case BUILT_IN_SPRINTF_CHK:
7608 case BUILT_IN_VSPRINTF_CHK:
7609 maybe_emit_sprintf_chk_warning (exp, fcode);
7610 break;
7612 case BUILT_IN_FREE:
7613 if (warn_free_nonheap_object)
7614 maybe_emit_free_warning (exp);
7615 break;
7617 case BUILT_IN_THREAD_POINTER:
7618 return expand_builtin_thread_pointer (exp, target);
7620 case BUILT_IN_SET_THREAD_POINTER:
7621 expand_builtin_set_thread_pointer (exp);
7622 return const0_rtx;
7624 case BUILT_IN_CILK_DETACH:
7625 expand_builtin_cilk_detach (exp);
7626 return const0_rtx;
7628 case BUILT_IN_CILK_POP_FRAME:
7629 expand_builtin_cilk_pop_frame (exp);
7630 return const0_rtx;
7632 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7633 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7634 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7635 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7636 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7637 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7638 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7639 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7640 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7641 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7642 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7643 /* We allow user CHKP builtins if Pointer Bounds
7644 Checker is off. */
7645 if (!chkp_function_instrumented_p (current_function_decl))
7647 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7648 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7649 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7650 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7651 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7652 return expand_normal (CALL_EXPR_ARG (exp, 0));
7653 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7654 return expand_normal (size_zero_node);
7655 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7656 return expand_normal (size_int (-1));
7657 else
7658 return const0_rtx;
7660 /* FALLTHROUGH */
7662 case BUILT_IN_CHKP_BNDMK:
7663 case BUILT_IN_CHKP_BNDSTX:
7664 case BUILT_IN_CHKP_BNDCL:
7665 case BUILT_IN_CHKP_BNDCU:
7666 case BUILT_IN_CHKP_BNDLDX:
7667 case BUILT_IN_CHKP_BNDRET:
7668 case BUILT_IN_CHKP_INTERSECT:
7669 case BUILT_IN_CHKP_NARROW:
7670 case BUILT_IN_CHKP_EXTRACT_LOWER:
7671 case BUILT_IN_CHKP_EXTRACT_UPPER:
7672 /* Software implementation of Pointer Bounds Checker is NYI.
7673 Target support is required. */
7674 error ("Your target platform does not support -fcheck-pointer-bounds");
7675 break;
7677 case BUILT_IN_ACC_ON_DEVICE:
7678 /* Do library call, if we failed to expand the builtin when
7679 folding. */
7680 break;
7682 default: /* just do library call, if unknown builtin */
7683 break;
7686 /* The switch statement above can drop through to cause the function
7687 to be called normally. */
7688 return expand_call (exp, target, ignore);
7691 /* Similar to expand_builtin but is used for instrumented calls. */
7694 expand_builtin_with_bounds (tree exp, rtx target,
7695 rtx subtarget ATTRIBUTE_UNUSED,
7696 machine_mode mode, int ignore)
7698 tree fndecl = get_callee_fndecl (exp);
7699 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7701 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7703 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7704 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7706 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7707 && fcode < END_CHKP_BUILTINS);
7709 switch (fcode)
7711 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7712 target = expand_builtin_memcpy_with_bounds (exp, target);
7713 if (target)
7714 return target;
7715 break;
7717 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7718 target = expand_builtin_mempcpy_with_bounds (exp, target);
7719 if (target)
7720 return target;
7721 break;
7723 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7724 target = expand_builtin_memset_with_bounds (exp, target, mode);
7725 if (target)
7726 return target;
7727 break;
7729 default:
7730 break;
7733 /* The switch statement above can drop through to cause the function
7734 to be called normally. */
7735 return expand_call (exp, target, ignore);
7738 /* Determine whether a tree node represents a call to a built-in
7739 function. If the tree T is a call to a built-in function with
7740 the right number of arguments of the appropriate types, return
7741 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7742 Otherwise the return value is END_BUILTINS. */
7744 enum built_in_function
7745 builtin_mathfn_code (const_tree t)
7747 const_tree fndecl, arg, parmlist;
7748 const_tree argtype, parmtype;
7749 const_call_expr_arg_iterator iter;
7751 if (TREE_CODE (t) != CALL_EXPR
7752 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7753 return END_BUILTINS;
7755 fndecl = get_callee_fndecl (t);
7756 if (fndecl == NULL_TREE
7757 || TREE_CODE (fndecl) != FUNCTION_DECL
7758 || ! DECL_BUILT_IN (fndecl)
7759 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7760 return END_BUILTINS;
7762 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7763 init_const_call_expr_arg_iterator (t, &iter);
7764 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7766 /* If a function doesn't take a variable number of arguments,
7767 the last element in the list will have type `void'. */
7768 parmtype = TREE_VALUE (parmlist);
7769 if (VOID_TYPE_P (parmtype))
7771 if (more_const_call_expr_args_p (&iter))
7772 return END_BUILTINS;
7773 return DECL_FUNCTION_CODE (fndecl);
7776 if (! more_const_call_expr_args_p (&iter))
7777 return END_BUILTINS;
7779 arg = next_const_call_expr_arg (&iter);
7780 argtype = TREE_TYPE (arg);
7782 if (SCALAR_FLOAT_TYPE_P (parmtype))
7784 if (! SCALAR_FLOAT_TYPE_P (argtype))
7785 return END_BUILTINS;
7787 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7789 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7790 return END_BUILTINS;
7792 else if (POINTER_TYPE_P (parmtype))
7794 if (! POINTER_TYPE_P (argtype))
7795 return END_BUILTINS;
7797 else if (INTEGRAL_TYPE_P (parmtype))
7799 if (! INTEGRAL_TYPE_P (argtype))
7800 return END_BUILTINS;
7802 else
7803 return END_BUILTINS;
7806 /* Variable-length argument list. */
7807 return DECL_FUNCTION_CODE (fndecl);
7810 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7811 evaluate to a constant. */
7813 static tree
7814 fold_builtin_constant_p (tree arg)
7816 /* We return 1 for a numeric type that's known to be a constant
7817 value at compile-time or for an aggregate type that's a
7818 literal constant. */
7819 STRIP_NOPS (arg);
7821 /* If we know this is a constant, emit the constant of one. */
7822 if (CONSTANT_CLASS_P (arg)
7823 || (TREE_CODE (arg) == CONSTRUCTOR
7824 && TREE_CONSTANT (arg)))
7825 return integer_one_node;
7826 if (TREE_CODE (arg) == ADDR_EXPR)
7828 tree op = TREE_OPERAND (arg, 0);
7829 if (TREE_CODE (op) == STRING_CST
7830 || (TREE_CODE (op) == ARRAY_REF
7831 && integer_zerop (TREE_OPERAND (op, 1))
7832 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7833 return integer_one_node;
7836 /* If this expression has side effects, show we don't know it to be a
7837 constant. Likewise if it's a pointer or aggregate type since in
7838 those case we only want literals, since those are only optimized
7839 when generating RTL, not later.
7840 And finally, if we are compiling an initializer, not code, we
7841 need to return a definite result now; there's not going to be any
7842 more optimization done. */
7843 if (TREE_SIDE_EFFECTS (arg)
7844 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7845 || POINTER_TYPE_P (TREE_TYPE (arg))
7846 || cfun == 0
7847 || folding_initializer
7848 || force_folding_builtin_constant_p)
7849 return integer_zero_node;
7851 return NULL_TREE;
7854 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7855 return it as a truthvalue. */
7857 static tree
7858 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7859 tree predictor)
7861 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7863 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7864 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7865 ret_type = TREE_TYPE (TREE_TYPE (fn));
7866 pred_type = TREE_VALUE (arg_types);
7867 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7869 pred = fold_convert_loc (loc, pred_type, pred);
7870 expected = fold_convert_loc (loc, expected_type, expected);
7871 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7872 predictor);
7874 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7875 build_int_cst (ret_type, 0));
7878 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7879 NULL_TREE if no simplification is possible. */
7881 tree
7882 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7884 tree inner, fndecl, inner_arg0;
7885 enum tree_code code;
7887 /* Distribute the expected value over short-circuiting operators.
7888 See through the cast from truthvalue_type_node to long. */
7889 inner_arg0 = arg0;
7890 while (CONVERT_EXPR_P (inner_arg0)
7891 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7892 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7893 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7895 /* If this is a builtin_expect within a builtin_expect keep the
7896 inner one. See through a comparison against a constant. It
7897 might have been added to create a thruthvalue. */
7898 inner = inner_arg0;
7900 if (COMPARISON_CLASS_P (inner)
7901 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7902 inner = TREE_OPERAND (inner, 0);
7904 if (TREE_CODE (inner) == CALL_EXPR
7905 && (fndecl = get_callee_fndecl (inner))
7906 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7907 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7908 return arg0;
7910 inner = inner_arg0;
7911 code = TREE_CODE (inner);
7912 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7914 tree op0 = TREE_OPERAND (inner, 0);
7915 tree op1 = TREE_OPERAND (inner, 1);
7917 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7918 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7919 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7921 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7924 /* If the argument isn't invariant then there's nothing else we can do. */
7925 if (!TREE_CONSTANT (inner_arg0))
7926 return NULL_TREE;
7928 /* If we expect that a comparison against the argument will fold to
7929 a constant return the constant. In practice, this means a true
7930 constant or the address of a non-weak symbol. */
7931 inner = inner_arg0;
7932 STRIP_NOPS (inner);
7933 if (TREE_CODE (inner) == ADDR_EXPR)
7937 inner = TREE_OPERAND (inner, 0);
7939 while (TREE_CODE (inner) == COMPONENT_REF
7940 || TREE_CODE (inner) == ARRAY_REF);
7941 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7942 return NULL_TREE;
7945 /* Otherwise, ARG0 already has the proper type for the return value. */
7946 return arg0;
7949 /* Fold a call to __builtin_classify_type with argument ARG. */
7951 static tree
7952 fold_builtin_classify_type (tree arg)
7954 if (arg == 0)
7955 return build_int_cst (integer_type_node, no_type_class);
7957 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7960 /* Fold a call to __builtin_strlen with argument ARG. */
7962 static tree
7963 fold_builtin_strlen (location_t loc, tree type, tree arg)
7965 if (!validate_arg (arg, POINTER_TYPE))
7966 return NULL_TREE;
7967 else
7969 tree len = c_strlen (arg, 0);
7971 if (len)
7972 return fold_convert_loc (loc, type, len);
7974 return NULL_TREE;
7978 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7980 static tree
7981 fold_builtin_inf (location_t loc, tree type, int warn)
7983 REAL_VALUE_TYPE real;
7985 /* __builtin_inff is intended to be usable to define INFINITY on all
7986 targets. If an infinity is not available, INFINITY expands "to a
7987 positive constant of type float that overflows at translation
7988 time", footnote "In this case, using INFINITY will violate the
7989 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7990 Thus we pedwarn to ensure this constraint violation is
7991 diagnosed. */
7992 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7993 pedwarn (loc, 0, "target format does not support infinity");
7995 real_inf (&real);
7996 return build_real (type, real);
7999 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8000 NULL_TREE if no simplification can be made. */
8002 static tree
8003 fold_builtin_sincos (location_t loc,
8004 tree arg0, tree arg1, tree arg2)
8006 tree type;
8007 tree fndecl, call = NULL_TREE;
8009 if (!validate_arg (arg0, REAL_TYPE)
8010 || !validate_arg (arg1, POINTER_TYPE)
8011 || !validate_arg (arg2, POINTER_TYPE))
8012 return NULL_TREE;
8014 type = TREE_TYPE (arg0);
8016 /* Calculate the result when the argument is a constant. */
8017 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8018 if (fn == END_BUILTINS)
8019 return NULL_TREE;
8021 /* Canonicalize sincos to cexpi. */
8022 if (TREE_CODE (arg0) == REAL_CST)
8024 tree complex_type = build_complex_type (type);
8025 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8027 if (!call)
8029 if (!targetm.libc_has_function (function_c99_math_complex)
8030 || !builtin_decl_implicit_p (fn))
8031 return NULL_TREE;
8032 fndecl = builtin_decl_explicit (fn);
8033 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8034 call = builtin_save_expr (call);
8037 return build2 (COMPOUND_EXPR, void_type_node,
8038 build2 (MODIFY_EXPR, void_type_node,
8039 build_fold_indirect_ref_loc (loc, arg1),
8040 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8041 build2 (MODIFY_EXPR, void_type_node,
8042 build_fold_indirect_ref_loc (loc, arg2),
8043 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8046 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8047 Return NULL_TREE if no simplification can be made. */
8049 static tree
8050 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8052 if (!validate_arg (arg1, POINTER_TYPE)
8053 || !validate_arg (arg2, POINTER_TYPE)
8054 || !validate_arg (len, INTEGER_TYPE))
8055 return NULL_TREE;
8057 /* If the LEN parameter is zero, return zero. */
8058 if (integer_zerop (len))
8059 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8060 arg1, arg2);
8062 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8063 if (operand_equal_p (arg1, arg2, 0))
8064 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8066 /* If len parameter is one, return an expression corresponding to
8067 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8068 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8070 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8071 tree cst_uchar_ptr_node
8072 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8074 tree ind1
8075 = fold_convert_loc (loc, integer_type_node,
8076 build1 (INDIRECT_REF, cst_uchar_node,
8077 fold_convert_loc (loc,
8078 cst_uchar_ptr_node,
8079 arg1)));
8080 tree ind2
8081 = fold_convert_loc (loc, integer_type_node,
8082 build1 (INDIRECT_REF, cst_uchar_node,
8083 fold_convert_loc (loc,
8084 cst_uchar_ptr_node,
8085 arg2)));
8086 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8089 return NULL_TREE;
8092 /* Fold a call to builtin isascii with argument ARG. */
8094 static tree
8095 fold_builtin_isascii (location_t loc, tree arg)
8097 if (!validate_arg (arg, INTEGER_TYPE))
8098 return NULL_TREE;
8099 else
8101 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8102 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8103 build_int_cst (integer_type_node,
8104 ~ (unsigned HOST_WIDE_INT) 0x7f));
8105 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8106 arg, integer_zero_node);
8110 /* Fold a call to builtin toascii with argument ARG. */
8112 static tree
8113 fold_builtin_toascii (location_t loc, tree arg)
8115 if (!validate_arg (arg, INTEGER_TYPE))
8116 return NULL_TREE;
8118 /* Transform toascii(c) -> (c & 0x7f). */
8119 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8120 build_int_cst (integer_type_node, 0x7f));
8123 /* Fold a call to builtin isdigit with argument ARG. */
8125 static tree
8126 fold_builtin_isdigit (location_t loc, tree arg)
8128 if (!validate_arg (arg, INTEGER_TYPE))
8129 return NULL_TREE;
8130 else
8132 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8133 /* According to the C standard, isdigit is unaffected by locale.
8134 However, it definitely is affected by the target character set. */
8135 unsigned HOST_WIDE_INT target_digit0
8136 = lang_hooks.to_target_charset ('0');
8138 if (target_digit0 == 0)
8139 return NULL_TREE;
8141 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8142 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8143 build_int_cst (unsigned_type_node, target_digit0));
8144 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8145 build_int_cst (unsigned_type_node, 9));
8149 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8151 static tree
8152 fold_builtin_fabs (location_t loc, tree arg, tree type)
8154 if (!validate_arg (arg, REAL_TYPE))
8155 return NULL_TREE;
8157 arg = fold_convert_loc (loc, type, arg);
8158 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8161 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8163 static tree
8164 fold_builtin_abs (location_t loc, tree arg, tree type)
8166 if (!validate_arg (arg, INTEGER_TYPE))
8167 return NULL_TREE;
8169 arg = fold_convert_loc (loc, type, arg);
8170 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8173 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8175 static tree
8176 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8178 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8179 if (validate_arg (arg0, REAL_TYPE)
8180 && validate_arg (arg1, REAL_TYPE)
8181 && validate_arg (arg2, REAL_TYPE)
8182 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8183 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8185 return NULL_TREE;
8188 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8190 static tree
8191 fold_builtin_carg (location_t loc, tree arg, tree type)
8193 if (validate_arg (arg, COMPLEX_TYPE)
8194 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8196 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8198 if (atan2_fn)
8200 tree new_arg = builtin_save_expr (arg);
8201 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8202 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8203 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8207 return NULL_TREE;
8210 /* Fold a call to builtin frexp, we can assume the base is 2. */
8212 static tree
8213 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8215 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8216 return NULL_TREE;
8218 STRIP_NOPS (arg0);
8220 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8221 return NULL_TREE;
8223 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8225 /* Proceed if a valid pointer type was passed in. */
8226 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8228 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8229 tree frac, exp;
8231 switch (value->cl)
8233 case rvc_zero:
8234 /* For +-0, return (*exp = 0, +-0). */
8235 exp = integer_zero_node;
8236 frac = arg0;
8237 break;
8238 case rvc_nan:
8239 case rvc_inf:
8240 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8241 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8242 case rvc_normal:
8244 /* Since the frexp function always expects base 2, and in
8245 GCC normalized significands are already in the range
8246 [0.5, 1.0), we have exactly what frexp wants. */
8247 REAL_VALUE_TYPE frac_rvt = *value;
8248 SET_REAL_EXP (&frac_rvt, 0);
8249 frac = build_real (rettype, frac_rvt);
8250 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8252 break;
8253 default:
8254 gcc_unreachable ();
8257 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8258 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8259 TREE_SIDE_EFFECTS (arg1) = 1;
8260 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8263 return NULL_TREE;
8266 /* Fold a call to builtin modf. */
8268 static tree
8269 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8271 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8272 return NULL_TREE;
8274 STRIP_NOPS (arg0);
8276 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8277 return NULL_TREE;
8279 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8281 /* Proceed if a valid pointer type was passed in. */
8282 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8284 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8285 REAL_VALUE_TYPE trunc, frac;
8287 switch (value->cl)
8289 case rvc_nan:
8290 case rvc_zero:
8291 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8292 trunc = frac = *value;
8293 break;
8294 case rvc_inf:
8295 /* For +-Inf, return (*arg1 = arg0, +-0). */
8296 frac = dconst0;
8297 frac.sign = value->sign;
8298 trunc = *value;
8299 break;
8300 case rvc_normal:
8301 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8302 real_trunc (&trunc, VOIDmode, value);
8303 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8304 /* If the original number was negative and already
8305 integral, then the fractional part is -0.0. */
8306 if (value->sign && frac.cl == rvc_zero)
8307 frac.sign = value->sign;
8308 break;
8311 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8312 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8313 build_real (rettype, trunc));
8314 TREE_SIDE_EFFECTS (arg1) = 1;
8315 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8316 build_real (rettype, frac));
8319 return NULL_TREE;
8322 /* Given a location LOC, an interclass builtin function decl FNDECL
8323 and its single argument ARG, return an folded expression computing
8324 the same, or NULL_TREE if we either couldn't or didn't want to fold
8325 (the latter happen if there's an RTL instruction available). */
8327 static tree
8328 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8330 machine_mode mode;
8332 if (!validate_arg (arg, REAL_TYPE))
8333 return NULL_TREE;
8335 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8336 return NULL_TREE;
8338 mode = TYPE_MODE (TREE_TYPE (arg));
8340 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8342 /* If there is no optab, try generic code. */
8343 switch (DECL_FUNCTION_CODE (fndecl))
8345 tree result;
8347 CASE_FLT_FN (BUILT_IN_ISINF):
8349 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8350 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8351 tree type = TREE_TYPE (arg);
8352 REAL_VALUE_TYPE r;
8353 char buf[128];
8355 if (is_ibm_extended)
8357 /* NaN and Inf are encoded in the high-order double value
8358 only. The low-order value is not significant. */
8359 type = double_type_node;
8360 mode = DFmode;
8361 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8363 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8364 real_from_string (&r, buf);
8365 result = build_call_expr (isgr_fn, 2,
8366 fold_build1_loc (loc, ABS_EXPR, type, arg),
8367 build_real (type, r));
8368 return result;
8370 CASE_FLT_FN (BUILT_IN_FINITE):
8371 case BUILT_IN_ISFINITE:
8373 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8374 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8375 tree type = TREE_TYPE (arg);
8376 REAL_VALUE_TYPE r;
8377 char buf[128];
8379 if (is_ibm_extended)
8381 /* NaN and Inf are encoded in the high-order double value
8382 only. The low-order value is not significant. */
8383 type = double_type_node;
8384 mode = DFmode;
8385 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8387 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8388 real_from_string (&r, buf);
8389 result = build_call_expr (isle_fn, 2,
8390 fold_build1_loc (loc, ABS_EXPR, type, arg),
8391 build_real (type, r));
8392 /*result = fold_build2_loc (loc, UNGT_EXPR,
8393 TREE_TYPE (TREE_TYPE (fndecl)),
8394 fold_build1_loc (loc, ABS_EXPR, type, arg),
8395 build_real (type, r));
8396 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8397 TREE_TYPE (TREE_TYPE (fndecl)),
8398 result);*/
8399 return result;
8401 case BUILT_IN_ISNORMAL:
8403 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8404 islessequal(fabs(x),DBL_MAX). */
8405 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8406 tree type = TREE_TYPE (arg);
8407 tree orig_arg, max_exp, min_exp;
8408 machine_mode orig_mode = mode;
8409 REAL_VALUE_TYPE rmax, rmin;
8410 char buf[128];
8412 orig_arg = arg = builtin_save_expr (arg);
8413 if (is_ibm_extended)
8415 /* Use double to test the normal range of IBM extended
8416 precision. Emin for IBM extended precision is
8417 different to emin for IEEE double, being 53 higher
8418 since the low double exponent is at least 53 lower
8419 than the high double exponent. */
8420 type = double_type_node;
8421 mode = DFmode;
8422 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8424 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8426 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8427 real_from_string (&rmax, buf);
8428 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8429 real_from_string (&rmin, buf);
8430 max_exp = build_real (type, rmax);
8431 min_exp = build_real (type, rmin);
8433 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8434 if (is_ibm_extended)
8436 /* Testing the high end of the range is done just using
8437 the high double, using the same test as isfinite().
8438 For the subnormal end of the range we first test the
8439 high double, then if its magnitude is equal to the
8440 limit of 0x1p-969, we test whether the low double is
8441 non-zero and opposite sign to the high double. */
8442 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8443 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8444 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8445 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8446 arg, min_exp);
8447 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8448 complex_double_type_node, orig_arg);
8449 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8450 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8451 tree zero = build_real (type, dconst0);
8452 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8453 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8454 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8455 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8456 fold_build3 (COND_EXPR,
8457 integer_type_node,
8458 hilt, logt, lolt));
8459 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8460 eq_min, ok_lo);
8461 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8462 gt_min, eq_min);
8464 else
8466 tree const isge_fn
8467 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8468 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8470 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8471 max_exp, min_exp);
8472 return result;
8474 default:
8475 break;
8478 return NULL_TREE;
8481 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8482 ARG is the argument for the call. */
8484 static tree
8485 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8487 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8489 if (!validate_arg (arg, REAL_TYPE))
8490 return NULL_TREE;
8492 switch (builtin_index)
8494 case BUILT_IN_ISINF:
8495 if (!HONOR_INFINITIES (arg))
8496 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8498 return NULL_TREE;
8500 case BUILT_IN_ISINF_SIGN:
8502 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8503 /* In a boolean context, GCC will fold the inner COND_EXPR to
8504 1. So e.g. "if (isinf_sign(x))" would be folded to just
8505 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8506 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8507 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8508 tree tmp = NULL_TREE;
8510 arg = builtin_save_expr (arg);
8512 if (signbit_fn && isinf_fn)
8514 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8515 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8517 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8518 signbit_call, integer_zero_node);
8519 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8520 isinf_call, integer_zero_node);
8522 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8523 integer_minus_one_node, integer_one_node);
8524 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8525 isinf_call, tmp,
8526 integer_zero_node);
8529 return tmp;
8532 case BUILT_IN_ISFINITE:
8533 if (!HONOR_NANS (arg)
8534 && !HONOR_INFINITIES (arg))
8535 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8537 return NULL_TREE;
8539 case BUILT_IN_ISNAN:
8540 if (!HONOR_NANS (arg))
8541 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8544 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8545 if (is_ibm_extended)
8547 /* NaN and Inf are encoded in the high-order double value
8548 only. The low-order value is not significant. */
8549 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8552 arg = builtin_save_expr (arg);
8553 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8555 default:
8556 gcc_unreachable ();
8560 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8561 This builtin will generate code to return the appropriate floating
8562 point classification depending on the value of the floating point
8563 number passed in. The possible return values must be supplied as
8564 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8565 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8566 one floating point argument which is "type generic". */
8568 static tree
8569 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8571 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8572 arg, type, res, tmp;
8573 machine_mode mode;
8574 REAL_VALUE_TYPE r;
8575 char buf[128];
8577 /* Verify the required arguments in the original call. */
8578 if (nargs != 6
8579 || !validate_arg (args[0], INTEGER_TYPE)
8580 || !validate_arg (args[1], INTEGER_TYPE)
8581 || !validate_arg (args[2], INTEGER_TYPE)
8582 || !validate_arg (args[3], INTEGER_TYPE)
8583 || !validate_arg (args[4], INTEGER_TYPE)
8584 || !validate_arg (args[5], REAL_TYPE))
8585 return NULL_TREE;
8587 fp_nan = args[0];
8588 fp_infinite = args[1];
8589 fp_normal = args[2];
8590 fp_subnormal = args[3];
8591 fp_zero = args[4];
8592 arg = args[5];
8593 type = TREE_TYPE (arg);
8594 mode = TYPE_MODE (type);
8595 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8597 /* fpclassify(x) ->
8598 isnan(x) ? FP_NAN :
8599 (fabs(x) == Inf ? FP_INFINITE :
8600 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8601 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8603 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8604 build_real (type, dconst0));
8605 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8606 tmp, fp_zero, fp_subnormal);
8608 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8609 real_from_string (&r, buf);
8610 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8611 arg, build_real (type, r));
8612 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8614 if (HONOR_INFINITIES (mode))
8616 real_inf (&r);
8617 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8618 build_real (type, r));
8619 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8620 fp_infinite, res);
8623 if (HONOR_NANS (mode))
8625 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8626 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8629 return res;
8632 /* Fold a call to an unordered comparison function such as
8633 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8634 being called and ARG0 and ARG1 are the arguments for the call.
8635 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8636 the opposite of the desired result. UNORDERED_CODE is used
8637 for modes that can hold NaNs and ORDERED_CODE is used for
8638 the rest. */
8640 static tree
8641 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8642 enum tree_code unordered_code,
8643 enum tree_code ordered_code)
8645 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8646 enum tree_code code;
8647 tree type0, type1;
8648 enum tree_code code0, code1;
8649 tree cmp_type = NULL_TREE;
8651 type0 = TREE_TYPE (arg0);
8652 type1 = TREE_TYPE (arg1);
8654 code0 = TREE_CODE (type0);
8655 code1 = TREE_CODE (type1);
8657 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8658 /* Choose the wider of two real types. */
8659 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8660 ? type0 : type1;
8661 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8662 cmp_type = type0;
8663 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8664 cmp_type = type1;
8666 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8667 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8669 if (unordered_code == UNORDERED_EXPR)
8671 if (!HONOR_NANS (arg0))
8672 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8673 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8676 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8677 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8678 fold_build2_loc (loc, code, type, arg0, arg1));
8681 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8682 arithmetics if it can never overflow, or into internal functions that
8683 return both result of arithmetics and overflowed boolean flag in
8684 a complex integer result, or some other check for overflow.
8685 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8686 checking part of that. */
8688 static tree
8689 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8690 tree arg0, tree arg1, tree arg2)
8692 enum internal_fn ifn = IFN_LAST;
8693 /* The code of the expression corresponding to the type-generic
8694 built-in, or ERROR_MARK for the type-specific ones. */
8695 enum tree_code opcode = ERROR_MARK;
8696 bool ovf_only = false;
8698 switch (fcode)
8700 case BUILT_IN_ADD_OVERFLOW_P:
8701 ovf_only = true;
8702 /* FALLTHRU */
8703 case BUILT_IN_ADD_OVERFLOW:
8704 opcode = PLUS_EXPR;
8705 /* FALLTHRU */
8706 case BUILT_IN_SADD_OVERFLOW:
8707 case BUILT_IN_SADDL_OVERFLOW:
8708 case BUILT_IN_SADDLL_OVERFLOW:
8709 case BUILT_IN_UADD_OVERFLOW:
8710 case BUILT_IN_UADDL_OVERFLOW:
8711 case BUILT_IN_UADDLL_OVERFLOW:
8712 ifn = IFN_ADD_OVERFLOW;
8713 break;
8714 case BUILT_IN_SUB_OVERFLOW_P:
8715 ovf_only = true;
8716 /* FALLTHRU */
8717 case BUILT_IN_SUB_OVERFLOW:
8718 opcode = MINUS_EXPR;
8719 /* FALLTHRU */
8720 case BUILT_IN_SSUB_OVERFLOW:
8721 case BUILT_IN_SSUBL_OVERFLOW:
8722 case BUILT_IN_SSUBLL_OVERFLOW:
8723 case BUILT_IN_USUB_OVERFLOW:
8724 case BUILT_IN_USUBL_OVERFLOW:
8725 case BUILT_IN_USUBLL_OVERFLOW:
8726 ifn = IFN_SUB_OVERFLOW;
8727 break;
8728 case BUILT_IN_MUL_OVERFLOW_P:
8729 ovf_only = true;
8730 /* FALLTHRU */
8731 case BUILT_IN_MUL_OVERFLOW:
8732 opcode = MULT_EXPR;
8733 /* FALLTHRU */
8734 case BUILT_IN_SMUL_OVERFLOW:
8735 case BUILT_IN_SMULL_OVERFLOW:
8736 case BUILT_IN_SMULLL_OVERFLOW:
8737 case BUILT_IN_UMUL_OVERFLOW:
8738 case BUILT_IN_UMULL_OVERFLOW:
8739 case BUILT_IN_UMULLL_OVERFLOW:
8740 ifn = IFN_MUL_OVERFLOW;
8741 break;
8742 default:
8743 gcc_unreachable ();
8746 /* For the "generic" overloads, the first two arguments can have different
8747 types and the last argument determines the target type to use to check
8748 for overflow. The arguments of the other overloads all have the same
8749 type. */
8750 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8752 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8753 arguments are constant, attempt to fold the built-in call into a constant
8754 expression indicating whether or not it detected an overflow. */
8755 if (ovf_only
8756 && TREE_CODE (arg0) == INTEGER_CST
8757 && TREE_CODE (arg1) == INTEGER_CST)
8758 /* Perform the computation in the target type and check for overflow. */
8759 return omit_one_operand_loc (loc, boolean_type_node,
8760 arith_overflowed_p (opcode, type, arg0, arg1)
8761 ? boolean_true_node : boolean_false_node,
8762 arg2);
8764 tree ctype = build_complex_type (type);
8765 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8766 2, arg0, arg1);
8767 tree tgt = save_expr (call);
8768 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8769 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8770 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8772 if (ovf_only)
8773 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8775 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8776 tree store
8777 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8778 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8781 /* Fold a call to __builtin_FILE to a constant string. */
8783 static inline tree
8784 fold_builtin_FILE (location_t loc)
8786 if (const char *fname = LOCATION_FILE (loc))
8787 return build_string_literal (strlen (fname) + 1, fname);
8789 return build_string_literal (1, "");
8792 /* Fold a call to __builtin_FUNCTION to a constant string. */
8794 static inline tree
8795 fold_builtin_FUNCTION ()
8797 const char *name = "";
8799 if (current_function_decl)
8800 name = lang_hooks.decl_printable_name (current_function_decl, 0);
8802 return build_string_literal (strlen (name) + 1, name);
8805 /* Fold a call to __builtin_LINE to an integer constant. */
8807 static inline tree
8808 fold_builtin_LINE (location_t loc, tree type)
8810 return build_int_cst (type, LOCATION_LINE (loc));
8813 /* Fold a call to built-in function FNDECL with 0 arguments.
8814 This function returns NULL_TREE if no simplification was possible. */
8816 static tree
8817 fold_builtin_0 (location_t loc, tree fndecl)
8819 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8820 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8821 switch (fcode)
8823 case BUILT_IN_FILE:
8824 return fold_builtin_FILE (loc);
8826 case BUILT_IN_FUNCTION:
8827 return fold_builtin_FUNCTION ();
8829 case BUILT_IN_LINE:
8830 return fold_builtin_LINE (loc, type);
8832 CASE_FLT_FN (BUILT_IN_INF):
8833 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8834 case BUILT_IN_INFD32:
8835 case BUILT_IN_INFD64:
8836 case BUILT_IN_INFD128:
8837 return fold_builtin_inf (loc, type, true);
8839 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8840 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8841 return fold_builtin_inf (loc, type, false);
8843 case BUILT_IN_CLASSIFY_TYPE:
8844 return fold_builtin_classify_type (NULL_TREE);
8846 default:
8847 break;
8849 return NULL_TREE;
8852 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8853 This function returns NULL_TREE if no simplification was possible. */
8855 static tree
8856 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8858 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8859 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8861 if (TREE_CODE (arg0) == ERROR_MARK)
8862 return NULL_TREE;
8864 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8865 return ret;
8867 switch (fcode)
8869 case BUILT_IN_CONSTANT_P:
8871 tree val = fold_builtin_constant_p (arg0);
8873 /* Gimplification will pull the CALL_EXPR for the builtin out of
8874 an if condition. When not optimizing, we'll not CSE it back.
8875 To avoid link error types of regressions, return false now. */
8876 if (!val && !optimize)
8877 val = integer_zero_node;
8879 return val;
8882 case BUILT_IN_CLASSIFY_TYPE:
8883 return fold_builtin_classify_type (arg0);
8885 case BUILT_IN_STRLEN:
8886 return fold_builtin_strlen (loc, type, arg0);
8888 CASE_FLT_FN (BUILT_IN_FABS):
8889 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8890 case BUILT_IN_FABSD32:
8891 case BUILT_IN_FABSD64:
8892 case BUILT_IN_FABSD128:
8893 return fold_builtin_fabs (loc, arg0, type);
8895 case BUILT_IN_ABS:
8896 case BUILT_IN_LABS:
8897 case BUILT_IN_LLABS:
8898 case BUILT_IN_IMAXABS:
8899 return fold_builtin_abs (loc, arg0, type);
8901 CASE_FLT_FN (BUILT_IN_CONJ):
8902 if (validate_arg (arg0, COMPLEX_TYPE)
8903 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8904 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8905 break;
8907 CASE_FLT_FN (BUILT_IN_CREAL):
8908 if (validate_arg (arg0, COMPLEX_TYPE)
8909 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8910 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8911 break;
8913 CASE_FLT_FN (BUILT_IN_CIMAG):
8914 if (validate_arg (arg0, COMPLEX_TYPE)
8915 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8916 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8917 break;
8919 CASE_FLT_FN (BUILT_IN_CARG):
8920 return fold_builtin_carg (loc, arg0, type);
8922 case BUILT_IN_ISASCII:
8923 return fold_builtin_isascii (loc, arg0);
8925 case BUILT_IN_TOASCII:
8926 return fold_builtin_toascii (loc, arg0);
8928 case BUILT_IN_ISDIGIT:
8929 return fold_builtin_isdigit (loc, arg0);
8931 CASE_FLT_FN (BUILT_IN_FINITE):
8932 case BUILT_IN_FINITED32:
8933 case BUILT_IN_FINITED64:
8934 case BUILT_IN_FINITED128:
8935 case BUILT_IN_ISFINITE:
8937 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8938 if (ret)
8939 return ret;
8940 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8943 CASE_FLT_FN (BUILT_IN_ISINF):
8944 case BUILT_IN_ISINFD32:
8945 case BUILT_IN_ISINFD64:
8946 case BUILT_IN_ISINFD128:
8948 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8949 if (ret)
8950 return ret;
8951 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8954 case BUILT_IN_ISNORMAL:
8955 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8957 case BUILT_IN_ISINF_SIGN:
8958 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8960 CASE_FLT_FN (BUILT_IN_ISNAN):
8961 case BUILT_IN_ISNAND32:
8962 case BUILT_IN_ISNAND64:
8963 case BUILT_IN_ISNAND128:
8964 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8966 case BUILT_IN_FREE:
8967 if (integer_zerop (arg0))
8968 return build_empty_stmt (loc);
8969 break;
8971 default:
8972 break;
8975 return NULL_TREE;
8979 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8980 This function returns NULL_TREE if no simplification was possible. */
8982 static tree
8983 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8985 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8986 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8988 if (TREE_CODE (arg0) == ERROR_MARK
8989 || TREE_CODE (arg1) == ERROR_MARK)
8990 return NULL_TREE;
8992 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8993 return ret;
8995 switch (fcode)
8997 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8998 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8999 if (validate_arg (arg0, REAL_TYPE)
9000 && validate_arg (arg1, POINTER_TYPE))
9001 return do_mpfr_lgamma_r (arg0, arg1, type);
9002 break;
9004 CASE_FLT_FN (BUILT_IN_FREXP):
9005 return fold_builtin_frexp (loc, arg0, arg1, type);
9007 CASE_FLT_FN (BUILT_IN_MODF):
9008 return fold_builtin_modf (loc, arg0, arg1, type);
9010 case BUILT_IN_STRSPN:
9011 return fold_builtin_strspn (loc, arg0, arg1);
9013 case BUILT_IN_STRCSPN:
9014 return fold_builtin_strcspn (loc, arg0, arg1);
9016 case BUILT_IN_STRPBRK:
9017 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9019 case BUILT_IN_EXPECT:
9020 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9022 case BUILT_IN_ISGREATER:
9023 return fold_builtin_unordered_cmp (loc, fndecl,
9024 arg0, arg1, UNLE_EXPR, LE_EXPR);
9025 case BUILT_IN_ISGREATEREQUAL:
9026 return fold_builtin_unordered_cmp (loc, fndecl,
9027 arg0, arg1, UNLT_EXPR, LT_EXPR);
9028 case BUILT_IN_ISLESS:
9029 return fold_builtin_unordered_cmp (loc, fndecl,
9030 arg0, arg1, UNGE_EXPR, GE_EXPR);
9031 case BUILT_IN_ISLESSEQUAL:
9032 return fold_builtin_unordered_cmp (loc, fndecl,
9033 arg0, arg1, UNGT_EXPR, GT_EXPR);
9034 case BUILT_IN_ISLESSGREATER:
9035 return fold_builtin_unordered_cmp (loc, fndecl,
9036 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9037 case BUILT_IN_ISUNORDERED:
9038 return fold_builtin_unordered_cmp (loc, fndecl,
9039 arg0, arg1, UNORDERED_EXPR,
9040 NOP_EXPR);
9042 /* We do the folding for va_start in the expander. */
9043 case BUILT_IN_VA_START:
9044 break;
9046 case BUILT_IN_OBJECT_SIZE:
9047 return fold_builtin_object_size (arg0, arg1);
9049 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9050 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9052 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9053 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9055 default:
9056 break;
9058 return NULL_TREE;
9061 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9062 and ARG2.
9063 This function returns NULL_TREE if no simplification was possible. */
9065 static tree
9066 fold_builtin_3 (location_t loc, tree fndecl,
9067 tree arg0, tree arg1, tree arg2)
9069 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9070 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9072 if (TREE_CODE (arg0) == ERROR_MARK
9073 || TREE_CODE (arg1) == ERROR_MARK
9074 || TREE_CODE (arg2) == ERROR_MARK)
9075 return NULL_TREE;
9077 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9078 arg0, arg1, arg2))
9079 return ret;
9081 switch (fcode)
9084 CASE_FLT_FN (BUILT_IN_SINCOS):
9085 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9087 CASE_FLT_FN (BUILT_IN_FMA):
9088 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
9089 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
9091 CASE_FLT_FN (BUILT_IN_REMQUO):
9092 if (validate_arg (arg0, REAL_TYPE)
9093 && validate_arg (arg1, REAL_TYPE)
9094 && validate_arg (arg2, POINTER_TYPE))
9095 return do_mpfr_remquo (arg0, arg1, arg2);
9096 break;
9098 case BUILT_IN_MEMCMP:
9099 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
9101 case BUILT_IN_EXPECT:
9102 return fold_builtin_expect (loc, arg0, arg1, arg2);
9104 case BUILT_IN_ADD_OVERFLOW:
9105 case BUILT_IN_SUB_OVERFLOW:
9106 case BUILT_IN_MUL_OVERFLOW:
9107 case BUILT_IN_ADD_OVERFLOW_P:
9108 case BUILT_IN_SUB_OVERFLOW_P:
9109 case BUILT_IN_MUL_OVERFLOW_P:
9110 case BUILT_IN_SADD_OVERFLOW:
9111 case BUILT_IN_SADDL_OVERFLOW:
9112 case BUILT_IN_SADDLL_OVERFLOW:
9113 case BUILT_IN_SSUB_OVERFLOW:
9114 case BUILT_IN_SSUBL_OVERFLOW:
9115 case BUILT_IN_SSUBLL_OVERFLOW:
9116 case BUILT_IN_SMUL_OVERFLOW:
9117 case BUILT_IN_SMULL_OVERFLOW:
9118 case BUILT_IN_SMULLL_OVERFLOW:
9119 case BUILT_IN_UADD_OVERFLOW:
9120 case BUILT_IN_UADDL_OVERFLOW:
9121 case BUILT_IN_UADDLL_OVERFLOW:
9122 case BUILT_IN_USUB_OVERFLOW:
9123 case BUILT_IN_USUBL_OVERFLOW:
9124 case BUILT_IN_USUBLL_OVERFLOW:
9125 case BUILT_IN_UMUL_OVERFLOW:
9126 case BUILT_IN_UMULL_OVERFLOW:
9127 case BUILT_IN_UMULLL_OVERFLOW:
9128 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9130 default:
9131 break;
9133 return NULL_TREE;
9136 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9137 arguments. IGNORE is true if the result of the
9138 function call is ignored. This function returns NULL_TREE if no
9139 simplification was possible. */
9141 tree
9142 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9144 tree ret = NULL_TREE;
9146 switch (nargs)
9148 case 0:
9149 ret = fold_builtin_0 (loc, fndecl);
9150 break;
9151 case 1:
9152 ret = fold_builtin_1 (loc, fndecl, args[0]);
9153 break;
9154 case 2:
9155 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9156 break;
9157 case 3:
9158 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9159 break;
9160 default:
9161 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9162 break;
9164 if (ret)
9166 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9167 SET_EXPR_LOCATION (ret, loc);
9168 TREE_NO_WARNING (ret) = 1;
9169 return ret;
9171 return NULL_TREE;
9174 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9175 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9176 of arguments in ARGS to be omitted. OLDNARGS is the number of
9177 elements in ARGS. */
9179 static tree
9180 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9181 int skip, tree fndecl, int n, va_list newargs)
9183 int nargs = oldnargs - skip + n;
9184 tree *buffer;
9186 if (n > 0)
9188 int i, j;
9190 buffer = XALLOCAVEC (tree, nargs);
9191 for (i = 0; i < n; i++)
9192 buffer[i] = va_arg (newargs, tree);
9193 for (j = skip; j < oldnargs; j++, i++)
9194 buffer[i] = args[j];
9196 else
9197 buffer = args + skip;
9199 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9202 /* Return true if FNDECL shouldn't be folded right now.
9203 If a built-in function has an inline attribute always_inline
9204 wrapper, defer folding it after always_inline functions have
9205 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9206 might not be performed. */
9208 bool
9209 avoid_folding_inline_builtin (tree fndecl)
9211 return (DECL_DECLARED_INLINE_P (fndecl)
9212 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9213 && cfun
9214 && !cfun->always_inline_functions_inlined
9215 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9218 /* A wrapper function for builtin folding that prevents warnings for
9219 "statement without effect" and the like, caused by removing the
9220 call node earlier than the warning is generated. */
9222 tree
9223 fold_call_expr (location_t loc, tree exp, bool ignore)
9225 tree ret = NULL_TREE;
9226 tree fndecl = get_callee_fndecl (exp);
9227 if (fndecl
9228 && TREE_CODE (fndecl) == FUNCTION_DECL
9229 && DECL_BUILT_IN (fndecl)
9230 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9231 yet. Defer folding until we see all the arguments
9232 (after inlining). */
9233 && !CALL_EXPR_VA_ARG_PACK (exp))
9235 int nargs = call_expr_nargs (exp);
9237 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9238 instead last argument is __builtin_va_arg_pack (). Defer folding
9239 even in that case, until arguments are finalized. */
9240 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9242 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9243 if (fndecl2
9244 && TREE_CODE (fndecl2) == FUNCTION_DECL
9245 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9246 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9247 return NULL_TREE;
9250 if (avoid_folding_inline_builtin (fndecl))
9251 return NULL_TREE;
9253 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9254 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9255 CALL_EXPR_ARGP (exp), ignore);
9256 else
9258 tree *args = CALL_EXPR_ARGP (exp);
9259 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9260 if (ret)
9261 return ret;
9264 return NULL_TREE;
9267 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9268 N arguments are passed in the array ARGARRAY. Return a folded
9269 expression or NULL_TREE if no simplification was possible. */
9271 tree
9272 fold_builtin_call_array (location_t loc, tree,
9273 tree fn,
9274 int n,
9275 tree *argarray)
9277 if (TREE_CODE (fn) != ADDR_EXPR)
9278 return NULL_TREE;
9280 tree fndecl = TREE_OPERAND (fn, 0);
9281 if (TREE_CODE (fndecl) == FUNCTION_DECL
9282 && DECL_BUILT_IN (fndecl))
9284 /* If last argument is __builtin_va_arg_pack (), arguments to this
9285 function are not finalized yet. Defer folding until they are. */
9286 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9288 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9289 if (fndecl2
9290 && TREE_CODE (fndecl2) == FUNCTION_DECL
9291 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9292 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9293 return NULL_TREE;
9295 if (avoid_folding_inline_builtin (fndecl))
9296 return NULL_TREE;
9297 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9298 return targetm.fold_builtin (fndecl, n, argarray, false);
9299 else
9300 return fold_builtin_n (loc, fndecl, argarray, n, false);
9303 return NULL_TREE;
9306 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9307 along with N new arguments specified as the "..." parameters. SKIP
9308 is the number of arguments in EXP to be omitted. This function is used
9309 to do varargs-to-varargs transformations. */
9311 static tree
9312 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9314 va_list ap;
9315 tree t;
9317 va_start (ap, n);
9318 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9319 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9320 va_end (ap);
9322 return t;
9325 /* Validate a single argument ARG against a tree code CODE representing
9326 a type. Return true when argument is valid. */
9328 static bool
9329 validate_arg (const_tree arg, enum tree_code code)
9331 if (!arg)
9332 return false;
9333 else if (code == POINTER_TYPE)
9334 return POINTER_TYPE_P (TREE_TYPE (arg));
9335 else if (code == INTEGER_TYPE)
9336 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9337 return code == TREE_CODE (TREE_TYPE (arg));
9340 /* This function validates the types of a function call argument list
9341 against a specified list of tree_codes. If the last specifier is a 0,
9342 that represents an ellipses, otherwise the last specifier must be a
9343 VOID_TYPE.
9345 This is the GIMPLE version of validate_arglist. Eventually we want to
9346 completely convert builtins.c to work from GIMPLEs and the tree based
9347 validate_arglist will then be removed. */
9349 bool
9350 validate_gimple_arglist (const gcall *call, ...)
9352 enum tree_code code;
9353 bool res = 0;
9354 va_list ap;
9355 const_tree arg;
9356 size_t i;
9358 va_start (ap, call);
9359 i = 0;
9363 code = (enum tree_code) va_arg (ap, int);
9364 switch (code)
9366 case 0:
9367 /* This signifies an ellipses, any further arguments are all ok. */
9368 res = true;
9369 goto end;
9370 case VOID_TYPE:
9371 /* This signifies an endlink, if no arguments remain, return
9372 true, otherwise return false. */
9373 res = (i == gimple_call_num_args (call));
9374 goto end;
9375 default:
9376 /* If no parameters remain or the parameter's code does not
9377 match the specified code, return false. Otherwise continue
9378 checking any remaining arguments. */
9379 arg = gimple_call_arg (call, i++);
9380 if (!validate_arg (arg, code))
9381 goto end;
9382 break;
9385 while (1);
9387 /* We need gotos here since we can only have one VA_CLOSE in a
9388 function. */
9389 end: ;
9390 va_end (ap);
9392 return res;
9395 /* Default target-specific builtin expander that does nothing. */
9398 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9399 rtx target ATTRIBUTE_UNUSED,
9400 rtx subtarget ATTRIBUTE_UNUSED,
9401 machine_mode mode ATTRIBUTE_UNUSED,
9402 int ignore ATTRIBUTE_UNUSED)
9404 return NULL_RTX;
9407 /* Returns true is EXP represents data that would potentially reside
9408 in a readonly section. */
9410 bool
9411 readonly_data_expr (tree exp)
9413 STRIP_NOPS (exp);
9415 if (TREE_CODE (exp) != ADDR_EXPR)
9416 return false;
9418 exp = get_base_address (TREE_OPERAND (exp, 0));
9419 if (!exp)
9420 return false;
9422 /* Make sure we call decl_readonly_section only for trees it
9423 can handle (since it returns true for everything it doesn't
9424 understand). */
9425 if (TREE_CODE (exp) == STRING_CST
9426 || TREE_CODE (exp) == CONSTRUCTOR
9427 || (VAR_P (exp) && TREE_STATIC (exp)))
9428 return decl_readonly_section (exp, 0);
9429 else
9430 return false;
9433 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9434 to the call, and TYPE is its return type.
9436 Return NULL_TREE if no simplification was possible, otherwise return the
9437 simplified form of the call as a tree.
9439 The simplified form may be a constant or other expression which
9440 computes the same value, but in a more efficient manner (including
9441 calls to other builtin functions).
9443 The call may contain arguments which need to be evaluated, but
9444 which are not useful to determine the result of the call. In
9445 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9446 COMPOUND_EXPR will be an argument which must be evaluated.
9447 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9448 COMPOUND_EXPR in the chain will contain the tree for the simplified
9449 form of the builtin function call. */
9451 static tree
9452 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9454 if (!validate_arg (s1, POINTER_TYPE)
9455 || !validate_arg (s2, POINTER_TYPE))
9456 return NULL_TREE;
9457 else
9459 tree fn;
9460 const char *p1, *p2;
9462 p2 = c_getstr (s2);
9463 if (p2 == NULL)
9464 return NULL_TREE;
9466 p1 = c_getstr (s1);
9467 if (p1 != NULL)
9469 const char *r = strpbrk (p1, p2);
9470 tree tem;
9472 if (r == NULL)
9473 return build_int_cst (TREE_TYPE (s1), 0);
9475 /* Return an offset into the constant string argument. */
9476 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9477 return fold_convert_loc (loc, type, tem);
9480 if (p2[0] == '\0')
9481 /* strpbrk(x, "") == NULL.
9482 Evaluate and ignore s1 in case it had side-effects. */
9483 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9485 if (p2[1] != '\0')
9486 return NULL_TREE; /* Really call strpbrk. */
9488 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9489 if (!fn)
9490 return NULL_TREE;
9492 /* New argument list transforming strpbrk(s1, s2) to
9493 strchr(s1, s2[0]). */
9494 return build_call_expr_loc (loc, fn, 2, s1,
9495 build_int_cst (integer_type_node, p2[0]));
9499 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9500 to the call.
9502 Return NULL_TREE if no simplification was possible, otherwise return the
9503 simplified form of the call as a tree.
9505 The simplified form may be a constant or other expression which
9506 computes the same value, but in a more efficient manner (including
9507 calls to other builtin functions).
9509 The call may contain arguments which need to be evaluated, but
9510 which are not useful to determine the result of the call. In
9511 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9512 COMPOUND_EXPR will be an argument which must be evaluated.
9513 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9514 COMPOUND_EXPR in the chain will contain the tree for the simplified
9515 form of the builtin function call. */
9517 static tree
9518 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9520 if (!validate_arg (s1, POINTER_TYPE)
9521 || !validate_arg (s2, POINTER_TYPE))
9522 return NULL_TREE;
9523 else
9525 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9527 /* If either argument is "", return NULL_TREE. */
9528 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9529 /* Evaluate and ignore both arguments in case either one has
9530 side-effects. */
9531 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9532 s1, s2);
9533 return NULL_TREE;
9537 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9538 to the call.
9540 Return NULL_TREE if no simplification was possible, otherwise return the
9541 simplified form of the call as a tree.
9543 The simplified form may be a constant or other expression which
9544 computes the same value, but in a more efficient manner (including
9545 calls to other builtin functions).
9547 The call may contain arguments which need to be evaluated, but
9548 which are not useful to determine the result of the call. In
9549 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9550 COMPOUND_EXPR will be an argument which must be evaluated.
9551 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9552 COMPOUND_EXPR in the chain will contain the tree for the simplified
9553 form of the builtin function call. */
9555 static tree
9556 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9558 if (!validate_arg (s1, POINTER_TYPE)
9559 || !validate_arg (s2, POINTER_TYPE))
9560 return NULL_TREE;
9561 else
9563 /* If the first argument is "", return NULL_TREE. */
9564 const char *p1 = c_getstr (s1);
9565 if (p1 && *p1 == '\0')
9567 /* Evaluate and ignore argument s2 in case it has
9568 side-effects. */
9569 return omit_one_operand_loc (loc, size_type_node,
9570 size_zero_node, s2);
9573 /* If the second argument is "", return __builtin_strlen(s1). */
9574 const char *p2 = c_getstr (s2);
9575 if (p2 && *p2 == '\0')
9577 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9579 /* If the replacement _DECL isn't initialized, don't do the
9580 transformation. */
9581 if (!fn)
9582 return NULL_TREE;
9584 return build_call_expr_loc (loc, fn, 1, s1);
9586 return NULL_TREE;
9590 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9591 produced. False otherwise. This is done so that we don't output the error
9592 or warning twice or three times. */
9594 bool
9595 fold_builtin_next_arg (tree exp, bool va_start_p)
9597 tree fntype = TREE_TYPE (current_function_decl);
9598 int nargs = call_expr_nargs (exp);
9599 tree arg;
9600 /* There is good chance the current input_location points inside the
9601 definition of the va_start macro (perhaps on the token for
9602 builtin) in a system header, so warnings will not be emitted.
9603 Use the location in real source code. */
9604 source_location current_location =
9605 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9606 NULL);
9608 if (!stdarg_p (fntype))
9610 error ("%<va_start%> used in function with fixed args");
9611 return true;
9614 if (va_start_p)
9616 if (va_start_p && (nargs != 2))
9618 error ("wrong number of arguments to function %<va_start%>");
9619 return true;
9621 arg = CALL_EXPR_ARG (exp, 1);
9623 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9624 when we checked the arguments and if needed issued a warning. */
9625 else
9627 if (nargs == 0)
9629 /* Evidently an out of date version of <stdarg.h>; can't validate
9630 va_start's second argument, but can still work as intended. */
9631 warning_at (current_location,
9632 OPT_Wvarargs,
9633 "%<__builtin_next_arg%> called without an argument");
9634 return true;
9636 else if (nargs > 1)
9638 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9639 return true;
9641 arg = CALL_EXPR_ARG (exp, 0);
9644 if (TREE_CODE (arg) == SSA_NAME)
9645 arg = SSA_NAME_VAR (arg);
9647 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9648 or __builtin_next_arg (0) the first time we see it, after checking
9649 the arguments and if needed issuing a warning. */
9650 if (!integer_zerop (arg))
9652 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9654 /* Strip off all nops for the sake of the comparison. This
9655 is not quite the same as STRIP_NOPS. It does more.
9656 We must also strip off INDIRECT_EXPR for C++ reference
9657 parameters. */
9658 while (CONVERT_EXPR_P (arg)
9659 || TREE_CODE (arg) == INDIRECT_REF)
9660 arg = TREE_OPERAND (arg, 0);
9661 if (arg != last_parm)
9663 /* FIXME: Sometimes with the tree optimizers we can get the
9664 not the last argument even though the user used the last
9665 argument. We just warn and set the arg to be the last
9666 argument so that we will get wrong-code because of
9667 it. */
9668 warning_at (current_location,
9669 OPT_Wvarargs,
9670 "second parameter of %<va_start%> not last named argument");
9673 /* Undefined by C99 7.15.1.4p4 (va_start):
9674 "If the parameter parmN is declared with the register storage
9675 class, with a function or array type, or with a type that is
9676 not compatible with the type that results after application of
9677 the default argument promotions, the behavior is undefined."
9679 else if (DECL_REGISTER (arg))
9681 warning_at (current_location,
9682 OPT_Wvarargs,
9683 "undefined behavior when second parameter of "
9684 "%<va_start%> is declared with %<register%> storage");
9687 /* We want to verify the second parameter just once before the tree
9688 optimizers are run and then avoid keeping it in the tree,
9689 as otherwise we could warn even for correct code like:
9690 void foo (int i, ...)
9691 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9692 if (va_start_p)
9693 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9694 else
9695 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9697 return false;
9701 /* Expand a call EXP to __builtin_object_size. */
9703 static rtx
9704 expand_builtin_object_size (tree exp)
9706 tree ost;
9707 int object_size_type;
9708 tree fndecl = get_callee_fndecl (exp);
9710 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9712 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9713 exp, fndecl);
9714 expand_builtin_trap ();
9715 return const0_rtx;
9718 ost = CALL_EXPR_ARG (exp, 1);
9719 STRIP_NOPS (ost);
9721 if (TREE_CODE (ost) != INTEGER_CST
9722 || tree_int_cst_sgn (ost) < 0
9723 || compare_tree_int (ost, 3) > 0)
9725 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9726 exp, fndecl);
9727 expand_builtin_trap ();
9728 return const0_rtx;
9731 object_size_type = tree_to_shwi (ost);
9733 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9736 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9737 FCODE is the BUILT_IN_* to use.
9738 Return NULL_RTX if we failed; the caller should emit a normal call,
9739 otherwise try to get the result in TARGET, if convenient (and in
9740 mode MODE if that's convenient). */
9742 static rtx
9743 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9744 enum built_in_function fcode)
9746 tree dest, src, len, size;
9748 if (!validate_arglist (exp,
9749 POINTER_TYPE,
9750 fcode == BUILT_IN_MEMSET_CHK
9751 ? INTEGER_TYPE : POINTER_TYPE,
9752 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9753 return NULL_RTX;
9755 dest = CALL_EXPR_ARG (exp, 0);
9756 src = CALL_EXPR_ARG (exp, 1);
9757 len = CALL_EXPR_ARG (exp, 2);
9758 size = CALL_EXPR_ARG (exp, 3);
9760 bool sizes_ok = check_sizes (OPT_Wstringop_overflow_,
9761 exp, len, /*maxlen=*/NULL_TREE,
9762 /*str=*/NULL_TREE, size);
9764 if (!tree_fits_uhwi_p (size))
9765 return NULL_RTX;
9767 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9769 /* Avoid transforming the checking call to an ordinary one when
9770 an overflow has been detected or when the call couldn't be
9771 validated because the size is not constant. */
9772 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9773 return NULL_RTX;
9775 tree fn = NULL_TREE;
9776 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9777 mem{cpy,pcpy,move,set} is available. */
9778 switch (fcode)
9780 case BUILT_IN_MEMCPY_CHK:
9781 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9782 break;
9783 case BUILT_IN_MEMPCPY_CHK:
9784 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9785 break;
9786 case BUILT_IN_MEMMOVE_CHK:
9787 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9788 break;
9789 case BUILT_IN_MEMSET_CHK:
9790 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9791 break;
9792 default:
9793 break;
9796 if (! fn)
9797 return NULL_RTX;
9799 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9800 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9801 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9802 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9804 else if (fcode == BUILT_IN_MEMSET_CHK)
9805 return NULL_RTX;
9806 else
9808 unsigned int dest_align = get_pointer_alignment (dest);
9810 /* If DEST is not a pointer type, call the normal function. */
9811 if (dest_align == 0)
9812 return NULL_RTX;
9814 /* If SRC and DEST are the same (and not volatile), do nothing. */
9815 if (operand_equal_p (src, dest, 0))
9817 tree expr;
9819 if (fcode != BUILT_IN_MEMPCPY_CHK)
9821 /* Evaluate and ignore LEN in case it has side-effects. */
9822 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9823 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9826 expr = fold_build_pointer_plus (dest, len);
9827 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9830 /* __memmove_chk special case. */
9831 if (fcode == BUILT_IN_MEMMOVE_CHK)
9833 unsigned int src_align = get_pointer_alignment (src);
9835 if (src_align == 0)
9836 return NULL_RTX;
9838 /* If src is categorized for a readonly section we can use
9839 normal __memcpy_chk. */
9840 if (readonly_data_expr (src))
9842 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9843 if (!fn)
9844 return NULL_RTX;
9845 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9846 dest, src, len, size);
9847 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9848 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9849 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9852 return NULL_RTX;
9856 /* Emit warning if a buffer overflow is detected at compile time. */
9858 static void
9859 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9861 /* The source string. */
9862 tree srcstr = NULL_TREE;
9863 /* The size of the destination object. */
9864 tree objsize = NULL_TREE;
9865 /* The string that is being concatenated with (as in __strcat_chk)
9866 or null if it isn't. */
9867 tree catstr = NULL_TREE;
9868 /* The maximum length of the source sequence in a bounded operation
9869 (such as __strncat_chk) or null if the operation isn't bounded
9870 (such as __strcat_chk). */
9871 tree maxlen = NULL_TREE;
9873 switch (fcode)
9875 case BUILT_IN_STRCPY_CHK:
9876 case BUILT_IN_STPCPY_CHK:
9877 srcstr = CALL_EXPR_ARG (exp, 1);
9878 objsize = CALL_EXPR_ARG (exp, 2);
9879 break;
9881 case BUILT_IN_STRCAT_CHK:
9882 /* For __strcat_chk the warning will be emitted only if overflowing
9883 by at least strlen (dest) + 1 bytes. */
9884 catstr = CALL_EXPR_ARG (exp, 0);
9885 srcstr = CALL_EXPR_ARG (exp, 1);
9886 objsize = CALL_EXPR_ARG (exp, 2);
9887 break;
9889 case BUILT_IN_STRNCAT_CHK:
9890 catstr = CALL_EXPR_ARG (exp, 0);
9891 srcstr = CALL_EXPR_ARG (exp, 1);
9892 maxlen = CALL_EXPR_ARG (exp, 2);
9893 objsize = CALL_EXPR_ARG (exp, 3);
9894 break;
9896 case BUILT_IN_STRNCPY_CHK:
9897 case BUILT_IN_STPNCPY_CHK:
9898 srcstr = CALL_EXPR_ARG (exp, 1);
9899 maxlen = CALL_EXPR_ARG (exp, 2);
9900 objsize = CALL_EXPR_ARG (exp, 3);
9901 break;
9903 case BUILT_IN_SNPRINTF_CHK:
9904 case BUILT_IN_VSNPRINTF_CHK:
9905 maxlen = CALL_EXPR_ARG (exp, 1);
9906 objsize = CALL_EXPR_ARG (exp, 3);
9907 break;
9908 default:
9909 gcc_unreachable ();
9912 if (catstr && maxlen)
9914 /* Check __strncat_chk. There is no way to determine the length
9915 of the string to which the source string is being appended so
9916 just warn when the length of the source string is not known. */
9917 check_strncat_sizes (exp, objsize);
9918 return;
9921 check_sizes (OPT_Wstringop_overflow_, exp,
9922 /*size=*/NULL_TREE, maxlen, srcstr, objsize);
9925 /* Emit warning if a buffer overflow is detected at compile time
9926 in __sprintf_chk/__vsprintf_chk calls. */
9928 static void
9929 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9931 tree size, len, fmt;
9932 const char *fmt_str;
9933 int nargs = call_expr_nargs (exp);
9935 /* Verify the required arguments in the original call. */
9937 if (nargs < 4)
9938 return;
9939 size = CALL_EXPR_ARG (exp, 2);
9940 fmt = CALL_EXPR_ARG (exp, 3);
9942 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9943 return;
9945 /* Check whether the format is a literal string constant. */
9946 fmt_str = c_getstr (fmt);
9947 if (fmt_str == NULL)
9948 return;
9950 if (!init_target_chars ())
9951 return;
9953 /* If the format doesn't contain % args or %%, we know its size. */
9954 if (strchr (fmt_str, target_percent) == 0)
9955 len = build_int_cstu (size_type_node, strlen (fmt_str));
9956 /* If the format is "%s" and first ... argument is a string literal,
9957 we know it too. */
9958 else if (fcode == BUILT_IN_SPRINTF_CHK
9959 && strcmp (fmt_str, target_percent_s) == 0)
9961 tree arg;
9963 if (nargs < 5)
9964 return;
9965 arg = CALL_EXPR_ARG (exp, 4);
9966 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9967 return;
9969 len = c_strlen (arg, 1);
9970 if (!len || ! tree_fits_uhwi_p (len))
9971 return;
9973 else
9974 return;
9976 /* Add one for the terminating nul. */
9977 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9978 check_sizes (OPT_Wstringop_overflow_,
9979 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, len, size);
9982 /* Emit warning if a free is called with address of a variable. */
9984 static void
9985 maybe_emit_free_warning (tree exp)
9987 tree arg = CALL_EXPR_ARG (exp, 0);
9989 STRIP_NOPS (arg);
9990 if (TREE_CODE (arg) != ADDR_EXPR)
9991 return;
9993 arg = get_base_address (TREE_OPERAND (arg, 0));
9994 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9995 return;
9997 if (SSA_VAR_P (arg))
9998 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9999 "%Kattempt to free a non-heap object %qD", exp, arg);
10000 else
10001 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10002 "%Kattempt to free a non-heap object", exp);
10005 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10006 if possible. */
10008 static tree
10009 fold_builtin_object_size (tree ptr, tree ost)
10011 unsigned HOST_WIDE_INT bytes;
10012 int object_size_type;
10014 if (!validate_arg (ptr, POINTER_TYPE)
10015 || !validate_arg (ost, INTEGER_TYPE))
10016 return NULL_TREE;
10018 STRIP_NOPS (ost);
10020 if (TREE_CODE (ost) != INTEGER_CST
10021 || tree_int_cst_sgn (ost) < 0
10022 || compare_tree_int (ost, 3) > 0)
10023 return NULL_TREE;
10025 object_size_type = tree_to_shwi (ost);
10027 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10028 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10029 and (size_t) 0 for types 2 and 3. */
10030 if (TREE_SIDE_EFFECTS (ptr))
10031 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10033 if (TREE_CODE (ptr) == ADDR_EXPR)
10035 compute_builtin_object_size (ptr, object_size_type, &bytes);
10036 if (wi::fits_to_tree_p (bytes, size_type_node))
10037 return build_int_cstu (size_type_node, bytes);
10039 else if (TREE_CODE (ptr) == SSA_NAME)
10041 /* If object size is not known yet, delay folding until
10042 later. Maybe subsequent passes will help determining
10043 it. */
10044 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10045 && wi::fits_to_tree_p (bytes, size_type_node))
10046 return build_int_cstu (size_type_node, bytes);
10049 return NULL_TREE;
10052 /* Builtins with folding operations that operate on "..." arguments
10053 need special handling; we need to store the arguments in a convenient
10054 data structure before attempting any folding. Fortunately there are
10055 only a few builtins that fall into this category. FNDECL is the
10056 function, EXP is the CALL_EXPR for the call. */
10058 static tree
10059 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10061 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10062 tree ret = NULL_TREE;
10064 switch (fcode)
10066 case BUILT_IN_FPCLASSIFY:
10067 ret = fold_builtin_fpclassify (loc, args, nargs);
10068 break;
10070 default:
10071 break;
10073 if (ret)
10075 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10076 SET_EXPR_LOCATION (ret, loc);
10077 TREE_NO_WARNING (ret) = 1;
10078 return ret;
10080 return NULL_TREE;
10083 /* Initialize format string characters in the target charset. */
10085 bool
10086 init_target_chars (void)
10088 static bool init;
10089 if (!init)
10091 target_newline = lang_hooks.to_target_charset ('\n');
10092 target_percent = lang_hooks.to_target_charset ('%');
10093 target_c = lang_hooks.to_target_charset ('c');
10094 target_s = lang_hooks.to_target_charset ('s');
10095 if (target_newline == 0 || target_percent == 0 || target_c == 0
10096 || target_s == 0)
10097 return false;
10099 target_percent_c[0] = target_percent;
10100 target_percent_c[1] = target_c;
10101 target_percent_c[2] = '\0';
10103 target_percent_s[0] = target_percent;
10104 target_percent_s[1] = target_s;
10105 target_percent_s[2] = '\0';
10107 target_percent_s_newline[0] = target_percent;
10108 target_percent_s_newline[1] = target_s;
10109 target_percent_s_newline[2] = target_newline;
10110 target_percent_s_newline[3] = '\0';
10112 init = true;
10114 return true;
10117 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10118 and no overflow/underflow occurred. INEXACT is true if M was not
10119 exactly calculated. TYPE is the tree type for the result. This
10120 function assumes that you cleared the MPFR flags and then
10121 calculated M to see if anything subsequently set a flag prior to
10122 entering this function. Return NULL_TREE if any checks fail. */
10124 static tree
10125 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10127 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10128 overflow/underflow occurred. If -frounding-math, proceed iff the
10129 result of calling FUNC was exact. */
10130 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10131 && (!flag_rounding_math || !inexact))
10133 REAL_VALUE_TYPE rr;
10135 real_from_mpfr (&rr, m, type, GMP_RNDN);
10136 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10137 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10138 but the mpft_t is not, then we underflowed in the
10139 conversion. */
10140 if (real_isfinite (&rr)
10141 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10143 REAL_VALUE_TYPE rmode;
10145 real_convert (&rmode, TYPE_MODE (type), &rr);
10146 /* Proceed iff the specified mode can hold the value. */
10147 if (real_identical (&rmode, &rr))
10148 return build_real (type, rmode);
10151 return NULL_TREE;
10154 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10155 number and no overflow/underflow occurred. INEXACT is true if M
10156 was not exactly calculated. TYPE is the tree type for the result.
10157 This function assumes that you cleared the MPFR flags and then
10158 calculated M to see if anything subsequently set a flag prior to
10159 entering this function. Return NULL_TREE if any checks fail, if
10160 FORCE_CONVERT is true, then bypass the checks. */
10162 static tree
10163 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10165 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10166 overflow/underflow occurred. If -frounding-math, proceed iff the
10167 result of calling FUNC was exact. */
10168 if (force_convert
10169 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10170 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10171 && (!flag_rounding_math || !inexact)))
10173 REAL_VALUE_TYPE re, im;
10175 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10176 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10177 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10178 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10179 but the mpft_t is not, then we underflowed in the
10180 conversion. */
10181 if (force_convert
10182 || (real_isfinite (&re) && real_isfinite (&im)
10183 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10184 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10186 REAL_VALUE_TYPE re_mode, im_mode;
10188 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10189 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10190 /* Proceed iff the specified mode can hold the value. */
10191 if (force_convert
10192 || (real_identical (&re_mode, &re)
10193 && real_identical (&im_mode, &im)))
10194 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10195 build_real (TREE_TYPE (type), im_mode));
10198 return NULL_TREE;
10201 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10202 the pointer *(ARG_QUO) and return the result. The type is taken
10203 from the type of ARG0 and is used for setting the precision of the
10204 calculation and results. */
10206 static tree
10207 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10209 tree const type = TREE_TYPE (arg0);
10210 tree result = NULL_TREE;
10212 STRIP_NOPS (arg0);
10213 STRIP_NOPS (arg1);
10215 /* To proceed, MPFR must exactly represent the target floating point
10216 format, which only happens when the target base equals two. */
10217 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10218 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10219 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10221 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10222 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10224 if (real_isfinite (ra0) && real_isfinite (ra1))
10226 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10227 const int prec = fmt->p;
10228 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10229 tree result_rem;
10230 long integer_quo;
10231 mpfr_t m0, m1;
10233 mpfr_inits2 (prec, m0, m1, NULL);
10234 mpfr_from_real (m0, ra0, GMP_RNDN);
10235 mpfr_from_real (m1, ra1, GMP_RNDN);
10236 mpfr_clear_flags ();
10237 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10238 /* Remquo is independent of the rounding mode, so pass
10239 inexact=0 to do_mpfr_ckconv(). */
10240 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10241 mpfr_clears (m0, m1, NULL);
10242 if (result_rem)
10244 /* MPFR calculates quo in the host's long so it may
10245 return more bits in quo than the target int can hold
10246 if sizeof(host long) > sizeof(target int). This can
10247 happen even for native compilers in LP64 mode. In
10248 these cases, modulo the quo value with the largest
10249 number that the target int can hold while leaving one
10250 bit for the sign. */
10251 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10252 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10254 /* Dereference the quo pointer argument. */
10255 arg_quo = build_fold_indirect_ref (arg_quo);
10256 /* Proceed iff a valid pointer type was passed in. */
10257 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10259 /* Set the value. */
10260 tree result_quo
10261 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10262 build_int_cst (TREE_TYPE (arg_quo),
10263 integer_quo));
10264 TREE_SIDE_EFFECTS (result_quo) = 1;
10265 /* Combine the quo assignment with the rem. */
10266 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10267 result_quo, result_rem));
10272 return result;
10275 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10276 resulting value as a tree with type TYPE. The mpfr precision is
10277 set to the precision of TYPE. We assume that this mpfr function
10278 returns zero if the result could be calculated exactly within the
10279 requested precision. In addition, the integer pointer represented
10280 by ARG_SG will be dereferenced and set to the appropriate signgam
10281 (-1,1) value. */
10283 static tree
10284 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10286 tree result = NULL_TREE;
10288 STRIP_NOPS (arg);
10290 /* To proceed, MPFR must exactly represent the target floating point
10291 format, which only happens when the target base equals two. Also
10292 verify ARG is a constant and that ARG_SG is an int pointer. */
10293 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10294 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10295 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10296 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10298 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10300 /* In addition to NaN and Inf, the argument cannot be zero or a
10301 negative integer. */
10302 if (real_isfinite (ra)
10303 && ra->cl != rvc_zero
10304 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10306 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10307 const int prec = fmt->p;
10308 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10309 int inexact, sg;
10310 mpfr_t m;
10311 tree result_lg;
10313 mpfr_init2 (m, prec);
10314 mpfr_from_real (m, ra, GMP_RNDN);
10315 mpfr_clear_flags ();
10316 inexact = mpfr_lgamma (m, &sg, m, rnd);
10317 result_lg = do_mpfr_ckconv (m, type, inexact);
10318 mpfr_clear (m);
10319 if (result_lg)
10321 tree result_sg;
10323 /* Dereference the arg_sg pointer argument. */
10324 arg_sg = build_fold_indirect_ref (arg_sg);
10325 /* Assign the signgam value into *arg_sg. */
10326 result_sg = fold_build2 (MODIFY_EXPR,
10327 TREE_TYPE (arg_sg), arg_sg,
10328 build_int_cst (TREE_TYPE (arg_sg), sg));
10329 TREE_SIDE_EFFECTS (result_sg) = 1;
10330 /* Combine the signgam assignment with the lgamma result. */
10331 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10332 result_sg, result_lg));
10337 return result;
10340 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10341 mpc function FUNC on it and return the resulting value as a tree
10342 with type TYPE. The mpfr precision is set to the precision of
10343 TYPE. We assume that function FUNC returns zero if the result
10344 could be calculated exactly within the requested precision. If
10345 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10346 in the arguments and/or results. */
10348 tree
10349 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10350 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10352 tree result = NULL_TREE;
10354 STRIP_NOPS (arg0);
10355 STRIP_NOPS (arg1);
10357 /* To proceed, MPFR must exactly represent the target floating point
10358 format, which only happens when the target base equals two. */
10359 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10360 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10361 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10362 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10363 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10365 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10366 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10367 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10368 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10370 if (do_nonfinite
10371 || (real_isfinite (re0) && real_isfinite (im0)
10372 && real_isfinite (re1) && real_isfinite (im1)))
10374 const struct real_format *const fmt =
10375 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10376 const int prec = fmt->p;
10377 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10378 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10379 int inexact;
10380 mpc_t m0, m1;
10382 mpc_init2 (m0, prec);
10383 mpc_init2 (m1, prec);
10384 mpfr_from_real (mpc_realref (m0), re0, rnd);
10385 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10386 mpfr_from_real (mpc_realref (m1), re1, rnd);
10387 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10388 mpfr_clear_flags ();
10389 inexact = func (m0, m0, m1, crnd);
10390 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10391 mpc_clear (m0);
10392 mpc_clear (m1);
10396 return result;
10399 /* A wrapper function for builtin folding that prevents warnings for
10400 "statement without effect" and the like, caused by removing the
10401 call node earlier than the warning is generated. */
10403 tree
10404 fold_call_stmt (gcall *stmt, bool ignore)
10406 tree ret = NULL_TREE;
10407 tree fndecl = gimple_call_fndecl (stmt);
10408 location_t loc = gimple_location (stmt);
10409 if (fndecl
10410 && TREE_CODE (fndecl) == FUNCTION_DECL
10411 && DECL_BUILT_IN (fndecl)
10412 && !gimple_call_va_arg_pack_p (stmt))
10414 int nargs = gimple_call_num_args (stmt);
10415 tree *args = (nargs > 0
10416 ? gimple_call_arg_ptr (stmt, 0)
10417 : &error_mark_node);
10419 if (avoid_folding_inline_builtin (fndecl))
10420 return NULL_TREE;
10421 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10423 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10425 else
10427 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10428 if (ret)
10430 /* Propagate location information from original call to
10431 expansion of builtin. Otherwise things like
10432 maybe_emit_chk_warning, that operate on the expansion
10433 of a builtin, will use the wrong location information. */
10434 if (gimple_has_location (stmt))
10436 tree realret = ret;
10437 if (TREE_CODE (ret) == NOP_EXPR)
10438 realret = TREE_OPERAND (ret, 0);
10439 if (CAN_HAVE_LOCATION_P (realret)
10440 && !EXPR_HAS_LOCATION (realret))
10441 SET_EXPR_LOCATION (realret, loc);
10442 return realret;
10444 return ret;
10448 return NULL_TREE;
10451 /* Look up the function in builtin_decl that corresponds to DECL
10452 and set ASMSPEC as its user assembler name. DECL must be a
10453 function decl that declares a builtin. */
10455 void
10456 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10458 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10459 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10460 && asmspec != 0);
10462 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10463 set_user_assembler_name (builtin, asmspec);
10465 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10466 && INT_TYPE_SIZE < BITS_PER_WORD)
10468 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10469 set_user_assembler_libfunc ("ffs", asmspec);
10470 set_optab_libfunc (ffs_optab, mode, "ffs");
10474 /* Return true if DECL is a builtin that expands to a constant or similarly
10475 simple code. */
10476 bool
10477 is_simple_builtin (tree decl)
10479 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10480 switch (DECL_FUNCTION_CODE (decl))
10482 /* Builtins that expand to constants. */
10483 case BUILT_IN_CONSTANT_P:
10484 case BUILT_IN_EXPECT:
10485 case BUILT_IN_OBJECT_SIZE:
10486 case BUILT_IN_UNREACHABLE:
10487 /* Simple register moves or loads from stack. */
10488 case BUILT_IN_ASSUME_ALIGNED:
10489 case BUILT_IN_RETURN_ADDRESS:
10490 case BUILT_IN_EXTRACT_RETURN_ADDR:
10491 case BUILT_IN_FROB_RETURN_ADDR:
10492 case BUILT_IN_RETURN:
10493 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10494 case BUILT_IN_FRAME_ADDRESS:
10495 case BUILT_IN_VA_END:
10496 case BUILT_IN_STACK_SAVE:
10497 case BUILT_IN_STACK_RESTORE:
10498 /* Exception state returns or moves registers around. */
10499 case BUILT_IN_EH_FILTER:
10500 case BUILT_IN_EH_POINTER:
10501 case BUILT_IN_EH_COPY_VALUES:
10502 return true;
10504 default:
10505 return false;
10508 return false;
10511 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10512 most probably expanded inline into reasonably simple code. This is a
10513 superset of is_simple_builtin. */
10514 bool
10515 is_inexpensive_builtin (tree decl)
10517 if (!decl)
10518 return false;
10519 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10520 return true;
10521 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10522 switch (DECL_FUNCTION_CODE (decl))
10524 case BUILT_IN_ABS:
10525 CASE_BUILT_IN_ALLOCA:
10526 case BUILT_IN_BSWAP16:
10527 case BUILT_IN_BSWAP32:
10528 case BUILT_IN_BSWAP64:
10529 case BUILT_IN_CLZ:
10530 case BUILT_IN_CLZIMAX:
10531 case BUILT_IN_CLZL:
10532 case BUILT_IN_CLZLL:
10533 case BUILT_IN_CTZ:
10534 case BUILT_IN_CTZIMAX:
10535 case BUILT_IN_CTZL:
10536 case BUILT_IN_CTZLL:
10537 case BUILT_IN_FFS:
10538 case BUILT_IN_FFSIMAX:
10539 case BUILT_IN_FFSL:
10540 case BUILT_IN_FFSLL:
10541 case BUILT_IN_IMAXABS:
10542 case BUILT_IN_FINITE:
10543 case BUILT_IN_FINITEF:
10544 case BUILT_IN_FINITEL:
10545 case BUILT_IN_FINITED32:
10546 case BUILT_IN_FINITED64:
10547 case BUILT_IN_FINITED128:
10548 case BUILT_IN_FPCLASSIFY:
10549 case BUILT_IN_ISFINITE:
10550 case BUILT_IN_ISINF_SIGN:
10551 case BUILT_IN_ISINF:
10552 case BUILT_IN_ISINFF:
10553 case BUILT_IN_ISINFL:
10554 case BUILT_IN_ISINFD32:
10555 case BUILT_IN_ISINFD64:
10556 case BUILT_IN_ISINFD128:
10557 case BUILT_IN_ISNAN:
10558 case BUILT_IN_ISNANF:
10559 case BUILT_IN_ISNANL:
10560 case BUILT_IN_ISNAND32:
10561 case BUILT_IN_ISNAND64:
10562 case BUILT_IN_ISNAND128:
10563 case BUILT_IN_ISNORMAL:
10564 case BUILT_IN_ISGREATER:
10565 case BUILT_IN_ISGREATEREQUAL:
10566 case BUILT_IN_ISLESS:
10567 case BUILT_IN_ISLESSEQUAL:
10568 case BUILT_IN_ISLESSGREATER:
10569 case BUILT_IN_ISUNORDERED:
10570 case BUILT_IN_VA_ARG_PACK:
10571 case BUILT_IN_VA_ARG_PACK_LEN:
10572 case BUILT_IN_VA_COPY:
10573 case BUILT_IN_TRAP:
10574 case BUILT_IN_SAVEREGS:
10575 case BUILT_IN_POPCOUNTL:
10576 case BUILT_IN_POPCOUNTLL:
10577 case BUILT_IN_POPCOUNTIMAX:
10578 case BUILT_IN_POPCOUNT:
10579 case BUILT_IN_PARITYL:
10580 case BUILT_IN_PARITYLL:
10581 case BUILT_IN_PARITYIMAX:
10582 case BUILT_IN_PARITY:
10583 case BUILT_IN_LABS:
10584 case BUILT_IN_LLABS:
10585 case BUILT_IN_PREFETCH:
10586 case BUILT_IN_ACC_ON_DEVICE:
10587 return true;
10589 default:
10590 return is_simple_builtin (decl);
10593 return false;
10596 /* Return true if T is a constant and the value cast to a target char
10597 can be represented by a host char.
10598 Store the casted char constant in *P if so. */
10600 bool
10601 target_char_cst_p (tree t, char *p)
10603 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10604 return false;
10606 *p = (char)tree_to_uhwi (t);
10607 return true;