* gimple-ssa-evrp-analyze.c (evrp_range_analyzer::try_find_new_range):
[official-gcc.git] / gcc / builtins.c
blobb97445920170c1bc9b7220442b68d00dc49dde7f
1 /* Expand builtin functions.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "stringpool.h"
64 #include "attribs.h"
65 #include "asan.h"
66 #include "cilk.h"
67 #include "tree-chkp.h"
68 #include "rtl-chkp.h"
69 #include "internal-fn.h"
70 #include "case-cfn-macros.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
74 struct target_builtins default_target_builtins;
75 #if SWITCHABLE_TARGET
76 struct target_builtins *this_target_builtins = &default_target_builtins;
77 #endif
79 /* Define the names of the builtin function types and codes. */
80 const char *const built_in_class_names[BUILT_IN_LAST]
81 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
84 const char * built_in_names[(int) END_BUILTINS] =
86 #include "builtins.def"
89 /* Setup an array of builtin_info_type, make sure each element decl is
90 initialized to NULL_TREE. */
91 builtin_info_type builtin_info[(int)END_BUILTINS];
93 /* Non-zero if __builtin_constant_p should be folded right away. */
94 bool force_folding_builtin_constant_p;
96 static rtx c_readstr (const char *, scalar_int_mode);
97 static int target_char_cast (tree, char *);
98 static rtx get_memory_rtx (tree, tree);
99 static int apply_args_size (void);
100 static int apply_result_size (void);
101 static rtx result_vector (int, rtx);
102 static void expand_builtin_prefetch (tree);
103 static rtx expand_builtin_apply_args (void);
104 static rtx expand_builtin_apply_args_1 (void);
105 static rtx expand_builtin_apply (rtx, rtx, rtx);
106 static void expand_builtin_return (rtx);
107 static enum type_class type_to_class (tree);
108 static rtx expand_builtin_classify_type (tree);
109 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
111 static rtx expand_builtin_interclass_mathfn (tree, rtx);
112 static rtx expand_builtin_sincos (tree);
113 static rtx expand_builtin_cexpi (tree, rtx);
114 static rtx expand_builtin_int_roundingfn (tree, rtx);
115 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
116 static rtx expand_builtin_next_arg (void);
117 static rtx expand_builtin_va_start (tree);
118 static rtx expand_builtin_va_end (tree);
119 static rtx expand_builtin_va_copy (tree);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
123 static rtx expand_builtin_memchr (tree, rtx);
124 static rtx expand_builtin_memcpy (tree, rtx);
125 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
126 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
127 rtx target, tree exp, int endp);
128 static rtx expand_builtin_memmove (tree, rtx);
129 static rtx expand_builtin_mempcpy (tree, rtx);
130 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx);
131 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
132 static rtx expand_builtin_strcat (tree, rtx);
133 static rtx expand_builtin_strcpy (tree, rtx);
134 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
135 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
136 static rtx expand_builtin_stpncpy (tree, rtx);
137 static rtx expand_builtin_strncat (tree, rtx);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
155 static bool validate_arg (const_tree, enum tree_code code);
156 static rtx expand_builtin_fabs (tree, rtx, rtx);
157 static rtx expand_builtin_signbit (tree, rtx);
158 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
159 static tree fold_builtin_isascii (location_t, tree);
160 static tree fold_builtin_toascii (location_t, tree);
161 static tree fold_builtin_isdigit (location_t, tree);
162 static tree fold_builtin_fabs (location_t, tree, tree);
163 static tree fold_builtin_abs (location_t, tree, tree);
164 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
165 enum tree_code);
166 static tree fold_builtin_0 (location_t, tree);
167 static tree fold_builtin_1 (location_t, tree, tree);
168 static tree fold_builtin_2 (location_t, tree, tree, tree);
169 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree);
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_free_warning (tree);
182 static tree fold_builtin_object_size (tree, tree);
184 unsigned HOST_WIDE_INT target_newline;
185 unsigned HOST_WIDE_INT target_percent;
186 static unsigned HOST_WIDE_INT target_c;
187 static unsigned HOST_WIDE_INT target_s;
188 char target_percent_c[3];
189 char target_percent_s[3];
190 char target_percent_s_newline[4];
191 static tree do_mpfr_remquo (tree, tree, tree);
192 static tree do_mpfr_lgamma_r (tree, tree, tree);
193 static void expand_builtin_sync_synchronize (void);
195 /* Return true if NAME starts with __builtin_ or __sync_. */
197 static bool
198 is_builtin_name (const char *name)
200 if (strncmp (name, "__builtin_", 10) == 0)
201 return true;
202 if (strncmp (name, "__sync_", 7) == 0)
203 return true;
204 if (strncmp (name, "__atomic_", 9) == 0)
205 return true;
206 if (flag_cilkplus
207 && (!strcmp (name, "__cilkrts_detach")
208 || !strcmp (name, "__cilkrts_pop_frame")))
209 return true;
210 return false;
214 /* Return true if DECL is a function symbol representing a built-in. */
216 bool
217 is_builtin_fn (tree decl)
219 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
222 /* Return true if NODE should be considered for inline expansion regardless
223 of the optimization level. This means whenever a function is invoked with
224 its "internal" name, which normally contains the prefix "__builtin". */
226 bool
227 called_as_built_in (tree node)
229 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
230 we want the name used to call the function, not the name it
231 will have. */
232 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
233 return is_builtin_name (name);
236 /* Compute values M and N such that M divides (address of EXP - N) and such
237 that N < M. If these numbers can be determined, store M in alignp and N in
238 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
239 *alignp and any bit-offset to *bitposp.
241 Note that the address (and thus the alignment) computed here is based
242 on the address to which a symbol resolves, whereas DECL_ALIGN is based
243 on the address at which an object is actually located. These two
244 addresses are not always the same. For example, on ARM targets,
245 the address &foo of a Thumb function foo() has the lowest bit set,
246 whereas foo() itself starts on an even address.
248 If ADDR_P is true we are taking the address of the memory reference EXP
249 and thus cannot rely on the access taking place. */
251 static bool
252 get_object_alignment_2 (tree exp, unsigned int *alignp,
253 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
255 HOST_WIDE_INT bitsize, bitpos;
256 tree offset;
257 machine_mode mode;
258 int unsignedp, reversep, volatilep;
259 unsigned int align = BITS_PER_UNIT;
260 bool known_alignment = false;
262 /* Get the innermost object and the constant (bitpos) and possibly
263 variable (offset) offset of the access. */
264 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
265 &unsignedp, &reversep, &volatilep);
267 /* Extract alignment information from the innermost object and
268 possibly adjust bitpos and offset. */
269 if (TREE_CODE (exp) == FUNCTION_DECL)
271 /* Function addresses can encode extra information besides their
272 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
273 allows the low bit to be used as a virtual bit, we know
274 that the address itself must be at least 2-byte aligned. */
275 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
276 align = 2 * BITS_PER_UNIT;
278 else if (TREE_CODE (exp) == LABEL_DECL)
280 else if (TREE_CODE (exp) == CONST_DECL)
282 /* The alignment of a CONST_DECL is determined by its initializer. */
283 exp = DECL_INITIAL (exp);
284 align = TYPE_ALIGN (TREE_TYPE (exp));
285 if (CONSTANT_CLASS_P (exp))
286 align = targetm.constant_alignment (exp, align);
288 known_alignment = true;
290 else if (DECL_P (exp))
292 align = DECL_ALIGN (exp);
293 known_alignment = true;
295 else if (TREE_CODE (exp) == INDIRECT_REF
296 || TREE_CODE (exp) == MEM_REF
297 || TREE_CODE (exp) == TARGET_MEM_REF)
299 tree addr = TREE_OPERAND (exp, 0);
300 unsigned ptr_align;
301 unsigned HOST_WIDE_INT ptr_bitpos;
302 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
304 /* If the address is explicitely aligned, handle that. */
305 if (TREE_CODE (addr) == BIT_AND_EXPR
306 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
308 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
309 ptr_bitmask *= BITS_PER_UNIT;
310 align = least_bit_hwi (ptr_bitmask);
311 addr = TREE_OPERAND (addr, 0);
314 known_alignment
315 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
316 align = MAX (ptr_align, align);
318 /* Re-apply explicit alignment to the bitpos. */
319 ptr_bitpos &= ptr_bitmask;
321 /* The alignment of the pointer operand in a TARGET_MEM_REF
322 has to take the variable offset parts into account. */
323 if (TREE_CODE (exp) == TARGET_MEM_REF)
325 if (TMR_INDEX (exp))
327 unsigned HOST_WIDE_INT step = 1;
328 if (TMR_STEP (exp))
329 step = TREE_INT_CST_LOW (TMR_STEP (exp));
330 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
332 if (TMR_INDEX2 (exp))
333 align = BITS_PER_UNIT;
334 known_alignment = false;
337 /* When EXP is an actual memory reference then we can use
338 TYPE_ALIGN of a pointer indirection to derive alignment.
339 Do so only if get_pointer_alignment_1 did not reveal absolute
340 alignment knowledge and if using that alignment would
341 improve the situation. */
342 unsigned int talign;
343 if (!addr_p && !known_alignment
344 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
345 && talign > align)
346 align = talign;
347 else
349 /* Else adjust bitpos accordingly. */
350 bitpos += ptr_bitpos;
351 if (TREE_CODE (exp) == MEM_REF
352 || TREE_CODE (exp) == TARGET_MEM_REF)
353 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
356 else if (TREE_CODE (exp) == STRING_CST)
358 /* STRING_CST are the only constant objects we allow to be not
359 wrapped inside a CONST_DECL. */
360 align = TYPE_ALIGN (TREE_TYPE (exp));
361 if (CONSTANT_CLASS_P (exp))
362 align = targetm.constant_alignment (exp, align);
364 known_alignment = true;
367 /* If there is a non-constant offset part extract the maximum
368 alignment that can prevail. */
369 if (offset)
371 unsigned int trailing_zeros = tree_ctz (offset);
372 if (trailing_zeros < HOST_BITS_PER_INT)
374 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
375 if (inner)
376 align = MIN (align, inner);
380 *alignp = align;
381 *bitposp = bitpos & (*alignp - 1);
382 return known_alignment;
385 /* For a memory reference expression EXP compute values M and N such that M
386 divides (&EXP - N) and such that N < M. If these numbers can be determined,
387 store M in alignp and N in *BITPOSP and return true. Otherwise return false
388 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
390 bool
391 get_object_alignment_1 (tree exp, unsigned int *alignp,
392 unsigned HOST_WIDE_INT *bitposp)
394 return get_object_alignment_2 (exp, alignp, bitposp, false);
397 /* Return the alignment in bits of EXP, an object. */
399 unsigned int
400 get_object_alignment (tree exp)
402 unsigned HOST_WIDE_INT bitpos = 0;
403 unsigned int align;
405 get_object_alignment_1 (exp, &align, &bitpos);
407 /* align and bitpos now specify known low bits of the pointer.
408 ptr & (align - 1) == bitpos. */
410 if (bitpos != 0)
411 align = least_bit_hwi (bitpos);
412 return align;
415 /* For a pointer valued expression EXP compute values M and N such that M
416 divides (EXP - N) and such that N < M. If these numbers can be determined,
417 store M in alignp and N in *BITPOSP and return true. Return false if
418 the results are just a conservative approximation.
420 If EXP is not a pointer, false is returned too. */
422 bool
423 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
424 unsigned HOST_WIDE_INT *bitposp)
426 STRIP_NOPS (exp);
428 if (TREE_CODE (exp) == ADDR_EXPR)
429 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
430 alignp, bitposp, true);
431 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
433 unsigned int align;
434 unsigned HOST_WIDE_INT bitpos;
435 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
436 &align, &bitpos);
437 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
438 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
439 else
441 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
442 if (trailing_zeros < HOST_BITS_PER_INT)
444 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
445 if (inner)
446 align = MIN (align, inner);
449 *alignp = align;
450 *bitposp = bitpos & (align - 1);
451 return res;
453 else if (TREE_CODE (exp) == SSA_NAME
454 && POINTER_TYPE_P (TREE_TYPE (exp)))
456 unsigned int ptr_align, ptr_misalign;
457 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
459 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
461 *bitposp = ptr_misalign * BITS_PER_UNIT;
462 *alignp = ptr_align * BITS_PER_UNIT;
463 /* Make sure to return a sensible alignment when the multiplication
464 by BITS_PER_UNIT overflowed. */
465 if (*alignp == 0)
466 *alignp = 1u << (HOST_BITS_PER_INT - 1);
467 /* We cannot really tell whether this result is an approximation. */
468 return false;
470 else
472 *bitposp = 0;
473 *alignp = BITS_PER_UNIT;
474 return false;
477 else if (TREE_CODE (exp) == INTEGER_CST)
479 *alignp = BIGGEST_ALIGNMENT;
480 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
481 & (BIGGEST_ALIGNMENT - 1));
482 return true;
485 *bitposp = 0;
486 *alignp = BITS_PER_UNIT;
487 return false;
490 /* Return the alignment in bits of EXP, a pointer valued expression.
491 The alignment returned is, by default, the alignment of the thing that
492 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
494 Otherwise, look at the expression to see if we can do better, i.e., if the
495 expression is actually pointing at an object whose alignment is tighter. */
497 unsigned int
498 get_pointer_alignment (tree exp)
500 unsigned HOST_WIDE_INT bitpos = 0;
501 unsigned int align;
503 get_pointer_alignment_1 (exp, &align, &bitpos);
505 /* align and bitpos now specify known low bits of the pointer.
506 ptr & (align - 1) == bitpos. */
508 if (bitpos != 0)
509 align = least_bit_hwi (bitpos);
511 return align;
514 /* Return the number of non-zero elements in the sequence
515 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
516 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
518 static unsigned
519 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
521 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
523 unsigned n;
525 if (eltsize == 1)
527 /* Optimize the common case of plain char. */
528 for (n = 0; n < maxelts; n++)
530 const char *elt = (const char*) ptr + n;
531 if (!*elt)
532 break;
535 else
537 for (n = 0; n < maxelts; n++)
539 const char *elt = (const char*) ptr + n * eltsize;
540 if (!memcmp (elt, "\0\0\0\0", eltsize))
541 break;
544 return n;
547 /* Compute the length of a null-terminated character string or wide
548 character string handling character sizes of 1, 2, and 4 bytes.
549 TREE_STRING_LENGTH is not the right way because it evaluates to
550 the size of the character array in bytes (as opposed to characters)
551 and because it can contain a zero byte in the middle.
553 ONLY_VALUE should be nonzero if the result is not going to be emitted
554 into the instruction stream and zero if it is going to be expanded.
555 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
556 is returned, otherwise NULL, since
557 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
558 evaluate the side-effects.
560 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
561 accesses. Note that this implies the result is not going to be emitted
562 into the instruction stream.
564 The value returned is of type `ssizetype'.
566 Unfortunately, string_constant can't access the values of const char
567 arrays with initializers, so neither can we do so here. */
569 tree
570 c_strlen (tree src, int only_value)
572 STRIP_NOPS (src);
573 if (TREE_CODE (src) == COND_EXPR
574 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
576 tree len1, len2;
578 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
579 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
580 if (tree_int_cst_equal (len1, len2))
581 return len1;
584 if (TREE_CODE (src) == COMPOUND_EXPR
585 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
586 return c_strlen (TREE_OPERAND (src, 1), only_value);
588 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
590 /* Offset from the beginning of the string in bytes. */
591 tree byteoff;
592 src = string_constant (src, &byteoff);
593 if (src == 0)
594 return NULL_TREE;
596 /* Determine the size of the string element. */
597 unsigned eltsize
598 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
600 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
601 length of SRC. */
602 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
604 /* PTR can point to the byte representation of any string type, including
605 char* and wchar_t*. */
606 const char *ptr = TREE_STRING_POINTER (src);
608 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
610 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
611 compute the offset to the following null if we don't know where to
612 start searching for it. */
613 if (string_length (ptr, eltsize, maxelts) < maxelts)
615 /* Return when an embedded null character is found. */
616 return NULL_TREE;
619 /* We don't know the starting offset, but we do know that the string
620 has no internal zero bytes. We can assume that the offset falls
621 within the bounds of the string; otherwise, the programmer deserves
622 what he gets. Subtract the offset from the length of the string,
623 and return that. This would perhaps not be valid if we were dealing
624 with named arrays in addition to literal string constants. */
626 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
629 /* Offset from the beginning of the string in elements. */
630 HOST_WIDE_INT eltoff;
632 /* We have a known offset into the string. Start searching there for
633 a null character if we can represent it as a single HOST_WIDE_INT. */
634 if (byteoff == 0)
635 eltoff = 0;
636 else if (! tree_fits_shwi_p (byteoff))
637 eltoff = -1;
638 else
639 eltoff = tree_to_shwi (byteoff) / eltsize;
641 /* If the offset is known to be out of bounds, warn, and call strlen at
642 runtime. */
643 if (eltoff < 0 || eltoff > maxelts)
645 /* Suppress multiple warnings for propagated constant strings. */
646 if (only_value != 2
647 && !TREE_NO_WARNING (src))
649 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
650 eltoff);
651 TREE_NO_WARNING (src) = 1;
653 return NULL_TREE;
656 /* Use strlen to search for the first zero byte. Since any strings
657 constructed with build_string will have nulls appended, we win even
658 if we get handed something like (char[4])"abcd".
660 Since ELTOFF is our starting index into the string, no further
661 calculation is needed. */
662 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
663 maxelts - eltoff);
665 return ssize_int (len);
668 /* Return a constant integer corresponding to target reading
669 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
671 static rtx
672 c_readstr (const char *str, scalar_int_mode mode)
674 HOST_WIDE_INT ch;
675 unsigned int i, j;
676 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
678 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
679 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
680 / HOST_BITS_PER_WIDE_INT;
682 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
683 for (i = 0; i < len; i++)
684 tmp[i] = 0;
686 ch = 1;
687 for (i = 0; i < GET_MODE_SIZE (mode); i++)
689 j = i;
690 if (WORDS_BIG_ENDIAN)
691 j = GET_MODE_SIZE (mode) - i - 1;
692 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
693 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
694 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
695 j *= BITS_PER_UNIT;
697 if (ch)
698 ch = (unsigned char) str[i];
699 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
702 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
703 return immed_wide_int_const (c, mode);
706 /* Cast a target constant CST to target CHAR and if that value fits into
707 host char type, return zero and put that value into variable pointed to by
708 P. */
710 static int
711 target_char_cast (tree cst, char *p)
713 unsigned HOST_WIDE_INT val, hostval;
715 if (TREE_CODE (cst) != INTEGER_CST
716 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
717 return 1;
719 /* Do not care if it fits or not right here. */
720 val = TREE_INT_CST_LOW (cst);
722 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
723 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
725 hostval = val;
726 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
727 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
729 if (val != hostval)
730 return 1;
732 *p = hostval;
733 return 0;
736 /* Similar to save_expr, but assumes that arbitrary code is not executed
737 in between the multiple evaluations. In particular, we assume that a
738 non-addressable local variable will not be modified. */
740 static tree
741 builtin_save_expr (tree exp)
743 if (TREE_CODE (exp) == SSA_NAME
744 || (TREE_ADDRESSABLE (exp) == 0
745 && (TREE_CODE (exp) == PARM_DECL
746 || (VAR_P (exp) && !TREE_STATIC (exp)))))
747 return exp;
749 return save_expr (exp);
752 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
753 times to get the address of either a higher stack frame, or a return
754 address located within it (depending on FNDECL_CODE). */
756 static rtx
757 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
759 int i;
760 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
761 if (tem == NULL_RTX)
763 /* For a zero count with __builtin_return_address, we don't care what
764 frame address we return, because target-specific definitions will
765 override us. Therefore frame pointer elimination is OK, and using
766 the soft frame pointer is OK.
768 For a nonzero count, or a zero count with __builtin_frame_address,
769 we require a stable offset from the current frame pointer to the
770 previous one, so we must use the hard frame pointer, and
771 we must disable frame pointer elimination. */
772 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
773 tem = frame_pointer_rtx;
774 else
776 tem = hard_frame_pointer_rtx;
778 /* Tell reload not to eliminate the frame pointer. */
779 crtl->accesses_prior_frames = 1;
783 if (count > 0)
784 SETUP_FRAME_ADDRESSES ();
786 /* On the SPARC, the return address is not in the frame, it is in a
787 register. There is no way to access it off of the current frame
788 pointer, but it can be accessed off the previous frame pointer by
789 reading the value from the register window save area. */
790 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
791 count--;
793 /* Scan back COUNT frames to the specified frame. */
794 for (i = 0; i < count; i++)
796 /* Assume the dynamic chain pointer is in the word that the
797 frame address points to, unless otherwise specified. */
798 tem = DYNAMIC_CHAIN_ADDRESS (tem);
799 tem = memory_address (Pmode, tem);
800 tem = gen_frame_mem (Pmode, tem);
801 tem = copy_to_reg (tem);
804 /* For __builtin_frame_address, return what we've got. But, on
805 the SPARC for example, we may have to add a bias. */
806 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
807 return FRAME_ADDR_RTX (tem);
809 /* For __builtin_return_address, get the return address from that frame. */
810 #ifdef RETURN_ADDR_RTX
811 tem = RETURN_ADDR_RTX (count, tem);
812 #else
813 tem = memory_address (Pmode,
814 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
815 tem = gen_frame_mem (Pmode, tem);
816 #endif
817 return tem;
820 /* Alias set used for setjmp buffer. */
821 static alias_set_type setjmp_alias_set = -1;
823 /* Construct the leading half of a __builtin_setjmp call. Control will
824 return to RECEIVER_LABEL. This is also called directly by the SJLJ
825 exception handling code. */
827 void
828 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
830 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
831 rtx stack_save;
832 rtx mem;
834 if (setjmp_alias_set == -1)
835 setjmp_alias_set = new_alias_set ();
837 buf_addr = convert_memory_address (Pmode, buf_addr);
839 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
841 /* We store the frame pointer and the address of receiver_label in
842 the buffer and use the rest of it for the stack save area, which
843 is machine-dependent. */
845 mem = gen_rtx_MEM (Pmode, buf_addr);
846 set_mem_alias_set (mem, setjmp_alias_set);
847 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
849 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
850 GET_MODE_SIZE (Pmode))),
851 set_mem_alias_set (mem, setjmp_alias_set);
853 emit_move_insn (validize_mem (mem),
854 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
856 stack_save = gen_rtx_MEM (sa_mode,
857 plus_constant (Pmode, buf_addr,
858 2 * GET_MODE_SIZE (Pmode)));
859 set_mem_alias_set (stack_save, setjmp_alias_set);
860 emit_stack_save (SAVE_NONLOCAL, &stack_save);
862 /* If there is further processing to do, do it. */
863 if (targetm.have_builtin_setjmp_setup ())
864 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
866 /* We have a nonlocal label. */
867 cfun->has_nonlocal_label = 1;
870 /* Construct the trailing part of a __builtin_setjmp call. This is
871 also called directly by the SJLJ exception handling code.
872 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
874 void
875 expand_builtin_setjmp_receiver (rtx receiver_label)
877 rtx chain;
879 /* Mark the FP as used when we get here, so we have to make sure it's
880 marked as used by this function. */
881 emit_use (hard_frame_pointer_rtx);
883 /* Mark the static chain as clobbered here so life information
884 doesn't get messed up for it. */
885 chain = targetm.calls.static_chain (current_function_decl, true);
886 if (chain && REG_P (chain))
887 emit_clobber (chain);
889 /* Now put in the code to restore the frame pointer, and argument
890 pointer, if needed. */
891 if (! targetm.have_nonlocal_goto ())
893 /* First adjust our frame pointer to its actual value. It was
894 previously set to the start of the virtual area corresponding to
895 the stacked variables when we branched here and now needs to be
896 adjusted to the actual hardware fp value.
898 Assignments to virtual registers are converted by
899 instantiate_virtual_regs into the corresponding assignment
900 to the underlying register (fp in this case) that makes
901 the original assignment true.
902 So the following insn will actually be decrementing fp by
903 TARGET_STARTING_FRAME_OFFSET. */
904 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
906 /* Restoring the frame pointer also modifies the hard frame pointer.
907 Mark it used (so that the previous assignment remains live once
908 the frame pointer is eliminated) and clobbered (to represent the
909 implicit update from the assignment). */
910 emit_use (hard_frame_pointer_rtx);
911 emit_clobber (hard_frame_pointer_rtx);
914 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
916 /* If the argument pointer can be eliminated in favor of the
917 frame pointer, we don't need to restore it. We assume here
918 that if such an elimination is present, it can always be used.
919 This is the case on all known machines; if we don't make this
920 assumption, we do unnecessary saving on many machines. */
921 size_t i;
922 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
924 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
925 if (elim_regs[i].from == ARG_POINTER_REGNUM
926 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
927 break;
929 if (i == ARRAY_SIZE (elim_regs))
931 /* Now restore our arg pointer from the address at which it
932 was saved in our stack frame. */
933 emit_move_insn (crtl->args.internal_arg_pointer,
934 copy_to_reg (get_arg_pointer_save_area ()));
938 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
939 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
940 else if (targetm.have_nonlocal_goto_receiver ())
941 emit_insn (targetm.gen_nonlocal_goto_receiver ());
942 else
943 { /* Nothing */ }
945 /* We must not allow the code we just generated to be reordered by
946 scheduling. Specifically, the update of the frame pointer must
947 happen immediately, not later. */
948 emit_insn (gen_blockage ());
951 /* __builtin_longjmp is passed a pointer to an array of five words (not
952 all will be used on all machines). It operates similarly to the C
953 library function of the same name, but is more efficient. Much of
954 the code below is copied from the handling of non-local gotos. */
956 static void
957 expand_builtin_longjmp (rtx buf_addr, rtx value)
959 rtx fp, lab, stack;
960 rtx_insn *insn, *last;
961 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
963 /* DRAP is needed for stack realign if longjmp is expanded to current
964 function */
965 if (SUPPORTS_STACK_ALIGNMENT)
966 crtl->need_drap = true;
968 if (setjmp_alias_set == -1)
969 setjmp_alias_set = new_alias_set ();
971 buf_addr = convert_memory_address (Pmode, buf_addr);
973 buf_addr = force_reg (Pmode, buf_addr);
975 /* We require that the user must pass a second argument of 1, because
976 that is what builtin_setjmp will return. */
977 gcc_assert (value == const1_rtx);
979 last = get_last_insn ();
980 if (targetm.have_builtin_longjmp ())
981 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
982 else
984 fp = gen_rtx_MEM (Pmode, buf_addr);
985 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
986 GET_MODE_SIZE (Pmode)));
988 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
989 2 * GET_MODE_SIZE (Pmode)));
990 set_mem_alias_set (fp, setjmp_alias_set);
991 set_mem_alias_set (lab, setjmp_alias_set);
992 set_mem_alias_set (stack, setjmp_alias_set);
994 /* Pick up FP, label, and SP from the block and jump. This code is
995 from expand_goto in stmt.c; see there for detailed comments. */
996 if (targetm.have_nonlocal_goto ())
997 /* We have to pass a value to the nonlocal_goto pattern that will
998 get copied into the static_chain pointer, but it does not matter
999 what that value is, because builtin_setjmp does not use it. */
1000 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1001 else
1003 lab = copy_to_reg (lab);
1005 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1006 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1008 emit_move_insn (hard_frame_pointer_rtx, fp);
1009 emit_stack_restore (SAVE_NONLOCAL, stack);
1011 emit_use (hard_frame_pointer_rtx);
1012 emit_use (stack_pointer_rtx);
1013 emit_indirect_jump (lab);
1017 /* Search backwards and mark the jump insn as a non-local goto.
1018 Note that this precludes the use of __builtin_longjmp to a
1019 __builtin_setjmp target in the same function. However, we've
1020 already cautioned the user that these functions are for
1021 internal exception handling use only. */
1022 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1024 gcc_assert (insn != last);
1026 if (JUMP_P (insn))
1028 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1029 break;
1031 else if (CALL_P (insn))
1032 break;
1036 static inline bool
1037 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1039 return (iter->i < iter->n);
1042 /* This function validates the types of a function call argument list
1043 against a specified list of tree_codes. If the last specifier is a 0,
1044 that represents an ellipsis, otherwise the last specifier must be a
1045 VOID_TYPE. */
1047 static bool
1048 validate_arglist (const_tree callexpr, ...)
1050 enum tree_code code;
1051 bool res = 0;
1052 va_list ap;
1053 const_call_expr_arg_iterator iter;
1054 const_tree arg;
1056 va_start (ap, callexpr);
1057 init_const_call_expr_arg_iterator (callexpr, &iter);
1059 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1060 tree fn = CALL_EXPR_FN (callexpr);
1061 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1063 for (unsigned argno = 1; ; ++argno)
1065 code = (enum tree_code) va_arg (ap, int);
1067 switch (code)
1069 case 0:
1070 /* This signifies an ellipses, any further arguments are all ok. */
1071 res = true;
1072 goto end;
1073 case VOID_TYPE:
1074 /* This signifies an endlink, if no arguments remain, return
1075 true, otherwise return false. */
1076 res = !more_const_call_expr_args_p (&iter);
1077 goto end;
1078 case POINTER_TYPE:
1079 /* The actual argument must be nonnull when either the whole
1080 called function has been declared nonnull, or when the formal
1081 argument corresponding to the actual argument has been. */
1082 if (argmap
1083 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1085 arg = next_const_call_expr_arg (&iter);
1086 if (!validate_arg (arg, code) || integer_zerop (arg))
1087 goto end;
1088 break;
1090 /* FALLTHRU */
1091 default:
1092 /* If no parameters remain or the parameter's code does not
1093 match the specified code, return false. Otherwise continue
1094 checking any remaining arguments. */
1095 arg = next_const_call_expr_arg (&iter);
1096 if (!validate_arg (arg, code))
1097 goto end;
1098 break;
1102 /* We need gotos here since we can only have one VA_CLOSE in a
1103 function. */
1104 end: ;
1105 va_end (ap);
1107 BITMAP_FREE (argmap);
1109 return res;
1112 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1113 and the address of the save area. */
1115 static rtx
1116 expand_builtin_nonlocal_goto (tree exp)
1118 tree t_label, t_save_area;
1119 rtx r_label, r_save_area, r_fp, r_sp;
1120 rtx_insn *insn;
1122 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1123 return NULL_RTX;
1125 t_label = CALL_EXPR_ARG (exp, 0);
1126 t_save_area = CALL_EXPR_ARG (exp, 1);
1128 r_label = expand_normal (t_label);
1129 r_label = convert_memory_address (Pmode, r_label);
1130 r_save_area = expand_normal (t_save_area);
1131 r_save_area = convert_memory_address (Pmode, r_save_area);
1132 /* Copy the address of the save location to a register just in case it was
1133 based on the frame pointer. */
1134 r_save_area = copy_to_reg (r_save_area);
1135 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1136 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1137 plus_constant (Pmode, r_save_area,
1138 GET_MODE_SIZE (Pmode)));
1140 crtl->has_nonlocal_goto = 1;
1142 /* ??? We no longer need to pass the static chain value, afaik. */
1143 if (targetm.have_nonlocal_goto ())
1144 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1145 else
1147 r_label = copy_to_reg (r_label);
1149 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1150 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1152 /* Restore frame pointer for containing function. */
1153 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1154 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1156 /* USE of hard_frame_pointer_rtx added for consistency;
1157 not clear if really needed. */
1158 emit_use (hard_frame_pointer_rtx);
1159 emit_use (stack_pointer_rtx);
1161 /* If the architecture is using a GP register, we must
1162 conservatively assume that the target function makes use of it.
1163 The prologue of functions with nonlocal gotos must therefore
1164 initialize the GP register to the appropriate value, and we
1165 must then make sure that this value is live at the point
1166 of the jump. (Note that this doesn't necessarily apply
1167 to targets with a nonlocal_goto pattern; they are free
1168 to implement it in their own way. Note also that this is
1169 a no-op if the GP register is a global invariant.) */
1170 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1171 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1172 emit_use (pic_offset_table_rtx);
1174 emit_indirect_jump (r_label);
1177 /* Search backwards to the jump insn and mark it as a
1178 non-local goto. */
1179 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1181 if (JUMP_P (insn))
1183 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1184 break;
1186 else if (CALL_P (insn))
1187 break;
1190 return const0_rtx;
1193 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1194 (not all will be used on all machines) that was passed to __builtin_setjmp.
1195 It updates the stack pointer in that block to the current value. This is
1196 also called directly by the SJLJ exception handling code. */
1198 void
1199 expand_builtin_update_setjmp_buf (rtx buf_addr)
1201 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1202 buf_addr = convert_memory_address (Pmode, buf_addr);
1203 rtx stack_save
1204 = gen_rtx_MEM (sa_mode,
1205 memory_address
1206 (sa_mode,
1207 plus_constant (Pmode, buf_addr,
1208 2 * GET_MODE_SIZE (Pmode))));
1210 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1213 /* Expand a call to __builtin_prefetch. For a target that does not support
1214 data prefetch, evaluate the memory address argument in case it has side
1215 effects. */
1217 static void
1218 expand_builtin_prefetch (tree exp)
1220 tree arg0, arg1, arg2;
1221 int nargs;
1222 rtx op0, op1, op2;
1224 if (!validate_arglist (exp, POINTER_TYPE, 0))
1225 return;
1227 arg0 = CALL_EXPR_ARG (exp, 0);
1229 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1230 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1231 locality). */
1232 nargs = call_expr_nargs (exp);
1233 if (nargs > 1)
1234 arg1 = CALL_EXPR_ARG (exp, 1);
1235 else
1236 arg1 = integer_zero_node;
1237 if (nargs > 2)
1238 arg2 = CALL_EXPR_ARG (exp, 2);
1239 else
1240 arg2 = integer_three_node;
1242 /* Argument 0 is an address. */
1243 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1245 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1246 if (TREE_CODE (arg1) != INTEGER_CST)
1248 error ("second argument to %<__builtin_prefetch%> must be a constant");
1249 arg1 = integer_zero_node;
1251 op1 = expand_normal (arg1);
1252 /* Argument 1 must be either zero or one. */
1253 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1255 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1256 " using zero");
1257 op1 = const0_rtx;
1260 /* Argument 2 (locality) must be a compile-time constant int. */
1261 if (TREE_CODE (arg2) != INTEGER_CST)
1263 error ("third argument to %<__builtin_prefetch%> must be a constant");
1264 arg2 = integer_zero_node;
1266 op2 = expand_normal (arg2);
1267 /* Argument 2 must be 0, 1, 2, or 3. */
1268 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1270 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1271 op2 = const0_rtx;
1274 if (targetm.have_prefetch ())
1276 struct expand_operand ops[3];
1278 create_address_operand (&ops[0], op0);
1279 create_integer_operand (&ops[1], INTVAL (op1));
1280 create_integer_operand (&ops[2], INTVAL (op2));
1281 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1282 return;
1285 /* Don't do anything with direct references to volatile memory, but
1286 generate code to handle other side effects. */
1287 if (!MEM_P (op0) && side_effects_p (op0))
1288 emit_insn (op0);
1291 /* Get a MEM rtx for expression EXP which is the address of an operand
1292 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1293 the maximum length of the block of memory that might be accessed or
1294 NULL if unknown. */
1296 static rtx
1297 get_memory_rtx (tree exp, tree len)
1299 tree orig_exp = exp;
1300 rtx addr, mem;
1302 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1303 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1304 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1305 exp = TREE_OPERAND (exp, 0);
1307 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1308 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1310 /* Get an expression we can use to find the attributes to assign to MEM.
1311 First remove any nops. */
1312 while (CONVERT_EXPR_P (exp)
1313 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1314 exp = TREE_OPERAND (exp, 0);
1316 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1317 (as builtin stringops may alias with anything). */
1318 exp = fold_build2 (MEM_REF,
1319 build_array_type (char_type_node,
1320 build_range_type (sizetype,
1321 size_one_node, len)),
1322 exp, build_int_cst (ptr_type_node, 0));
1324 /* If the MEM_REF has no acceptable address, try to get the base object
1325 from the original address we got, and build an all-aliasing
1326 unknown-sized access to that one. */
1327 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1328 set_mem_attributes (mem, exp, 0);
1329 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1330 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1331 0))))
1333 exp = build_fold_addr_expr (exp);
1334 exp = fold_build2 (MEM_REF,
1335 build_array_type (char_type_node,
1336 build_range_type (sizetype,
1337 size_zero_node,
1338 NULL)),
1339 exp, build_int_cst (ptr_type_node, 0));
1340 set_mem_attributes (mem, exp, 0);
1342 set_mem_alias_set (mem, 0);
1343 return mem;
1346 /* Built-in functions to perform an untyped call and return. */
1348 #define apply_args_mode \
1349 (this_target_builtins->x_apply_args_mode)
1350 #define apply_result_mode \
1351 (this_target_builtins->x_apply_result_mode)
1353 /* Return the size required for the block returned by __builtin_apply_args,
1354 and initialize apply_args_mode. */
1356 static int
1357 apply_args_size (void)
1359 static int size = -1;
1360 int align;
1361 unsigned int regno;
1362 machine_mode mode;
1364 /* The values computed by this function never change. */
1365 if (size < 0)
1367 /* The first value is the incoming arg-pointer. */
1368 size = GET_MODE_SIZE (Pmode);
1370 /* The second value is the structure value address unless this is
1371 passed as an "invisible" first argument. */
1372 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1373 size += GET_MODE_SIZE (Pmode);
1375 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1376 if (FUNCTION_ARG_REGNO_P (regno))
1378 mode = targetm.calls.get_raw_arg_mode (regno);
1380 gcc_assert (mode != VOIDmode);
1382 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1383 if (size % align != 0)
1384 size = CEIL (size, align) * align;
1385 size += GET_MODE_SIZE (mode);
1386 apply_args_mode[regno] = mode;
1388 else
1390 apply_args_mode[regno] = VOIDmode;
1393 return size;
1396 /* Return the size required for the block returned by __builtin_apply,
1397 and initialize apply_result_mode. */
1399 static int
1400 apply_result_size (void)
1402 static int size = -1;
1403 int align, regno;
1404 machine_mode mode;
1406 /* The values computed by this function never change. */
1407 if (size < 0)
1409 size = 0;
1411 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1412 if (targetm.calls.function_value_regno_p (regno))
1414 mode = targetm.calls.get_raw_result_mode (regno);
1416 gcc_assert (mode != VOIDmode);
1418 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1419 if (size % align != 0)
1420 size = CEIL (size, align) * align;
1421 size += GET_MODE_SIZE (mode);
1422 apply_result_mode[regno] = mode;
1424 else
1425 apply_result_mode[regno] = VOIDmode;
1427 /* Allow targets that use untyped_call and untyped_return to override
1428 the size so that machine-specific information can be stored here. */
1429 #ifdef APPLY_RESULT_SIZE
1430 size = APPLY_RESULT_SIZE;
1431 #endif
1433 return size;
1436 /* Create a vector describing the result block RESULT. If SAVEP is true,
1437 the result block is used to save the values; otherwise it is used to
1438 restore the values. */
1440 static rtx
1441 result_vector (int savep, rtx result)
1443 int regno, size, align, nelts;
1444 machine_mode mode;
1445 rtx reg, mem;
1446 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1448 size = nelts = 0;
1449 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1450 if ((mode = apply_result_mode[regno]) != VOIDmode)
1452 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1453 if (size % align != 0)
1454 size = CEIL (size, align) * align;
1455 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1456 mem = adjust_address (result, mode, size);
1457 savevec[nelts++] = (savep
1458 ? gen_rtx_SET (mem, reg)
1459 : gen_rtx_SET (reg, mem));
1460 size += GET_MODE_SIZE (mode);
1462 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1465 /* Save the state required to perform an untyped call with the same
1466 arguments as were passed to the current function. */
1468 static rtx
1469 expand_builtin_apply_args_1 (void)
1471 rtx registers, tem;
1472 int size, align, regno;
1473 machine_mode mode;
1474 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1476 /* Create a block where the arg-pointer, structure value address,
1477 and argument registers can be saved. */
1478 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1480 /* Walk past the arg-pointer and structure value address. */
1481 size = GET_MODE_SIZE (Pmode);
1482 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1483 size += GET_MODE_SIZE (Pmode);
1485 /* Save each register used in calling a function to the block. */
1486 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1487 if ((mode = apply_args_mode[regno]) != VOIDmode)
1489 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1490 if (size % align != 0)
1491 size = CEIL (size, align) * align;
1493 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1495 emit_move_insn (adjust_address (registers, mode, size), tem);
1496 size += GET_MODE_SIZE (mode);
1499 /* Save the arg pointer to the block. */
1500 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1501 /* We need the pointer as the caller actually passed them to us, not
1502 as we might have pretended they were passed. Make sure it's a valid
1503 operand, as emit_move_insn isn't expected to handle a PLUS. */
1504 if (STACK_GROWS_DOWNWARD)
1506 = force_operand (plus_constant (Pmode, tem,
1507 crtl->args.pretend_args_size),
1508 NULL_RTX);
1509 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1511 size = GET_MODE_SIZE (Pmode);
1513 /* Save the structure value address unless this is passed as an
1514 "invisible" first argument. */
1515 if (struct_incoming_value)
1517 emit_move_insn (adjust_address (registers, Pmode, size),
1518 copy_to_reg (struct_incoming_value));
1519 size += GET_MODE_SIZE (Pmode);
1522 /* Return the address of the block. */
1523 return copy_addr_to_reg (XEXP (registers, 0));
1526 /* __builtin_apply_args returns block of memory allocated on
1527 the stack into which is stored the arg pointer, structure
1528 value address, static chain, and all the registers that might
1529 possibly be used in performing a function call. The code is
1530 moved to the start of the function so the incoming values are
1531 saved. */
1533 static rtx
1534 expand_builtin_apply_args (void)
1536 /* Don't do __builtin_apply_args more than once in a function.
1537 Save the result of the first call and reuse it. */
1538 if (apply_args_value != 0)
1539 return apply_args_value;
1541 /* When this function is called, it means that registers must be
1542 saved on entry to this function. So we migrate the
1543 call to the first insn of this function. */
1544 rtx temp;
1546 start_sequence ();
1547 temp = expand_builtin_apply_args_1 ();
1548 rtx_insn *seq = get_insns ();
1549 end_sequence ();
1551 apply_args_value = temp;
1553 /* Put the insns after the NOTE that starts the function.
1554 If this is inside a start_sequence, make the outer-level insn
1555 chain current, so the code is placed at the start of the
1556 function. If internal_arg_pointer is a non-virtual pseudo,
1557 it needs to be placed after the function that initializes
1558 that pseudo. */
1559 push_topmost_sequence ();
1560 if (REG_P (crtl->args.internal_arg_pointer)
1561 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1562 emit_insn_before (seq, parm_birth_insn);
1563 else
1564 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1565 pop_topmost_sequence ();
1566 return temp;
1570 /* Perform an untyped call and save the state required to perform an
1571 untyped return of whatever value was returned by the given function. */
1573 static rtx
1574 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1576 int size, align, regno;
1577 machine_mode mode;
1578 rtx incoming_args, result, reg, dest, src;
1579 rtx_call_insn *call_insn;
1580 rtx old_stack_level = 0;
1581 rtx call_fusage = 0;
1582 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1584 arguments = convert_memory_address (Pmode, arguments);
1586 /* Create a block where the return registers can be saved. */
1587 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1589 /* Fetch the arg pointer from the ARGUMENTS block. */
1590 incoming_args = gen_reg_rtx (Pmode);
1591 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1592 if (!STACK_GROWS_DOWNWARD)
1593 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1594 incoming_args, 0, OPTAB_LIB_WIDEN);
1596 /* Push a new argument block and copy the arguments. Do not allow
1597 the (potential) memcpy call below to interfere with our stack
1598 manipulations. */
1599 do_pending_stack_adjust ();
1600 NO_DEFER_POP;
1602 /* Save the stack with nonlocal if available. */
1603 if (targetm.have_save_stack_nonlocal ())
1604 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1605 else
1606 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1608 /* Allocate a block of memory onto the stack and copy the memory
1609 arguments to the outgoing arguments address. We can pass TRUE
1610 as the 4th argument because we just saved the stack pointer
1611 and will restore it right after the call. */
1612 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1614 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1615 may have already set current_function_calls_alloca to true.
1616 current_function_calls_alloca won't be set if argsize is zero,
1617 so we have to guarantee need_drap is true here. */
1618 if (SUPPORTS_STACK_ALIGNMENT)
1619 crtl->need_drap = true;
1621 dest = virtual_outgoing_args_rtx;
1622 if (!STACK_GROWS_DOWNWARD)
1624 if (CONST_INT_P (argsize))
1625 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1626 else
1627 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1629 dest = gen_rtx_MEM (BLKmode, dest);
1630 set_mem_align (dest, PARM_BOUNDARY);
1631 src = gen_rtx_MEM (BLKmode, incoming_args);
1632 set_mem_align (src, PARM_BOUNDARY);
1633 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1635 /* Refer to the argument block. */
1636 apply_args_size ();
1637 arguments = gen_rtx_MEM (BLKmode, arguments);
1638 set_mem_align (arguments, PARM_BOUNDARY);
1640 /* Walk past the arg-pointer and structure value address. */
1641 size = GET_MODE_SIZE (Pmode);
1642 if (struct_value)
1643 size += GET_MODE_SIZE (Pmode);
1645 /* Restore each of the registers previously saved. Make USE insns
1646 for each of these registers for use in making the call. */
1647 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1648 if ((mode = apply_args_mode[regno]) != VOIDmode)
1650 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1651 if (size % align != 0)
1652 size = CEIL (size, align) * align;
1653 reg = gen_rtx_REG (mode, regno);
1654 emit_move_insn (reg, adjust_address (arguments, mode, size));
1655 use_reg (&call_fusage, reg);
1656 size += GET_MODE_SIZE (mode);
1659 /* Restore the structure value address unless this is passed as an
1660 "invisible" first argument. */
1661 size = GET_MODE_SIZE (Pmode);
1662 if (struct_value)
1664 rtx value = gen_reg_rtx (Pmode);
1665 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1666 emit_move_insn (struct_value, value);
1667 if (REG_P (struct_value))
1668 use_reg (&call_fusage, struct_value);
1669 size += GET_MODE_SIZE (Pmode);
1672 /* All arguments and registers used for the call are set up by now! */
1673 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1675 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1676 and we don't want to load it into a register as an optimization,
1677 because prepare_call_address already did it if it should be done. */
1678 if (GET_CODE (function) != SYMBOL_REF)
1679 function = memory_address (FUNCTION_MODE, function);
1681 /* Generate the actual call instruction and save the return value. */
1682 if (targetm.have_untyped_call ())
1684 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1685 emit_call_insn (targetm.gen_untyped_call (mem, result,
1686 result_vector (1, result)));
1688 else if (targetm.have_call_value ())
1690 rtx valreg = 0;
1692 /* Locate the unique return register. It is not possible to
1693 express a call that sets more than one return register using
1694 call_value; use untyped_call for that. In fact, untyped_call
1695 only needs to save the return registers in the given block. */
1696 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1697 if ((mode = apply_result_mode[regno]) != VOIDmode)
1699 gcc_assert (!valreg); /* have_untyped_call required. */
1701 valreg = gen_rtx_REG (mode, regno);
1704 emit_insn (targetm.gen_call_value (valreg,
1705 gen_rtx_MEM (FUNCTION_MODE, function),
1706 const0_rtx, NULL_RTX, const0_rtx));
1708 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1710 else
1711 gcc_unreachable ();
1713 /* Find the CALL insn we just emitted, and attach the register usage
1714 information. */
1715 call_insn = last_call_insn ();
1716 add_function_usage_to (call_insn, call_fusage);
1718 /* Restore the stack. */
1719 if (targetm.have_save_stack_nonlocal ())
1720 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1721 else
1722 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1723 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1725 OK_DEFER_POP;
1727 /* Return the address of the result block. */
1728 result = copy_addr_to_reg (XEXP (result, 0));
1729 return convert_memory_address (ptr_mode, result);
1732 /* Perform an untyped return. */
1734 static void
1735 expand_builtin_return (rtx result)
1737 int size, align, regno;
1738 machine_mode mode;
1739 rtx reg;
1740 rtx_insn *call_fusage = 0;
1742 result = convert_memory_address (Pmode, result);
1744 apply_result_size ();
1745 result = gen_rtx_MEM (BLKmode, result);
1747 if (targetm.have_untyped_return ())
1749 rtx vector = result_vector (0, result);
1750 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1751 emit_barrier ();
1752 return;
1755 /* Restore the return value and note that each value is used. */
1756 size = 0;
1757 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1758 if ((mode = apply_result_mode[regno]) != VOIDmode)
1760 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1761 if (size % align != 0)
1762 size = CEIL (size, align) * align;
1763 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1764 emit_move_insn (reg, adjust_address (result, mode, size));
1766 push_to_sequence (call_fusage);
1767 emit_use (reg);
1768 call_fusage = get_insns ();
1769 end_sequence ();
1770 size += GET_MODE_SIZE (mode);
1773 /* Put the USE insns before the return. */
1774 emit_insn (call_fusage);
1776 /* Return whatever values was restored by jumping directly to the end
1777 of the function. */
1778 expand_naked_return ();
1781 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1783 static enum type_class
1784 type_to_class (tree type)
1786 switch (TREE_CODE (type))
1788 case VOID_TYPE: return void_type_class;
1789 case INTEGER_TYPE: return integer_type_class;
1790 case ENUMERAL_TYPE: return enumeral_type_class;
1791 case BOOLEAN_TYPE: return boolean_type_class;
1792 case POINTER_TYPE: return pointer_type_class;
1793 case REFERENCE_TYPE: return reference_type_class;
1794 case OFFSET_TYPE: return offset_type_class;
1795 case REAL_TYPE: return real_type_class;
1796 case COMPLEX_TYPE: return complex_type_class;
1797 case FUNCTION_TYPE: return function_type_class;
1798 case METHOD_TYPE: return method_type_class;
1799 case RECORD_TYPE: return record_type_class;
1800 case UNION_TYPE:
1801 case QUAL_UNION_TYPE: return union_type_class;
1802 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1803 ? string_type_class : array_type_class);
1804 case LANG_TYPE: return lang_type_class;
1805 default: return no_type_class;
1809 /* Expand a call EXP to __builtin_classify_type. */
1811 static rtx
1812 expand_builtin_classify_type (tree exp)
1814 if (call_expr_nargs (exp))
1815 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1816 return GEN_INT (no_type_class);
1819 /* This helper macro, meant to be used in mathfn_built_in below, determines
1820 which among a set of builtin math functions is appropriate for a given type
1821 mode. The `F' (float) and `L' (long double) are automatically generated
1822 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1823 types, there are additional types that are considered with 'F32', 'F64',
1824 'F128', etc. suffixes. */
1825 #define CASE_MATHFN(MATHFN) \
1826 CASE_CFN_##MATHFN: \
1827 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1828 fcodel = BUILT_IN_##MATHFN##L ; break;
1829 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1830 types. */
1831 #define CASE_MATHFN_FLOATN(MATHFN) \
1832 CASE_CFN_##MATHFN: \
1833 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1834 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1835 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1836 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1837 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1838 break;
1839 /* Similar to above, but appends _R after any F/L suffix. */
1840 #define CASE_MATHFN_REENT(MATHFN) \
1841 case CFN_BUILT_IN_##MATHFN##_R: \
1842 case CFN_BUILT_IN_##MATHFN##F_R: \
1843 case CFN_BUILT_IN_##MATHFN##L_R: \
1844 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1845 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1847 /* Return a function equivalent to FN but operating on floating-point
1848 values of type TYPE, or END_BUILTINS if no such function exists.
1849 This is purely an operation on function codes; it does not guarantee
1850 that the target actually has an implementation of the function. */
1852 static built_in_function
1853 mathfn_built_in_2 (tree type, combined_fn fn)
1855 tree mtype;
1856 built_in_function fcode, fcodef, fcodel;
1857 built_in_function fcodef16 = END_BUILTINS;
1858 built_in_function fcodef32 = END_BUILTINS;
1859 built_in_function fcodef64 = END_BUILTINS;
1860 built_in_function fcodef128 = END_BUILTINS;
1861 built_in_function fcodef32x = END_BUILTINS;
1862 built_in_function fcodef64x = END_BUILTINS;
1863 built_in_function fcodef128x = END_BUILTINS;
1865 switch (fn)
1867 CASE_MATHFN (ACOS)
1868 CASE_MATHFN (ACOSH)
1869 CASE_MATHFN (ASIN)
1870 CASE_MATHFN (ASINH)
1871 CASE_MATHFN (ATAN)
1872 CASE_MATHFN (ATAN2)
1873 CASE_MATHFN (ATANH)
1874 CASE_MATHFN (CBRT)
1875 CASE_MATHFN (CEIL)
1876 CASE_MATHFN (CEXPI)
1877 CASE_MATHFN_FLOATN (COPYSIGN)
1878 CASE_MATHFN (COS)
1879 CASE_MATHFN (COSH)
1880 CASE_MATHFN (DREM)
1881 CASE_MATHFN (ERF)
1882 CASE_MATHFN (ERFC)
1883 CASE_MATHFN (EXP)
1884 CASE_MATHFN (EXP10)
1885 CASE_MATHFN (EXP2)
1886 CASE_MATHFN (EXPM1)
1887 CASE_MATHFN (FABS)
1888 CASE_MATHFN (FDIM)
1889 CASE_MATHFN (FLOOR)
1890 CASE_MATHFN_FLOATN (FMA)
1891 CASE_MATHFN_FLOATN (FMAX)
1892 CASE_MATHFN_FLOATN (FMIN)
1893 CASE_MATHFN (FMOD)
1894 CASE_MATHFN (FREXP)
1895 CASE_MATHFN (GAMMA)
1896 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1897 CASE_MATHFN (HUGE_VAL)
1898 CASE_MATHFN (HYPOT)
1899 CASE_MATHFN (ILOGB)
1900 CASE_MATHFN (ICEIL)
1901 CASE_MATHFN (IFLOOR)
1902 CASE_MATHFN (INF)
1903 CASE_MATHFN (IRINT)
1904 CASE_MATHFN (IROUND)
1905 CASE_MATHFN (ISINF)
1906 CASE_MATHFN (J0)
1907 CASE_MATHFN (J1)
1908 CASE_MATHFN (JN)
1909 CASE_MATHFN (LCEIL)
1910 CASE_MATHFN (LDEXP)
1911 CASE_MATHFN (LFLOOR)
1912 CASE_MATHFN (LGAMMA)
1913 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1914 CASE_MATHFN (LLCEIL)
1915 CASE_MATHFN (LLFLOOR)
1916 CASE_MATHFN (LLRINT)
1917 CASE_MATHFN (LLROUND)
1918 CASE_MATHFN (LOG)
1919 CASE_MATHFN (LOG10)
1920 CASE_MATHFN (LOG1P)
1921 CASE_MATHFN (LOG2)
1922 CASE_MATHFN (LOGB)
1923 CASE_MATHFN (LRINT)
1924 CASE_MATHFN (LROUND)
1925 CASE_MATHFN (MODF)
1926 CASE_MATHFN (NAN)
1927 CASE_MATHFN (NANS)
1928 CASE_MATHFN (NEARBYINT)
1929 CASE_MATHFN (NEXTAFTER)
1930 CASE_MATHFN (NEXTTOWARD)
1931 CASE_MATHFN (POW)
1932 CASE_MATHFN (POWI)
1933 CASE_MATHFN (POW10)
1934 CASE_MATHFN (REMAINDER)
1935 CASE_MATHFN (REMQUO)
1936 CASE_MATHFN (RINT)
1937 CASE_MATHFN (ROUND)
1938 CASE_MATHFN (SCALB)
1939 CASE_MATHFN (SCALBLN)
1940 CASE_MATHFN (SCALBN)
1941 CASE_MATHFN (SIGNBIT)
1942 CASE_MATHFN (SIGNIFICAND)
1943 CASE_MATHFN (SIN)
1944 CASE_MATHFN (SINCOS)
1945 CASE_MATHFN (SINH)
1946 CASE_MATHFN_FLOATN (SQRT)
1947 CASE_MATHFN (TAN)
1948 CASE_MATHFN (TANH)
1949 CASE_MATHFN (TGAMMA)
1950 CASE_MATHFN (TRUNC)
1951 CASE_MATHFN (Y0)
1952 CASE_MATHFN (Y1)
1953 CASE_MATHFN (YN)
1955 default:
1956 return END_BUILTINS;
1959 mtype = TYPE_MAIN_VARIANT (type);
1960 if (mtype == double_type_node)
1961 return fcode;
1962 else if (mtype == float_type_node)
1963 return fcodef;
1964 else if (mtype == long_double_type_node)
1965 return fcodel;
1966 else if (mtype == float16_type_node)
1967 return fcodef16;
1968 else if (mtype == float32_type_node)
1969 return fcodef32;
1970 else if (mtype == float64_type_node)
1971 return fcodef64;
1972 else if (mtype == float128_type_node)
1973 return fcodef128;
1974 else if (mtype == float32x_type_node)
1975 return fcodef32x;
1976 else if (mtype == float64x_type_node)
1977 return fcodef64x;
1978 else if (mtype == float128x_type_node)
1979 return fcodef128x;
1980 else
1981 return END_BUILTINS;
1984 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1985 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1986 otherwise use the explicit declaration. If we can't do the conversion,
1987 return null. */
1989 static tree
1990 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1992 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1993 if (fcode2 == END_BUILTINS)
1994 return NULL_TREE;
1996 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1997 return NULL_TREE;
1999 return builtin_decl_explicit (fcode2);
2002 /* Like mathfn_built_in_1, but always use the implicit array. */
2004 tree
2005 mathfn_built_in (tree type, combined_fn fn)
2007 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2010 /* Like mathfn_built_in_1, but take a built_in_function and
2011 always use the implicit array. */
2013 tree
2014 mathfn_built_in (tree type, enum built_in_function fn)
2016 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2019 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2020 return its code, otherwise return IFN_LAST. Note that this function
2021 only tests whether the function is defined in internals.def, not whether
2022 it is actually available on the target. */
2024 internal_fn
2025 associated_internal_fn (tree fndecl)
2027 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2028 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2029 switch (DECL_FUNCTION_CODE (fndecl))
2031 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2032 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2033 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2034 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2035 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2036 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2037 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2038 #include "internal-fn.def"
2040 CASE_FLT_FN (BUILT_IN_POW10):
2041 return IFN_EXP10;
2043 CASE_FLT_FN (BUILT_IN_DREM):
2044 return IFN_REMAINDER;
2046 CASE_FLT_FN (BUILT_IN_SCALBN):
2047 CASE_FLT_FN (BUILT_IN_SCALBLN):
2048 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2049 return IFN_LDEXP;
2050 return IFN_LAST;
2052 default:
2053 return IFN_LAST;
2057 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2058 on the current target by a call to an internal function, return the
2059 code of that internal function, otherwise return IFN_LAST. The caller
2060 is responsible for ensuring that any side-effects of the built-in
2061 call are dealt with correctly. E.g. if CALL sets errno, the caller
2062 must decide that the errno result isn't needed or make it available
2063 in some other way. */
2065 internal_fn
2066 replacement_internal_fn (gcall *call)
2068 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2070 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2071 if (ifn != IFN_LAST)
2073 tree_pair types = direct_internal_fn_types (ifn, call);
2074 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2075 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2076 return ifn;
2079 return IFN_LAST;
2082 /* Expand a call to the builtin trinary math functions (fma).
2083 Return NULL_RTX if a normal call should be emitted rather than expanding the
2084 function in-line. EXP is the expression that is a call to the builtin
2085 function; if convenient, the result should be placed in TARGET.
2086 SUBTARGET may be used as the target for computing one of EXP's
2087 operands. */
2089 static rtx
2090 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2092 optab builtin_optab;
2093 rtx op0, op1, op2, result;
2094 rtx_insn *insns;
2095 tree fndecl = get_callee_fndecl (exp);
2096 tree arg0, arg1, arg2;
2097 machine_mode mode;
2099 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2100 return NULL_RTX;
2102 arg0 = CALL_EXPR_ARG (exp, 0);
2103 arg1 = CALL_EXPR_ARG (exp, 1);
2104 arg2 = CALL_EXPR_ARG (exp, 2);
2106 switch (DECL_FUNCTION_CODE (fndecl))
2108 CASE_FLT_FN (BUILT_IN_FMA):
2109 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2110 builtin_optab = fma_optab; break;
2111 default:
2112 gcc_unreachable ();
2115 /* Make a suitable register to place result in. */
2116 mode = TYPE_MODE (TREE_TYPE (exp));
2118 /* Before working hard, check whether the instruction is available. */
2119 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2120 return NULL_RTX;
2122 result = gen_reg_rtx (mode);
2124 /* Always stabilize the argument list. */
2125 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2126 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2127 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2129 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2130 op1 = expand_normal (arg1);
2131 op2 = expand_normal (arg2);
2133 start_sequence ();
2135 /* Compute into RESULT.
2136 Set RESULT to wherever the result comes back. */
2137 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2138 result, 0);
2140 /* If we were unable to expand via the builtin, stop the sequence
2141 (without outputting the insns) and call to the library function
2142 with the stabilized argument list. */
2143 if (result == 0)
2145 end_sequence ();
2146 return expand_call (exp, target, target == const0_rtx);
2149 /* Output the entire sequence. */
2150 insns = get_insns ();
2151 end_sequence ();
2152 emit_insn (insns);
2154 return result;
2157 /* Expand a call to the builtin sin and cos math functions.
2158 Return NULL_RTX if a normal call should be emitted rather than expanding the
2159 function in-line. EXP is the expression that is a call to the builtin
2160 function; if convenient, the result should be placed in TARGET.
2161 SUBTARGET may be used as the target for computing one of EXP's
2162 operands. */
2164 static rtx
2165 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2167 optab builtin_optab;
2168 rtx op0;
2169 rtx_insn *insns;
2170 tree fndecl = get_callee_fndecl (exp);
2171 machine_mode mode;
2172 tree arg;
2174 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2175 return NULL_RTX;
2177 arg = CALL_EXPR_ARG (exp, 0);
2179 switch (DECL_FUNCTION_CODE (fndecl))
2181 CASE_FLT_FN (BUILT_IN_SIN):
2182 CASE_FLT_FN (BUILT_IN_COS):
2183 builtin_optab = sincos_optab; break;
2184 default:
2185 gcc_unreachable ();
2188 /* Make a suitable register to place result in. */
2189 mode = TYPE_MODE (TREE_TYPE (exp));
2191 /* Check if sincos insn is available, otherwise fallback
2192 to sin or cos insn. */
2193 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2194 switch (DECL_FUNCTION_CODE (fndecl))
2196 CASE_FLT_FN (BUILT_IN_SIN):
2197 builtin_optab = sin_optab; break;
2198 CASE_FLT_FN (BUILT_IN_COS):
2199 builtin_optab = cos_optab; break;
2200 default:
2201 gcc_unreachable ();
2204 /* Before working hard, check whether the instruction is available. */
2205 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2207 rtx result = gen_reg_rtx (mode);
2209 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2210 need to expand the argument again. This way, we will not perform
2211 side-effects more the once. */
2212 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2214 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2216 start_sequence ();
2218 /* Compute into RESULT.
2219 Set RESULT to wherever the result comes back. */
2220 if (builtin_optab == sincos_optab)
2222 int ok;
2224 switch (DECL_FUNCTION_CODE (fndecl))
2226 CASE_FLT_FN (BUILT_IN_SIN):
2227 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2228 break;
2229 CASE_FLT_FN (BUILT_IN_COS):
2230 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2231 break;
2232 default:
2233 gcc_unreachable ();
2235 gcc_assert (ok);
2237 else
2238 result = expand_unop (mode, builtin_optab, op0, result, 0);
2240 if (result != 0)
2242 /* Output the entire sequence. */
2243 insns = get_insns ();
2244 end_sequence ();
2245 emit_insn (insns);
2246 return result;
2249 /* If we were unable to expand via the builtin, stop the sequence
2250 (without outputting the insns) and call to the library function
2251 with the stabilized argument list. */
2252 end_sequence ();
2255 return expand_call (exp, target, target == const0_rtx);
2258 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2259 return an RTL instruction code that implements the functionality.
2260 If that isn't possible or available return CODE_FOR_nothing. */
2262 static enum insn_code
2263 interclass_mathfn_icode (tree arg, tree fndecl)
2265 bool errno_set = false;
2266 optab builtin_optab = unknown_optab;
2267 machine_mode mode;
2269 switch (DECL_FUNCTION_CODE (fndecl))
2271 CASE_FLT_FN (BUILT_IN_ILOGB):
2272 errno_set = true; builtin_optab = ilogb_optab; break;
2273 CASE_FLT_FN (BUILT_IN_ISINF):
2274 builtin_optab = isinf_optab; break;
2275 case BUILT_IN_ISNORMAL:
2276 case BUILT_IN_ISFINITE:
2277 CASE_FLT_FN (BUILT_IN_FINITE):
2278 case BUILT_IN_FINITED32:
2279 case BUILT_IN_FINITED64:
2280 case BUILT_IN_FINITED128:
2281 case BUILT_IN_ISINFD32:
2282 case BUILT_IN_ISINFD64:
2283 case BUILT_IN_ISINFD128:
2284 /* These builtins have no optabs (yet). */
2285 break;
2286 default:
2287 gcc_unreachable ();
2290 /* There's no easy way to detect the case we need to set EDOM. */
2291 if (flag_errno_math && errno_set)
2292 return CODE_FOR_nothing;
2294 /* Optab mode depends on the mode of the input argument. */
2295 mode = TYPE_MODE (TREE_TYPE (arg));
2297 if (builtin_optab)
2298 return optab_handler (builtin_optab, mode);
2299 return CODE_FOR_nothing;
2302 /* Expand a call to one of the builtin math functions that operate on
2303 floating point argument and output an integer result (ilogb, isinf,
2304 isnan, etc).
2305 Return 0 if a normal call should be emitted rather than expanding the
2306 function in-line. EXP is the expression that is a call to the builtin
2307 function; if convenient, the result should be placed in TARGET. */
2309 static rtx
2310 expand_builtin_interclass_mathfn (tree exp, rtx target)
2312 enum insn_code icode = CODE_FOR_nothing;
2313 rtx op0;
2314 tree fndecl = get_callee_fndecl (exp);
2315 machine_mode mode;
2316 tree arg;
2318 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2319 return NULL_RTX;
2321 arg = CALL_EXPR_ARG (exp, 0);
2322 icode = interclass_mathfn_icode (arg, fndecl);
2323 mode = TYPE_MODE (TREE_TYPE (arg));
2325 if (icode != CODE_FOR_nothing)
2327 struct expand_operand ops[1];
2328 rtx_insn *last = get_last_insn ();
2329 tree orig_arg = arg;
2331 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2332 need to expand the argument again. This way, we will not perform
2333 side-effects more the once. */
2334 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2336 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2338 if (mode != GET_MODE (op0))
2339 op0 = convert_to_mode (mode, op0, 0);
2341 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2342 if (maybe_legitimize_operands (icode, 0, 1, ops)
2343 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2344 return ops[0].value;
2346 delete_insns_since (last);
2347 CALL_EXPR_ARG (exp, 0) = orig_arg;
2350 return NULL_RTX;
2353 /* Expand a call to the builtin sincos math function.
2354 Return NULL_RTX if a normal call should be emitted rather than expanding the
2355 function in-line. EXP is the expression that is a call to the builtin
2356 function. */
2358 static rtx
2359 expand_builtin_sincos (tree exp)
2361 rtx op0, op1, op2, target1, target2;
2362 machine_mode mode;
2363 tree arg, sinp, cosp;
2364 int result;
2365 location_t loc = EXPR_LOCATION (exp);
2366 tree alias_type, alias_off;
2368 if (!validate_arglist (exp, REAL_TYPE,
2369 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2370 return NULL_RTX;
2372 arg = CALL_EXPR_ARG (exp, 0);
2373 sinp = CALL_EXPR_ARG (exp, 1);
2374 cosp = CALL_EXPR_ARG (exp, 2);
2376 /* Make a suitable register to place result in. */
2377 mode = TYPE_MODE (TREE_TYPE (arg));
2379 /* Check if sincos insn is available, otherwise emit the call. */
2380 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2381 return NULL_RTX;
2383 target1 = gen_reg_rtx (mode);
2384 target2 = gen_reg_rtx (mode);
2386 op0 = expand_normal (arg);
2387 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2388 alias_off = build_int_cst (alias_type, 0);
2389 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2390 sinp, alias_off));
2391 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2392 cosp, alias_off));
2394 /* Compute into target1 and target2.
2395 Set TARGET to wherever the result comes back. */
2396 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2397 gcc_assert (result);
2399 /* Move target1 and target2 to the memory locations indicated
2400 by op1 and op2. */
2401 emit_move_insn (op1, target1);
2402 emit_move_insn (op2, target2);
2404 return const0_rtx;
2407 /* Expand a call to the internal cexpi builtin to the sincos math function.
2408 EXP is the expression that is a call to the builtin function; if convenient,
2409 the result should be placed in TARGET. */
2411 static rtx
2412 expand_builtin_cexpi (tree exp, rtx target)
2414 tree fndecl = get_callee_fndecl (exp);
2415 tree arg, type;
2416 machine_mode mode;
2417 rtx op0, op1, op2;
2418 location_t loc = EXPR_LOCATION (exp);
2420 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2421 return NULL_RTX;
2423 arg = CALL_EXPR_ARG (exp, 0);
2424 type = TREE_TYPE (arg);
2425 mode = TYPE_MODE (TREE_TYPE (arg));
2427 /* Try expanding via a sincos optab, fall back to emitting a libcall
2428 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2429 is only generated from sincos, cexp or if we have either of them. */
2430 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2432 op1 = gen_reg_rtx (mode);
2433 op2 = gen_reg_rtx (mode);
2435 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2437 /* Compute into op1 and op2. */
2438 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2440 else if (targetm.libc_has_function (function_sincos))
2442 tree call, fn = NULL_TREE;
2443 tree top1, top2;
2444 rtx op1a, op2a;
2446 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2447 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2448 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2449 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2450 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2451 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2452 else
2453 gcc_unreachable ();
2455 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2456 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2457 op1a = copy_addr_to_reg (XEXP (op1, 0));
2458 op2a = copy_addr_to_reg (XEXP (op2, 0));
2459 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2460 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2462 /* Make sure not to fold the sincos call again. */
2463 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2464 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2465 call, 3, arg, top1, top2));
2467 else
2469 tree call, fn = NULL_TREE, narg;
2470 tree ctype = build_complex_type (type);
2472 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2473 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2474 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2475 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2476 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2477 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2478 else
2479 gcc_unreachable ();
2481 /* If we don't have a decl for cexp create one. This is the
2482 friendliest fallback if the user calls __builtin_cexpi
2483 without full target C99 function support. */
2484 if (fn == NULL_TREE)
2486 tree fntype;
2487 const char *name = NULL;
2489 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2490 name = "cexpf";
2491 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2492 name = "cexp";
2493 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2494 name = "cexpl";
2496 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2497 fn = build_fn_decl (name, fntype);
2500 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2501 build_real (type, dconst0), arg);
2503 /* Make sure not to fold the cexp call again. */
2504 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2505 return expand_expr (build_call_nary (ctype, call, 1, narg),
2506 target, VOIDmode, EXPAND_NORMAL);
2509 /* Now build the proper return type. */
2510 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2511 make_tree (TREE_TYPE (arg), op2),
2512 make_tree (TREE_TYPE (arg), op1)),
2513 target, VOIDmode, EXPAND_NORMAL);
2516 /* Conveniently construct a function call expression. FNDECL names the
2517 function to be called, N is the number of arguments, and the "..."
2518 parameters are the argument expressions. Unlike build_call_exr
2519 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2521 static tree
2522 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2524 va_list ap;
2525 tree fntype = TREE_TYPE (fndecl);
2526 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2528 va_start (ap, n);
2529 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2530 va_end (ap);
2531 SET_EXPR_LOCATION (fn, loc);
2532 return fn;
2535 /* Expand a call to one of the builtin rounding functions gcc defines
2536 as an extension (lfloor and lceil). As these are gcc extensions we
2537 do not need to worry about setting errno to EDOM.
2538 If expanding via optab fails, lower expression to (int)(floor(x)).
2539 EXP is the expression that is a call to the builtin function;
2540 if convenient, the result should be placed in TARGET. */
2542 static rtx
2543 expand_builtin_int_roundingfn (tree exp, rtx target)
2545 convert_optab builtin_optab;
2546 rtx op0, tmp;
2547 rtx_insn *insns;
2548 tree fndecl = get_callee_fndecl (exp);
2549 enum built_in_function fallback_fn;
2550 tree fallback_fndecl;
2551 machine_mode mode;
2552 tree arg;
2554 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2555 gcc_unreachable ();
2557 arg = CALL_EXPR_ARG (exp, 0);
2559 switch (DECL_FUNCTION_CODE (fndecl))
2561 CASE_FLT_FN (BUILT_IN_ICEIL):
2562 CASE_FLT_FN (BUILT_IN_LCEIL):
2563 CASE_FLT_FN (BUILT_IN_LLCEIL):
2564 builtin_optab = lceil_optab;
2565 fallback_fn = BUILT_IN_CEIL;
2566 break;
2568 CASE_FLT_FN (BUILT_IN_IFLOOR):
2569 CASE_FLT_FN (BUILT_IN_LFLOOR):
2570 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2571 builtin_optab = lfloor_optab;
2572 fallback_fn = BUILT_IN_FLOOR;
2573 break;
2575 default:
2576 gcc_unreachable ();
2579 /* Make a suitable register to place result in. */
2580 mode = TYPE_MODE (TREE_TYPE (exp));
2582 target = gen_reg_rtx (mode);
2584 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2585 need to expand the argument again. This way, we will not perform
2586 side-effects more the once. */
2587 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2589 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2591 start_sequence ();
2593 /* Compute into TARGET. */
2594 if (expand_sfix_optab (target, op0, builtin_optab))
2596 /* Output the entire sequence. */
2597 insns = get_insns ();
2598 end_sequence ();
2599 emit_insn (insns);
2600 return target;
2603 /* If we were unable to expand via the builtin, stop the sequence
2604 (without outputting the insns). */
2605 end_sequence ();
2607 /* Fall back to floating point rounding optab. */
2608 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2610 /* For non-C99 targets we may end up without a fallback fndecl here
2611 if the user called __builtin_lfloor directly. In this case emit
2612 a call to the floor/ceil variants nevertheless. This should result
2613 in the best user experience for not full C99 targets. */
2614 if (fallback_fndecl == NULL_TREE)
2616 tree fntype;
2617 const char *name = NULL;
2619 switch (DECL_FUNCTION_CODE (fndecl))
2621 case BUILT_IN_ICEIL:
2622 case BUILT_IN_LCEIL:
2623 case BUILT_IN_LLCEIL:
2624 name = "ceil";
2625 break;
2626 case BUILT_IN_ICEILF:
2627 case BUILT_IN_LCEILF:
2628 case BUILT_IN_LLCEILF:
2629 name = "ceilf";
2630 break;
2631 case BUILT_IN_ICEILL:
2632 case BUILT_IN_LCEILL:
2633 case BUILT_IN_LLCEILL:
2634 name = "ceill";
2635 break;
2636 case BUILT_IN_IFLOOR:
2637 case BUILT_IN_LFLOOR:
2638 case BUILT_IN_LLFLOOR:
2639 name = "floor";
2640 break;
2641 case BUILT_IN_IFLOORF:
2642 case BUILT_IN_LFLOORF:
2643 case BUILT_IN_LLFLOORF:
2644 name = "floorf";
2645 break;
2646 case BUILT_IN_IFLOORL:
2647 case BUILT_IN_LFLOORL:
2648 case BUILT_IN_LLFLOORL:
2649 name = "floorl";
2650 break;
2651 default:
2652 gcc_unreachable ();
2655 fntype = build_function_type_list (TREE_TYPE (arg),
2656 TREE_TYPE (arg), NULL_TREE);
2657 fallback_fndecl = build_fn_decl (name, fntype);
2660 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2662 tmp = expand_normal (exp);
2663 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2665 /* Truncate the result of floating point optab to integer
2666 via expand_fix (). */
2667 target = gen_reg_rtx (mode);
2668 expand_fix (target, tmp, 0);
2670 return target;
2673 /* Expand a call to one of the builtin math functions doing integer
2674 conversion (lrint).
2675 Return 0 if a normal call should be emitted rather than expanding the
2676 function in-line. EXP is the expression that is a call to the builtin
2677 function; if convenient, the result should be placed in TARGET. */
2679 static rtx
2680 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2682 convert_optab builtin_optab;
2683 rtx op0;
2684 rtx_insn *insns;
2685 tree fndecl = get_callee_fndecl (exp);
2686 tree arg;
2687 machine_mode mode;
2688 enum built_in_function fallback_fn = BUILT_IN_NONE;
2690 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2691 gcc_unreachable ();
2693 arg = CALL_EXPR_ARG (exp, 0);
2695 switch (DECL_FUNCTION_CODE (fndecl))
2697 CASE_FLT_FN (BUILT_IN_IRINT):
2698 fallback_fn = BUILT_IN_LRINT;
2699 gcc_fallthrough ();
2700 CASE_FLT_FN (BUILT_IN_LRINT):
2701 CASE_FLT_FN (BUILT_IN_LLRINT):
2702 builtin_optab = lrint_optab;
2703 break;
2705 CASE_FLT_FN (BUILT_IN_IROUND):
2706 fallback_fn = BUILT_IN_LROUND;
2707 gcc_fallthrough ();
2708 CASE_FLT_FN (BUILT_IN_LROUND):
2709 CASE_FLT_FN (BUILT_IN_LLROUND):
2710 builtin_optab = lround_optab;
2711 break;
2713 default:
2714 gcc_unreachable ();
2717 /* There's no easy way to detect the case we need to set EDOM. */
2718 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2719 return NULL_RTX;
2721 /* Make a suitable register to place result in. */
2722 mode = TYPE_MODE (TREE_TYPE (exp));
2724 /* There's no easy way to detect the case we need to set EDOM. */
2725 if (!flag_errno_math)
2727 rtx result = gen_reg_rtx (mode);
2729 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2730 need to expand the argument again. This way, we will not perform
2731 side-effects more the once. */
2732 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2734 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2736 start_sequence ();
2738 if (expand_sfix_optab (result, op0, builtin_optab))
2740 /* Output the entire sequence. */
2741 insns = get_insns ();
2742 end_sequence ();
2743 emit_insn (insns);
2744 return result;
2747 /* If we were unable to expand via the builtin, stop the sequence
2748 (without outputting the insns) and call to the library function
2749 with the stabilized argument list. */
2750 end_sequence ();
2753 if (fallback_fn != BUILT_IN_NONE)
2755 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2756 targets, (int) round (x) should never be transformed into
2757 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2758 a call to lround in the hope that the target provides at least some
2759 C99 functions. This should result in the best user experience for
2760 not full C99 targets. */
2761 tree fallback_fndecl = mathfn_built_in_1
2762 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2764 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2765 fallback_fndecl, 1, arg);
2767 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2768 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2769 return convert_to_mode (mode, target, 0);
2772 return expand_call (exp, target, target == const0_rtx);
2775 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2776 a normal call should be emitted rather than expanding the function
2777 in-line. EXP is the expression that is a call to the builtin
2778 function; if convenient, the result should be placed in TARGET. */
2780 static rtx
2781 expand_builtin_powi (tree exp, rtx target)
2783 tree arg0, arg1;
2784 rtx op0, op1;
2785 machine_mode mode;
2786 machine_mode mode2;
2788 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2789 return NULL_RTX;
2791 arg0 = CALL_EXPR_ARG (exp, 0);
2792 arg1 = CALL_EXPR_ARG (exp, 1);
2793 mode = TYPE_MODE (TREE_TYPE (exp));
2795 /* Emit a libcall to libgcc. */
2797 /* Mode of the 2nd argument must match that of an int. */
2798 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2800 if (target == NULL_RTX)
2801 target = gen_reg_rtx (mode);
2803 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2804 if (GET_MODE (op0) != mode)
2805 op0 = convert_to_mode (mode, op0, 0);
2806 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2807 if (GET_MODE (op1) != mode2)
2808 op1 = convert_to_mode (mode2, op1, 0);
2810 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2811 target, LCT_CONST, mode,
2812 op0, mode, op1, mode2);
2814 return target;
2817 /* Expand expression EXP which is a call to the strlen builtin. Return
2818 NULL_RTX if we failed the caller should emit a normal call, otherwise
2819 try to get the result in TARGET, if convenient. */
2821 static rtx
2822 expand_builtin_strlen (tree exp, rtx target,
2823 machine_mode target_mode)
2825 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2826 return NULL_RTX;
2827 else
2829 struct expand_operand ops[4];
2830 rtx pat;
2831 tree len;
2832 tree src = CALL_EXPR_ARG (exp, 0);
2833 rtx src_reg;
2834 rtx_insn *before_strlen;
2835 machine_mode insn_mode;
2836 enum insn_code icode = CODE_FOR_nothing;
2837 unsigned int align;
2839 /* If the length can be computed at compile-time, return it. */
2840 len = c_strlen (src, 0);
2841 if (len)
2842 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2844 /* If the length can be computed at compile-time and is constant
2845 integer, but there are side-effects in src, evaluate
2846 src for side-effects, then return len.
2847 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2848 can be optimized into: i++; x = 3; */
2849 len = c_strlen (src, 1);
2850 if (len && TREE_CODE (len) == INTEGER_CST)
2852 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2853 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2856 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2858 /* If SRC is not a pointer type, don't do this operation inline. */
2859 if (align == 0)
2860 return NULL_RTX;
2862 /* Bail out if we can't compute strlen in the right mode. */
2863 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2865 icode = optab_handler (strlen_optab, insn_mode);
2866 if (icode != CODE_FOR_nothing)
2867 break;
2869 if (insn_mode == VOIDmode)
2870 return NULL_RTX;
2872 /* Make a place to hold the source address. We will not expand
2873 the actual source until we are sure that the expansion will
2874 not fail -- there are trees that cannot be expanded twice. */
2875 src_reg = gen_reg_rtx (Pmode);
2877 /* Mark the beginning of the strlen sequence so we can emit the
2878 source operand later. */
2879 before_strlen = get_last_insn ();
2881 create_output_operand (&ops[0], target, insn_mode);
2882 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2883 create_integer_operand (&ops[2], 0);
2884 create_integer_operand (&ops[3], align);
2885 if (!maybe_expand_insn (icode, 4, ops))
2886 return NULL_RTX;
2888 /* Check to see if the argument was declared attribute nonstring
2889 and if so, issue a warning since at this point it's not known
2890 to be nul-terminated. */
2891 maybe_warn_nonstring_arg (TREE_OPERAND (CALL_EXPR_FN (exp), 0), exp);
2893 /* Now that we are assured of success, expand the source. */
2894 start_sequence ();
2895 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2896 if (pat != src_reg)
2898 #ifdef POINTERS_EXTEND_UNSIGNED
2899 if (GET_MODE (pat) != Pmode)
2900 pat = convert_to_mode (Pmode, pat,
2901 POINTERS_EXTEND_UNSIGNED);
2902 #endif
2903 emit_move_insn (src_reg, pat);
2905 pat = get_insns ();
2906 end_sequence ();
2908 if (before_strlen)
2909 emit_insn_after (pat, before_strlen);
2910 else
2911 emit_insn_before (pat, get_insns ());
2913 /* Return the value in the proper mode for this function. */
2914 if (GET_MODE (ops[0].value) == target_mode)
2915 target = ops[0].value;
2916 else if (target != 0)
2917 convert_move (target, ops[0].value, 0);
2918 else
2919 target = convert_to_mode (target_mode, ops[0].value, 0);
2921 return target;
2925 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2926 bytes from constant string DATA + OFFSET and return it as target
2927 constant. */
2929 static rtx
2930 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2931 scalar_int_mode mode)
2933 const char *str = (const char *) data;
2935 gcc_assert (offset >= 0
2936 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2937 <= strlen (str) + 1));
2939 return c_readstr (str + offset, mode);
2942 /* LEN specify length of the block of memcpy/memset operation.
2943 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2944 In some cases we can make very likely guess on max size, then we
2945 set it into PROBABLE_MAX_SIZE. */
2947 static void
2948 determine_block_size (tree len, rtx len_rtx,
2949 unsigned HOST_WIDE_INT *min_size,
2950 unsigned HOST_WIDE_INT *max_size,
2951 unsigned HOST_WIDE_INT *probable_max_size)
2953 if (CONST_INT_P (len_rtx))
2955 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2956 return;
2958 else
2960 wide_int min, max;
2961 enum value_range_type range_type = VR_UNDEFINED;
2963 /* Determine bounds from the type. */
2964 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2965 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2966 else
2967 *min_size = 0;
2968 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2969 *probable_max_size = *max_size
2970 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2971 else
2972 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2974 if (TREE_CODE (len) == SSA_NAME)
2975 range_type = get_range_info (len, &min, &max);
2976 if (range_type == VR_RANGE)
2978 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2979 *min_size = min.to_uhwi ();
2980 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2981 *probable_max_size = *max_size = max.to_uhwi ();
2983 else if (range_type == VR_ANTI_RANGE)
2985 /* Anti range 0...N lets us to determine minimal size to N+1. */
2986 if (min == 0)
2988 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2989 *min_size = max.to_uhwi () + 1;
2991 /* Code like
2993 int n;
2994 if (n < 100)
2995 memcpy (a, b, n)
2997 Produce anti range allowing negative values of N. We still
2998 can use the information and make a guess that N is not negative.
3000 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3001 *probable_max_size = min.to_uhwi () - 1;
3004 gcc_checking_assert (*max_size <=
3005 (unsigned HOST_WIDE_INT)
3006 GET_MODE_MASK (GET_MODE (len_rtx)));
3009 /* Try to verify that the sizes and lengths of the arguments to a string
3010 manipulation function given by EXP are within valid bounds and that
3011 the operation does not lead to buffer overflow. Arguments other than
3012 EXP may be null. When non-null, the arguments have the following
3013 meaning:
3014 SIZE is the user-supplied size argument to the function (such as in
3015 memcpy(d, s, SIZE) or strncpy(d, s, SIZE). It specifies the exact
3016 number of bytes to write.
3017 MAXLEN is the user-supplied bound on the length of the source sequence
3018 (such as in strncat(d, s, N). It specifies the upper limit on the number
3019 of bytes to write.
3020 SRC is the source string (such as in strcpy(d, s)) when the expression
3021 EXP is a string function call (as opposed to a memory call like memcpy).
3022 As an exception, SRC can also be an integer denoting the precomputed
3023 size of the source string or object (for functions like memcpy).
3024 OBJSIZE is the size of the destination object specified by the last
3025 argument to the _chk builtins, typically resulting from the expansion
3026 of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
3027 OBJSIZE).
3029 When SIZE is null LEN is checked to verify that it doesn't exceed
3030 SIZE_MAX.
3032 If the call is successfully verified as safe from buffer overflow
3033 the function returns true, otherwise false.. */
3035 static bool
3036 check_sizes (int opt, tree exp, tree size, tree maxlen, tree src, tree objsize)
3038 /* The size of the largest object is half the address space, or
3039 SSIZE_MAX. (This is way too permissive.) */
3040 tree maxobjsize = TYPE_MAX_VALUE (ssizetype);
3042 tree slen = NULL_TREE;
3044 tree range[2] = { NULL_TREE, NULL_TREE };
3046 /* Set to true when the exact number of bytes written by a string
3047 function like strcpy is not known and the only thing that is
3048 known is that it must be at least one (for the terminating nul). */
3049 bool at_least_one = false;
3050 if (src)
3052 /* SRC is normally a pointer to string but as a special case
3053 it can be an integer denoting the length of a string. */
3054 if (POINTER_TYPE_P (TREE_TYPE (src)))
3056 /* Try to determine the range of lengths the source string
3057 refers to. If it can be determined and is less than
3058 the upper bound given by MAXLEN add one to it for
3059 the terminating nul. Otherwise, set it to one for
3060 the same reason, or to MAXLEN as appropriate. */
3061 get_range_strlen (src, range);
3062 if (range[0] && (!maxlen || TREE_CODE (maxlen) == INTEGER_CST))
3064 if (maxlen && tree_int_cst_le (maxlen, range[0]))
3065 range[0] = range[1] = maxlen;
3066 else
3067 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3068 range[0], size_one_node);
3070 if (maxlen && tree_int_cst_le (maxlen, range[1]))
3071 range[1] = maxlen;
3072 else if (!integer_all_onesp (range[1]))
3073 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3074 range[1], size_one_node);
3076 slen = range[0];
3078 else
3080 at_least_one = true;
3081 slen = size_one_node;
3084 else
3085 slen = src;
3088 if (!size && !maxlen)
3090 /* When the only available piece of data is the object size
3091 there is nothing to do. */
3092 if (!slen)
3093 return true;
3095 /* Otherwise, when the length of the source sequence is known
3096 (as with with strlen), set SIZE to it. */
3097 if (!range[0])
3098 size = slen;
3101 if (!objsize)
3102 objsize = maxobjsize;
3104 /* The SIZE is exact if it's non-null, constant, and in range of
3105 unsigned HOST_WIDE_INT. */
3106 bool exactsize = size && tree_fits_uhwi_p (size);
3108 if (size)
3109 get_size_range (size, range);
3111 /* First check the number of bytes to be written against the maximum
3112 object size. */
3113 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3115 location_t loc = tree_nonartificial_location (exp);
3116 loc = expansion_point_location_if_in_system_header (loc);
3118 if (range[0] == range[1])
3119 warning_at (loc, opt,
3120 "%K%qD specified size %E "
3121 "exceeds maximum object size %E",
3122 exp, get_callee_fndecl (exp), range[0], maxobjsize);
3123 else
3124 warning_at (loc, opt,
3125 "%K%qD specified size between %E and %E "
3126 "exceeds maximum object size %E",
3127 exp, get_callee_fndecl (exp),
3128 range[0], range[1], maxobjsize);
3129 return false;
3132 /* Next check the number of bytes to be written against the destination
3133 object size. */
3134 if (range[0] || !exactsize || integer_all_onesp (size))
3136 if (range[0]
3137 && ((tree_fits_uhwi_p (objsize)
3138 && tree_int_cst_lt (objsize, range[0]))
3139 || (tree_fits_uhwi_p (size)
3140 && tree_int_cst_lt (size, range[0]))))
3142 location_t loc = tree_nonartificial_location (exp);
3143 loc = expansion_point_location_if_in_system_header (loc);
3145 if (size == slen && at_least_one)
3147 /* This is a call to strcpy with a destination of 0 size
3148 and a source of unknown length. The call will write
3149 at least one byte past the end of the destination. */
3150 warning_at (loc, opt,
3151 "%K%qD writing %E or more bytes into a region "
3152 "of size %E overflows the destination",
3153 exp, get_callee_fndecl (exp), range[0], objsize);
3155 else if (tree_int_cst_equal (range[0], range[1]))
3156 warning_at (loc, opt,
3157 (integer_onep (range[0])
3158 ? G_("%K%qD writing %E byte into a region "
3159 "of size %E overflows the destination")
3160 : G_("%K%qD writing %E bytes into a region "
3161 "of size %E overflows the destination")),
3162 exp, get_callee_fndecl (exp), range[0], objsize);
3163 else if (tree_int_cst_sign_bit (range[1]))
3165 /* Avoid printing the upper bound if it's invalid. */
3166 warning_at (loc, opt,
3167 "%K%qD writing %E or more bytes into a region "
3168 "of size %E overflows the destination",
3169 exp, get_callee_fndecl (exp), range[0], objsize);
3171 else
3172 warning_at (loc, opt,
3173 "%K%qD writing between %E and %E bytes into "
3174 "a region of size %E overflows the destination",
3175 exp, get_callee_fndecl (exp), range[0], range[1],
3176 objsize);
3178 /* Return error when an overflow has been detected. */
3179 return false;
3183 /* Check the maximum length of the source sequence against the size
3184 of the destination object if known, or against the maximum size
3185 of an object. */
3186 if (maxlen)
3188 get_size_range (maxlen, range);
3190 if (range[0] && objsize && tree_fits_uhwi_p (objsize))
3192 location_t loc = tree_nonartificial_location (exp);
3193 loc = expansion_point_location_if_in_system_header (loc);
3195 if (tree_int_cst_lt (maxobjsize, range[0]))
3197 /* Warn about crazy big sizes first since that's more
3198 likely to be meaningful than saying that the bound
3199 is greater than the object size if both are big. */
3200 if (range[0] == range[1])
3201 warning_at (loc, opt,
3202 "%K%qD specified bound %E "
3203 "exceeds maximum object size %E",
3204 exp, get_callee_fndecl (exp),
3205 range[0], maxobjsize);
3206 else
3207 warning_at (loc, opt,
3208 "%K%qD specified bound between %E and %E "
3209 "exceeds maximum object size %E",
3210 exp, get_callee_fndecl (exp),
3211 range[0], range[1], maxobjsize);
3213 return false;
3216 if (objsize != maxobjsize && tree_int_cst_lt (objsize, range[0]))
3218 if (tree_int_cst_equal (range[0], range[1]))
3219 warning_at (loc, opt,
3220 "%K%qD specified bound %E "
3221 "exceeds destination size %E",
3222 exp, get_callee_fndecl (exp),
3223 range[0], objsize);
3224 else
3225 warning_at (loc, opt,
3226 "%K%qD specified bound between %E and %E "
3227 "exceeds destination size %E",
3228 exp, get_callee_fndecl (exp),
3229 range[0], range[1], objsize);
3230 return false;
3235 if (slen
3236 && slen == src
3237 && size && range[0]
3238 && tree_int_cst_lt (slen, range[0]))
3240 location_t loc = tree_nonartificial_location (exp);
3242 if (tree_int_cst_equal (range[0], range[1]))
3243 warning_at (loc, opt,
3244 (tree_int_cst_equal (range[0], integer_one_node)
3245 ? G_("%K%qD reading %E byte from a region of size %E")
3246 : G_("%K%qD reading %E bytes from a region of size %E")),
3247 exp, get_callee_fndecl (exp), range[0], slen);
3248 else if (tree_int_cst_sign_bit (range[1]))
3250 /* Avoid printing the upper bound if it's invalid. */
3251 warning_at (loc, opt,
3252 "%K%qD reading %E or more bytes from a region "
3253 "of size %E",
3254 exp, get_callee_fndecl (exp), range[0], slen);
3256 else
3257 warning_at (loc, opt,
3258 "%K%qD reading between %E and %E bytes from a region "
3259 "of size %E",
3260 exp, get_callee_fndecl (exp), range[0], range[1], slen);
3261 return false;
3264 return true;
3267 /* Helper to compute the size of the object referenced by the DEST
3268 expression which must have pointer type, using Object Size type
3269 OSTYPE (only the least significant 2 bits are used). Return
3270 the size of the object if successful or NULL when the size cannot
3271 be determined. */
3273 tree
3274 compute_objsize (tree dest, int ostype)
3276 unsigned HOST_WIDE_INT size;
3278 /* Only the two least significant bits are meaningful. */
3279 ostype &= 3;
3281 if (compute_builtin_object_size (dest, ostype, &size))
3282 return build_int_cst (sizetype, size);
3284 /* Unless computing the largest size (for memcpy and other raw memory
3285 functions), try to determine the size of the object from its type. */
3286 if (!ostype)
3287 return NULL_TREE;
3289 if (TREE_CODE (dest) == SSA_NAME)
3291 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3292 if (!is_gimple_assign (stmt))
3293 return NULL_TREE;
3295 tree_code code = gimple_assign_rhs_code (stmt);
3296 if (code != ADDR_EXPR && code != POINTER_PLUS_EXPR)
3297 return NULL_TREE;
3299 dest = gimple_assign_rhs1 (stmt);
3302 if (TREE_CODE (dest) != ADDR_EXPR)
3303 return NULL_TREE;
3305 tree type = TREE_TYPE (dest);
3306 if (TREE_CODE (type) == POINTER_TYPE)
3307 type = TREE_TYPE (type);
3309 type = TYPE_MAIN_VARIANT (type);
3311 if (TREE_CODE (type) == ARRAY_TYPE
3312 && !array_at_struct_end_p (dest))
3314 /* Return the constant size unless it's zero (that's a zero-length
3315 array likely at the end of a struct). */
3316 tree size = TYPE_SIZE_UNIT (type);
3317 if (size && TREE_CODE (size) == INTEGER_CST
3318 && !integer_zerop (size))
3319 return size;
3322 return NULL_TREE;
3325 /* Helper to determine and check the sizes of the source and the destination
3326 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3327 call expression, DEST is the destination argument, SRC is the source
3328 argument or null, and LEN is the number of bytes. Use Object Size type-0
3329 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3330 (no overflow or invalid sizes), false otherwise. */
3332 static bool
3333 check_memop_sizes (tree exp, tree dest, tree src, tree size)
3335 if (!warn_stringop_overflow)
3336 return true;
3338 /* For functions like memset and memcpy that operate on raw memory
3339 try to determine the size of the largest source and destination
3340 object using type-0 Object Size regardless of the object size
3341 type specified by the option. */
3342 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3343 tree dstsize = compute_objsize (dest, 0);
3345 return check_sizes (OPT_Wstringop_overflow_, exp,
3346 size, /*maxlen=*/NULL_TREE, srcsize, dstsize);
3349 /* Validate memchr arguments without performing any expansion.
3350 Return NULL_RTX. */
3352 static rtx
3353 expand_builtin_memchr (tree exp, rtx)
3355 if (!validate_arglist (exp,
3356 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3357 return NULL_RTX;
3359 tree arg1 = CALL_EXPR_ARG (exp, 0);
3360 tree len = CALL_EXPR_ARG (exp, 2);
3362 /* Diagnose calls where the specified length exceeds the size
3363 of the object. */
3364 if (warn_stringop_overflow)
3366 tree size = compute_objsize (arg1, 0);
3367 check_sizes (OPT_Wstringop_overflow_,
3368 exp, len, /*maxlen=*/NULL_TREE,
3369 size, /*objsize=*/NULL_TREE);
3372 return NULL_RTX;
3375 /* Expand a call EXP to the memcpy builtin.
3376 Return NULL_RTX if we failed, the caller should emit a normal call,
3377 otherwise try to get the result in TARGET, if convenient (and in
3378 mode MODE if that's convenient). */
3380 static rtx
3381 expand_builtin_memcpy (tree exp, rtx target)
3383 if (!validate_arglist (exp,
3384 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3385 return NULL_RTX;
3387 tree dest = CALL_EXPR_ARG (exp, 0);
3388 tree src = CALL_EXPR_ARG (exp, 1);
3389 tree len = CALL_EXPR_ARG (exp, 2);
3391 check_memop_sizes (exp, dest, src, len);
3393 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3394 /*endp=*/ 0);
3397 /* Check a call EXP to the memmove built-in for validity.
3398 Return NULL_RTX on both success and failure. */
3400 static rtx
3401 expand_builtin_memmove (tree exp, rtx)
3403 if (!validate_arglist (exp,
3404 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3405 return NULL_RTX;
3407 tree dest = CALL_EXPR_ARG (exp, 0);
3408 tree src = CALL_EXPR_ARG (exp, 1);
3409 tree len = CALL_EXPR_ARG (exp, 2);
3411 check_memop_sizes (exp, dest, src, len);
3413 return NULL_RTX;
3416 /* Expand an instrumented call EXP to the memcpy builtin.
3417 Return NULL_RTX if we failed, the caller should emit a normal call,
3418 otherwise try to get the result in TARGET, if convenient (and in
3419 mode MODE if that's convenient). */
3421 static rtx
3422 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3424 if (!validate_arglist (exp,
3425 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3426 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3427 INTEGER_TYPE, VOID_TYPE))
3428 return NULL_RTX;
3429 else
3431 tree dest = CALL_EXPR_ARG (exp, 0);
3432 tree src = CALL_EXPR_ARG (exp, 2);
3433 tree len = CALL_EXPR_ARG (exp, 4);
3434 rtx res = expand_builtin_memory_copy_args (dest, src, len, target, exp,
3435 /*end_p=*/ 0);
3437 /* Return src bounds with the result. */
3438 if (res)
3440 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3441 expand_normal (CALL_EXPR_ARG (exp, 1)));
3442 res = chkp_join_splitted_slot (res, bnd);
3444 return res;
3448 /* Expand a call EXP to the mempcpy builtin.
3449 Return NULL_RTX if we failed; the caller should emit a normal call,
3450 otherwise try to get the result in TARGET, if convenient (and in
3451 mode MODE if that's convenient). If ENDP is 0 return the
3452 destination pointer, if ENDP is 1 return the end pointer ala
3453 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3454 stpcpy. */
3456 static rtx
3457 expand_builtin_mempcpy (tree exp, rtx target)
3459 if (!validate_arglist (exp,
3460 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3461 return NULL_RTX;
3463 tree dest = CALL_EXPR_ARG (exp, 0);
3464 tree src = CALL_EXPR_ARG (exp, 1);
3465 tree len = CALL_EXPR_ARG (exp, 2);
3467 /* Avoid expanding mempcpy into memcpy when the call is determined
3468 to overflow the buffer. This also prevents the same overflow
3469 from being diagnosed again when expanding memcpy. */
3470 if (!check_memop_sizes (exp, dest, src, len))
3471 return NULL_RTX;
3473 return expand_builtin_mempcpy_args (dest, src, len,
3474 target, exp, /*endp=*/ 1);
3477 /* Expand an instrumented call EXP to the mempcpy builtin.
3478 Return NULL_RTX if we failed, the caller should emit a normal call,
3479 otherwise try to get the result in TARGET, if convenient (and in
3480 mode MODE if that's convenient). */
3482 static rtx
3483 expand_builtin_mempcpy_with_bounds (tree exp, rtx target)
3485 if (!validate_arglist (exp,
3486 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3487 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3488 INTEGER_TYPE, VOID_TYPE))
3489 return NULL_RTX;
3490 else
3492 tree dest = CALL_EXPR_ARG (exp, 0);
3493 tree src = CALL_EXPR_ARG (exp, 2);
3494 tree len = CALL_EXPR_ARG (exp, 4);
3495 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3496 exp, 1);
3498 /* Return src bounds with the result. */
3499 if (res)
3501 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3502 expand_normal (CALL_EXPR_ARG (exp, 1)));
3503 res = chkp_join_splitted_slot (res, bnd);
3505 return res;
3509 /* Helper function to do the actual work for expand of memory copy family
3510 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3511 of memory from SRC to DEST and assign to TARGET if convenient.
3512 If ENDP is 0 return the
3513 destination pointer, if ENDP is 1 return the end pointer ala
3514 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3515 stpcpy. */
3517 static rtx
3518 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3519 rtx target, tree exp, int endp)
3521 const char *src_str;
3522 unsigned int src_align = get_pointer_alignment (src);
3523 unsigned int dest_align = get_pointer_alignment (dest);
3524 rtx dest_mem, src_mem, dest_addr, len_rtx;
3525 HOST_WIDE_INT expected_size = -1;
3526 unsigned int expected_align = 0;
3527 unsigned HOST_WIDE_INT min_size;
3528 unsigned HOST_WIDE_INT max_size;
3529 unsigned HOST_WIDE_INT probable_max_size;
3531 /* If DEST is not a pointer type, call the normal function. */
3532 if (dest_align == 0)
3533 return NULL_RTX;
3535 /* If either SRC is not a pointer type, don't do this
3536 operation in-line. */
3537 if (src_align == 0)
3538 return NULL_RTX;
3540 if (currently_expanding_gimple_stmt)
3541 stringop_block_profile (currently_expanding_gimple_stmt,
3542 &expected_align, &expected_size);
3544 if (expected_align < dest_align)
3545 expected_align = dest_align;
3546 dest_mem = get_memory_rtx (dest, len);
3547 set_mem_align (dest_mem, dest_align);
3548 len_rtx = expand_normal (len);
3549 determine_block_size (len, len_rtx, &min_size, &max_size,
3550 &probable_max_size);
3551 src_str = c_getstr (src);
3553 /* If SRC is a string constant and block move would be done
3554 by pieces, we can avoid loading the string from memory
3555 and only stored the computed constants. */
3556 if (src_str
3557 && CONST_INT_P (len_rtx)
3558 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3559 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3560 CONST_CAST (char *, src_str),
3561 dest_align, false))
3563 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3564 builtin_memcpy_read_str,
3565 CONST_CAST (char *, src_str),
3566 dest_align, false, endp);
3567 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3568 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3569 return dest_mem;
3572 src_mem = get_memory_rtx (src, len);
3573 set_mem_align (src_mem, src_align);
3575 /* Copy word part most expediently. */
3576 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3577 CALL_EXPR_TAILCALL (exp)
3578 && (endp == 0 || target == const0_rtx)
3579 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3580 expected_align, expected_size,
3581 min_size, max_size, probable_max_size);
3583 if (dest_addr == 0)
3585 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3586 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3589 if (endp && target != const0_rtx)
3591 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3592 /* stpcpy pointer to last byte. */
3593 if (endp == 2)
3594 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3597 return dest_addr;
3600 static rtx
3601 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3602 rtx target, tree orig_exp, int endp)
3604 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3605 endp);
3608 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3609 we failed, the caller should emit a normal call, otherwise try to
3610 get the result in TARGET, if convenient. If ENDP is 0 return the
3611 destination pointer, if ENDP is 1 return the end pointer ala
3612 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3613 stpcpy. */
3615 static rtx
3616 expand_movstr (tree dest, tree src, rtx target, int endp)
3618 struct expand_operand ops[3];
3619 rtx dest_mem;
3620 rtx src_mem;
3622 if (!targetm.have_movstr ())
3623 return NULL_RTX;
3625 dest_mem = get_memory_rtx (dest, NULL);
3626 src_mem = get_memory_rtx (src, NULL);
3627 if (!endp)
3629 target = force_reg (Pmode, XEXP (dest_mem, 0));
3630 dest_mem = replace_equiv_address (dest_mem, target);
3633 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3634 create_fixed_operand (&ops[1], dest_mem);
3635 create_fixed_operand (&ops[2], src_mem);
3636 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3637 return NULL_RTX;
3639 if (endp && target != const0_rtx)
3641 target = ops[0].value;
3642 /* movstr is supposed to set end to the address of the NUL
3643 terminator. If the caller requested a mempcpy-like return value,
3644 adjust it. */
3645 if (endp == 1)
3647 rtx tem = plus_constant (GET_MODE (target),
3648 gen_lowpart (GET_MODE (target), target), 1);
3649 emit_move_insn (target, force_operand (tem, NULL_RTX));
3652 return target;
3655 /* Do some very basic size validation of a call to the strcpy builtin
3656 given by EXP. Return NULL_RTX to have the built-in expand to a call
3657 to the library function. */
3659 static rtx
3660 expand_builtin_strcat (tree exp, rtx)
3662 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3663 || !warn_stringop_overflow)
3664 return NULL_RTX;
3666 tree dest = CALL_EXPR_ARG (exp, 0);
3667 tree src = CALL_EXPR_ARG (exp, 1);
3669 /* There is no way here to determine the length of the string in
3670 the destination to which the SRC string is being appended so
3671 just diagnose cases when the souce string is longer than
3672 the destination object. */
3674 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3676 check_sizes (OPT_Wstringop_overflow_,
3677 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3679 return NULL_RTX;
3682 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3683 NULL_RTX if we failed the caller should emit a normal call, otherwise
3684 try to get the result in TARGET, if convenient (and in mode MODE if that's
3685 convenient). */
3687 static rtx
3688 expand_builtin_strcpy (tree exp, rtx target)
3690 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3691 return NULL_RTX;
3693 tree dest = CALL_EXPR_ARG (exp, 0);
3694 tree src = CALL_EXPR_ARG (exp, 1);
3696 if (warn_stringop_overflow)
3698 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3699 check_sizes (OPT_Wstringop_overflow_,
3700 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3703 return expand_builtin_strcpy_args (dest, src, target);
3706 /* Helper function to do the actual work for expand_builtin_strcpy. The
3707 arguments to the builtin_strcpy call DEST and SRC are broken out
3708 so that this can also be called without constructing an actual CALL_EXPR.
3709 The other arguments and return value are the same as for
3710 expand_builtin_strcpy. */
3712 static rtx
3713 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3715 return expand_movstr (dest, src, target, /*endp=*/0);
3718 /* Expand a call EXP to the stpcpy builtin.
3719 Return NULL_RTX if we failed the caller should emit a normal call,
3720 otherwise try to get the result in TARGET, if convenient (and in
3721 mode MODE if that's convenient). */
3723 static rtx
3724 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3726 tree dst, src;
3727 location_t loc = EXPR_LOCATION (exp);
3729 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3730 return NULL_RTX;
3732 dst = CALL_EXPR_ARG (exp, 0);
3733 src = CALL_EXPR_ARG (exp, 1);
3735 if (warn_stringop_overflow)
3737 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3738 check_sizes (OPT_Wstringop_overflow_,
3739 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3742 /* If return value is ignored, transform stpcpy into strcpy. */
3743 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3745 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3746 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3747 return expand_expr (result, target, mode, EXPAND_NORMAL);
3749 else
3751 tree len, lenp1;
3752 rtx ret;
3754 /* Ensure we get an actual string whose length can be evaluated at
3755 compile-time, not an expression containing a string. This is
3756 because the latter will potentially produce pessimized code
3757 when used to produce the return value. */
3758 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3759 return expand_movstr (dst, src, target, /*endp=*/2);
3761 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3762 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3763 target, exp, /*endp=*/2);
3765 if (ret)
3766 return ret;
3768 if (TREE_CODE (len) == INTEGER_CST)
3770 rtx len_rtx = expand_normal (len);
3772 if (CONST_INT_P (len_rtx))
3774 ret = expand_builtin_strcpy_args (dst, src, target);
3776 if (ret)
3778 if (! target)
3780 if (mode != VOIDmode)
3781 target = gen_reg_rtx (mode);
3782 else
3783 target = gen_reg_rtx (GET_MODE (ret));
3785 if (GET_MODE (target) != GET_MODE (ret))
3786 ret = gen_lowpart (GET_MODE (target), ret);
3788 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3789 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3790 gcc_assert (ret);
3792 return target;
3797 return expand_movstr (dst, src, target, /*endp=*/2);
3801 /* Check a call EXP to the stpncpy built-in for validity.
3802 Return NULL_RTX on both success and failure. */
3804 static rtx
3805 expand_builtin_stpncpy (tree exp, rtx)
3807 if (!validate_arglist (exp,
3808 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3809 || !warn_stringop_overflow)
3810 return NULL_RTX;
3812 /* The source and destination of the call. */
3813 tree dest = CALL_EXPR_ARG (exp, 0);
3814 tree src = CALL_EXPR_ARG (exp, 1);
3816 /* The exact number of bytes to write (not the maximum). */
3817 tree len = CALL_EXPR_ARG (exp, 2);
3819 /* The size of the destination object. */
3820 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3822 check_sizes (OPT_Wstringop_overflow_,
3823 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
3825 return NULL_RTX;
3828 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3829 bytes from constant string DATA + OFFSET and return it as target
3830 constant. */
3833 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3834 scalar_int_mode mode)
3836 const char *str = (const char *) data;
3838 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3839 return const0_rtx;
3841 return c_readstr (str + offset, mode);
3844 /* Helper to check the sizes of sequences and the destination of calls
3845 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3846 success (no overflow or invalid sizes), false otherwise. */
3848 static bool
3849 check_strncat_sizes (tree exp, tree objsize)
3851 tree dest = CALL_EXPR_ARG (exp, 0);
3852 tree src = CALL_EXPR_ARG (exp, 1);
3853 tree maxlen = CALL_EXPR_ARG (exp, 2);
3855 /* Try to determine the range of lengths that the source expression
3856 refers to. */
3857 tree lenrange[2];
3858 get_range_strlen (src, lenrange);
3860 /* Try to verify that the destination is big enough for the shortest
3861 string. */
3863 if (!objsize && warn_stringop_overflow)
3865 /* If it hasn't been provided by __strncat_chk, try to determine
3866 the size of the destination object into which the source is
3867 being copied. */
3868 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3871 /* Add one for the terminating nul. */
3872 tree srclen = (lenrange[0]
3873 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3874 size_one_node)
3875 : NULL_TREE);
3877 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3878 nul so the specified upper bound should never be equal to (or greater
3879 than) the size of the destination. */
3880 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (objsize)
3881 && tree_int_cst_equal (objsize, maxlen))
3883 location_t loc = tree_nonartificial_location (exp);
3884 loc = expansion_point_location_if_in_system_header (loc);
3886 warning_at (loc, OPT_Wstringop_overflow_,
3887 "%K%qD specified bound %E equals destination size",
3888 exp, get_callee_fndecl (exp), maxlen);
3890 return false;
3893 if (!srclen
3894 || (maxlen && tree_fits_uhwi_p (maxlen)
3895 && tree_fits_uhwi_p (srclen)
3896 && tree_int_cst_lt (maxlen, srclen)))
3897 srclen = maxlen;
3899 /* The number of bytes to write is LEN but check_sizes will also
3900 check SRCLEN if LEN's value isn't known. */
3901 return check_sizes (OPT_Wstringop_overflow_,
3902 exp, /*size=*/NULL_TREE, maxlen, srclen, objsize);
3905 /* Similar to expand_builtin_strcat, do some very basic size validation
3906 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3907 the built-in expand to a call to the library function. */
3909 static rtx
3910 expand_builtin_strncat (tree exp, rtx)
3912 if (!validate_arglist (exp,
3913 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3914 || !warn_stringop_overflow)
3915 return NULL_RTX;
3917 tree dest = CALL_EXPR_ARG (exp, 0);
3918 tree src = CALL_EXPR_ARG (exp, 1);
3919 /* The upper bound on the number of bytes to write. */
3920 tree maxlen = CALL_EXPR_ARG (exp, 2);
3921 /* The length of the source sequence. */
3922 tree slen = c_strlen (src, 1);
3924 /* Try to determine the range of lengths that the source expression
3925 refers to. */
3926 tree lenrange[2];
3927 if (slen)
3928 lenrange[0] = lenrange[1] = slen;
3929 else
3930 get_range_strlen (src, lenrange);
3932 /* Try to verify that the destination is big enough for the shortest
3933 string. First try to determine the size of the destination object
3934 into which the source is being copied. */
3935 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3937 /* Add one for the terminating nul. */
3938 tree srclen = (lenrange[0]
3939 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3940 size_one_node)
3941 : NULL_TREE);
3943 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3944 nul so the specified upper bound should never be equal to (or greater
3945 than) the size of the destination. */
3946 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (destsize)
3947 && tree_int_cst_equal (destsize, maxlen))
3949 location_t loc = tree_nonartificial_location (exp);
3950 loc = expansion_point_location_if_in_system_header (loc);
3952 warning_at (loc, OPT_Wstringop_overflow_,
3953 "%K%qD specified bound %E equals destination size",
3954 exp, get_callee_fndecl (exp), maxlen);
3956 return NULL_RTX;
3959 if (!srclen
3960 || (maxlen && tree_fits_uhwi_p (maxlen)
3961 && tree_fits_uhwi_p (srclen)
3962 && tree_int_cst_lt (maxlen, srclen)))
3963 srclen = maxlen;
3965 /* The number of bytes to write is LEN but check_sizes will also
3966 check SRCLEN if LEN's value isn't known. */
3967 check_sizes (OPT_Wstringop_overflow_,
3968 exp, /*size=*/NULL_TREE, maxlen, srclen, destsize);
3970 return NULL_RTX;
3973 /* Helper to check the sizes of sequences and the destination of calls
3974 to __builtin_strncpy (DST, SRC, CNT) and __builtin___strncpy_chk.
3975 Returns true on success (no overflow warning), false otherwise. */
3977 static bool
3978 check_strncpy_sizes (tree exp, tree dst, tree src, tree cnt)
3980 tree dstsize = compute_objsize (dst, warn_stringop_overflow - 1);
3982 if (!check_sizes (OPT_Wstringop_overflow_,
3983 exp, cnt, /*maxlen=*/NULL_TREE, src, dstsize))
3984 return false;
3986 return true;
3989 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3990 NULL_RTX if we failed the caller should emit a normal call. */
3992 static rtx
3993 expand_builtin_strncpy (tree exp, rtx target)
3995 location_t loc = EXPR_LOCATION (exp);
3997 if (validate_arglist (exp,
3998 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4000 tree dest = CALL_EXPR_ARG (exp, 0);
4001 tree src = CALL_EXPR_ARG (exp, 1);
4002 /* The number of bytes to write (not the maximum). */
4003 tree len = CALL_EXPR_ARG (exp, 2);
4004 /* The length of the source sequence. */
4005 tree slen = c_strlen (src, 1);
4007 check_strncpy_sizes (exp, dest, src, len);
4009 /* We must be passed a constant len and src parameter. */
4010 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4011 return NULL_RTX;
4013 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4015 /* We're required to pad with trailing zeros if the requested
4016 len is greater than strlen(s2)+1. In that case try to
4017 use store_by_pieces, if it fails, punt. */
4018 if (tree_int_cst_lt (slen, len))
4020 unsigned int dest_align = get_pointer_alignment (dest);
4021 const char *p = c_getstr (src);
4022 rtx dest_mem;
4024 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4025 || !can_store_by_pieces (tree_to_uhwi (len),
4026 builtin_strncpy_read_str,
4027 CONST_CAST (char *, p),
4028 dest_align, false))
4029 return NULL_RTX;
4031 dest_mem = get_memory_rtx (dest, len);
4032 store_by_pieces (dest_mem, tree_to_uhwi (len),
4033 builtin_strncpy_read_str,
4034 CONST_CAST (char *, p), dest_align, false, 0);
4035 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4036 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4037 return dest_mem;
4040 return NULL_RTX;
4043 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4044 bytes from constant string DATA + OFFSET and return it as target
4045 constant. */
4048 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4049 scalar_int_mode mode)
4051 const char *c = (const char *) data;
4052 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4054 memset (p, *c, GET_MODE_SIZE (mode));
4056 return c_readstr (p, mode);
4059 /* Callback routine for store_by_pieces. Return the RTL of a register
4060 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4061 char value given in the RTL register data. For example, if mode is
4062 4 bytes wide, return the RTL for 0x01010101*data. */
4064 static rtx
4065 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4066 scalar_int_mode mode)
4068 rtx target, coeff;
4069 size_t size;
4070 char *p;
4072 size = GET_MODE_SIZE (mode);
4073 if (size == 1)
4074 return (rtx) data;
4076 p = XALLOCAVEC (char, size);
4077 memset (p, 1, size);
4078 coeff = c_readstr (p, mode);
4080 target = convert_to_mode (mode, (rtx) data, 1);
4081 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4082 return force_reg (mode, target);
4085 /* Expand expression EXP, which is a call to the memset builtin. Return
4086 NULL_RTX if we failed the caller should emit a normal call, otherwise
4087 try to get the result in TARGET, if convenient (and in mode MODE if that's
4088 convenient). */
4090 static rtx
4091 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4093 if (!validate_arglist (exp,
4094 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4095 return NULL_RTX;
4097 tree dest = CALL_EXPR_ARG (exp, 0);
4098 tree val = CALL_EXPR_ARG (exp, 1);
4099 tree len = CALL_EXPR_ARG (exp, 2);
4101 check_memop_sizes (exp, dest, NULL_TREE, len);
4103 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4106 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4107 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4108 try to get the result in TARGET, if convenient (and in mode MODE if that's
4109 convenient). */
4111 static rtx
4112 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4114 if (!validate_arglist (exp,
4115 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4116 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4117 return NULL_RTX;
4118 else
4120 tree dest = CALL_EXPR_ARG (exp, 0);
4121 tree val = CALL_EXPR_ARG (exp, 2);
4122 tree len = CALL_EXPR_ARG (exp, 3);
4123 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4125 /* Return src bounds with the result. */
4126 if (res)
4128 rtx bnd = force_reg (targetm.chkp_bound_mode (),
4129 expand_normal (CALL_EXPR_ARG (exp, 1)));
4130 res = chkp_join_splitted_slot (res, bnd);
4132 return res;
4136 /* Helper function to do the actual work for expand_builtin_memset. The
4137 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4138 so that this can also be called without constructing an actual CALL_EXPR.
4139 The other arguments and return value are the same as for
4140 expand_builtin_memset. */
4142 static rtx
4143 expand_builtin_memset_args (tree dest, tree val, tree len,
4144 rtx target, machine_mode mode, tree orig_exp)
4146 tree fndecl, fn;
4147 enum built_in_function fcode;
4148 machine_mode val_mode;
4149 char c;
4150 unsigned int dest_align;
4151 rtx dest_mem, dest_addr, len_rtx;
4152 HOST_WIDE_INT expected_size = -1;
4153 unsigned int expected_align = 0;
4154 unsigned HOST_WIDE_INT min_size;
4155 unsigned HOST_WIDE_INT max_size;
4156 unsigned HOST_WIDE_INT probable_max_size;
4158 dest_align = get_pointer_alignment (dest);
4160 /* If DEST is not a pointer type, don't do this operation in-line. */
4161 if (dest_align == 0)
4162 return NULL_RTX;
4164 if (currently_expanding_gimple_stmt)
4165 stringop_block_profile (currently_expanding_gimple_stmt,
4166 &expected_align, &expected_size);
4168 if (expected_align < dest_align)
4169 expected_align = dest_align;
4171 /* If the LEN parameter is zero, return DEST. */
4172 if (integer_zerop (len))
4174 /* Evaluate and ignore VAL in case it has side-effects. */
4175 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4176 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4179 /* Stabilize the arguments in case we fail. */
4180 dest = builtin_save_expr (dest);
4181 val = builtin_save_expr (val);
4182 len = builtin_save_expr (len);
4184 len_rtx = expand_normal (len);
4185 determine_block_size (len, len_rtx, &min_size, &max_size,
4186 &probable_max_size);
4187 dest_mem = get_memory_rtx (dest, len);
4188 val_mode = TYPE_MODE (unsigned_char_type_node);
4190 if (TREE_CODE (val) != INTEGER_CST)
4192 rtx val_rtx;
4194 val_rtx = expand_normal (val);
4195 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4197 /* Assume that we can memset by pieces if we can store
4198 * the coefficients by pieces (in the required modes).
4199 * We can't pass builtin_memset_gen_str as that emits RTL. */
4200 c = 1;
4201 if (tree_fits_uhwi_p (len)
4202 && can_store_by_pieces (tree_to_uhwi (len),
4203 builtin_memset_read_str, &c, dest_align,
4204 true))
4206 val_rtx = force_reg (val_mode, val_rtx);
4207 store_by_pieces (dest_mem, tree_to_uhwi (len),
4208 builtin_memset_gen_str, val_rtx, dest_align,
4209 true, 0);
4211 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4212 dest_align, expected_align,
4213 expected_size, min_size, max_size,
4214 probable_max_size))
4215 goto do_libcall;
4217 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4218 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4219 return dest_mem;
4222 if (target_char_cast (val, &c))
4223 goto do_libcall;
4225 if (c)
4227 if (tree_fits_uhwi_p (len)
4228 && can_store_by_pieces (tree_to_uhwi (len),
4229 builtin_memset_read_str, &c, dest_align,
4230 true))
4231 store_by_pieces (dest_mem, tree_to_uhwi (len),
4232 builtin_memset_read_str, &c, dest_align, true, 0);
4233 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4234 gen_int_mode (c, val_mode),
4235 dest_align, expected_align,
4236 expected_size, min_size, max_size,
4237 probable_max_size))
4238 goto do_libcall;
4240 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4241 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4242 return dest_mem;
4245 set_mem_align (dest_mem, dest_align);
4246 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4247 CALL_EXPR_TAILCALL (orig_exp)
4248 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4249 expected_align, expected_size,
4250 min_size, max_size,
4251 probable_max_size);
4253 if (dest_addr == 0)
4255 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4256 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4259 return dest_addr;
4261 do_libcall:
4262 fndecl = get_callee_fndecl (orig_exp);
4263 fcode = DECL_FUNCTION_CODE (fndecl);
4264 if (fcode == BUILT_IN_MEMSET
4265 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4266 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4267 dest, val, len);
4268 else if (fcode == BUILT_IN_BZERO)
4269 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4270 dest, len);
4271 else
4272 gcc_unreachable ();
4273 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4274 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4275 return expand_call (fn, target, target == const0_rtx);
4278 /* Expand expression EXP, which is a call to the bzero builtin. Return
4279 NULL_RTX if we failed the caller should emit a normal call. */
4281 static rtx
4282 expand_builtin_bzero (tree exp)
4284 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4285 return NULL_RTX;
4287 tree dest = CALL_EXPR_ARG (exp, 0);
4288 tree size = CALL_EXPR_ARG (exp, 1);
4290 check_memop_sizes (exp, dest, NULL_TREE, size);
4292 /* New argument list transforming bzero(ptr x, int y) to
4293 memset(ptr x, int 0, size_t y). This is done this way
4294 so that if it isn't expanded inline, we fallback to
4295 calling bzero instead of memset. */
4297 location_t loc = EXPR_LOCATION (exp);
4299 return expand_builtin_memset_args (dest, integer_zero_node,
4300 fold_convert_loc (loc,
4301 size_type_node, size),
4302 const0_rtx, VOIDmode, exp);
4305 /* Try to expand cmpstr operation ICODE with the given operands.
4306 Return the result rtx on success, otherwise return null. */
4308 static rtx
4309 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4310 HOST_WIDE_INT align)
4312 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4314 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4315 target = NULL_RTX;
4317 struct expand_operand ops[4];
4318 create_output_operand (&ops[0], target, insn_mode);
4319 create_fixed_operand (&ops[1], arg1_rtx);
4320 create_fixed_operand (&ops[2], arg2_rtx);
4321 create_integer_operand (&ops[3], align);
4322 if (maybe_expand_insn (icode, 4, ops))
4323 return ops[0].value;
4324 return NULL_RTX;
4327 /* Expand expression EXP, which is a call to the memcmp built-in function.
4328 Return NULL_RTX if we failed and the caller should emit a normal call,
4329 otherwise try to get the result in TARGET, if convenient.
4330 RESULT_EQ is true if we can relax the returned value to be either zero
4331 or nonzero, without caring about the sign. */
4333 static rtx
4334 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4336 if (!validate_arglist (exp,
4337 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4338 return NULL_RTX;
4340 tree arg1 = CALL_EXPR_ARG (exp, 0);
4341 tree arg2 = CALL_EXPR_ARG (exp, 1);
4342 tree len = CALL_EXPR_ARG (exp, 2);
4344 /* Diagnose calls where the specified length exceeds the size of either
4345 object. */
4346 if (warn_stringop_overflow)
4348 tree size = compute_objsize (arg1, 0);
4349 if (check_sizes (OPT_Wstringop_overflow_,
4350 exp, len, /*maxlen=*/NULL_TREE,
4351 size, /*objsize=*/NULL_TREE))
4353 size = compute_objsize (arg2, 0);
4354 check_sizes (OPT_Wstringop_overflow_,
4355 exp, len, /*maxlen=*/NULL_TREE,
4356 size, /*objsize=*/NULL_TREE);
4360 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4361 location_t loc = EXPR_LOCATION (exp);
4363 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4364 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4366 /* If we don't have POINTER_TYPE, call the function. */
4367 if (arg1_align == 0 || arg2_align == 0)
4368 return NULL_RTX;
4370 rtx arg1_rtx = get_memory_rtx (arg1, len);
4371 rtx arg2_rtx = get_memory_rtx (arg2, len);
4372 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4374 /* Set MEM_SIZE as appropriate. */
4375 if (CONST_INT_P (len_rtx))
4377 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4378 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4381 by_pieces_constfn constfn = NULL;
4383 const char *src_str = c_getstr (arg2);
4384 if (result_eq && src_str == NULL)
4386 src_str = c_getstr (arg1);
4387 if (src_str != NULL)
4388 std::swap (arg1_rtx, arg2_rtx);
4391 /* If SRC is a string constant and block move would be done
4392 by pieces, we can avoid loading the string from memory
4393 and only stored the computed constants. */
4394 if (src_str
4395 && CONST_INT_P (len_rtx)
4396 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4397 constfn = builtin_memcpy_read_str;
4399 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4400 TREE_TYPE (len), target,
4401 result_eq, constfn,
4402 CONST_CAST (char *, src_str));
4404 if (result)
4406 /* Return the value in the proper mode for this function. */
4407 if (GET_MODE (result) == mode)
4408 return result;
4410 if (target != 0)
4412 convert_move (target, result, 0);
4413 return target;
4416 return convert_to_mode (mode, result, 0);
4419 return NULL_RTX;
4422 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4423 if we failed the caller should emit a normal call, otherwise try to get
4424 the result in TARGET, if convenient. */
4426 static rtx
4427 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4429 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4430 return NULL_RTX;
4432 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4433 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4434 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4436 rtx arg1_rtx, arg2_rtx;
4437 tree fndecl, fn;
4438 tree arg1 = CALL_EXPR_ARG (exp, 0);
4439 tree arg2 = CALL_EXPR_ARG (exp, 1);
4440 rtx result = NULL_RTX;
4442 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4443 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4445 /* If we don't have POINTER_TYPE, call the function. */
4446 if (arg1_align == 0 || arg2_align == 0)
4447 return NULL_RTX;
4449 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4450 arg1 = builtin_save_expr (arg1);
4451 arg2 = builtin_save_expr (arg2);
4453 arg1_rtx = get_memory_rtx (arg1, NULL);
4454 arg2_rtx = get_memory_rtx (arg2, NULL);
4456 /* Try to call cmpstrsi. */
4457 if (cmpstr_icode != CODE_FOR_nothing)
4458 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4459 MIN (arg1_align, arg2_align));
4461 /* Try to determine at least one length and call cmpstrnsi. */
4462 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4464 tree len;
4465 rtx arg3_rtx;
4467 tree len1 = c_strlen (arg1, 1);
4468 tree len2 = c_strlen (arg2, 1);
4470 if (len1)
4471 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4472 if (len2)
4473 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4475 /* If we don't have a constant length for the first, use the length
4476 of the second, if we know it. We don't require a constant for
4477 this case; some cost analysis could be done if both are available
4478 but neither is constant. For now, assume they're equally cheap,
4479 unless one has side effects. If both strings have constant lengths,
4480 use the smaller. */
4482 if (!len1)
4483 len = len2;
4484 else if (!len2)
4485 len = len1;
4486 else if (TREE_SIDE_EFFECTS (len1))
4487 len = len2;
4488 else if (TREE_SIDE_EFFECTS (len2))
4489 len = len1;
4490 else if (TREE_CODE (len1) != INTEGER_CST)
4491 len = len2;
4492 else if (TREE_CODE (len2) != INTEGER_CST)
4493 len = len1;
4494 else if (tree_int_cst_lt (len1, len2))
4495 len = len1;
4496 else
4497 len = len2;
4499 /* If both arguments have side effects, we cannot optimize. */
4500 if (len && !TREE_SIDE_EFFECTS (len))
4502 arg3_rtx = expand_normal (len);
4503 result = expand_cmpstrn_or_cmpmem
4504 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4505 arg3_rtx, MIN (arg1_align, arg2_align));
4509 if (result)
4511 /* Return the value in the proper mode for this function. */
4512 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4513 if (GET_MODE (result) == mode)
4514 return result;
4515 if (target == 0)
4516 return convert_to_mode (mode, result, 0);
4517 convert_move (target, result, 0);
4518 return target;
4521 /* Expand the library call ourselves using a stabilized argument
4522 list to avoid re-evaluating the function's arguments twice. */
4523 fndecl = get_callee_fndecl (exp);
4524 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4525 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4526 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4527 return expand_call (fn, target, target == const0_rtx);
4529 return NULL_RTX;
4532 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4533 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4534 the result in TARGET, if convenient. */
4536 static rtx
4537 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4538 ATTRIBUTE_UNUSED machine_mode mode)
4540 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4542 if (!validate_arglist (exp,
4543 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4544 return NULL_RTX;
4546 /* If c_strlen can determine an expression for one of the string
4547 lengths, and it doesn't have side effects, then emit cmpstrnsi
4548 using length MIN(strlen(string)+1, arg3). */
4549 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4550 if (cmpstrn_icode != CODE_FOR_nothing)
4552 tree len, len1, len2, len3;
4553 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4554 rtx result;
4555 tree fndecl, fn;
4556 tree arg1 = CALL_EXPR_ARG (exp, 0);
4557 tree arg2 = CALL_EXPR_ARG (exp, 1);
4558 tree arg3 = CALL_EXPR_ARG (exp, 2);
4560 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4561 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4563 len1 = c_strlen (arg1, 1);
4564 len2 = c_strlen (arg2, 1);
4566 if (len1)
4567 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4568 if (len2)
4569 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4571 len3 = fold_convert_loc (loc, sizetype, arg3);
4573 /* If we don't have a constant length for the first, use the length
4574 of the second, if we know it. If neither string is constant length,
4575 use the given length argument. We don't require a constant for
4576 this case; some cost analysis could be done if both are available
4577 but neither is constant. For now, assume they're equally cheap,
4578 unless one has side effects. If both strings have constant lengths,
4579 use the smaller. */
4581 if (!len1 && !len2)
4582 len = len3;
4583 else if (!len1)
4584 len = len2;
4585 else if (!len2)
4586 len = len1;
4587 else if (TREE_SIDE_EFFECTS (len1))
4588 len = len2;
4589 else if (TREE_SIDE_EFFECTS (len2))
4590 len = len1;
4591 else if (TREE_CODE (len1) != INTEGER_CST)
4592 len = len2;
4593 else if (TREE_CODE (len2) != INTEGER_CST)
4594 len = len1;
4595 else if (tree_int_cst_lt (len1, len2))
4596 len = len1;
4597 else
4598 len = len2;
4600 /* If we are not using the given length, we must incorporate it here.
4601 The actual new length parameter will be MIN(len,arg3) in this case. */
4602 if (len != len3)
4603 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4604 arg1_rtx = get_memory_rtx (arg1, len);
4605 arg2_rtx = get_memory_rtx (arg2, len);
4606 arg3_rtx = expand_normal (len);
4607 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4608 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4609 MIN (arg1_align, arg2_align));
4610 if (result)
4612 /* Return the value in the proper mode for this function. */
4613 mode = TYPE_MODE (TREE_TYPE (exp));
4614 if (GET_MODE (result) == mode)
4615 return result;
4616 if (target == 0)
4617 return convert_to_mode (mode, result, 0);
4618 convert_move (target, result, 0);
4619 return target;
4622 /* Expand the library call ourselves using a stabilized argument
4623 list to avoid re-evaluating the function's arguments twice. */
4624 fndecl = get_callee_fndecl (exp);
4625 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4626 arg1, arg2, len);
4627 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4628 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4629 return expand_call (fn, target, target == const0_rtx);
4631 return NULL_RTX;
4634 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4635 if that's convenient. */
4638 expand_builtin_saveregs (void)
4640 rtx val;
4641 rtx_insn *seq;
4643 /* Don't do __builtin_saveregs more than once in a function.
4644 Save the result of the first call and reuse it. */
4645 if (saveregs_value != 0)
4646 return saveregs_value;
4648 /* When this function is called, it means that registers must be
4649 saved on entry to this function. So we migrate the call to the
4650 first insn of this function. */
4652 start_sequence ();
4654 /* Do whatever the machine needs done in this case. */
4655 val = targetm.calls.expand_builtin_saveregs ();
4657 seq = get_insns ();
4658 end_sequence ();
4660 saveregs_value = val;
4662 /* Put the insns after the NOTE that starts the function. If this
4663 is inside a start_sequence, make the outer-level insn chain current, so
4664 the code is placed at the start of the function. */
4665 push_topmost_sequence ();
4666 emit_insn_after (seq, entry_of_function ());
4667 pop_topmost_sequence ();
4669 return val;
4672 /* Expand a call to __builtin_next_arg. */
4674 static rtx
4675 expand_builtin_next_arg (void)
4677 /* Checking arguments is already done in fold_builtin_next_arg
4678 that must be called before this function. */
4679 return expand_binop (ptr_mode, add_optab,
4680 crtl->args.internal_arg_pointer,
4681 crtl->args.arg_offset_rtx,
4682 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4685 /* Make it easier for the backends by protecting the valist argument
4686 from multiple evaluations. */
4688 static tree
4689 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4691 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4693 /* The current way of determining the type of valist is completely
4694 bogus. We should have the information on the va builtin instead. */
4695 if (!vatype)
4696 vatype = targetm.fn_abi_va_list (cfun->decl);
4698 if (TREE_CODE (vatype) == ARRAY_TYPE)
4700 if (TREE_SIDE_EFFECTS (valist))
4701 valist = save_expr (valist);
4703 /* For this case, the backends will be expecting a pointer to
4704 vatype, but it's possible we've actually been given an array
4705 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4706 So fix it. */
4707 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4709 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4710 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4713 else
4715 tree pt = build_pointer_type (vatype);
4717 if (! needs_lvalue)
4719 if (! TREE_SIDE_EFFECTS (valist))
4720 return valist;
4722 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4723 TREE_SIDE_EFFECTS (valist) = 1;
4726 if (TREE_SIDE_EFFECTS (valist))
4727 valist = save_expr (valist);
4728 valist = fold_build2_loc (loc, MEM_REF,
4729 vatype, valist, build_int_cst (pt, 0));
4732 return valist;
4735 /* The "standard" definition of va_list is void*. */
4737 tree
4738 std_build_builtin_va_list (void)
4740 return ptr_type_node;
4743 /* The "standard" abi va_list is va_list_type_node. */
4745 tree
4746 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4748 return va_list_type_node;
4751 /* The "standard" type of va_list is va_list_type_node. */
4753 tree
4754 std_canonical_va_list_type (tree type)
4756 tree wtype, htype;
4758 wtype = va_list_type_node;
4759 htype = type;
4761 if (TREE_CODE (wtype) == ARRAY_TYPE)
4763 /* If va_list is an array type, the argument may have decayed
4764 to a pointer type, e.g. by being passed to another function.
4765 In that case, unwrap both types so that we can compare the
4766 underlying records. */
4767 if (TREE_CODE (htype) == ARRAY_TYPE
4768 || POINTER_TYPE_P (htype))
4770 wtype = TREE_TYPE (wtype);
4771 htype = TREE_TYPE (htype);
4774 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4775 return va_list_type_node;
4777 return NULL_TREE;
4780 /* The "standard" implementation of va_start: just assign `nextarg' to
4781 the variable. */
4783 void
4784 std_expand_builtin_va_start (tree valist, rtx nextarg)
4786 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4787 convert_move (va_r, nextarg, 0);
4789 /* We do not have any valid bounds for the pointer, so
4790 just store zero bounds for it. */
4791 if (chkp_function_instrumented_p (current_function_decl))
4792 chkp_expand_bounds_reset_for_mem (valist,
4793 make_tree (TREE_TYPE (valist),
4794 nextarg));
4797 /* Expand EXP, a call to __builtin_va_start. */
4799 static rtx
4800 expand_builtin_va_start (tree exp)
4802 rtx nextarg;
4803 tree valist;
4804 location_t loc = EXPR_LOCATION (exp);
4806 if (call_expr_nargs (exp) < 2)
4808 error_at (loc, "too few arguments to function %<va_start%>");
4809 return const0_rtx;
4812 if (fold_builtin_next_arg (exp, true))
4813 return const0_rtx;
4815 nextarg = expand_builtin_next_arg ();
4816 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4818 if (targetm.expand_builtin_va_start)
4819 targetm.expand_builtin_va_start (valist, nextarg);
4820 else
4821 std_expand_builtin_va_start (valist, nextarg);
4823 return const0_rtx;
4826 /* Expand EXP, a call to __builtin_va_end. */
4828 static rtx
4829 expand_builtin_va_end (tree exp)
4831 tree valist = CALL_EXPR_ARG (exp, 0);
4833 /* Evaluate for side effects, if needed. I hate macros that don't
4834 do that. */
4835 if (TREE_SIDE_EFFECTS (valist))
4836 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4838 return const0_rtx;
4841 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4842 builtin rather than just as an assignment in stdarg.h because of the
4843 nastiness of array-type va_list types. */
4845 static rtx
4846 expand_builtin_va_copy (tree exp)
4848 tree dst, src, t;
4849 location_t loc = EXPR_LOCATION (exp);
4851 dst = CALL_EXPR_ARG (exp, 0);
4852 src = CALL_EXPR_ARG (exp, 1);
4854 dst = stabilize_va_list_loc (loc, dst, 1);
4855 src = stabilize_va_list_loc (loc, src, 0);
4857 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4859 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4861 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4862 TREE_SIDE_EFFECTS (t) = 1;
4863 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4865 else
4867 rtx dstb, srcb, size;
4869 /* Evaluate to pointers. */
4870 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4871 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4872 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4873 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4875 dstb = convert_memory_address (Pmode, dstb);
4876 srcb = convert_memory_address (Pmode, srcb);
4878 /* "Dereference" to BLKmode memories. */
4879 dstb = gen_rtx_MEM (BLKmode, dstb);
4880 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4881 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4882 srcb = gen_rtx_MEM (BLKmode, srcb);
4883 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4884 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4886 /* Copy. */
4887 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4890 return const0_rtx;
4893 /* Expand a call to one of the builtin functions __builtin_frame_address or
4894 __builtin_return_address. */
4896 static rtx
4897 expand_builtin_frame_address (tree fndecl, tree exp)
4899 /* The argument must be a nonnegative integer constant.
4900 It counts the number of frames to scan up the stack.
4901 The value is either the frame pointer value or the return
4902 address saved in that frame. */
4903 if (call_expr_nargs (exp) == 0)
4904 /* Warning about missing arg was already issued. */
4905 return const0_rtx;
4906 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4908 error ("invalid argument to %qD", fndecl);
4909 return const0_rtx;
4911 else
4913 /* Number of frames to scan up the stack. */
4914 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4916 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4918 /* Some ports cannot access arbitrary stack frames. */
4919 if (tem == NULL)
4921 warning (0, "unsupported argument to %qD", fndecl);
4922 return const0_rtx;
4925 if (count)
4927 /* Warn since no effort is made to ensure that any frame
4928 beyond the current one exists or can be safely reached. */
4929 warning (OPT_Wframe_address, "calling %qD with "
4930 "a nonzero argument is unsafe", fndecl);
4933 /* For __builtin_frame_address, return what we've got. */
4934 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4935 return tem;
4937 if (!REG_P (tem)
4938 && ! CONSTANT_P (tem))
4939 tem = copy_addr_to_reg (tem);
4940 return tem;
4944 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4945 failed and the caller should emit a normal call. */
4947 static rtx
4948 expand_builtin_alloca (tree exp)
4950 rtx op0;
4951 rtx result;
4952 unsigned int align;
4953 tree fndecl = get_callee_fndecl (exp);
4954 HOST_WIDE_INT max_size;
4955 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4956 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4957 bool valid_arglist
4958 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4959 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
4960 VOID_TYPE)
4961 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
4962 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4963 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4965 if (!valid_arglist)
4966 return NULL_RTX;
4968 if ((alloca_for_var && !warn_vla_limit)
4969 || (!alloca_for_var && !warn_alloca_limit))
4971 /* -Walloca-larger-than and -Wvla-larger-than settings override
4972 the more general -Walloc-size-larger-than so unless either of
4973 the former options is specified check the alloca arguments for
4974 overflow. */
4975 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4976 int idx[] = { 0, -1 };
4977 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4980 /* Compute the argument. */
4981 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4983 /* Compute the alignment. */
4984 align = (fcode == BUILT_IN_ALLOCA
4985 ? BIGGEST_ALIGNMENT
4986 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
4988 /* Compute the maximum size. */
4989 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4990 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
4991 : -1);
4993 /* Allocate the desired space. If the allocation stems from the declaration
4994 of a variable-sized object, it cannot accumulate. */
4995 result
4996 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
4997 result = convert_memory_address (ptr_mode, result);
4999 return result;
5002 /* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
5003 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
5004 dummy value into second parameter relying on this function to perform the
5005 change. See motivation for this in comment to handle_builtin_stack_restore
5006 function. */
5008 static rtx
5009 expand_asan_emit_allocas_unpoison (tree exp)
5011 tree arg0 = CALL_EXPR_ARG (exp, 0);
5012 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5013 rtx bot = convert_memory_address (ptr_mode, virtual_stack_dynamic_rtx);
5014 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5015 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5016 top, ptr_mode, bot, ptr_mode);
5017 return ret;
5020 /* Expand a call to bswap builtin in EXP.
5021 Return NULL_RTX if a normal call should be emitted rather than expanding the
5022 function in-line. If convenient, the result should be placed in TARGET.
5023 SUBTARGET may be used as the target for computing one of EXP's operands. */
5025 static rtx
5026 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5027 rtx subtarget)
5029 tree arg;
5030 rtx op0;
5032 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5033 return NULL_RTX;
5035 arg = CALL_EXPR_ARG (exp, 0);
5036 op0 = expand_expr (arg,
5037 subtarget && GET_MODE (subtarget) == target_mode
5038 ? subtarget : NULL_RTX,
5039 target_mode, EXPAND_NORMAL);
5040 if (GET_MODE (op0) != target_mode)
5041 op0 = convert_to_mode (target_mode, op0, 1);
5043 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5045 gcc_assert (target);
5047 return convert_to_mode (target_mode, target, 1);
5050 /* Expand a call to a unary builtin in EXP.
5051 Return NULL_RTX if a normal call should be emitted rather than expanding the
5052 function in-line. If convenient, the result should be placed in TARGET.
5053 SUBTARGET may be used as the target for computing one of EXP's operands. */
5055 static rtx
5056 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5057 rtx subtarget, optab op_optab)
5059 rtx op0;
5061 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5062 return NULL_RTX;
5064 /* Compute the argument. */
5065 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5066 (subtarget
5067 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5068 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5069 VOIDmode, EXPAND_NORMAL);
5070 /* Compute op, into TARGET if possible.
5071 Set TARGET to wherever the result comes back. */
5072 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5073 op_optab, op0, target, op_optab != clrsb_optab);
5074 gcc_assert (target);
5076 return convert_to_mode (target_mode, target, 0);
5079 /* Expand a call to __builtin_expect. We just return our argument
5080 as the builtin_expect semantic should've been already executed by
5081 tree branch prediction pass. */
5083 static rtx
5084 expand_builtin_expect (tree exp, rtx target)
5086 tree arg;
5088 if (call_expr_nargs (exp) < 2)
5089 return const0_rtx;
5090 arg = CALL_EXPR_ARG (exp, 0);
5092 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5093 /* When guessing was done, the hints should be already stripped away. */
5094 gcc_assert (!flag_guess_branch_prob
5095 || optimize == 0 || seen_error ());
5096 return target;
5099 /* Expand a call to __builtin_assume_aligned. We just return our first
5100 argument as the builtin_assume_aligned semantic should've been already
5101 executed by CCP. */
5103 static rtx
5104 expand_builtin_assume_aligned (tree exp, rtx target)
5106 if (call_expr_nargs (exp) < 2)
5107 return const0_rtx;
5108 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5109 EXPAND_NORMAL);
5110 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5111 && (call_expr_nargs (exp) < 3
5112 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5113 return target;
5116 void
5117 expand_builtin_trap (void)
5119 if (targetm.have_trap ())
5121 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5122 /* For trap insns when not accumulating outgoing args force
5123 REG_ARGS_SIZE note to prevent crossjumping of calls with
5124 different args sizes. */
5125 if (!ACCUMULATE_OUTGOING_ARGS)
5126 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
5128 else
5130 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5131 tree call_expr = build_call_expr (fn, 0);
5132 expand_call (call_expr, NULL_RTX, false);
5135 emit_barrier ();
5138 /* Expand a call to __builtin_unreachable. We do nothing except emit
5139 a barrier saying that control flow will not pass here.
5141 It is the responsibility of the program being compiled to ensure
5142 that control flow does never reach __builtin_unreachable. */
5143 static void
5144 expand_builtin_unreachable (void)
5146 emit_barrier ();
5149 /* Expand EXP, a call to fabs, fabsf or fabsl.
5150 Return NULL_RTX if a normal call should be emitted rather than expanding
5151 the function inline. If convenient, the result should be placed
5152 in TARGET. SUBTARGET may be used as the target for computing
5153 the operand. */
5155 static rtx
5156 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5158 machine_mode mode;
5159 tree arg;
5160 rtx op0;
5162 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5163 return NULL_RTX;
5165 arg = CALL_EXPR_ARG (exp, 0);
5166 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5167 mode = TYPE_MODE (TREE_TYPE (arg));
5168 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5169 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5172 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5173 Return NULL is a normal call should be emitted rather than expanding the
5174 function inline. If convenient, the result should be placed in TARGET.
5175 SUBTARGET may be used as the target for computing the operand. */
5177 static rtx
5178 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5180 rtx op0, op1;
5181 tree arg;
5183 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5184 return NULL_RTX;
5186 arg = CALL_EXPR_ARG (exp, 0);
5187 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5189 arg = CALL_EXPR_ARG (exp, 1);
5190 op1 = expand_normal (arg);
5192 return expand_copysign (op0, op1, target);
5195 /* Expand a call to __builtin___clear_cache. */
5197 static rtx
5198 expand_builtin___clear_cache (tree exp)
5200 if (!targetm.code_for_clear_cache)
5202 #ifdef CLEAR_INSN_CACHE
5203 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5204 does something. Just do the default expansion to a call to
5205 __clear_cache(). */
5206 return NULL_RTX;
5207 #else
5208 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5209 does nothing. There is no need to call it. Do nothing. */
5210 return const0_rtx;
5211 #endif /* CLEAR_INSN_CACHE */
5214 /* We have a "clear_cache" insn, and it will handle everything. */
5215 tree begin, end;
5216 rtx begin_rtx, end_rtx;
5218 /* We must not expand to a library call. If we did, any
5219 fallback library function in libgcc that might contain a call to
5220 __builtin___clear_cache() would recurse infinitely. */
5221 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5223 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5224 return const0_rtx;
5227 if (targetm.have_clear_cache ())
5229 struct expand_operand ops[2];
5231 begin = CALL_EXPR_ARG (exp, 0);
5232 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5234 end = CALL_EXPR_ARG (exp, 1);
5235 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5237 create_address_operand (&ops[0], begin_rtx);
5238 create_address_operand (&ops[1], end_rtx);
5239 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5240 return const0_rtx;
5242 return const0_rtx;
5245 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5247 static rtx
5248 round_trampoline_addr (rtx tramp)
5250 rtx temp, addend, mask;
5252 /* If we don't need too much alignment, we'll have been guaranteed
5253 proper alignment by get_trampoline_type. */
5254 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5255 return tramp;
5257 /* Round address up to desired boundary. */
5258 temp = gen_reg_rtx (Pmode);
5259 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5260 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5262 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5263 temp, 0, OPTAB_LIB_WIDEN);
5264 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5265 temp, 0, OPTAB_LIB_WIDEN);
5267 return tramp;
5270 static rtx
5271 expand_builtin_init_trampoline (tree exp, bool onstack)
5273 tree t_tramp, t_func, t_chain;
5274 rtx m_tramp, r_tramp, r_chain, tmp;
5276 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5277 POINTER_TYPE, VOID_TYPE))
5278 return NULL_RTX;
5280 t_tramp = CALL_EXPR_ARG (exp, 0);
5281 t_func = CALL_EXPR_ARG (exp, 1);
5282 t_chain = CALL_EXPR_ARG (exp, 2);
5284 r_tramp = expand_normal (t_tramp);
5285 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5286 MEM_NOTRAP_P (m_tramp) = 1;
5288 /* If ONSTACK, the TRAMP argument should be the address of a field
5289 within the local function's FRAME decl. Either way, let's see if
5290 we can fill in the MEM_ATTRs for this memory. */
5291 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5292 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5294 /* Creator of a heap trampoline is responsible for making sure the
5295 address is aligned to at least STACK_BOUNDARY. Normally malloc
5296 will ensure this anyhow. */
5297 tmp = round_trampoline_addr (r_tramp);
5298 if (tmp != r_tramp)
5300 m_tramp = change_address (m_tramp, BLKmode, tmp);
5301 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5302 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5305 /* The FUNC argument should be the address of the nested function.
5306 Extract the actual function decl to pass to the hook. */
5307 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5308 t_func = TREE_OPERAND (t_func, 0);
5309 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5311 r_chain = expand_normal (t_chain);
5313 /* Generate insns to initialize the trampoline. */
5314 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5316 if (onstack)
5318 trampolines_created = 1;
5320 if (targetm.calls.custom_function_descriptors != 0)
5321 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5322 "trampoline generated for nested function %qD", t_func);
5325 return const0_rtx;
5328 static rtx
5329 expand_builtin_adjust_trampoline (tree exp)
5331 rtx tramp;
5333 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5334 return NULL_RTX;
5336 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5337 tramp = round_trampoline_addr (tramp);
5338 if (targetm.calls.trampoline_adjust_address)
5339 tramp = targetm.calls.trampoline_adjust_address (tramp);
5341 return tramp;
5344 /* Expand a call to the builtin descriptor initialization routine.
5345 A descriptor is made up of a couple of pointers to the static
5346 chain and the code entry in this order. */
5348 static rtx
5349 expand_builtin_init_descriptor (tree exp)
5351 tree t_descr, t_func, t_chain;
5352 rtx m_descr, r_descr, r_func, r_chain;
5354 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5355 VOID_TYPE))
5356 return NULL_RTX;
5358 t_descr = CALL_EXPR_ARG (exp, 0);
5359 t_func = CALL_EXPR_ARG (exp, 1);
5360 t_chain = CALL_EXPR_ARG (exp, 2);
5362 r_descr = expand_normal (t_descr);
5363 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5364 MEM_NOTRAP_P (m_descr) = 1;
5366 r_func = expand_normal (t_func);
5367 r_chain = expand_normal (t_chain);
5369 /* Generate insns to initialize the descriptor. */
5370 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5371 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5372 POINTER_SIZE / BITS_PER_UNIT), r_func);
5374 return const0_rtx;
5377 /* Expand a call to the builtin descriptor adjustment routine. */
5379 static rtx
5380 expand_builtin_adjust_descriptor (tree exp)
5382 rtx tramp;
5384 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5385 return NULL_RTX;
5387 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5389 /* Unalign the descriptor to allow runtime identification. */
5390 tramp = plus_constant (ptr_mode, tramp,
5391 targetm.calls.custom_function_descriptors);
5393 return force_operand (tramp, NULL_RTX);
5396 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5397 function. The function first checks whether the back end provides
5398 an insn to implement signbit for the respective mode. If not, it
5399 checks whether the floating point format of the value is such that
5400 the sign bit can be extracted. If that is not the case, error out.
5401 EXP is the expression that is a call to the builtin function; if
5402 convenient, the result should be placed in TARGET. */
5403 static rtx
5404 expand_builtin_signbit (tree exp, rtx target)
5406 const struct real_format *fmt;
5407 scalar_float_mode fmode;
5408 scalar_int_mode rmode, imode;
5409 tree arg;
5410 int word, bitpos;
5411 enum insn_code icode;
5412 rtx temp;
5413 location_t loc = EXPR_LOCATION (exp);
5415 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5416 return NULL_RTX;
5418 arg = CALL_EXPR_ARG (exp, 0);
5419 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5420 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5421 fmt = REAL_MODE_FORMAT (fmode);
5423 arg = builtin_save_expr (arg);
5425 /* Expand the argument yielding a RTX expression. */
5426 temp = expand_normal (arg);
5428 /* Check if the back end provides an insn that handles signbit for the
5429 argument's mode. */
5430 icode = optab_handler (signbit_optab, fmode);
5431 if (icode != CODE_FOR_nothing)
5433 rtx_insn *last = get_last_insn ();
5434 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5435 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5436 return target;
5437 delete_insns_since (last);
5440 /* For floating point formats without a sign bit, implement signbit
5441 as "ARG < 0.0". */
5442 bitpos = fmt->signbit_ro;
5443 if (bitpos < 0)
5445 /* But we can't do this if the format supports signed zero. */
5446 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5448 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5449 build_real (TREE_TYPE (arg), dconst0));
5450 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5453 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5455 imode = int_mode_for_mode (fmode).require ();
5456 temp = gen_lowpart (imode, temp);
5458 else
5460 imode = word_mode;
5461 /* Handle targets with different FP word orders. */
5462 if (FLOAT_WORDS_BIG_ENDIAN)
5463 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5464 else
5465 word = bitpos / BITS_PER_WORD;
5466 temp = operand_subword_force (temp, word, fmode);
5467 bitpos = bitpos % BITS_PER_WORD;
5470 /* Force the intermediate word_mode (or narrower) result into a
5471 register. This avoids attempting to create paradoxical SUBREGs
5472 of floating point modes below. */
5473 temp = force_reg (imode, temp);
5475 /* If the bitpos is within the "result mode" lowpart, the operation
5476 can be implement with a single bitwise AND. Otherwise, we need
5477 a right shift and an AND. */
5479 if (bitpos < GET_MODE_BITSIZE (rmode))
5481 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5483 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5484 temp = gen_lowpart (rmode, temp);
5485 temp = expand_binop (rmode, and_optab, temp,
5486 immed_wide_int_const (mask, rmode),
5487 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5489 else
5491 /* Perform a logical right shift to place the signbit in the least
5492 significant bit, then truncate the result to the desired mode
5493 and mask just this bit. */
5494 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5495 temp = gen_lowpart (rmode, temp);
5496 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5497 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5500 return temp;
5503 /* Expand fork or exec calls. TARGET is the desired target of the
5504 call. EXP is the call. FN is the
5505 identificator of the actual function. IGNORE is nonzero if the
5506 value is to be ignored. */
5508 static rtx
5509 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5511 tree id, decl;
5512 tree call;
5514 /* If we are not profiling, just call the function. */
5515 if (!profile_arc_flag)
5516 return NULL_RTX;
5518 /* Otherwise call the wrapper. This should be equivalent for the rest of
5519 compiler, so the code does not diverge, and the wrapper may run the
5520 code necessary for keeping the profiling sane. */
5522 switch (DECL_FUNCTION_CODE (fn))
5524 case BUILT_IN_FORK:
5525 id = get_identifier ("__gcov_fork");
5526 break;
5528 case BUILT_IN_EXECL:
5529 id = get_identifier ("__gcov_execl");
5530 break;
5532 case BUILT_IN_EXECV:
5533 id = get_identifier ("__gcov_execv");
5534 break;
5536 case BUILT_IN_EXECLP:
5537 id = get_identifier ("__gcov_execlp");
5538 break;
5540 case BUILT_IN_EXECLE:
5541 id = get_identifier ("__gcov_execle");
5542 break;
5544 case BUILT_IN_EXECVP:
5545 id = get_identifier ("__gcov_execvp");
5546 break;
5548 case BUILT_IN_EXECVE:
5549 id = get_identifier ("__gcov_execve");
5550 break;
5552 default:
5553 gcc_unreachable ();
5556 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5557 FUNCTION_DECL, id, TREE_TYPE (fn));
5558 DECL_EXTERNAL (decl) = 1;
5559 TREE_PUBLIC (decl) = 1;
5560 DECL_ARTIFICIAL (decl) = 1;
5561 TREE_NOTHROW (decl) = 1;
5562 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5563 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5564 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5565 return expand_call (call, target, ignore);
5570 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5571 the pointer in these functions is void*, the tree optimizers may remove
5572 casts. The mode computed in expand_builtin isn't reliable either, due
5573 to __sync_bool_compare_and_swap.
5575 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5576 group of builtins. This gives us log2 of the mode size. */
5578 static inline machine_mode
5579 get_builtin_sync_mode (int fcode_diff)
5581 /* The size is not negotiable, so ask not to get BLKmode in return
5582 if the target indicates that a smaller size would be better. */
5583 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5586 /* Expand the memory expression LOC and return the appropriate memory operand
5587 for the builtin_sync operations. */
5589 static rtx
5590 get_builtin_sync_mem (tree loc, machine_mode mode)
5592 rtx addr, mem;
5594 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5595 addr = convert_memory_address (Pmode, addr);
5597 /* Note that we explicitly do not want any alias information for this
5598 memory, so that we kill all other live memories. Otherwise we don't
5599 satisfy the full barrier semantics of the intrinsic. */
5600 mem = validize_mem (gen_rtx_MEM (mode, addr));
5602 /* The alignment needs to be at least according to that of the mode. */
5603 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5604 get_pointer_alignment (loc)));
5605 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5606 MEM_VOLATILE_P (mem) = 1;
5608 return mem;
5611 /* Make sure an argument is in the right mode.
5612 EXP is the tree argument.
5613 MODE is the mode it should be in. */
5615 static rtx
5616 expand_expr_force_mode (tree exp, machine_mode mode)
5618 rtx val;
5619 machine_mode old_mode;
5621 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5622 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5623 of CONST_INTs, where we know the old_mode only from the call argument. */
5625 old_mode = GET_MODE (val);
5626 if (old_mode == VOIDmode)
5627 old_mode = TYPE_MODE (TREE_TYPE (exp));
5628 val = convert_modes (mode, old_mode, val, 1);
5629 return val;
5633 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5634 EXP is the CALL_EXPR. CODE is the rtx code
5635 that corresponds to the arithmetic or logical operation from the name;
5636 an exception here is that NOT actually means NAND. TARGET is an optional
5637 place for us to store the results; AFTER is true if this is the
5638 fetch_and_xxx form. */
5640 static rtx
5641 expand_builtin_sync_operation (machine_mode mode, tree exp,
5642 enum rtx_code code, bool after,
5643 rtx target)
5645 rtx val, mem;
5646 location_t loc = EXPR_LOCATION (exp);
5648 if (code == NOT && warn_sync_nand)
5650 tree fndecl = get_callee_fndecl (exp);
5651 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5653 static bool warned_f_a_n, warned_n_a_f;
5655 switch (fcode)
5657 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5658 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5659 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5660 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5661 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5662 if (warned_f_a_n)
5663 break;
5665 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5666 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5667 warned_f_a_n = true;
5668 break;
5670 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5671 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5672 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5673 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5674 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5675 if (warned_n_a_f)
5676 break;
5678 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5679 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5680 warned_n_a_f = true;
5681 break;
5683 default:
5684 gcc_unreachable ();
5688 /* Expand the operands. */
5689 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5690 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5692 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5693 after);
5696 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5697 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5698 true if this is the boolean form. TARGET is a place for us to store the
5699 results; this is NOT optional if IS_BOOL is true. */
5701 static rtx
5702 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5703 bool is_bool, rtx target)
5705 rtx old_val, new_val, mem;
5706 rtx *pbool, *poval;
5708 /* Expand the operands. */
5709 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5710 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5711 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5713 pbool = poval = NULL;
5714 if (target != const0_rtx)
5716 if (is_bool)
5717 pbool = &target;
5718 else
5719 poval = &target;
5721 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5722 false, MEMMODEL_SYNC_SEQ_CST,
5723 MEMMODEL_SYNC_SEQ_CST))
5724 return NULL_RTX;
5726 return target;
5729 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5730 general form is actually an atomic exchange, and some targets only
5731 support a reduced form with the second argument being a constant 1.
5732 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5733 the results. */
5735 static rtx
5736 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5737 rtx target)
5739 rtx val, mem;
5741 /* Expand the operands. */
5742 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5743 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5745 return expand_sync_lock_test_and_set (target, mem, val);
5748 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5750 static void
5751 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5753 rtx mem;
5755 /* Expand the operands. */
5756 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5758 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5761 /* Given an integer representing an ``enum memmodel'', verify its
5762 correctness and return the memory model enum. */
5764 static enum memmodel
5765 get_memmodel (tree exp)
5767 rtx op;
5768 unsigned HOST_WIDE_INT val;
5769 source_location loc
5770 = expansion_point_location_if_in_system_header (input_location);
5772 /* If the parameter is not a constant, it's a run time value so we'll just
5773 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5774 if (TREE_CODE (exp) != INTEGER_CST)
5775 return MEMMODEL_SEQ_CST;
5777 op = expand_normal (exp);
5779 val = INTVAL (op);
5780 if (targetm.memmodel_check)
5781 val = targetm.memmodel_check (val);
5782 else if (val & ~MEMMODEL_MASK)
5784 warning_at (loc, OPT_Winvalid_memory_model,
5785 "unknown architecture specifier in memory model to builtin");
5786 return MEMMODEL_SEQ_CST;
5789 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5790 if (memmodel_base (val) >= MEMMODEL_LAST)
5792 warning_at (loc, OPT_Winvalid_memory_model,
5793 "invalid memory model argument to builtin");
5794 return MEMMODEL_SEQ_CST;
5797 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5798 be conservative and promote consume to acquire. */
5799 if (val == MEMMODEL_CONSUME)
5800 val = MEMMODEL_ACQUIRE;
5802 return (enum memmodel) val;
5805 /* Expand the __atomic_exchange intrinsic:
5806 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5807 EXP is the CALL_EXPR.
5808 TARGET is an optional place for us to store the results. */
5810 static rtx
5811 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5813 rtx val, mem;
5814 enum memmodel model;
5816 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5818 if (!flag_inline_atomics)
5819 return NULL_RTX;
5821 /* Expand the operands. */
5822 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5823 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5825 return expand_atomic_exchange (target, mem, val, model);
5828 /* Expand the __atomic_compare_exchange intrinsic:
5829 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5830 TYPE desired, BOOL weak,
5831 enum memmodel success,
5832 enum memmodel failure)
5833 EXP is the CALL_EXPR.
5834 TARGET is an optional place for us to store the results. */
5836 static rtx
5837 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5838 rtx target)
5840 rtx expect, desired, mem, oldval;
5841 rtx_code_label *label;
5842 enum memmodel success, failure;
5843 tree weak;
5844 bool is_weak;
5845 source_location loc
5846 = expansion_point_location_if_in_system_header (input_location);
5848 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5849 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5851 if (failure > success)
5853 warning_at (loc, OPT_Winvalid_memory_model,
5854 "failure memory model cannot be stronger than success "
5855 "memory model for %<__atomic_compare_exchange%>");
5856 success = MEMMODEL_SEQ_CST;
5859 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5861 warning_at (loc, OPT_Winvalid_memory_model,
5862 "invalid failure memory model for "
5863 "%<__atomic_compare_exchange%>");
5864 failure = MEMMODEL_SEQ_CST;
5865 success = MEMMODEL_SEQ_CST;
5869 if (!flag_inline_atomics)
5870 return NULL_RTX;
5872 /* Expand the operands. */
5873 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5875 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5876 expect = convert_memory_address (Pmode, expect);
5877 expect = gen_rtx_MEM (mode, expect);
5878 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5880 weak = CALL_EXPR_ARG (exp, 3);
5881 is_weak = false;
5882 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5883 is_weak = true;
5885 if (target == const0_rtx)
5886 target = NULL;
5888 /* Lest the rtl backend create a race condition with an imporoper store
5889 to memory, always create a new pseudo for OLDVAL. */
5890 oldval = NULL;
5892 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5893 is_weak, success, failure))
5894 return NULL_RTX;
5896 /* Conditionally store back to EXPECT, lest we create a race condition
5897 with an improper store to memory. */
5898 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5899 the normal case where EXPECT is totally private, i.e. a register. At
5900 which point the store can be unconditional. */
5901 label = gen_label_rtx ();
5902 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5903 GET_MODE (target), 1, label);
5904 emit_move_insn (expect, oldval);
5905 emit_label (label);
5907 return target;
5910 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5911 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5912 call. The weak parameter must be dropped to match the expected parameter
5913 list and the expected argument changed from value to pointer to memory
5914 slot. */
5916 static void
5917 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5919 unsigned int z;
5920 vec<tree, va_gc> *vec;
5922 vec_alloc (vec, 5);
5923 vec->quick_push (gimple_call_arg (call, 0));
5924 tree expected = gimple_call_arg (call, 1);
5925 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5926 TREE_TYPE (expected));
5927 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5928 if (expd != x)
5929 emit_move_insn (x, expd);
5930 tree v = make_tree (TREE_TYPE (expected), x);
5931 vec->quick_push (build1 (ADDR_EXPR,
5932 build_pointer_type (TREE_TYPE (expected)), v));
5933 vec->quick_push (gimple_call_arg (call, 2));
5934 /* Skip the boolean weak parameter. */
5935 for (z = 4; z < 6; z++)
5936 vec->quick_push (gimple_call_arg (call, z));
5937 built_in_function fncode
5938 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5939 + exact_log2 (GET_MODE_SIZE (mode)));
5940 tree fndecl = builtin_decl_explicit (fncode);
5941 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5942 fndecl);
5943 tree exp = build_call_vec (boolean_type_node, fn, vec);
5944 tree lhs = gimple_call_lhs (call);
5945 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5946 if (lhs)
5948 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5949 if (GET_MODE (boolret) != mode)
5950 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5951 x = force_reg (mode, x);
5952 write_complex_part (target, boolret, true);
5953 write_complex_part (target, x, false);
5957 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5959 void
5960 expand_ifn_atomic_compare_exchange (gcall *call)
5962 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5963 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5964 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
5965 rtx expect, desired, mem, oldval, boolret;
5966 enum memmodel success, failure;
5967 tree lhs;
5968 bool is_weak;
5969 source_location loc
5970 = expansion_point_location_if_in_system_header (gimple_location (call));
5972 success = get_memmodel (gimple_call_arg (call, 4));
5973 failure = get_memmodel (gimple_call_arg (call, 5));
5975 if (failure > success)
5977 warning_at (loc, OPT_Winvalid_memory_model,
5978 "failure memory model cannot be stronger than success "
5979 "memory model for %<__atomic_compare_exchange%>");
5980 success = MEMMODEL_SEQ_CST;
5983 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5985 warning_at (loc, OPT_Winvalid_memory_model,
5986 "invalid failure memory model for "
5987 "%<__atomic_compare_exchange%>");
5988 failure = MEMMODEL_SEQ_CST;
5989 success = MEMMODEL_SEQ_CST;
5992 if (!flag_inline_atomics)
5994 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5995 return;
5998 /* Expand the operands. */
5999 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6001 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6002 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6004 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6006 boolret = NULL;
6007 oldval = NULL;
6009 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6010 is_weak, success, failure))
6012 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6013 return;
6016 lhs = gimple_call_lhs (call);
6017 if (lhs)
6019 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6020 if (GET_MODE (boolret) != mode)
6021 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6022 write_complex_part (target, boolret, true);
6023 write_complex_part (target, oldval, false);
6027 /* Expand the __atomic_load intrinsic:
6028 TYPE __atomic_load (TYPE *object, enum memmodel)
6029 EXP is the CALL_EXPR.
6030 TARGET is an optional place for us to store the results. */
6032 static rtx
6033 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6035 rtx mem;
6036 enum memmodel model;
6038 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6039 if (is_mm_release (model) || is_mm_acq_rel (model))
6041 source_location loc
6042 = expansion_point_location_if_in_system_header (input_location);
6043 warning_at (loc, OPT_Winvalid_memory_model,
6044 "invalid memory model for %<__atomic_load%>");
6045 model = MEMMODEL_SEQ_CST;
6048 if (!flag_inline_atomics)
6049 return NULL_RTX;
6051 /* Expand the operand. */
6052 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6054 return expand_atomic_load (target, mem, model);
6058 /* Expand the __atomic_store intrinsic:
6059 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6060 EXP is the CALL_EXPR.
6061 TARGET is an optional place for us to store the results. */
6063 static rtx
6064 expand_builtin_atomic_store (machine_mode mode, tree exp)
6066 rtx mem, val;
6067 enum memmodel model;
6069 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6070 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6071 || is_mm_release (model)))
6073 source_location loc
6074 = expansion_point_location_if_in_system_header (input_location);
6075 warning_at (loc, OPT_Winvalid_memory_model,
6076 "invalid memory model for %<__atomic_store%>");
6077 model = MEMMODEL_SEQ_CST;
6080 if (!flag_inline_atomics)
6081 return NULL_RTX;
6083 /* Expand the operands. */
6084 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6085 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6087 return expand_atomic_store (mem, val, model, false);
6090 /* Expand the __atomic_fetch_XXX intrinsic:
6091 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6092 EXP is the CALL_EXPR.
6093 TARGET is an optional place for us to store the results.
6094 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6095 FETCH_AFTER is true if returning the result of the operation.
6096 FETCH_AFTER is false if returning the value before the operation.
6097 IGNORE is true if the result is not used.
6098 EXT_CALL is the correct builtin for an external call if this cannot be
6099 resolved to an instruction sequence. */
6101 static rtx
6102 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6103 enum rtx_code code, bool fetch_after,
6104 bool ignore, enum built_in_function ext_call)
6106 rtx val, mem, ret;
6107 enum memmodel model;
6108 tree fndecl;
6109 tree addr;
6111 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6113 /* Expand the operands. */
6114 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6115 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6117 /* Only try generating instructions if inlining is turned on. */
6118 if (flag_inline_atomics)
6120 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6121 if (ret)
6122 return ret;
6125 /* Return if a different routine isn't needed for the library call. */
6126 if (ext_call == BUILT_IN_NONE)
6127 return NULL_RTX;
6129 /* Change the call to the specified function. */
6130 fndecl = get_callee_fndecl (exp);
6131 addr = CALL_EXPR_FN (exp);
6132 STRIP_NOPS (addr);
6134 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6135 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6137 /* If we will emit code after the call, the call can not be a tail call.
6138 If it is emitted as a tail call, a barrier is emitted after it, and
6139 then all trailing code is removed. */
6140 if (!ignore)
6141 CALL_EXPR_TAILCALL (exp) = 0;
6143 /* Expand the call here so we can emit trailing code. */
6144 ret = expand_call (exp, target, ignore);
6146 /* Replace the original function just in case it matters. */
6147 TREE_OPERAND (addr, 0) = fndecl;
6149 /* Then issue the arithmetic correction to return the right result. */
6150 if (!ignore)
6152 if (code == NOT)
6154 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6155 OPTAB_LIB_WIDEN);
6156 ret = expand_simple_unop (mode, NOT, ret, target, true);
6158 else
6159 ret = expand_simple_binop (mode, code, ret, val, target, true,
6160 OPTAB_LIB_WIDEN);
6162 return ret;
6165 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6167 void
6168 expand_ifn_atomic_bit_test_and (gcall *call)
6170 tree ptr = gimple_call_arg (call, 0);
6171 tree bit = gimple_call_arg (call, 1);
6172 tree flag = gimple_call_arg (call, 2);
6173 tree lhs = gimple_call_lhs (call);
6174 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6175 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6176 enum rtx_code code;
6177 optab optab;
6178 struct expand_operand ops[5];
6180 gcc_assert (flag_inline_atomics);
6182 if (gimple_call_num_args (call) == 4)
6183 model = get_memmodel (gimple_call_arg (call, 3));
6185 rtx mem = get_builtin_sync_mem (ptr, mode);
6186 rtx val = expand_expr_force_mode (bit, mode);
6188 switch (gimple_call_internal_fn (call))
6190 case IFN_ATOMIC_BIT_TEST_AND_SET:
6191 code = IOR;
6192 optab = atomic_bit_test_and_set_optab;
6193 break;
6194 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6195 code = XOR;
6196 optab = atomic_bit_test_and_complement_optab;
6197 break;
6198 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6199 code = AND;
6200 optab = atomic_bit_test_and_reset_optab;
6201 break;
6202 default:
6203 gcc_unreachable ();
6206 if (lhs == NULL_TREE)
6208 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6209 val, NULL_RTX, true, OPTAB_DIRECT);
6210 if (code == AND)
6211 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6212 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6213 return;
6216 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6217 enum insn_code icode = direct_optab_handler (optab, mode);
6218 gcc_assert (icode != CODE_FOR_nothing);
6219 create_output_operand (&ops[0], target, mode);
6220 create_fixed_operand (&ops[1], mem);
6221 create_convert_operand_to (&ops[2], val, mode, true);
6222 create_integer_operand (&ops[3], model);
6223 create_integer_operand (&ops[4], integer_onep (flag));
6224 if (maybe_expand_insn (icode, 5, ops))
6225 return;
6227 rtx bitval = val;
6228 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6229 val, NULL_RTX, true, OPTAB_DIRECT);
6230 rtx maskval = val;
6231 if (code == AND)
6232 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6233 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6234 code, model, false);
6235 if (integer_onep (flag))
6237 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6238 NULL_RTX, true, OPTAB_DIRECT);
6239 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6240 true, OPTAB_DIRECT);
6242 else
6243 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6244 OPTAB_DIRECT);
6245 if (result != target)
6246 emit_move_insn (target, result);
6249 /* Expand an atomic clear operation.
6250 void _atomic_clear (BOOL *obj, enum memmodel)
6251 EXP is the call expression. */
6253 static rtx
6254 expand_builtin_atomic_clear (tree exp)
6256 machine_mode mode;
6257 rtx mem, ret;
6258 enum memmodel model;
6260 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6261 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6262 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6264 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6266 source_location loc
6267 = expansion_point_location_if_in_system_header (input_location);
6268 warning_at (loc, OPT_Winvalid_memory_model,
6269 "invalid memory model for %<__atomic_store%>");
6270 model = MEMMODEL_SEQ_CST;
6273 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6274 Failing that, a store is issued by __atomic_store. The only way this can
6275 fail is if the bool type is larger than a word size. Unlikely, but
6276 handle it anyway for completeness. Assume a single threaded model since
6277 there is no atomic support in this case, and no barriers are required. */
6278 ret = expand_atomic_store (mem, const0_rtx, model, true);
6279 if (!ret)
6280 emit_move_insn (mem, const0_rtx);
6281 return const0_rtx;
6284 /* Expand an atomic test_and_set operation.
6285 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6286 EXP is the call expression. */
6288 static rtx
6289 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6291 rtx mem;
6292 enum memmodel model;
6293 machine_mode mode;
6295 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6296 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6297 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6299 return expand_atomic_test_and_set (target, mem, model);
6303 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6304 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6306 static tree
6307 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6309 int size;
6310 machine_mode mode;
6311 unsigned int mode_align, type_align;
6313 if (TREE_CODE (arg0) != INTEGER_CST)
6314 return NULL_TREE;
6316 /* We need a corresponding integer mode for the access to be lock-free. */
6317 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6318 if (!int_mode_for_size (size, 0).exists (&mode))
6319 return boolean_false_node;
6321 mode_align = GET_MODE_ALIGNMENT (mode);
6323 if (TREE_CODE (arg1) == INTEGER_CST)
6325 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6327 /* Either this argument is null, or it's a fake pointer encoding
6328 the alignment of the object. */
6329 val = least_bit_hwi (val);
6330 val *= BITS_PER_UNIT;
6332 if (val == 0 || mode_align < val)
6333 type_align = mode_align;
6334 else
6335 type_align = val;
6337 else
6339 tree ttype = TREE_TYPE (arg1);
6341 /* This function is usually invoked and folded immediately by the front
6342 end before anything else has a chance to look at it. The pointer
6343 parameter at this point is usually cast to a void *, so check for that
6344 and look past the cast. */
6345 if (CONVERT_EXPR_P (arg1)
6346 && POINTER_TYPE_P (ttype)
6347 && VOID_TYPE_P (TREE_TYPE (ttype))
6348 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6349 arg1 = TREE_OPERAND (arg1, 0);
6351 ttype = TREE_TYPE (arg1);
6352 gcc_assert (POINTER_TYPE_P (ttype));
6354 /* Get the underlying type of the object. */
6355 ttype = TREE_TYPE (ttype);
6356 type_align = TYPE_ALIGN (ttype);
6359 /* If the object has smaller alignment, the lock free routines cannot
6360 be used. */
6361 if (type_align < mode_align)
6362 return boolean_false_node;
6364 /* Check if a compare_and_swap pattern exists for the mode which represents
6365 the required size. The pattern is not allowed to fail, so the existence
6366 of the pattern indicates support is present. Also require that an
6367 atomic load exists for the required size. */
6368 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6369 return boolean_true_node;
6370 else
6371 return boolean_false_node;
6374 /* Return true if the parameters to call EXP represent an object which will
6375 always generate lock free instructions. The first argument represents the
6376 size of the object, and the second parameter is a pointer to the object
6377 itself. If NULL is passed for the object, then the result is based on
6378 typical alignment for an object of the specified size. Otherwise return
6379 false. */
6381 static rtx
6382 expand_builtin_atomic_always_lock_free (tree exp)
6384 tree size;
6385 tree arg0 = CALL_EXPR_ARG (exp, 0);
6386 tree arg1 = CALL_EXPR_ARG (exp, 1);
6388 if (TREE_CODE (arg0) != INTEGER_CST)
6390 error ("non-constant argument 1 to __atomic_always_lock_free");
6391 return const0_rtx;
6394 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6395 if (size == boolean_true_node)
6396 return const1_rtx;
6397 return const0_rtx;
6400 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6401 is lock free on this architecture. */
6403 static tree
6404 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6406 if (!flag_inline_atomics)
6407 return NULL_TREE;
6409 /* If it isn't always lock free, don't generate a result. */
6410 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6411 return boolean_true_node;
6413 return NULL_TREE;
6416 /* Return true if the parameters to call EXP represent an object which will
6417 always generate lock free instructions. The first argument represents the
6418 size of the object, and the second parameter is a pointer to the object
6419 itself. If NULL is passed for the object, then the result is based on
6420 typical alignment for an object of the specified size. Otherwise return
6421 NULL*/
6423 static rtx
6424 expand_builtin_atomic_is_lock_free (tree exp)
6426 tree size;
6427 tree arg0 = CALL_EXPR_ARG (exp, 0);
6428 tree arg1 = CALL_EXPR_ARG (exp, 1);
6430 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6432 error ("non-integer argument 1 to __atomic_is_lock_free");
6433 return NULL_RTX;
6436 if (!flag_inline_atomics)
6437 return NULL_RTX;
6439 /* If the value is known at compile time, return the RTX for it. */
6440 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6441 if (size == boolean_true_node)
6442 return const1_rtx;
6444 return NULL_RTX;
6447 /* Expand the __atomic_thread_fence intrinsic:
6448 void __atomic_thread_fence (enum memmodel)
6449 EXP is the CALL_EXPR. */
6451 static void
6452 expand_builtin_atomic_thread_fence (tree exp)
6454 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6455 expand_mem_thread_fence (model);
6458 /* Expand the __atomic_signal_fence intrinsic:
6459 void __atomic_signal_fence (enum memmodel)
6460 EXP is the CALL_EXPR. */
6462 static void
6463 expand_builtin_atomic_signal_fence (tree exp)
6465 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6466 expand_mem_signal_fence (model);
6469 /* Expand the __sync_synchronize intrinsic. */
6471 static void
6472 expand_builtin_sync_synchronize (void)
6474 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6477 static rtx
6478 expand_builtin_thread_pointer (tree exp, rtx target)
6480 enum insn_code icode;
6481 if (!validate_arglist (exp, VOID_TYPE))
6482 return const0_rtx;
6483 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6484 if (icode != CODE_FOR_nothing)
6486 struct expand_operand op;
6487 /* If the target is not sutitable then create a new target. */
6488 if (target == NULL_RTX
6489 || !REG_P (target)
6490 || GET_MODE (target) != Pmode)
6491 target = gen_reg_rtx (Pmode);
6492 create_output_operand (&op, target, Pmode);
6493 expand_insn (icode, 1, &op);
6494 return target;
6496 error ("__builtin_thread_pointer is not supported on this target");
6497 return const0_rtx;
6500 static void
6501 expand_builtin_set_thread_pointer (tree exp)
6503 enum insn_code icode;
6504 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6505 return;
6506 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6507 if (icode != CODE_FOR_nothing)
6509 struct expand_operand op;
6510 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6511 Pmode, EXPAND_NORMAL);
6512 create_input_operand (&op, val, Pmode);
6513 expand_insn (icode, 1, &op);
6514 return;
6516 error ("__builtin_set_thread_pointer is not supported on this target");
6520 /* Emit code to restore the current value of stack. */
6522 static void
6523 expand_stack_restore (tree var)
6525 rtx_insn *prev;
6526 rtx sa = expand_normal (var);
6528 sa = convert_memory_address (Pmode, sa);
6530 prev = get_last_insn ();
6531 emit_stack_restore (SAVE_BLOCK, sa);
6533 record_new_stack_level ();
6535 fixup_args_size_notes (prev, get_last_insn (), 0);
6538 /* Emit code to save the current value of stack. */
6540 static rtx
6541 expand_stack_save (void)
6543 rtx ret = NULL_RTX;
6545 emit_stack_save (SAVE_BLOCK, &ret);
6546 return ret;
6550 /* Expand an expression EXP that calls a built-in function,
6551 with result going to TARGET if that's convenient
6552 (and in mode MODE if that's convenient).
6553 SUBTARGET may be used as the target for computing one of EXP's operands.
6554 IGNORE is nonzero if the value is to be ignored. */
6557 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6558 int ignore)
6560 tree fndecl = get_callee_fndecl (exp);
6561 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6562 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6563 int flags;
6565 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6566 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6568 /* When ASan is enabled, we don't want to expand some memory/string
6569 builtins and rely on libsanitizer's hooks. This allows us to avoid
6570 redundant checks and be sure, that possible overflow will be detected
6571 by ASan. */
6573 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6574 return expand_call (exp, target, ignore);
6576 /* When not optimizing, generate calls to library functions for a certain
6577 set of builtins. */
6578 if (!optimize
6579 && !called_as_built_in (fndecl)
6580 && fcode != BUILT_IN_FORK
6581 && fcode != BUILT_IN_EXECL
6582 && fcode != BUILT_IN_EXECV
6583 && fcode != BUILT_IN_EXECLP
6584 && fcode != BUILT_IN_EXECLE
6585 && fcode != BUILT_IN_EXECVP
6586 && fcode != BUILT_IN_EXECVE
6587 && !ALLOCA_FUNCTION_CODE_P (fcode)
6588 && fcode != BUILT_IN_FREE
6589 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6590 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6591 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6592 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6593 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6594 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6595 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6596 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6597 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6598 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6599 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6600 && fcode != BUILT_IN_CHKP_BNDRET)
6601 return expand_call (exp, target, ignore);
6603 /* The built-in function expanders test for target == const0_rtx
6604 to determine whether the function's result will be ignored. */
6605 if (ignore)
6606 target = const0_rtx;
6608 /* If the result of a pure or const built-in function is ignored, and
6609 none of its arguments are volatile, we can avoid expanding the
6610 built-in call and just evaluate the arguments for side-effects. */
6611 if (target == const0_rtx
6612 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6613 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6615 bool volatilep = false;
6616 tree arg;
6617 call_expr_arg_iterator iter;
6619 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6620 if (TREE_THIS_VOLATILE (arg))
6622 volatilep = true;
6623 break;
6626 if (! volatilep)
6628 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6629 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6630 return const0_rtx;
6634 /* expand_builtin_with_bounds is supposed to be used for
6635 instrumented builtin calls. */
6636 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6638 switch (fcode)
6640 CASE_FLT_FN (BUILT_IN_FABS):
6641 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6642 case BUILT_IN_FABSD32:
6643 case BUILT_IN_FABSD64:
6644 case BUILT_IN_FABSD128:
6645 target = expand_builtin_fabs (exp, target, subtarget);
6646 if (target)
6647 return target;
6648 break;
6650 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6651 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6652 target = expand_builtin_copysign (exp, target, subtarget);
6653 if (target)
6654 return target;
6655 break;
6657 /* Just do a normal library call if we were unable to fold
6658 the values. */
6659 CASE_FLT_FN (BUILT_IN_CABS):
6660 break;
6662 CASE_FLT_FN (BUILT_IN_FMA):
6663 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
6664 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6665 if (target)
6666 return target;
6667 break;
6669 CASE_FLT_FN (BUILT_IN_ILOGB):
6670 if (! flag_unsafe_math_optimizations)
6671 break;
6672 gcc_fallthrough ();
6673 CASE_FLT_FN (BUILT_IN_ISINF):
6674 CASE_FLT_FN (BUILT_IN_FINITE):
6675 case BUILT_IN_ISFINITE:
6676 case BUILT_IN_ISNORMAL:
6677 target = expand_builtin_interclass_mathfn (exp, target);
6678 if (target)
6679 return target;
6680 break;
6682 CASE_FLT_FN (BUILT_IN_ICEIL):
6683 CASE_FLT_FN (BUILT_IN_LCEIL):
6684 CASE_FLT_FN (BUILT_IN_LLCEIL):
6685 CASE_FLT_FN (BUILT_IN_LFLOOR):
6686 CASE_FLT_FN (BUILT_IN_IFLOOR):
6687 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6688 target = expand_builtin_int_roundingfn (exp, target);
6689 if (target)
6690 return target;
6691 break;
6693 CASE_FLT_FN (BUILT_IN_IRINT):
6694 CASE_FLT_FN (BUILT_IN_LRINT):
6695 CASE_FLT_FN (BUILT_IN_LLRINT):
6696 CASE_FLT_FN (BUILT_IN_IROUND):
6697 CASE_FLT_FN (BUILT_IN_LROUND):
6698 CASE_FLT_FN (BUILT_IN_LLROUND):
6699 target = expand_builtin_int_roundingfn_2 (exp, target);
6700 if (target)
6701 return target;
6702 break;
6704 CASE_FLT_FN (BUILT_IN_POWI):
6705 target = expand_builtin_powi (exp, target);
6706 if (target)
6707 return target;
6708 break;
6710 CASE_FLT_FN (BUILT_IN_CEXPI):
6711 target = expand_builtin_cexpi (exp, target);
6712 gcc_assert (target);
6713 return target;
6715 CASE_FLT_FN (BUILT_IN_SIN):
6716 CASE_FLT_FN (BUILT_IN_COS):
6717 if (! flag_unsafe_math_optimizations)
6718 break;
6719 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6720 if (target)
6721 return target;
6722 break;
6724 CASE_FLT_FN (BUILT_IN_SINCOS):
6725 if (! flag_unsafe_math_optimizations)
6726 break;
6727 target = expand_builtin_sincos (exp);
6728 if (target)
6729 return target;
6730 break;
6732 case BUILT_IN_APPLY_ARGS:
6733 return expand_builtin_apply_args ();
6735 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6736 FUNCTION with a copy of the parameters described by
6737 ARGUMENTS, and ARGSIZE. It returns a block of memory
6738 allocated on the stack into which is stored all the registers
6739 that might possibly be used for returning the result of a
6740 function. ARGUMENTS is the value returned by
6741 __builtin_apply_args. ARGSIZE is the number of bytes of
6742 arguments that must be copied. ??? How should this value be
6743 computed? We'll also need a safe worst case value for varargs
6744 functions. */
6745 case BUILT_IN_APPLY:
6746 if (!validate_arglist (exp, POINTER_TYPE,
6747 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6748 && !validate_arglist (exp, REFERENCE_TYPE,
6749 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6750 return const0_rtx;
6751 else
6753 rtx ops[3];
6755 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6756 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6757 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6759 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6762 /* __builtin_return (RESULT) causes the function to return the
6763 value described by RESULT. RESULT is address of the block of
6764 memory returned by __builtin_apply. */
6765 case BUILT_IN_RETURN:
6766 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6767 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6768 return const0_rtx;
6770 case BUILT_IN_SAVEREGS:
6771 return expand_builtin_saveregs ();
6773 case BUILT_IN_VA_ARG_PACK:
6774 /* All valid uses of __builtin_va_arg_pack () are removed during
6775 inlining. */
6776 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6777 return const0_rtx;
6779 case BUILT_IN_VA_ARG_PACK_LEN:
6780 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6781 inlining. */
6782 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6783 return const0_rtx;
6785 /* Return the address of the first anonymous stack arg. */
6786 case BUILT_IN_NEXT_ARG:
6787 if (fold_builtin_next_arg (exp, false))
6788 return const0_rtx;
6789 return expand_builtin_next_arg ();
6791 case BUILT_IN_CLEAR_CACHE:
6792 target = expand_builtin___clear_cache (exp);
6793 if (target)
6794 return target;
6795 break;
6797 case BUILT_IN_CLASSIFY_TYPE:
6798 return expand_builtin_classify_type (exp);
6800 case BUILT_IN_CONSTANT_P:
6801 return const0_rtx;
6803 case BUILT_IN_FRAME_ADDRESS:
6804 case BUILT_IN_RETURN_ADDRESS:
6805 return expand_builtin_frame_address (fndecl, exp);
6807 /* Returns the address of the area where the structure is returned.
6808 0 otherwise. */
6809 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6810 if (call_expr_nargs (exp) != 0
6811 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6812 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6813 return const0_rtx;
6814 else
6815 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6817 CASE_BUILT_IN_ALLOCA:
6818 target = expand_builtin_alloca (exp);
6819 if (target)
6820 return target;
6821 break;
6823 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6824 return expand_asan_emit_allocas_unpoison (exp);
6826 case BUILT_IN_STACK_SAVE:
6827 return expand_stack_save ();
6829 case BUILT_IN_STACK_RESTORE:
6830 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6831 return const0_rtx;
6833 case BUILT_IN_BSWAP16:
6834 case BUILT_IN_BSWAP32:
6835 case BUILT_IN_BSWAP64:
6836 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6837 if (target)
6838 return target;
6839 break;
6841 CASE_INT_FN (BUILT_IN_FFS):
6842 target = expand_builtin_unop (target_mode, exp, target,
6843 subtarget, ffs_optab);
6844 if (target)
6845 return target;
6846 break;
6848 CASE_INT_FN (BUILT_IN_CLZ):
6849 target = expand_builtin_unop (target_mode, exp, target,
6850 subtarget, clz_optab);
6851 if (target)
6852 return target;
6853 break;
6855 CASE_INT_FN (BUILT_IN_CTZ):
6856 target = expand_builtin_unop (target_mode, exp, target,
6857 subtarget, ctz_optab);
6858 if (target)
6859 return target;
6860 break;
6862 CASE_INT_FN (BUILT_IN_CLRSB):
6863 target = expand_builtin_unop (target_mode, exp, target,
6864 subtarget, clrsb_optab);
6865 if (target)
6866 return target;
6867 break;
6869 CASE_INT_FN (BUILT_IN_POPCOUNT):
6870 target = expand_builtin_unop (target_mode, exp, target,
6871 subtarget, popcount_optab);
6872 if (target)
6873 return target;
6874 break;
6876 CASE_INT_FN (BUILT_IN_PARITY):
6877 target = expand_builtin_unop (target_mode, exp, target,
6878 subtarget, parity_optab);
6879 if (target)
6880 return target;
6881 break;
6883 case BUILT_IN_STRLEN:
6884 target = expand_builtin_strlen (exp, target, target_mode);
6885 if (target)
6886 return target;
6887 break;
6889 case BUILT_IN_STRCAT:
6890 target = expand_builtin_strcat (exp, target);
6891 if (target)
6892 return target;
6893 break;
6895 case BUILT_IN_STRCPY:
6896 target = expand_builtin_strcpy (exp, target);
6897 if (target)
6898 return target;
6899 break;
6901 case BUILT_IN_STRNCAT:
6902 target = expand_builtin_strncat (exp, target);
6903 if (target)
6904 return target;
6905 break;
6907 case BUILT_IN_STRNCPY:
6908 target = expand_builtin_strncpy (exp, target);
6909 if (target)
6910 return target;
6911 break;
6913 case BUILT_IN_STPCPY:
6914 target = expand_builtin_stpcpy (exp, target, mode);
6915 if (target)
6916 return target;
6917 break;
6919 case BUILT_IN_STPNCPY:
6920 target = expand_builtin_stpncpy (exp, target);
6921 if (target)
6922 return target;
6923 break;
6925 case BUILT_IN_MEMCHR:
6926 target = expand_builtin_memchr (exp, target);
6927 if (target)
6928 return target;
6929 break;
6931 case BUILT_IN_MEMCPY:
6932 target = expand_builtin_memcpy (exp, target);
6933 if (target)
6934 return target;
6935 break;
6937 case BUILT_IN_MEMMOVE:
6938 target = expand_builtin_memmove (exp, target);
6939 if (target)
6940 return target;
6941 break;
6943 case BUILT_IN_MEMPCPY:
6944 target = expand_builtin_mempcpy (exp, target);
6945 if (target)
6946 return target;
6947 break;
6949 case BUILT_IN_MEMSET:
6950 target = expand_builtin_memset (exp, target, mode);
6951 if (target)
6952 return target;
6953 break;
6955 case BUILT_IN_BZERO:
6956 target = expand_builtin_bzero (exp);
6957 if (target)
6958 return target;
6959 break;
6961 case BUILT_IN_STRCMP:
6962 target = expand_builtin_strcmp (exp, target);
6963 if (target)
6964 return target;
6965 break;
6967 case BUILT_IN_STRNCMP:
6968 target = expand_builtin_strncmp (exp, target, mode);
6969 if (target)
6970 return target;
6971 break;
6973 case BUILT_IN_BCMP:
6974 case BUILT_IN_MEMCMP:
6975 case BUILT_IN_MEMCMP_EQ:
6976 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6977 if (target)
6978 return target;
6979 if (fcode == BUILT_IN_MEMCMP_EQ)
6981 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6982 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6984 break;
6986 case BUILT_IN_SETJMP:
6987 /* This should have been lowered to the builtins below. */
6988 gcc_unreachable ();
6990 case BUILT_IN_SETJMP_SETUP:
6991 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6992 and the receiver label. */
6993 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6995 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6996 VOIDmode, EXPAND_NORMAL);
6997 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6998 rtx_insn *label_r = label_rtx (label);
7000 /* This is copied from the handling of non-local gotos. */
7001 expand_builtin_setjmp_setup (buf_addr, label_r);
7002 nonlocal_goto_handler_labels
7003 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7004 nonlocal_goto_handler_labels);
7005 /* ??? Do not let expand_label treat us as such since we would
7006 not want to be both on the list of non-local labels and on
7007 the list of forced labels. */
7008 FORCED_LABEL (label) = 0;
7009 return const0_rtx;
7011 break;
7013 case BUILT_IN_SETJMP_RECEIVER:
7014 /* __builtin_setjmp_receiver is passed the receiver label. */
7015 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7017 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7018 rtx_insn *label_r = label_rtx (label);
7020 expand_builtin_setjmp_receiver (label_r);
7021 return const0_rtx;
7023 break;
7025 /* __builtin_longjmp is passed a pointer to an array of five words.
7026 It's similar to the C library longjmp function but works with
7027 __builtin_setjmp above. */
7028 case BUILT_IN_LONGJMP:
7029 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7031 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7032 VOIDmode, EXPAND_NORMAL);
7033 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7035 if (value != const1_rtx)
7037 error ("%<__builtin_longjmp%> second argument must be 1");
7038 return const0_rtx;
7041 expand_builtin_longjmp (buf_addr, value);
7042 return const0_rtx;
7044 break;
7046 case BUILT_IN_NONLOCAL_GOTO:
7047 target = expand_builtin_nonlocal_goto (exp);
7048 if (target)
7049 return target;
7050 break;
7052 /* This updates the setjmp buffer that is its argument with the value
7053 of the current stack pointer. */
7054 case BUILT_IN_UPDATE_SETJMP_BUF:
7055 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7057 rtx buf_addr
7058 = expand_normal (CALL_EXPR_ARG (exp, 0));
7060 expand_builtin_update_setjmp_buf (buf_addr);
7061 return const0_rtx;
7063 break;
7065 case BUILT_IN_TRAP:
7066 expand_builtin_trap ();
7067 return const0_rtx;
7069 case BUILT_IN_UNREACHABLE:
7070 expand_builtin_unreachable ();
7071 return const0_rtx;
7073 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7074 case BUILT_IN_SIGNBITD32:
7075 case BUILT_IN_SIGNBITD64:
7076 case BUILT_IN_SIGNBITD128:
7077 target = expand_builtin_signbit (exp, target);
7078 if (target)
7079 return target;
7080 break;
7082 /* Various hooks for the DWARF 2 __throw routine. */
7083 case BUILT_IN_UNWIND_INIT:
7084 expand_builtin_unwind_init ();
7085 return const0_rtx;
7086 case BUILT_IN_DWARF_CFA:
7087 return virtual_cfa_rtx;
7088 #ifdef DWARF2_UNWIND_INFO
7089 case BUILT_IN_DWARF_SP_COLUMN:
7090 return expand_builtin_dwarf_sp_column ();
7091 case BUILT_IN_INIT_DWARF_REG_SIZES:
7092 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7093 return const0_rtx;
7094 #endif
7095 case BUILT_IN_FROB_RETURN_ADDR:
7096 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7097 case BUILT_IN_EXTRACT_RETURN_ADDR:
7098 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7099 case BUILT_IN_EH_RETURN:
7100 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7101 CALL_EXPR_ARG (exp, 1));
7102 return const0_rtx;
7103 case BUILT_IN_EH_RETURN_DATA_REGNO:
7104 return expand_builtin_eh_return_data_regno (exp);
7105 case BUILT_IN_EXTEND_POINTER:
7106 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7107 case BUILT_IN_EH_POINTER:
7108 return expand_builtin_eh_pointer (exp);
7109 case BUILT_IN_EH_FILTER:
7110 return expand_builtin_eh_filter (exp);
7111 case BUILT_IN_EH_COPY_VALUES:
7112 return expand_builtin_eh_copy_values (exp);
7114 case BUILT_IN_VA_START:
7115 return expand_builtin_va_start (exp);
7116 case BUILT_IN_VA_END:
7117 return expand_builtin_va_end (exp);
7118 case BUILT_IN_VA_COPY:
7119 return expand_builtin_va_copy (exp);
7120 case BUILT_IN_EXPECT:
7121 return expand_builtin_expect (exp, target);
7122 case BUILT_IN_ASSUME_ALIGNED:
7123 return expand_builtin_assume_aligned (exp, target);
7124 case BUILT_IN_PREFETCH:
7125 expand_builtin_prefetch (exp);
7126 return const0_rtx;
7128 case BUILT_IN_INIT_TRAMPOLINE:
7129 return expand_builtin_init_trampoline (exp, true);
7130 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7131 return expand_builtin_init_trampoline (exp, false);
7132 case BUILT_IN_ADJUST_TRAMPOLINE:
7133 return expand_builtin_adjust_trampoline (exp);
7135 case BUILT_IN_INIT_DESCRIPTOR:
7136 return expand_builtin_init_descriptor (exp);
7137 case BUILT_IN_ADJUST_DESCRIPTOR:
7138 return expand_builtin_adjust_descriptor (exp);
7140 case BUILT_IN_FORK:
7141 case BUILT_IN_EXECL:
7142 case BUILT_IN_EXECV:
7143 case BUILT_IN_EXECLP:
7144 case BUILT_IN_EXECLE:
7145 case BUILT_IN_EXECVP:
7146 case BUILT_IN_EXECVE:
7147 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7148 if (target)
7149 return target;
7150 break;
7152 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7153 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7154 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7155 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7156 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7157 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7158 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7159 if (target)
7160 return target;
7161 break;
7163 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7164 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7165 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7166 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7167 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7168 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7169 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7170 if (target)
7171 return target;
7172 break;
7174 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7175 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7176 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7177 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7178 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7179 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7180 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7181 if (target)
7182 return target;
7183 break;
7185 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7186 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7187 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7188 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7189 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7190 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7191 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7192 if (target)
7193 return target;
7194 break;
7196 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7197 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7198 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7199 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7200 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7201 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7202 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7203 if (target)
7204 return target;
7205 break;
7207 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7208 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7209 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7210 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7211 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7212 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7213 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7214 if (target)
7215 return target;
7216 break;
7218 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7219 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7220 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7221 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7222 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7223 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7224 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7225 if (target)
7226 return target;
7227 break;
7229 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7230 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7231 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7232 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7233 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7234 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7235 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7236 if (target)
7237 return target;
7238 break;
7240 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7241 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7242 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7243 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7244 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7245 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7246 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7247 if (target)
7248 return target;
7249 break;
7251 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7252 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7253 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7254 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7255 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7256 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7257 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7258 if (target)
7259 return target;
7260 break;
7262 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7263 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7264 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7265 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7266 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7267 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7268 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7269 if (target)
7270 return target;
7271 break;
7273 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7274 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7275 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7276 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7277 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7278 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7279 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7280 if (target)
7281 return target;
7282 break;
7284 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7285 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7286 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7287 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7288 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7289 if (mode == VOIDmode)
7290 mode = TYPE_MODE (boolean_type_node);
7291 if (!target || !register_operand (target, mode))
7292 target = gen_reg_rtx (mode);
7294 mode = get_builtin_sync_mode
7295 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7296 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7297 if (target)
7298 return target;
7299 break;
7301 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7302 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7303 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7304 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7305 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7306 mode = get_builtin_sync_mode
7307 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7308 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7309 if (target)
7310 return target;
7311 break;
7313 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7314 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7315 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7316 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7317 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7318 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7319 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7320 if (target)
7321 return target;
7322 break;
7324 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7325 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7326 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7327 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7328 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7329 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7330 expand_builtin_sync_lock_release (mode, exp);
7331 return const0_rtx;
7333 case BUILT_IN_SYNC_SYNCHRONIZE:
7334 expand_builtin_sync_synchronize ();
7335 return const0_rtx;
7337 case BUILT_IN_ATOMIC_EXCHANGE_1:
7338 case BUILT_IN_ATOMIC_EXCHANGE_2:
7339 case BUILT_IN_ATOMIC_EXCHANGE_4:
7340 case BUILT_IN_ATOMIC_EXCHANGE_8:
7341 case BUILT_IN_ATOMIC_EXCHANGE_16:
7342 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7343 target = expand_builtin_atomic_exchange (mode, exp, target);
7344 if (target)
7345 return target;
7346 break;
7348 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7349 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7350 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7351 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7352 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7354 unsigned int nargs, z;
7355 vec<tree, va_gc> *vec;
7357 mode =
7358 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7359 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7360 if (target)
7361 return target;
7363 /* If this is turned into an external library call, the weak parameter
7364 must be dropped to match the expected parameter list. */
7365 nargs = call_expr_nargs (exp);
7366 vec_alloc (vec, nargs - 1);
7367 for (z = 0; z < 3; z++)
7368 vec->quick_push (CALL_EXPR_ARG (exp, z));
7369 /* Skip the boolean weak parameter. */
7370 for (z = 4; z < 6; z++)
7371 vec->quick_push (CALL_EXPR_ARG (exp, z));
7372 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7373 break;
7376 case BUILT_IN_ATOMIC_LOAD_1:
7377 case BUILT_IN_ATOMIC_LOAD_2:
7378 case BUILT_IN_ATOMIC_LOAD_4:
7379 case BUILT_IN_ATOMIC_LOAD_8:
7380 case BUILT_IN_ATOMIC_LOAD_16:
7381 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7382 target = expand_builtin_atomic_load (mode, exp, target);
7383 if (target)
7384 return target;
7385 break;
7387 case BUILT_IN_ATOMIC_STORE_1:
7388 case BUILT_IN_ATOMIC_STORE_2:
7389 case BUILT_IN_ATOMIC_STORE_4:
7390 case BUILT_IN_ATOMIC_STORE_8:
7391 case BUILT_IN_ATOMIC_STORE_16:
7392 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7393 target = expand_builtin_atomic_store (mode, exp);
7394 if (target)
7395 return const0_rtx;
7396 break;
7398 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7399 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7400 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7401 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7402 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7404 enum built_in_function lib;
7405 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7406 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7407 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7408 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7409 ignore, lib);
7410 if (target)
7411 return target;
7412 break;
7414 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7415 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7416 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7417 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7418 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7420 enum built_in_function lib;
7421 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7422 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7423 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7424 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7425 ignore, lib);
7426 if (target)
7427 return target;
7428 break;
7430 case BUILT_IN_ATOMIC_AND_FETCH_1:
7431 case BUILT_IN_ATOMIC_AND_FETCH_2:
7432 case BUILT_IN_ATOMIC_AND_FETCH_4:
7433 case BUILT_IN_ATOMIC_AND_FETCH_8:
7434 case BUILT_IN_ATOMIC_AND_FETCH_16:
7436 enum built_in_function lib;
7437 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7438 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7439 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7440 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7441 ignore, lib);
7442 if (target)
7443 return target;
7444 break;
7446 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7447 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7448 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7449 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7450 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7452 enum built_in_function lib;
7453 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7454 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7455 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7456 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7457 ignore, lib);
7458 if (target)
7459 return target;
7460 break;
7462 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7463 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7464 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7465 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7466 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7468 enum built_in_function lib;
7469 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7470 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7471 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7472 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7473 ignore, lib);
7474 if (target)
7475 return target;
7476 break;
7478 case BUILT_IN_ATOMIC_OR_FETCH_1:
7479 case BUILT_IN_ATOMIC_OR_FETCH_2:
7480 case BUILT_IN_ATOMIC_OR_FETCH_4:
7481 case BUILT_IN_ATOMIC_OR_FETCH_8:
7482 case BUILT_IN_ATOMIC_OR_FETCH_16:
7484 enum built_in_function lib;
7485 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7486 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7487 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7488 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7489 ignore, lib);
7490 if (target)
7491 return target;
7492 break;
7494 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7495 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7496 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7497 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7498 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7499 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7500 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7501 ignore, BUILT_IN_NONE);
7502 if (target)
7503 return target;
7504 break;
7506 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7507 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7508 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7509 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7510 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7511 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7512 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7513 ignore, BUILT_IN_NONE);
7514 if (target)
7515 return target;
7516 break;
7518 case BUILT_IN_ATOMIC_FETCH_AND_1:
7519 case BUILT_IN_ATOMIC_FETCH_AND_2:
7520 case BUILT_IN_ATOMIC_FETCH_AND_4:
7521 case BUILT_IN_ATOMIC_FETCH_AND_8:
7522 case BUILT_IN_ATOMIC_FETCH_AND_16:
7523 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7524 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7525 ignore, BUILT_IN_NONE);
7526 if (target)
7527 return target;
7528 break;
7530 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7531 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7532 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7533 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7534 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7535 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7536 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7537 ignore, BUILT_IN_NONE);
7538 if (target)
7539 return target;
7540 break;
7542 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7543 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7544 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7545 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7546 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7547 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7548 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7549 ignore, BUILT_IN_NONE);
7550 if (target)
7551 return target;
7552 break;
7554 case BUILT_IN_ATOMIC_FETCH_OR_1:
7555 case BUILT_IN_ATOMIC_FETCH_OR_2:
7556 case BUILT_IN_ATOMIC_FETCH_OR_4:
7557 case BUILT_IN_ATOMIC_FETCH_OR_8:
7558 case BUILT_IN_ATOMIC_FETCH_OR_16:
7559 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7560 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7561 ignore, BUILT_IN_NONE);
7562 if (target)
7563 return target;
7564 break;
7566 case BUILT_IN_ATOMIC_TEST_AND_SET:
7567 return expand_builtin_atomic_test_and_set (exp, target);
7569 case BUILT_IN_ATOMIC_CLEAR:
7570 return expand_builtin_atomic_clear (exp);
7572 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7573 return expand_builtin_atomic_always_lock_free (exp);
7575 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7576 target = expand_builtin_atomic_is_lock_free (exp);
7577 if (target)
7578 return target;
7579 break;
7581 case BUILT_IN_ATOMIC_THREAD_FENCE:
7582 expand_builtin_atomic_thread_fence (exp);
7583 return const0_rtx;
7585 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7586 expand_builtin_atomic_signal_fence (exp);
7587 return const0_rtx;
7589 case BUILT_IN_OBJECT_SIZE:
7590 return expand_builtin_object_size (exp);
7592 case BUILT_IN_MEMCPY_CHK:
7593 case BUILT_IN_MEMPCPY_CHK:
7594 case BUILT_IN_MEMMOVE_CHK:
7595 case BUILT_IN_MEMSET_CHK:
7596 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7597 if (target)
7598 return target;
7599 break;
7601 case BUILT_IN_STRCPY_CHK:
7602 case BUILT_IN_STPCPY_CHK:
7603 case BUILT_IN_STRNCPY_CHK:
7604 case BUILT_IN_STPNCPY_CHK:
7605 case BUILT_IN_STRCAT_CHK:
7606 case BUILT_IN_STRNCAT_CHK:
7607 case BUILT_IN_SNPRINTF_CHK:
7608 case BUILT_IN_VSNPRINTF_CHK:
7609 maybe_emit_chk_warning (exp, fcode);
7610 break;
7612 case BUILT_IN_SPRINTF_CHK:
7613 case BUILT_IN_VSPRINTF_CHK:
7614 maybe_emit_sprintf_chk_warning (exp, fcode);
7615 break;
7617 case BUILT_IN_FREE:
7618 if (warn_free_nonheap_object)
7619 maybe_emit_free_warning (exp);
7620 break;
7622 case BUILT_IN_THREAD_POINTER:
7623 return expand_builtin_thread_pointer (exp, target);
7625 case BUILT_IN_SET_THREAD_POINTER:
7626 expand_builtin_set_thread_pointer (exp);
7627 return const0_rtx;
7629 case BUILT_IN_CILK_DETACH:
7630 expand_builtin_cilk_detach (exp);
7631 return const0_rtx;
7633 case BUILT_IN_CILK_POP_FRAME:
7634 expand_builtin_cilk_pop_frame (exp);
7635 return const0_rtx;
7637 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7638 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7639 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7640 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7641 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7642 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7643 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7644 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7645 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7646 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7647 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7648 /* We allow user CHKP builtins if Pointer Bounds
7649 Checker is off. */
7650 if (!chkp_function_instrumented_p (current_function_decl))
7652 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7653 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7654 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7655 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7656 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7657 return expand_normal (CALL_EXPR_ARG (exp, 0));
7658 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7659 return expand_normal (size_zero_node);
7660 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7661 return expand_normal (size_int (-1));
7662 else
7663 return const0_rtx;
7665 /* FALLTHROUGH */
7667 case BUILT_IN_CHKP_BNDMK:
7668 case BUILT_IN_CHKP_BNDSTX:
7669 case BUILT_IN_CHKP_BNDCL:
7670 case BUILT_IN_CHKP_BNDCU:
7671 case BUILT_IN_CHKP_BNDLDX:
7672 case BUILT_IN_CHKP_BNDRET:
7673 case BUILT_IN_CHKP_INTERSECT:
7674 case BUILT_IN_CHKP_NARROW:
7675 case BUILT_IN_CHKP_EXTRACT_LOWER:
7676 case BUILT_IN_CHKP_EXTRACT_UPPER:
7677 /* Software implementation of Pointer Bounds Checker is NYI.
7678 Target support is required. */
7679 error ("Your target platform does not support -fcheck-pointer-bounds");
7680 break;
7682 case BUILT_IN_ACC_ON_DEVICE:
7683 /* Do library call, if we failed to expand the builtin when
7684 folding. */
7685 break;
7687 default: /* just do library call, if unknown builtin */
7688 break;
7691 /* The switch statement above can drop through to cause the function
7692 to be called normally. */
7693 return expand_call (exp, target, ignore);
7696 /* Similar to expand_builtin but is used for instrumented calls. */
7699 expand_builtin_with_bounds (tree exp, rtx target,
7700 rtx subtarget ATTRIBUTE_UNUSED,
7701 machine_mode mode, int ignore)
7703 tree fndecl = get_callee_fndecl (exp);
7704 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7706 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7708 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7709 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7711 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7712 && fcode < END_CHKP_BUILTINS);
7714 switch (fcode)
7716 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7717 target = expand_builtin_memcpy_with_bounds (exp, target);
7718 if (target)
7719 return target;
7720 break;
7722 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7723 target = expand_builtin_mempcpy_with_bounds (exp, target);
7724 if (target)
7725 return target;
7726 break;
7728 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7729 target = expand_builtin_memset_with_bounds (exp, target, mode);
7730 if (target)
7731 return target;
7732 break;
7734 default:
7735 break;
7738 /* The switch statement above can drop through to cause the function
7739 to be called normally. */
7740 return expand_call (exp, target, ignore);
7743 /* Determine whether a tree node represents a call to a built-in
7744 function. If the tree T is a call to a built-in function with
7745 the right number of arguments of the appropriate types, return
7746 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7747 Otherwise the return value is END_BUILTINS. */
7749 enum built_in_function
7750 builtin_mathfn_code (const_tree t)
7752 const_tree fndecl, arg, parmlist;
7753 const_tree argtype, parmtype;
7754 const_call_expr_arg_iterator iter;
7756 if (TREE_CODE (t) != CALL_EXPR
7757 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7758 return END_BUILTINS;
7760 fndecl = get_callee_fndecl (t);
7761 if (fndecl == NULL_TREE
7762 || TREE_CODE (fndecl) != FUNCTION_DECL
7763 || ! DECL_BUILT_IN (fndecl)
7764 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7765 return END_BUILTINS;
7767 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7768 init_const_call_expr_arg_iterator (t, &iter);
7769 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7771 /* If a function doesn't take a variable number of arguments,
7772 the last element in the list will have type `void'. */
7773 parmtype = TREE_VALUE (parmlist);
7774 if (VOID_TYPE_P (parmtype))
7776 if (more_const_call_expr_args_p (&iter))
7777 return END_BUILTINS;
7778 return DECL_FUNCTION_CODE (fndecl);
7781 if (! more_const_call_expr_args_p (&iter))
7782 return END_BUILTINS;
7784 arg = next_const_call_expr_arg (&iter);
7785 argtype = TREE_TYPE (arg);
7787 if (SCALAR_FLOAT_TYPE_P (parmtype))
7789 if (! SCALAR_FLOAT_TYPE_P (argtype))
7790 return END_BUILTINS;
7792 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7794 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7795 return END_BUILTINS;
7797 else if (POINTER_TYPE_P (parmtype))
7799 if (! POINTER_TYPE_P (argtype))
7800 return END_BUILTINS;
7802 else if (INTEGRAL_TYPE_P (parmtype))
7804 if (! INTEGRAL_TYPE_P (argtype))
7805 return END_BUILTINS;
7807 else
7808 return END_BUILTINS;
7811 /* Variable-length argument list. */
7812 return DECL_FUNCTION_CODE (fndecl);
7815 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7816 evaluate to a constant. */
7818 static tree
7819 fold_builtin_constant_p (tree arg)
7821 /* We return 1 for a numeric type that's known to be a constant
7822 value at compile-time or for an aggregate type that's a
7823 literal constant. */
7824 STRIP_NOPS (arg);
7826 /* If we know this is a constant, emit the constant of one. */
7827 if (CONSTANT_CLASS_P (arg)
7828 || (TREE_CODE (arg) == CONSTRUCTOR
7829 && TREE_CONSTANT (arg)))
7830 return integer_one_node;
7831 if (TREE_CODE (arg) == ADDR_EXPR)
7833 tree op = TREE_OPERAND (arg, 0);
7834 if (TREE_CODE (op) == STRING_CST
7835 || (TREE_CODE (op) == ARRAY_REF
7836 && integer_zerop (TREE_OPERAND (op, 1))
7837 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7838 return integer_one_node;
7841 /* If this expression has side effects, show we don't know it to be a
7842 constant. Likewise if it's a pointer or aggregate type since in
7843 those case we only want literals, since those are only optimized
7844 when generating RTL, not later.
7845 And finally, if we are compiling an initializer, not code, we
7846 need to return a definite result now; there's not going to be any
7847 more optimization done. */
7848 if (TREE_SIDE_EFFECTS (arg)
7849 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7850 || POINTER_TYPE_P (TREE_TYPE (arg))
7851 || cfun == 0
7852 || folding_initializer
7853 || force_folding_builtin_constant_p)
7854 return integer_zero_node;
7856 return NULL_TREE;
7859 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7860 return it as a truthvalue. */
7862 static tree
7863 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7864 tree predictor)
7866 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7868 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7869 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7870 ret_type = TREE_TYPE (TREE_TYPE (fn));
7871 pred_type = TREE_VALUE (arg_types);
7872 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7874 pred = fold_convert_loc (loc, pred_type, pred);
7875 expected = fold_convert_loc (loc, expected_type, expected);
7876 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7877 predictor);
7879 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7880 build_int_cst (ret_type, 0));
7883 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7884 NULL_TREE if no simplification is possible. */
7886 tree
7887 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7889 tree inner, fndecl, inner_arg0;
7890 enum tree_code code;
7892 /* Distribute the expected value over short-circuiting operators.
7893 See through the cast from truthvalue_type_node to long. */
7894 inner_arg0 = arg0;
7895 while (CONVERT_EXPR_P (inner_arg0)
7896 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7897 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7898 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7900 /* If this is a builtin_expect within a builtin_expect keep the
7901 inner one. See through a comparison against a constant. It
7902 might have been added to create a thruthvalue. */
7903 inner = inner_arg0;
7905 if (COMPARISON_CLASS_P (inner)
7906 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7907 inner = TREE_OPERAND (inner, 0);
7909 if (TREE_CODE (inner) == CALL_EXPR
7910 && (fndecl = get_callee_fndecl (inner))
7911 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7912 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7913 return arg0;
7915 inner = inner_arg0;
7916 code = TREE_CODE (inner);
7917 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7919 tree op0 = TREE_OPERAND (inner, 0);
7920 tree op1 = TREE_OPERAND (inner, 1);
7922 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7923 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7924 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7926 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7929 /* If the argument isn't invariant then there's nothing else we can do. */
7930 if (!TREE_CONSTANT (inner_arg0))
7931 return NULL_TREE;
7933 /* If we expect that a comparison against the argument will fold to
7934 a constant return the constant. In practice, this means a true
7935 constant or the address of a non-weak symbol. */
7936 inner = inner_arg0;
7937 STRIP_NOPS (inner);
7938 if (TREE_CODE (inner) == ADDR_EXPR)
7942 inner = TREE_OPERAND (inner, 0);
7944 while (TREE_CODE (inner) == COMPONENT_REF
7945 || TREE_CODE (inner) == ARRAY_REF);
7946 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7947 return NULL_TREE;
7950 /* Otherwise, ARG0 already has the proper type for the return value. */
7951 return arg0;
7954 /* Fold a call to __builtin_classify_type with argument ARG. */
7956 static tree
7957 fold_builtin_classify_type (tree arg)
7959 if (arg == 0)
7960 return build_int_cst (integer_type_node, no_type_class);
7962 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7965 /* Fold a call to __builtin_strlen with argument ARG. */
7967 static tree
7968 fold_builtin_strlen (location_t loc, tree type, tree arg)
7970 if (!validate_arg (arg, POINTER_TYPE))
7971 return NULL_TREE;
7972 else
7974 tree len = c_strlen (arg, 0);
7976 if (len)
7977 return fold_convert_loc (loc, type, len);
7979 return NULL_TREE;
7983 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7985 static tree
7986 fold_builtin_inf (location_t loc, tree type, int warn)
7988 REAL_VALUE_TYPE real;
7990 /* __builtin_inff is intended to be usable to define INFINITY on all
7991 targets. If an infinity is not available, INFINITY expands "to a
7992 positive constant of type float that overflows at translation
7993 time", footnote "In this case, using INFINITY will violate the
7994 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7995 Thus we pedwarn to ensure this constraint violation is
7996 diagnosed. */
7997 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7998 pedwarn (loc, 0, "target format does not support infinity");
8000 real_inf (&real);
8001 return build_real (type, real);
8004 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8005 NULL_TREE if no simplification can be made. */
8007 static tree
8008 fold_builtin_sincos (location_t loc,
8009 tree arg0, tree arg1, tree arg2)
8011 tree type;
8012 tree fndecl, call = NULL_TREE;
8014 if (!validate_arg (arg0, REAL_TYPE)
8015 || !validate_arg (arg1, POINTER_TYPE)
8016 || !validate_arg (arg2, POINTER_TYPE))
8017 return NULL_TREE;
8019 type = TREE_TYPE (arg0);
8021 /* Calculate the result when the argument is a constant. */
8022 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8023 if (fn == END_BUILTINS)
8024 return NULL_TREE;
8026 /* Canonicalize sincos to cexpi. */
8027 if (TREE_CODE (arg0) == REAL_CST)
8029 tree complex_type = build_complex_type (type);
8030 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8032 if (!call)
8034 if (!targetm.libc_has_function (function_c99_math_complex)
8035 || !builtin_decl_implicit_p (fn))
8036 return NULL_TREE;
8037 fndecl = builtin_decl_explicit (fn);
8038 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8039 call = builtin_save_expr (call);
8042 return build2 (COMPOUND_EXPR, void_type_node,
8043 build2 (MODIFY_EXPR, void_type_node,
8044 build_fold_indirect_ref_loc (loc, arg1),
8045 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8046 build2 (MODIFY_EXPR, void_type_node,
8047 build_fold_indirect_ref_loc (loc, arg2),
8048 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8051 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8052 Return NULL_TREE if no simplification can be made. */
8054 static tree
8055 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8057 if (!validate_arg (arg1, POINTER_TYPE)
8058 || !validate_arg (arg2, POINTER_TYPE)
8059 || !validate_arg (len, INTEGER_TYPE))
8060 return NULL_TREE;
8062 /* If the LEN parameter is zero, return zero. */
8063 if (integer_zerop (len))
8064 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8065 arg1, arg2);
8067 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8068 if (operand_equal_p (arg1, arg2, 0))
8069 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8071 /* If len parameter is one, return an expression corresponding to
8072 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8073 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8075 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8076 tree cst_uchar_ptr_node
8077 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8079 tree ind1
8080 = fold_convert_loc (loc, integer_type_node,
8081 build1 (INDIRECT_REF, cst_uchar_node,
8082 fold_convert_loc (loc,
8083 cst_uchar_ptr_node,
8084 arg1)));
8085 tree ind2
8086 = fold_convert_loc (loc, integer_type_node,
8087 build1 (INDIRECT_REF, cst_uchar_node,
8088 fold_convert_loc (loc,
8089 cst_uchar_ptr_node,
8090 arg2)));
8091 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8094 return NULL_TREE;
8097 /* Fold a call to builtin isascii with argument ARG. */
8099 static tree
8100 fold_builtin_isascii (location_t loc, tree arg)
8102 if (!validate_arg (arg, INTEGER_TYPE))
8103 return NULL_TREE;
8104 else
8106 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8107 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8108 build_int_cst (integer_type_node,
8109 ~ (unsigned HOST_WIDE_INT) 0x7f));
8110 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8111 arg, integer_zero_node);
8115 /* Fold a call to builtin toascii with argument ARG. */
8117 static tree
8118 fold_builtin_toascii (location_t loc, tree arg)
8120 if (!validate_arg (arg, INTEGER_TYPE))
8121 return NULL_TREE;
8123 /* Transform toascii(c) -> (c & 0x7f). */
8124 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8125 build_int_cst (integer_type_node, 0x7f));
8128 /* Fold a call to builtin isdigit with argument ARG. */
8130 static tree
8131 fold_builtin_isdigit (location_t loc, tree arg)
8133 if (!validate_arg (arg, INTEGER_TYPE))
8134 return NULL_TREE;
8135 else
8137 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8138 /* According to the C standard, isdigit is unaffected by locale.
8139 However, it definitely is affected by the target character set. */
8140 unsigned HOST_WIDE_INT target_digit0
8141 = lang_hooks.to_target_charset ('0');
8143 if (target_digit0 == 0)
8144 return NULL_TREE;
8146 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8147 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8148 build_int_cst (unsigned_type_node, target_digit0));
8149 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8150 build_int_cst (unsigned_type_node, 9));
8154 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8156 static tree
8157 fold_builtin_fabs (location_t loc, tree arg, tree type)
8159 if (!validate_arg (arg, REAL_TYPE))
8160 return NULL_TREE;
8162 arg = fold_convert_loc (loc, type, arg);
8163 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8166 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8168 static tree
8169 fold_builtin_abs (location_t loc, tree arg, tree type)
8171 if (!validate_arg (arg, INTEGER_TYPE))
8172 return NULL_TREE;
8174 arg = fold_convert_loc (loc, type, arg);
8175 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8178 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8180 static tree
8181 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8183 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8184 if (validate_arg (arg0, REAL_TYPE)
8185 && validate_arg (arg1, REAL_TYPE)
8186 && validate_arg (arg2, REAL_TYPE)
8187 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8188 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8190 return NULL_TREE;
8193 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8195 static tree
8196 fold_builtin_carg (location_t loc, tree arg, tree type)
8198 if (validate_arg (arg, COMPLEX_TYPE)
8199 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8201 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8203 if (atan2_fn)
8205 tree new_arg = builtin_save_expr (arg);
8206 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8207 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8208 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8212 return NULL_TREE;
8215 /* Fold a call to builtin frexp, we can assume the base is 2. */
8217 static tree
8218 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8220 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8221 return NULL_TREE;
8223 STRIP_NOPS (arg0);
8225 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8226 return NULL_TREE;
8228 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8230 /* Proceed if a valid pointer type was passed in. */
8231 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8233 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8234 tree frac, exp;
8236 switch (value->cl)
8238 case rvc_zero:
8239 /* For +-0, return (*exp = 0, +-0). */
8240 exp = integer_zero_node;
8241 frac = arg0;
8242 break;
8243 case rvc_nan:
8244 case rvc_inf:
8245 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8246 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8247 case rvc_normal:
8249 /* Since the frexp function always expects base 2, and in
8250 GCC normalized significands are already in the range
8251 [0.5, 1.0), we have exactly what frexp wants. */
8252 REAL_VALUE_TYPE frac_rvt = *value;
8253 SET_REAL_EXP (&frac_rvt, 0);
8254 frac = build_real (rettype, frac_rvt);
8255 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8257 break;
8258 default:
8259 gcc_unreachable ();
8262 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8263 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8264 TREE_SIDE_EFFECTS (arg1) = 1;
8265 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8268 return NULL_TREE;
8271 /* Fold a call to builtin modf. */
8273 static tree
8274 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8276 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8277 return NULL_TREE;
8279 STRIP_NOPS (arg0);
8281 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8282 return NULL_TREE;
8284 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8286 /* Proceed if a valid pointer type was passed in. */
8287 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8289 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8290 REAL_VALUE_TYPE trunc, frac;
8292 switch (value->cl)
8294 case rvc_nan:
8295 case rvc_zero:
8296 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8297 trunc = frac = *value;
8298 break;
8299 case rvc_inf:
8300 /* For +-Inf, return (*arg1 = arg0, +-0). */
8301 frac = dconst0;
8302 frac.sign = value->sign;
8303 trunc = *value;
8304 break;
8305 case rvc_normal:
8306 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8307 real_trunc (&trunc, VOIDmode, value);
8308 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8309 /* If the original number was negative and already
8310 integral, then the fractional part is -0.0. */
8311 if (value->sign && frac.cl == rvc_zero)
8312 frac.sign = value->sign;
8313 break;
8316 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8317 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8318 build_real (rettype, trunc));
8319 TREE_SIDE_EFFECTS (arg1) = 1;
8320 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8321 build_real (rettype, frac));
8324 return NULL_TREE;
8327 /* Given a location LOC, an interclass builtin function decl FNDECL
8328 and its single argument ARG, return an folded expression computing
8329 the same, or NULL_TREE if we either couldn't or didn't want to fold
8330 (the latter happen if there's an RTL instruction available). */
8332 static tree
8333 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8335 machine_mode mode;
8337 if (!validate_arg (arg, REAL_TYPE))
8338 return NULL_TREE;
8340 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8341 return NULL_TREE;
8343 mode = TYPE_MODE (TREE_TYPE (arg));
8345 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8347 /* If there is no optab, try generic code. */
8348 switch (DECL_FUNCTION_CODE (fndecl))
8350 tree result;
8352 CASE_FLT_FN (BUILT_IN_ISINF):
8354 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8355 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8356 tree type = TREE_TYPE (arg);
8357 REAL_VALUE_TYPE r;
8358 char buf[128];
8360 if (is_ibm_extended)
8362 /* NaN and Inf are encoded in the high-order double value
8363 only. The low-order value is not significant. */
8364 type = double_type_node;
8365 mode = DFmode;
8366 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8368 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8369 real_from_string (&r, buf);
8370 result = build_call_expr (isgr_fn, 2,
8371 fold_build1_loc (loc, ABS_EXPR, type, arg),
8372 build_real (type, r));
8373 return result;
8375 CASE_FLT_FN (BUILT_IN_FINITE):
8376 case BUILT_IN_ISFINITE:
8378 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8379 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8380 tree type = TREE_TYPE (arg);
8381 REAL_VALUE_TYPE r;
8382 char buf[128];
8384 if (is_ibm_extended)
8386 /* NaN and Inf are encoded in the high-order double value
8387 only. The low-order value is not significant. */
8388 type = double_type_node;
8389 mode = DFmode;
8390 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8392 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8393 real_from_string (&r, buf);
8394 result = build_call_expr (isle_fn, 2,
8395 fold_build1_loc (loc, ABS_EXPR, type, arg),
8396 build_real (type, r));
8397 /*result = fold_build2_loc (loc, UNGT_EXPR,
8398 TREE_TYPE (TREE_TYPE (fndecl)),
8399 fold_build1_loc (loc, ABS_EXPR, type, arg),
8400 build_real (type, r));
8401 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8402 TREE_TYPE (TREE_TYPE (fndecl)),
8403 result);*/
8404 return result;
8406 case BUILT_IN_ISNORMAL:
8408 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8409 islessequal(fabs(x),DBL_MAX). */
8410 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8411 tree type = TREE_TYPE (arg);
8412 tree orig_arg, max_exp, min_exp;
8413 machine_mode orig_mode = mode;
8414 REAL_VALUE_TYPE rmax, rmin;
8415 char buf[128];
8417 orig_arg = arg = builtin_save_expr (arg);
8418 if (is_ibm_extended)
8420 /* Use double to test the normal range of IBM extended
8421 precision. Emin for IBM extended precision is
8422 different to emin for IEEE double, being 53 higher
8423 since the low double exponent is at least 53 lower
8424 than the high double exponent. */
8425 type = double_type_node;
8426 mode = DFmode;
8427 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8429 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8431 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8432 real_from_string (&rmax, buf);
8433 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8434 real_from_string (&rmin, buf);
8435 max_exp = build_real (type, rmax);
8436 min_exp = build_real (type, rmin);
8438 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8439 if (is_ibm_extended)
8441 /* Testing the high end of the range is done just using
8442 the high double, using the same test as isfinite().
8443 For the subnormal end of the range we first test the
8444 high double, then if its magnitude is equal to the
8445 limit of 0x1p-969, we test whether the low double is
8446 non-zero and opposite sign to the high double. */
8447 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8448 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8449 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8450 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8451 arg, min_exp);
8452 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8453 complex_double_type_node, orig_arg);
8454 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8455 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8456 tree zero = build_real (type, dconst0);
8457 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8458 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8459 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8460 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8461 fold_build3 (COND_EXPR,
8462 integer_type_node,
8463 hilt, logt, lolt));
8464 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8465 eq_min, ok_lo);
8466 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8467 gt_min, eq_min);
8469 else
8471 tree const isge_fn
8472 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8473 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8475 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8476 max_exp, min_exp);
8477 return result;
8479 default:
8480 break;
8483 return NULL_TREE;
8486 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8487 ARG is the argument for the call. */
8489 static tree
8490 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8492 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8494 if (!validate_arg (arg, REAL_TYPE))
8495 return NULL_TREE;
8497 switch (builtin_index)
8499 case BUILT_IN_ISINF:
8500 if (!HONOR_INFINITIES (arg))
8501 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8503 return NULL_TREE;
8505 case BUILT_IN_ISINF_SIGN:
8507 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8508 /* In a boolean context, GCC will fold the inner COND_EXPR to
8509 1. So e.g. "if (isinf_sign(x))" would be folded to just
8510 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8511 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8512 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8513 tree tmp = NULL_TREE;
8515 arg = builtin_save_expr (arg);
8517 if (signbit_fn && isinf_fn)
8519 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8520 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8522 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8523 signbit_call, integer_zero_node);
8524 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8525 isinf_call, integer_zero_node);
8527 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8528 integer_minus_one_node, integer_one_node);
8529 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8530 isinf_call, tmp,
8531 integer_zero_node);
8534 return tmp;
8537 case BUILT_IN_ISFINITE:
8538 if (!HONOR_NANS (arg)
8539 && !HONOR_INFINITIES (arg))
8540 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8542 return NULL_TREE;
8544 case BUILT_IN_ISNAN:
8545 if (!HONOR_NANS (arg))
8546 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8549 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8550 if (is_ibm_extended)
8552 /* NaN and Inf are encoded in the high-order double value
8553 only. The low-order value is not significant. */
8554 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8557 arg = builtin_save_expr (arg);
8558 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8560 default:
8561 gcc_unreachable ();
8565 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8566 This builtin will generate code to return the appropriate floating
8567 point classification depending on the value of the floating point
8568 number passed in. The possible return values must be supplied as
8569 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8570 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8571 one floating point argument which is "type generic". */
8573 static tree
8574 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8576 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8577 arg, type, res, tmp;
8578 machine_mode mode;
8579 REAL_VALUE_TYPE r;
8580 char buf[128];
8582 /* Verify the required arguments in the original call. */
8583 if (nargs != 6
8584 || !validate_arg (args[0], INTEGER_TYPE)
8585 || !validate_arg (args[1], INTEGER_TYPE)
8586 || !validate_arg (args[2], INTEGER_TYPE)
8587 || !validate_arg (args[3], INTEGER_TYPE)
8588 || !validate_arg (args[4], INTEGER_TYPE)
8589 || !validate_arg (args[5], REAL_TYPE))
8590 return NULL_TREE;
8592 fp_nan = args[0];
8593 fp_infinite = args[1];
8594 fp_normal = args[2];
8595 fp_subnormal = args[3];
8596 fp_zero = args[4];
8597 arg = args[5];
8598 type = TREE_TYPE (arg);
8599 mode = TYPE_MODE (type);
8600 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8602 /* fpclassify(x) ->
8603 isnan(x) ? FP_NAN :
8604 (fabs(x) == Inf ? FP_INFINITE :
8605 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8606 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8608 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8609 build_real (type, dconst0));
8610 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8611 tmp, fp_zero, fp_subnormal);
8613 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8614 real_from_string (&r, buf);
8615 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8616 arg, build_real (type, r));
8617 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8619 if (HONOR_INFINITIES (mode))
8621 real_inf (&r);
8622 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8623 build_real (type, r));
8624 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8625 fp_infinite, res);
8628 if (HONOR_NANS (mode))
8630 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8631 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8634 return res;
8637 /* Fold a call to an unordered comparison function such as
8638 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8639 being called and ARG0 and ARG1 are the arguments for the call.
8640 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8641 the opposite of the desired result. UNORDERED_CODE is used
8642 for modes that can hold NaNs and ORDERED_CODE is used for
8643 the rest. */
8645 static tree
8646 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8647 enum tree_code unordered_code,
8648 enum tree_code ordered_code)
8650 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8651 enum tree_code code;
8652 tree type0, type1;
8653 enum tree_code code0, code1;
8654 tree cmp_type = NULL_TREE;
8656 type0 = TREE_TYPE (arg0);
8657 type1 = TREE_TYPE (arg1);
8659 code0 = TREE_CODE (type0);
8660 code1 = TREE_CODE (type1);
8662 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8663 /* Choose the wider of two real types. */
8664 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8665 ? type0 : type1;
8666 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8667 cmp_type = type0;
8668 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8669 cmp_type = type1;
8671 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8672 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8674 if (unordered_code == UNORDERED_EXPR)
8676 if (!HONOR_NANS (arg0))
8677 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8678 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8681 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8682 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8683 fold_build2_loc (loc, code, type, arg0, arg1));
8686 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8687 arithmetics if it can never overflow, or into internal functions that
8688 return both result of arithmetics and overflowed boolean flag in
8689 a complex integer result, or some other check for overflow.
8690 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8691 checking part of that. */
8693 static tree
8694 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8695 tree arg0, tree arg1, tree arg2)
8697 enum internal_fn ifn = IFN_LAST;
8698 /* The code of the expression corresponding to the type-generic
8699 built-in, or ERROR_MARK for the type-specific ones. */
8700 enum tree_code opcode = ERROR_MARK;
8701 bool ovf_only = false;
8703 switch (fcode)
8705 case BUILT_IN_ADD_OVERFLOW_P:
8706 ovf_only = true;
8707 /* FALLTHRU */
8708 case BUILT_IN_ADD_OVERFLOW:
8709 opcode = PLUS_EXPR;
8710 /* FALLTHRU */
8711 case BUILT_IN_SADD_OVERFLOW:
8712 case BUILT_IN_SADDL_OVERFLOW:
8713 case BUILT_IN_SADDLL_OVERFLOW:
8714 case BUILT_IN_UADD_OVERFLOW:
8715 case BUILT_IN_UADDL_OVERFLOW:
8716 case BUILT_IN_UADDLL_OVERFLOW:
8717 ifn = IFN_ADD_OVERFLOW;
8718 break;
8719 case BUILT_IN_SUB_OVERFLOW_P:
8720 ovf_only = true;
8721 /* FALLTHRU */
8722 case BUILT_IN_SUB_OVERFLOW:
8723 opcode = MINUS_EXPR;
8724 /* FALLTHRU */
8725 case BUILT_IN_SSUB_OVERFLOW:
8726 case BUILT_IN_SSUBL_OVERFLOW:
8727 case BUILT_IN_SSUBLL_OVERFLOW:
8728 case BUILT_IN_USUB_OVERFLOW:
8729 case BUILT_IN_USUBL_OVERFLOW:
8730 case BUILT_IN_USUBLL_OVERFLOW:
8731 ifn = IFN_SUB_OVERFLOW;
8732 break;
8733 case BUILT_IN_MUL_OVERFLOW_P:
8734 ovf_only = true;
8735 /* FALLTHRU */
8736 case BUILT_IN_MUL_OVERFLOW:
8737 opcode = MULT_EXPR;
8738 /* FALLTHRU */
8739 case BUILT_IN_SMUL_OVERFLOW:
8740 case BUILT_IN_SMULL_OVERFLOW:
8741 case BUILT_IN_SMULLL_OVERFLOW:
8742 case BUILT_IN_UMUL_OVERFLOW:
8743 case BUILT_IN_UMULL_OVERFLOW:
8744 case BUILT_IN_UMULLL_OVERFLOW:
8745 ifn = IFN_MUL_OVERFLOW;
8746 break;
8747 default:
8748 gcc_unreachable ();
8751 /* For the "generic" overloads, the first two arguments can have different
8752 types and the last argument determines the target type to use to check
8753 for overflow. The arguments of the other overloads all have the same
8754 type. */
8755 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8757 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8758 arguments are constant, attempt to fold the built-in call into a constant
8759 expression indicating whether or not it detected an overflow. */
8760 if (ovf_only
8761 && TREE_CODE (arg0) == INTEGER_CST
8762 && TREE_CODE (arg1) == INTEGER_CST)
8763 /* Perform the computation in the target type and check for overflow. */
8764 return omit_one_operand_loc (loc, boolean_type_node,
8765 arith_overflowed_p (opcode, type, arg0, arg1)
8766 ? boolean_true_node : boolean_false_node,
8767 arg2);
8769 tree ctype = build_complex_type (type);
8770 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8771 2, arg0, arg1);
8772 tree tgt = save_expr (call);
8773 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8774 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8775 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8777 if (ovf_only)
8778 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8780 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8781 tree store
8782 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8783 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8786 /* Fold a call to __builtin_FILE to a constant string. */
8788 static inline tree
8789 fold_builtin_FILE (location_t loc)
8791 if (const char *fname = LOCATION_FILE (loc))
8792 return build_string_literal (strlen (fname) + 1, fname);
8794 return build_string_literal (1, "");
8797 /* Fold a call to __builtin_FUNCTION to a constant string. */
8799 static inline tree
8800 fold_builtin_FUNCTION ()
8802 const char *name = "";
8804 if (current_function_decl)
8805 name = lang_hooks.decl_printable_name (current_function_decl, 0);
8807 return build_string_literal (strlen (name) + 1, name);
8810 /* Fold a call to __builtin_LINE to an integer constant. */
8812 static inline tree
8813 fold_builtin_LINE (location_t loc, tree type)
8815 return build_int_cst (type, LOCATION_LINE (loc));
8818 /* Fold a call to built-in function FNDECL with 0 arguments.
8819 This function returns NULL_TREE if no simplification was possible. */
8821 static tree
8822 fold_builtin_0 (location_t loc, tree fndecl)
8824 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8825 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8826 switch (fcode)
8828 case BUILT_IN_FILE:
8829 return fold_builtin_FILE (loc);
8831 case BUILT_IN_FUNCTION:
8832 return fold_builtin_FUNCTION ();
8834 case BUILT_IN_LINE:
8835 return fold_builtin_LINE (loc, type);
8837 CASE_FLT_FN (BUILT_IN_INF):
8838 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8839 case BUILT_IN_INFD32:
8840 case BUILT_IN_INFD64:
8841 case BUILT_IN_INFD128:
8842 return fold_builtin_inf (loc, type, true);
8844 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8845 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8846 return fold_builtin_inf (loc, type, false);
8848 case BUILT_IN_CLASSIFY_TYPE:
8849 return fold_builtin_classify_type (NULL_TREE);
8851 default:
8852 break;
8854 return NULL_TREE;
8857 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8858 This function returns NULL_TREE if no simplification was possible. */
8860 static tree
8861 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8863 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8864 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8866 if (TREE_CODE (arg0) == ERROR_MARK)
8867 return NULL_TREE;
8869 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8870 return ret;
8872 switch (fcode)
8874 case BUILT_IN_CONSTANT_P:
8876 tree val = fold_builtin_constant_p (arg0);
8878 /* Gimplification will pull the CALL_EXPR for the builtin out of
8879 an if condition. When not optimizing, we'll not CSE it back.
8880 To avoid link error types of regressions, return false now. */
8881 if (!val && !optimize)
8882 val = integer_zero_node;
8884 return val;
8887 case BUILT_IN_CLASSIFY_TYPE:
8888 return fold_builtin_classify_type (arg0);
8890 case BUILT_IN_STRLEN:
8891 return fold_builtin_strlen (loc, type, arg0);
8893 CASE_FLT_FN (BUILT_IN_FABS):
8894 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8895 case BUILT_IN_FABSD32:
8896 case BUILT_IN_FABSD64:
8897 case BUILT_IN_FABSD128:
8898 return fold_builtin_fabs (loc, arg0, type);
8900 case BUILT_IN_ABS:
8901 case BUILT_IN_LABS:
8902 case BUILT_IN_LLABS:
8903 case BUILT_IN_IMAXABS:
8904 return fold_builtin_abs (loc, arg0, type);
8906 CASE_FLT_FN (BUILT_IN_CONJ):
8907 if (validate_arg (arg0, COMPLEX_TYPE)
8908 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8909 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8910 break;
8912 CASE_FLT_FN (BUILT_IN_CREAL):
8913 if (validate_arg (arg0, COMPLEX_TYPE)
8914 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8915 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8916 break;
8918 CASE_FLT_FN (BUILT_IN_CIMAG):
8919 if (validate_arg (arg0, COMPLEX_TYPE)
8920 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8921 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8922 break;
8924 CASE_FLT_FN (BUILT_IN_CARG):
8925 return fold_builtin_carg (loc, arg0, type);
8927 case BUILT_IN_ISASCII:
8928 return fold_builtin_isascii (loc, arg0);
8930 case BUILT_IN_TOASCII:
8931 return fold_builtin_toascii (loc, arg0);
8933 case BUILT_IN_ISDIGIT:
8934 return fold_builtin_isdigit (loc, arg0);
8936 CASE_FLT_FN (BUILT_IN_FINITE):
8937 case BUILT_IN_FINITED32:
8938 case BUILT_IN_FINITED64:
8939 case BUILT_IN_FINITED128:
8940 case BUILT_IN_ISFINITE:
8942 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8943 if (ret)
8944 return ret;
8945 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8948 CASE_FLT_FN (BUILT_IN_ISINF):
8949 case BUILT_IN_ISINFD32:
8950 case BUILT_IN_ISINFD64:
8951 case BUILT_IN_ISINFD128:
8953 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8954 if (ret)
8955 return ret;
8956 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8959 case BUILT_IN_ISNORMAL:
8960 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8962 case BUILT_IN_ISINF_SIGN:
8963 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8965 CASE_FLT_FN (BUILT_IN_ISNAN):
8966 case BUILT_IN_ISNAND32:
8967 case BUILT_IN_ISNAND64:
8968 case BUILT_IN_ISNAND128:
8969 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8971 case BUILT_IN_FREE:
8972 if (integer_zerop (arg0))
8973 return build_empty_stmt (loc);
8974 break;
8976 default:
8977 break;
8980 return NULL_TREE;
8984 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8985 This function returns NULL_TREE if no simplification was possible. */
8987 static tree
8988 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8990 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8991 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8993 if (TREE_CODE (arg0) == ERROR_MARK
8994 || TREE_CODE (arg1) == ERROR_MARK)
8995 return NULL_TREE;
8997 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8998 return ret;
9000 switch (fcode)
9002 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9003 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9004 if (validate_arg (arg0, REAL_TYPE)
9005 && validate_arg (arg1, POINTER_TYPE))
9006 return do_mpfr_lgamma_r (arg0, arg1, type);
9007 break;
9009 CASE_FLT_FN (BUILT_IN_FREXP):
9010 return fold_builtin_frexp (loc, arg0, arg1, type);
9012 CASE_FLT_FN (BUILT_IN_MODF):
9013 return fold_builtin_modf (loc, arg0, arg1, type);
9015 case BUILT_IN_STRSPN:
9016 return fold_builtin_strspn (loc, arg0, arg1);
9018 case BUILT_IN_STRCSPN:
9019 return fold_builtin_strcspn (loc, arg0, arg1);
9021 case BUILT_IN_STRPBRK:
9022 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9024 case BUILT_IN_EXPECT:
9025 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9027 case BUILT_IN_ISGREATER:
9028 return fold_builtin_unordered_cmp (loc, fndecl,
9029 arg0, arg1, UNLE_EXPR, LE_EXPR);
9030 case BUILT_IN_ISGREATEREQUAL:
9031 return fold_builtin_unordered_cmp (loc, fndecl,
9032 arg0, arg1, UNLT_EXPR, LT_EXPR);
9033 case BUILT_IN_ISLESS:
9034 return fold_builtin_unordered_cmp (loc, fndecl,
9035 arg0, arg1, UNGE_EXPR, GE_EXPR);
9036 case BUILT_IN_ISLESSEQUAL:
9037 return fold_builtin_unordered_cmp (loc, fndecl,
9038 arg0, arg1, UNGT_EXPR, GT_EXPR);
9039 case BUILT_IN_ISLESSGREATER:
9040 return fold_builtin_unordered_cmp (loc, fndecl,
9041 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9042 case BUILT_IN_ISUNORDERED:
9043 return fold_builtin_unordered_cmp (loc, fndecl,
9044 arg0, arg1, UNORDERED_EXPR,
9045 NOP_EXPR);
9047 /* We do the folding for va_start in the expander. */
9048 case BUILT_IN_VA_START:
9049 break;
9051 case BUILT_IN_OBJECT_SIZE:
9052 return fold_builtin_object_size (arg0, arg1);
9054 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9055 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9057 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9058 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9060 default:
9061 break;
9063 return NULL_TREE;
9066 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9067 and ARG2.
9068 This function returns NULL_TREE if no simplification was possible. */
9070 static tree
9071 fold_builtin_3 (location_t loc, tree fndecl,
9072 tree arg0, tree arg1, tree arg2)
9074 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9075 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9077 if (TREE_CODE (arg0) == ERROR_MARK
9078 || TREE_CODE (arg1) == ERROR_MARK
9079 || TREE_CODE (arg2) == ERROR_MARK)
9080 return NULL_TREE;
9082 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9083 arg0, arg1, arg2))
9084 return ret;
9086 switch (fcode)
9089 CASE_FLT_FN (BUILT_IN_SINCOS):
9090 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9092 CASE_FLT_FN (BUILT_IN_FMA):
9093 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
9094 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
9096 CASE_FLT_FN (BUILT_IN_REMQUO):
9097 if (validate_arg (arg0, REAL_TYPE)
9098 && validate_arg (arg1, REAL_TYPE)
9099 && validate_arg (arg2, POINTER_TYPE))
9100 return do_mpfr_remquo (arg0, arg1, arg2);
9101 break;
9103 case BUILT_IN_MEMCMP:
9104 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
9106 case BUILT_IN_EXPECT:
9107 return fold_builtin_expect (loc, arg0, arg1, arg2);
9109 case BUILT_IN_ADD_OVERFLOW:
9110 case BUILT_IN_SUB_OVERFLOW:
9111 case BUILT_IN_MUL_OVERFLOW:
9112 case BUILT_IN_ADD_OVERFLOW_P:
9113 case BUILT_IN_SUB_OVERFLOW_P:
9114 case BUILT_IN_MUL_OVERFLOW_P:
9115 case BUILT_IN_SADD_OVERFLOW:
9116 case BUILT_IN_SADDL_OVERFLOW:
9117 case BUILT_IN_SADDLL_OVERFLOW:
9118 case BUILT_IN_SSUB_OVERFLOW:
9119 case BUILT_IN_SSUBL_OVERFLOW:
9120 case BUILT_IN_SSUBLL_OVERFLOW:
9121 case BUILT_IN_SMUL_OVERFLOW:
9122 case BUILT_IN_SMULL_OVERFLOW:
9123 case BUILT_IN_SMULLL_OVERFLOW:
9124 case BUILT_IN_UADD_OVERFLOW:
9125 case BUILT_IN_UADDL_OVERFLOW:
9126 case BUILT_IN_UADDLL_OVERFLOW:
9127 case BUILT_IN_USUB_OVERFLOW:
9128 case BUILT_IN_USUBL_OVERFLOW:
9129 case BUILT_IN_USUBLL_OVERFLOW:
9130 case BUILT_IN_UMUL_OVERFLOW:
9131 case BUILT_IN_UMULL_OVERFLOW:
9132 case BUILT_IN_UMULLL_OVERFLOW:
9133 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9135 default:
9136 break;
9138 return NULL_TREE;
9141 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9142 arguments. IGNORE is true if the result of the
9143 function call is ignored. This function returns NULL_TREE if no
9144 simplification was possible. */
9146 tree
9147 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9149 tree ret = NULL_TREE;
9151 switch (nargs)
9153 case 0:
9154 ret = fold_builtin_0 (loc, fndecl);
9155 break;
9156 case 1:
9157 ret = fold_builtin_1 (loc, fndecl, args[0]);
9158 break;
9159 case 2:
9160 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9161 break;
9162 case 3:
9163 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9164 break;
9165 default:
9166 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9167 break;
9169 if (ret)
9171 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9172 SET_EXPR_LOCATION (ret, loc);
9173 TREE_NO_WARNING (ret) = 1;
9174 return ret;
9176 return NULL_TREE;
9179 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9180 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9181 of arguments in ARGS to be omitted. OLDNARGS is the number of
9182 elements in ARGS. */
9184 static tree
9185 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9186 int skip, tree fndecl, int n, va_list newargs)
9188 int nargs = oldnargs - skip + n;
9189 tree *buffer;
9191 if (n > 0)
9193 int i, j;
9195 buffer = XALLOCAVEC (tree, nargs);
9196 for (i = 0; i < n; i++)
9197 buffer[i] = va_arg (newargs, tree);
9198 for (j = skip; j < oldnargs; j++, i++)
9199 buffer[i] = args[j];
9201 else
9202 buffer = args + skip;
9204 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9207 /* Return true if FNDECL shouldn't be folded right now.
9208 If a built-in function has an inline attribute always_inline
9209 wrapper, defer folding it after always_inline functions have
9210 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9211 might not be performed. */
9213 bool
9214 avoid_folding_inline_builtin (tree fndecl)
9216 return (DECL_DECLARED_INLINE_P (fndecl)
9217 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9218 && cfun
9219 && !cfun->always_inline_functions_inlined
9220 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9223 /* A wrapper function for builtin folding that prevents warnings for
9224 "statement without effect" and the like, caused by removing the
9225 call node earlier than the warning is generated. */
9227 tree
9228 fold_call_expr (location_t loc, tree exp, bool ignore)
9230 tree ret = NULL_TREE;
9231 tree fndecl = get_callee_fndecl (exp);
9232 if (fndecl
9233 && TREE_CODE (fndecl) == FUNCTION_DECL
9234 && DECL_BUILT_IN (fndecl)
9235 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9236 yet. Defer folding until we see all the arguments
9237 (after inlining). */
9238 && !CALL_EXPR_VA_ARG_PACK (exp))
9240 int nargs = call_expr_nargs (exp);
9242 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9243 instead last argument is __builtin_va_arg_pack (). Defer folding
9244 even in that case, until arguments are finalized. */
9245 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9247 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9248 if (fndecl2
9249 && TREE_CODE (fndecl2) == FUNCTION_DECL
9250 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9251 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9252 return NULL_TREE;
9255 if (avoid_folding_inline_builtin (fndecl))
9256 return NULL_TREE;
9258 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9259 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9260 CALL_EXPR_ARGP (exp), ignore);
9261 else
9263 tree *args = CALL_EXPR_ARGP (exp);
9264 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9265 if (ret)
9266 return ret;
9269 return NULL_TREE;
9272 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9273 N arguments are passed in the array ARGARRAY. Return a folded
9274 expression or NULL_TREE if no simplification was possible. */
9276 tree
9277 fold_builtin_call_array (location_t loc, tree,
9278 tree fn,
9279 int n,
9280 tree *argarray)
9282 if (TREE_CODE (fn) != ADDR_EXPR)
9283 return NULL_TREE;
9285 tree fndecl = TREE_OPERAND (fn, 0);
9286 if (TREE_CODE (fndecl) == FUNCTION_DECL
9287 && DECL_BUILT_IN (fndecl))
9289 /* If last argument is __builtin_va_arg_pack (), arguments to this
9290 function are not finalized yet. Defer folding until they are. */
9291 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9293 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9294 if (fndecl2
9295 && TREE_CODE (fndecl2) == FUNCTION_DECL
9296 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9297 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9298 return NULL_TREE;
9300 if (avoid_folding_inline_builtin (fndecl))
9301 return NULL_TREE;
9302 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9303 return targetm.fold_builtin (fndecl, n, argarray, false);
9304 else
9305 return fold_builtin_n (loc, fndecl, argarray, n, false);
9308 return NULL_TREE;
9311 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9312 along with N new arguments specified as the "..." parameters. SKIP
9313 is the number of arguments in EXP to be omitted. This function is used
9314 to do varargs-to-varargs transformations. */
9316 static tree
9317 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9319 va_list ap;
9320 tree t;
9322 va_start (ap, n);
9323 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9324 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9325 va_end (ap);
9327 return t;
9330 /* Validate a single argument ARG against a tree code CODE representing
9331 a type. Return true when argument is valid. */
9333 static bool
9334 validate_arg (const_tree arg, enum tree_code code)
9336 if (!arg)
9337 return false;
9338 else if (code == POINTER_TYPE)
9339 return POINTER_TYPE_P (TREE_TYPE (arg));
9340 else if (code == INTEGER_TYPE)
9341 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9342 return code == TREE_CODE (TREE_TYPE (arg));
9345 /* This function validates the types of a function call argument list
9346 against a specified list of tree_codes. If the last specifier is a 0,
9347 that represents an ellipses, otherwise the last specifier must be a
9348 VOID_TYPE.
9350 This is the GIMPLE version of validate_arglist. Eventually we want to
9351 completely convert builtins.c to work from GIMPLEs and the tree based
9352 validate_arglist will then be removed. */
9354 bool
9355 validate_gimple_arglist (const gcall *call, ...)
9357 enum tree_code code;
9358 bool res = 0;
9359 va_list ap;
9360 const_tree arg;
9361 size_t i;
9363 va_start (ap, call);
9364 i = 0;
9368 code = (enum tree_code) va_arg (ap, int);
9369 switch (code)
9371 case 0:
9372 /* This signifies an ellipses, any further arguments are all ok. */
9373 res = true;
9374 goto end;
9375 case VOID_TYPE:
9376 /* This signifies an endlink, if no arguments remain, return
9377 true, otherwise return false. */
9378 res = (i == gimple_call_num_args (call));
9379 goto end;
9380 default:
9381 /* If no parameters remain or the parameter's code does not
9382 match the specified code, return false. Otherwise continue
9383 checking any remaining arguments. */
9384 arg = gimple_call_arg (call, i++);
9385 if (!validate_arg (arg, code))
9386 goto end;
9387 break;
9390 while (1);
9392 /* We need gotos here since we can only have one VA_CLOSE in a
9393 function. */
9394 end: ;
9395 va_end (ap);
9397 return res;
9400 /* Default target-specific builtin expander that does nothing. */
9403 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9404 rtx target ATTRIBUTE_UNUSED,
9405 rtx subtarget ATTRIBUTE_UNUSED,
9406 machine_mode mode ATTRIBUTE_UNUSED,
9407 int ignore ATTRIBUTE_UNUSED)
9409 return NULL_RTX;
9412 /* Returns true is EXP represents data that would potentially reside
9413 in a readonly section. */
9415 bool
9416 readonly_data_expr (tree exp)
9418 STRIP_NOPS (exp);
9420 if (TREE_CODE (exp) != ADDR_EXPR)
9421 return false;
9423 exp = get_base_address (TREE_OPERAND (exp, 0));
9424 if (!exp)
9425 return false;
9427 /* Make sure we call decl_readonly_section only for trees it
9428 can handle (since it returns true for everything it doesn't
9429 understand). */
9430 if (TREE_CODE (exp) == STRING_CST
9431 || TREE_CODE (exp) == CONSTRUCTOR
9432 || (VAR_P (exp) && TREE_STATIC (exp)))
9433 return decl_readonly_section (exp, 0);
9434 else
9435 return false;
9438 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9439 to the call, and TYPE is its return type.
9441 Return NULL_TREE if no simplification was possible, otherwise return the
9442 simplified form of the call as a tree.
9444 The simplified form may be a constant or other expression which
9445 computes the same value, but in a more efficient manner (including
9446 calls to other builtin functions).
9448 The call may contain arguments which need to be evaluated, but
9449 which are not useful to determine the result of the call. In
9450 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9451 COMPOUND_EXPR will be an argument which must be evaluated.
9452 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9453 COMPOUND_EXPR in the chain will contain the tree for the simplified
9454 form of the builtin function call. */
9456 static tree
9457 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9459 if (!validate_arg (s1, POINTER_TYPE)
9460 || !validate_arg (s2, POINTER_TYPE))
9461 return NULL_TREE;
9462 else
9464 tree fn;
9465 const char *p1, *p2;
9467 p2 = c_getstr (s2);
9468 if (p2 == NULL)
9469 return NULL_TREE;
9471 p1 = c_getstr (s1);
9472 if (p1 != NULL)
9474 const char *r = strpbrk (p1, p2);
9475 tree tem;
9477 if (r == NULL)
9478 return build_int_cst (TREE_TYPE (s1), 0);
9480 /* Return an offset into the constant string argument. */
9481 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9482 return fold_convert_loc (loc, type, tem);
9485 if (p2[0] == '\0')
9486 /* strpbrk(x, "") == NULL.
9487 Evaluate and ignore s1 in case it had side-effects. */
9488 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9490 if (p2[1] != '\0')
9491 return NULL_TREE; /* Really call strpbrk. */
9493 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9494 if (!fn)
9495 return NULL_TREE;
9497 /* New argument list transforming strpbrk(s1, s2) to
9498 strchr(s1, s2[0]). */
9499 return build_call_expr_loc (loc, fn, 2, s1,
9500 build_int_cst (integer_type_node, p2[0]));
9504 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9505 to the call.
9507 Return NULL_TREE if no simplification was possible, otherwise return the
9508 simplified form of the call as a tree.
9510 The simplified form may be a constant or other expression which
9511 computes the same value, but in a more efficient manner (including
9512 calls to other builtin functions).
9514 The call may contain arguments which need to be evaluated, but
9515 which are not useful to determine the result of the call. In
9516 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9517 COMPOUND_EXPR will be an argument which must be evaluated.
9518 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9519 COMPOUND_EXPR in the chain will contain the tree for the simplified
9520 form of the builtin function call. */
9522 static tree
9523 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9525 if (!validate_arg (s1, POINTER_TYPE)
9526 || !validate_arg (s2, POINTER_TYPE))
9527 return NULL_TREE;
9528 else
9530 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9532 /* If either argument is "", return NULL_TREE. */
9533 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9534 /* Evaluate and ignore both arguments in case either one has
9535 side-effects. */
9536 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9537 s1, s2);
9538 return NULL_TREE;
9542 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9543 to the call.
9545 Return NULL_TREE if no simplification was possible, otherwise return the
9546 simplified form of the call as a tree.
9548 The simplified form may be a constant or other expression which
9549 computes the same value, but in a more efficient manner (including
9550 calls to other builtin functions).
9552 The call may contain arguments which need to be evaluated, but
9553 which are not useful to determine the result of the call. In
9554 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9555 COMPOUND_EXPR will be an argument which must be evaluated.
9556 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9557 COMPOUND_EXPR in the chain will contain the tree for the simplified
9558 form of the builtin function call. */
9560 static tree
9561 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9563 if (!validate_arg (s1, POINTER_TYPE)
9564 || !validate_arg (s2, POINTER_TYPE))
9565 return NULL_TREE;
9566 else
9568 /* If the first argument is "", return NULL_TREE. */
9569 const char *p1 = c_getstr (s1);
9570 if (p1 && *p1 == '\0')
9572 /* Evaluate and ignore argument s2 in case it has
9573 side-effects. */
9574 return omit_one_operand_loc (loc, size_type_node,
9575 size_zero_node, s2);
9578 /* If the second argument is "", return __builtin_strlen(s1). */
9579 const char *p2 = c_getstr (s2);
9580 if (p2 && *p2 == '\0')
9582 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9584 /* If the replacement _DECL isn't initialized, don't do the
9585 transformation. */
9586 if (!fn)
9587 return NULL_TREE;
9589 return build_call_expr_loc (loc, fn, 1, s1);
9591 return NULL_TREE;
9595 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9596 produced. False otherwise. This is done so that we don't output the error
9597 or warning twice or three times. */
9599 bool
9600 fold_builtin_next_arg (tree exp, bool va_start_p)
9602 tree fntype = TREE_TYPE (current_function_decl);
9603 int nargs = call_expr_nargs (exp);
9604 tree arg;
9605 /* There is good chance the current input_location points inside the
9606 definition of the va_start macro (perhaps on the token for
9607 builtin) in a system header, so warnings will not be emitted.
9608 Use the location in real source code. */
9609 source_location current_location =
9610 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9611 NULL);
9613 if (!stdarg_p (fntype))
9615 error ("%<va_start%> used in function with fixed args");
9616 return true;
9619 if (va_start_p)
9621 if (va_start_p && (nargs != 2))
9623 error ("wrong number of arguments to function %<va_start%>");
9624 return true;
9626 arg = CALL_EXPR_ARG (exp, 1);
9628 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9629 when we checked the arguments and if needed issued a warning. */
9630 else
9632 if (nargs == 0)
9634 /* Evidently an out of date version of <stdarg.h>; can't validate
9635 va_start's second argument, but can still work as intended. */
9636 warning_at (current_location,
9637 OPT_Wvarargs,
9638 "%<__builtin_next_arg%> called without an argument");
9639 return true;
9641 else if (nargs > 1)
9643 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9644 return true;
9646 arg = CALL_EXPR_ARG (exp, 0);
9649 if (TREE_CODE (arg) == SSA_NAME)
9650 arg = SSA_NAME_VAR (arg);
9652 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9653 or __builtin_next_arg (0) the first time we see it, after checking
9654 the arguments and if needed issuing a warning. */
9655 if (!integer_zerop (arg))
9657 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9659 /* Strip off all nops for the sake of the comparison. This
9660 is not quite the same as STRIP_NOPS. It does more.
9661 We must also strip off INDIRECT_EXPR for C++ reference
9662 parameters. */
9663 while (CONVERT_EXPR_P (arg)
9664 || TREE_CODE (arg) == INDIRECT_REF)
9665 arg = TREE_OPERAND (arg, 0);
9666 if (arg != last_parm)
9668 /* FIXME: Sometimes with the tree optimizers we can get the
9669 not the last argument even though the user used the last
9670 argument. We just warn and set the arg to be the last
9671 argument so that we will get wrong-code because of
9672 it. */
9673 warning_at (current_location,
9674 OPT_Wvarargs,
9675 "second parameter of %<va_start%> not last named argument");
9678 /* Undefined by C99 7.15.1.4p4 (va_start):
9679 "If the parameter parmN is declared with the register storage
9680 class, with a function or array type, or with a type that is
9681 not compatible with the type that results after application of
9682 the default argument promotions, the behavior is undefined."
9684 else if (DECL_REGISTER (arg))
9686 warning_at (current_location,
9687 OPT_Wvarargs,
9688 "undefined behavior when second parameter of "
9689 "%<va_start%> is declared with %<register%> storage");
9692 /* We want to verify the second parameter just once before the tree
9693 optimizers are run and then avoid keeping it in the tree,
9694 as otherwise we could warn even for correct code like:
9695 void foo (int i, ...)
9696 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9697 if (va_start_p)
9698 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9699 else
9700 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9702 return false;
9706 /* Expand a call EXP to __builtin_object_size. */
9708 static rtx
9709 expand_builtin_object_size (tree exp)
9711 tree ost;
9712 int object_size_type;
9713 tree fndecl = get_callee_fndecl (exp);
9715 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9717 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9718 exp, fndecl);
9719 expand_builtin_trap ();
9720 return const0_rtx;
9723 ost = CALL_EXPR_ARG (exp, 1);
9724 STRIP_NOPS (ost);
9726 if (TREE_CODE (ost) != INTEGER_CST
9727 || tree_int_cst_sgn (ost) < 0
9728 || compare_tree_int (ost, 3) > 0)
9730 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9731 exp, fndecl);
9732 expand_builtin_trap ();
9733 return const0_rtx;
9736 object_size_type = tree_to_shwi (ost);
9738 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9741 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9742 FCODE is the BUILT_IN_* to use.
9743 Return NULL_RTX if we failed; the caller should emit a normal call,
9744 otherwise try to get the result in TARGET, if convenient (and in
9745 mode MODE if that's convenient). */
9747 static rtx
9748 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9749 enum built_in_function fcode)
9751 tree dest, src, len, size;
9753 if (!validate_arglist (exp,
9754 POINTER_TYPE,
9755 fcode == BUILT_IN_MEMSET_CHK
9756 ? INTEGER_TYPE : POINTER_TYPE,
9757 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9758 return NULL_RTX;
9760 dest = CALL_EXPR_ARG (exp, 0);
9761 src = CALL_EXPR_ARG (exp, 1);
9762 len = CALL_EXPR_ARG (exp, 2);
9763 size = CALL_EXPR_ARG (exp, 3);
9765 bool sizes_ok = check_sizes (OPT_Wstringop_overflow_,
9766 exp, len, /*maxlen=*/NULL_TREE,
9767 /*str=*/NULL_TREE, size);
9769 if (!tree_fits_uhwi_p (size))
9770 return NULL_RTX;
9772 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9774 /* Avoid transforming the checking call to an ordinary one when
9775 an overflow has been detected or when the call couldn't be
9776 validated because the size is not constant. */
9777 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9778 return NULL_RTX;
9780 tree fn = NULL_TREE;
9781 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9782 mem{cpy,pcpy,move,set} is available. */
9783 switch (fcode)
9785 case BUILT_IN_MEMCPY_CHK:
9786 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9787 break;
9788 case BUILT_IN_MEMPCPY_CHK:
9789 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9790 break;
9791 case BUILT_IN_MEMMOVE_CHK:
9792 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9793 break;
9794 case BUILT_IN_MEMSET_CHK:
9795 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9796 break;
9797 default:
9798 break;
9801 if (! fn)
9802 return NULL_RTX;
9804 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9805 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9806 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9807 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9809 else if (fcode == BUILT_IN_MEMSET_CHK)
9810 return NULL_RTX;
9811 else
9813 unsigned int dest_align = get_pointer_alignment (dest);
9815 /* If DEST is not a pointer type, call the normal function. */
9816 if (dest_align == 0)
9817 return NULL_RTX;
9819 /* If SRC and DEST are the same (and not volatile), do nothing. */
9820 if (operand_equal_p (src, dest, 0))
9822 tree expr;
9824 if (fcode != BUILT_IN_MEMPCPY_CHK)
9826 /* Evaluate and ignore LEN in case it has side-effects. */
9827 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9828 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9831 expr = fold_build_pointer_plus (dest, len);
9832 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9835 /* __memmove_chk special case. */
9836 if (fcode == BUILT_IN_MEMMOVE_CHK)
9838 unsigned int src_align = get_pointer_alignment (src);
9840 if (src_align == 0)
9841 return NULL_RTX;
9843 /* If src is categorized for a readonly section we can use
9844 normal __memcpy_chk. */
9845 if (readonly_data_expr (src))
9847 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9848 if (!fn)
9849 return NULL_RTX;
9850 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9851 dest, src, len, size);
9852 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9853 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9854 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9857 return NULL_RTX;
9861 /* Emit warning if a buffer overflow is detected at compile time. */
9863 static void
9864 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9866 /* The source string. */
9867 tree srcstr = NULL_TREE;
9868 /* The size of the destination object. */
9869 tree objsize = NULL_TREE;
9870 /* The string that is being concatenated with (as in __strcat_chk)
9871 or null if it isn't. */
9872 tree catstr = NULL_TREE;
9873 /* The maximum length of the source sequence in a bounded operation
9874 (such as __strncat_chk) or null if the operation isn't bounded
9875 (such as __strcat_chk). */
9876 tree maxlen = NULL_TREE;
9878 switch (fcode)
9880 case BUILT_IN_STRCPY_CHK:
9881 case BUILT_IN_STPCPY_CHK:
9882 srcstr = CALL_EXPR_ARG (exp, 1);
9883 objsize = CALL_EXPR_ARG (exp, 2);
9884 break;
9886 case BUILT_IN_STRCAT_CHK:
9887 /* For __strcat_chk the warning will be emitted only if overflowing
9888 by at least strlen (dest) + 1 bytes. */
9889 catstr = CALL_EXPR_ARG (exp, 0);
9890 srcstr = CALL_EXPR_ARG (exp, 1);
9891 objsize = CALL_EXPR_ARG (exp, 2);
9892 break;
9894 case BUILT_IN_STRNCAT_CHK:
9895 catstr = CALL_EXPR_ARG (exp, 0);
9896 srcstr = CALL_EXPR_ARG (exp, 1);
9897 maxlen = CALL_EXPR_ARG (exp, 2);
9898 objsize = CALL_EXPR_ARG (exp, 3);
9899 break;
9901 case BUILT_IN_STRNCPY_CHK:
9902 case BUILT_IN_STPNCPY_CHK:
9903 srcstr = CALL_EXPR_ARG (exp, 1);
9904 maxlen = CALL_EXPR_ARG (exp, 2);
9905 objsize = CALL_EXPR_ARG (exp, 3);
9906 break;
9908 case BUILT_IN_SNPRINTF_CHK:
9909 case BUILT_IN_VSNPRINTF_CHK:
9910 maxlen = CALL_EXPR_ARG (exp, 1);
9911 objsize = CALL_EXPR_ARG (exp, 3);
9912 break;
9913 default:
9914 gcc_unreachable ();
9917 if (catstr && maxlen)
9919 /* Check __strncat_chk. There is no way to determine the length
9920 of the string to which the source string is being appended so
9921 just warn when the length of the source string is not known. */
9922 check_strncat_sizes (exp, objsize);
9923 return;
9926 check_sizes (OPT_Wstringop_overflow_, exp,
9927 /*size=*/NULL_TREE, maxlen, srcstr, objsize);
9930 /* Emit warning if a buffer overflow is detected at compile time
9931 in __sprintf_chk/__vsprintf_chk calls. */
9933 static void
9934 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9936 tree size, len, fmt;
9937 const char *fmt_str;
9938 int nargs = call_expr_nargs (exp);
9940 /* Verify the required arguments in the original call. */
9942 if (nargs < 4)
9943 return;
9944 size = CALL_EXPR_ARG (exp, 2);
9945 fmt = CALL_EXPR_ARG (exp, 3);
9947 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9948 return;
9950 /* Check whether the format is a literal string constant. */
9951 fmt_str = c_getstr (fmt);
9952 if (fmt_str == NULL)
9953 return;
9955 if (!init_target_chars ())
9956 return;
9958 /* If the format doesn't contain % args or %%, we know its size. */
9959 if (strchr (fmt_str, target_percent) == 0)
9960 len = build_int_cstu (size_type_node, strlen (fmt_str));
9961 /* If the format is "%s" and first ... argument is a string literal,
9962 we know it too. */
9963 else if (fcode == BUILT_IN_SPRINTF_CHK
9964 && strcmp (fmt_str, target_percent_s) == 0)
9966 tree arg;
9968 if (nargs < 5)
9969 return;
9970 arg = CALL_EXPR_ARG (exp, 4);
9971 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9972 return;
9974 len = c_strlen (arg, 1);
9975 if (!len || ! tree_fits_uhwi_p (len))
9976 return;
9978 else
9979 return;
9981 /* Add one for the terminating nul. */
9982 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9983 check_sizes (OPT_Wstringop_overflow_,
9984 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, len, size);
9987 /* Emit warning if a free is called with address of a variable. */
9989 static void
9990 maybe_emit_free_warning (tree exp)
9992 tree arg = CALL_EXPR_ARG (exp, 0);
9994 STRIP_NOPS (arg);
9995 if (TREE_CODE (arg) != ADDR_EXPR)
9996 return;
9998 arg = get_base_address (TREE_OPERAND (arg, 0));
9999 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10000 return;
10002 if (SSA_VAR_P (arg))
10003 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10004 "%Kattempt to free a non-heap object %qD", exp, arg);
10005 else
10006 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10007 "%Kattempt to free a non-heap object", exp);
10010 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10011 if possible. */
10013 static tree
10014 fold_builtin_object_size (tree ptr, tree ost)
10016 unsigned HOST_WIDE_INT bytes;
10017 int object_size_type;
10019 if (!validate_arg (ptr, POINTER_TYPE)
10020 || !validate_arg (ost, INTEGER_TYPE))
10021 return NULL_TREE;
10023 STRIP_NOPS (ost);
10025 if (TREE_CODE (ost) != INTEGER_CST
10026 || tree_int_cst_sgn (ost) < 0
10027 || compare_tree_int (ost, 3) > 0)
10028 return NULL_TREE;
10030 object_size_type = tree_to_shwi (ost);
10032 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10033 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10034 and (size_t) 0 for types 2 and 3. */
10035 if (TREE_SIDE_EFFECTS (ptr))
10036 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10038 if (TREE_CODE (ptr) == ADDR_EXPR)
10040 compute_builtin_object_size (ptr, object_size_type, &bytes);
10041 if (wi::fits_to_tree_p (bytes, size_type_node))
10042 return build_int_cstu (size_type_node, bytes);
10044 else if (TREE_CODE (ptr) == SSA_NAME)
10046 /* If object size is not known yet, delay folding until
10047 later. Maybe subsequent passes will help determining
10048 it. */
10049 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10050 && wi::fits_to_tree_p (bytes, size_type_node))
10051 return build_int_cstu (size_type_node, bytes);
10054 return NULL_TREE;
10057 /* Builtins with folding operations that operate on "..." arguments
10058 need special handling; we need to store the arguments in a convenient
10059 data structure before attempting any folding. Fortunately there are
10060 only a few builtins that fall into this category. FNDECL is the
10061 function, EXP is the CALL_EXPR for the call. */
10063 static tree
10064 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10066 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10067 tree ret = NULL_TREE;
10069 switch (fcode)
10071 case BUILT_IN_FPCLASSIFY:
10072 ret = fold_builtin_fpclassify (loc, args, nargs);
10073 break;
10075 default:
10076 break;
10078 if (ret)
10080 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10081 SET_EXPR_LOCATION (ret, loc);
10082 TREE_NO_WARNING (ret) = 1;
10083 return ret;
10085 return NULL_TREE;
10088 /* Initialize format string characters in the target charset. */
10090 bool
10091 init_target_chars (void)
10093 static bool init;
10094 if (!init)
10096 target_newline = lang_hooks.to_target_charset ('\n');
10097 target_percent = lang_hooks.to_target_charset ('%');
10098 target_c = lang_hooks.to_target_charset ('c');
10099 target_s = lang_hooks.to_target_charset ('s');
10100 if (target_newline == 0 || target_percent == 0 || target_c == 0
10101 || target_s == 0)
10102 return false;
10104 target_percent_c[0] = target_percent;
10105 target_percent_c[1] = target_c;
10106 target_percent_c[2] = '\0';
10108 target_percent_s[0] = target_percent;
10109 target_percent_s[1] = target_s;
10110 target_percent_s[2] = '\0';
10112 target_percent_s_newline[0] = target_percent;
10113 target_percent_s_newline[1] = target_s;
10114 target_percent_s_newline[2] = target_newline;
10115 target_percent_s_newline[3] = '\0';
10117 init = true;
10119 return true;
10122 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10123 and no overflow/underflow occurred. INEXACT is true if M was not
10124 exactly calculated. TYPE is the tree type for the result. This
10125 function assumes that you cleared the MPFR flags and then
10126 calculated M to see if anything subsequently set a flag prior to
10127 entering this function. Return NULL_TREE if any checks fail. */
10129 static tree
10130 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10132 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10133 overflow/underflow occurred. If -frounding-math, proceed iff the
10134 result of calling FUNC was exact. */
10135 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10136 && (!flag_rounding_math || !inexact))
10138 REAL_VALUE_TYPE rr;
10140 real_from_mpfr (&rr, m, type, GMP_RNDN);
10141 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10142 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10143 but the mpft_t is not, then we underflowed in the
10144 conversion. */
10145 if (real_isfinite (&rr)
10146 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10148 REAL_VALUE_TYPE rmode;
10150 real_convert (&rmode, TYPE_MODE (type), &rr);
10151 /* Proceed iff the specified mode can hold the value. */
10152 if (real_identical (&rmode, &rr))
10153 return build_real (type, rmode);
10156 return NULL_TREE;
10159 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10160 number and no overflow/underflow occurred. INEXACT is true if M
10161 was not exactly calculated. TYPE is the tree type for the result.
10162 This function assumes that you cleared the MPFR flags and then
10163 calculated M to see if anything subsequently set a flag prior to
10164 entering this function. Return NULL_TREE if any checks fail, if
10165 FORCE_CONVERT is true, then bypass the checks. */
10167 static tree
10168 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10170 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10171 overflow/underflow occurred. If -frounding-math, proceed iff the
10172 result of calling FUNC was exact. */
10173 if (force_convert
10174 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10175 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10176 && (!flag_rounding_math || !inexact)))
10178 REAL_VALUE_TYPE re, im;
10180 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10181 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10182 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10183 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10184 but the mpft_t is not, then we underflowed in the
10185 conversion. */
10186 if (force_convert
10187 || (real_isfinite (&re) && real_isfinite (&im)
10188 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10189 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10191 REAL_VALUE_TYPE re_mode, im_mode;
10193 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10194 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10195 /* Proceed iff the specified mode can hold the value. */
10196 if (force_convert
10197 || (real_identical (&re_mode, &re)
10198 && real_identical (&im_mode, &im)))
10199 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10200 build_real (TREE_TYPE (type), im_mode));
10203 return NULL_TREE;
10206 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10207 the pointer *(ARG_QUO) and return the result. The type is taken
10208 from the type of ARG0 and is used for setting the precision of the
10209 calculation and results. */
10211 static tree
10212 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10214 tree const type = TREE_TYPE (arg0);
10215 tree result = NULL_TREE;
10217 STRIP_NOPS (arg0);
10218 STRIP_NOPS (arg1);
10220 /* To proceed, MPFR must exactly represent the target floating point
10221 format, which only happens when the target base equals two. */
10222 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10223 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10224 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10226 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10227 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10229 if (real_isfinite (ra0) && real_isfinite (ra1))
10231 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10232 const int prec = fmt->p;
10233 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10234 tree result_rem;
10235 long integer_quo;
10236 mpfr_t m0, m1;
10238 mpfr_inits2 (prec, m0, m1, NULL);
10239 mpfr_from_real (m0, ra0, GMP_RNDN);
10240 mpfr_from_real (m1, ra1, GMP_RNDN);
10241 mpfr_clear_flags ();
10242 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10243 /* Remquo is independent of the rounding mode, so pass
10244 inexact=0 to do_mpfr_ckconv(). */
10245 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10246 mpfr_clears (m0, m1, NULL);
10247 if (result_rem)
10249 /* MPFR calculates quo in the host's long so it may
10250 return more bits in quo than the target int can hold
10251 if sizeof(host long) > sizeof(target int). This can
10252 happen even for native compilers in LP64 mode. In
10253 these cases, modulo the quo value with the largest
10254 number that the target int can hold while leaving one
10255 bit for the sign. */
10256 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10257 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10259 /* Dereference the quo pointer argument. */
10260 arg_quo = build_fold_indirect_ref (arg_quo);
10261 /* Proceed iff a valid pointer type was passed in. */
10262 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10264 /* Set the value. */
10265 tree result_quo
10266 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10267 build_int_cst (TREE_TYPE (arg_quo),
10268 integer_quo));
10269 TREE_SIDE_EFFECTS (result_quo) = 1;
10270 /* Combine the quo assignment with the rem. */
10271 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10272 result_quo, result_rem));
10277 return result;
10280 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10281 resulting value as a tree with type TYPE. The mpfr precision is
10282 set to the precision of TYPE. We assume that this mpfr function
10283 returns zero if the result could be calculated exactly within the
10284 requested precision. In addition, the integer pointer represented
10285 by ARG_SG will be dereferenced and set to the appropriate signgam
10286 (-1,1) value. */
10288 static tree
10289 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10291 tree result = NULL_TREE;
10293 STRIP_NOPS (arg);
10295 /* To proceed, MPFR must exactly represent the target floating point
10296 format, which only happens when the target base equals two. Also
10297 verify ARG is a constant and that ARG_SG is an int pointer. */
10298 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10299 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10300 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10301 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10303 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10305 /* In addition to NaN and Inf, the argument cannot be zero or a
10306 negative integer. */
10307 if (real_isfinite (ra)
10308 && ra->cl != rvc_zero
10309 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10311 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10312 const int prec = fmt->p;
10313 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10314 int inexact, sg;
10315 mpfr_t m;
10316 tree result_lg;
10318 mpfr_init2 (m, prec);
10319 mpfr_from_real (m, ra, GMP_RNDN);
10320 mpfr_clear_flags ();
10321 inexact = mpfr_lgamma (m, &sg, m, rnd);
10322 result_lg = do_mpfr_ckconv (m, type, inexact);
10323 mpfr_clear (m);
10324 if (result_lg)
10326 tree result_sg;
10328 /* Dereference the arg_sg pointer argument. */
10329 arg_sg = build_fold_indirect_ref (arg_sg);
10330 /* Assign the signgam value into *arg_sg. */
10331 result_sg = fold_build2 (MODIFY_EXPR,
10332 TREE_TYPE (arg_sg), arg_sg,
10333 build_int_cst (TREE_TYPE (arg_sg), sg));
10334 TREE_SIDE_EFFECTS (result_sg) = 1;
10335 /* Combine the signgam assignment with the lgamma result. */
10336 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10337 result_sg, result_lg));
10342 return result;
10345 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10346 mpc function FUNC on it and return the resulting value as a tree
10347 with type TYPE. The mpfr precision is set to the precision of
10348 TYPE. We assume that function FUNC returns zero if the result
10349 could be calculated exactly within the requested precision. If
10350 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10351 in the arguments and/or results. */
10353 tree
10354 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10355 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10357 tree result = NULL_TREE;
10359 STRIP_NOPS (arg0);
10360 STRIP_NOPS (arg1);
10362 /* To proceed, MPFR must exactly represent the target floating point
10363 format, which only happens when the target base equals two. */
10364 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10365 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10366 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10367 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10368 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10370 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10371 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10372 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10373 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10375 if (do_nonfinite
10376 || (real_isfinite (re0) && real_isfinite (im0)
10377 && real_isfinite (re1) && real_isfinite (im1)))
10379 const struct real_format *const fmt =
10380 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10381 const int prec = fmt->p;
10382 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10383 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10384 int inexact;
10385 mpc_t m0, m1;
10387 mpc_init2 (m0, prec);
10388 mpc_init2 (m1, prec);
10389 mpfr_from_real (mpc_realref (m0), re0, rnd);
10390 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10391 mpfr_from_real (mpc_realref (m1), re1, rnd);
10392 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10393 mpfr_clear_flags ();
10394 inexact = func (m0, m0, m1, crnd);
10395 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10396 mpc_clear (m0);
10397 mpc_clear (m1);
10401 return result;
10404 /* A wrapper function for builtin folding that prevents warnings for
10405 "statement without effect" and the like, caused by removing the
10406 call node earlier than the warning is generated. */
10408 tree
10409 fold_call_stmt (gcall *stmt, bool ignore)
10411 tree ret = NULL_TREE;
10412 tree fndecl = gimple_call_fndecl (stmt);
10413 location_t loc = gimple_location (stmt);
10414 if (fndecl
10415 && TREE_CODE (fndecl) == FUNCTION_DECL
10416 && DECL_BUILT_IN (fndecl)
10417 && !gimple_call_va_arg_pack_p (stmt))
10419 int nargs = gimple_call_num_args (stmt);
10420 tree *args = (nargs > 0
10421 ? gimple_call_arg_ptr (stmt, 0)
10422 : &error_mark_node);
10424 if (avoid_folding_inline_builtin (fndecl))
10425 return NULL_TREE;
10426 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10428 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10430 else
10432 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10433 if (ret)
10435 /* Propagate location information from original call to
10436 expansion of builtin. Otherwise things like
10437 maybe_emit_chk_warning, that operate on the expansion
10438 of a builtin, will use the wrong location information. */
10439 if (gimple_has_location (stmt))
10441 tree realret = ret;
10442 if (TREE_CODE (ret) == NOP_EXPR)
10443 realret = TREE_OPERAND (ret, 0);
10444 if (CAN_HAVE_LOCATION_P (realret)
10445 && !EXPR_HAS_LOCATION (realret))
10446 SET_EXPR_LOCATION (realret, loc);
10447 return realret;
10449 return ret;
10453 return NULL_TREE;
10456 /* Look up the function in builtin_decl that corresponds to DECL
10457 and set ASMSPEC as its user assembler name. DECL must be a
10458 function decl that declares a builtin. */
10460 void
10461 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10463 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10464 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10465 && asmspec != 0);
10467 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10468 set_user_assembler_name (builtin, asmspec);
10470 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10471 && INT_TYPE_SIZE < BITS_PER_WORD)
10473 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10474 set_user_assembler_libfunc ("ffs", asmspec);
10475 set_optab_libfunc (ffs_optab, mode, "ffs");
10479 /* Return true if DECL is a builtin that expands to a constant or similarly
10480 simple code. */
10481 bool
10482 is_simple_builtin (tree decl)
10484 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10485 switch (DECL_FUNCTION_CODE (decl))
10487 /* Builtins that expand to constants. */
10488 case BUILT_IN_CONSTANT_P:
10489 case BUILT_IN_EXPECT:
10490 case BUILT_IN_OBJECT_SIZE:
10491 case BUILT_IN_UNREACHABLE:
10492 /* Simple register moves or loads from stack. */
10493 case BUILT_IN_ASSUME_ALIGNED:
10494 case BUILT_IN_RETURN_ADDRESS:
10495 case BUILT_IN_EXTRACT_RETURN_ADDR:
10496 case BUILT_IN_FROB_RETURN_ADDR:
10497 case BUILT_IN_RETURN:
10498 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10499 case BUILT_IN_FRAME_ADDRESS:
10500 case BUILT_IN_VA_END:
10501 case BUILT_IN_STACK_SAVE:
10502 case BUILT_IN_STACK_RESTORE:
10503 /* Exception state returns or moves registers around. */
10504 case BUILT_IN_EH_FILTER:
10505 case BUILT_IN_EH_POINTER:
10506 case BUILT_IN_EH_COPY_VALUES:
10507 return true;
10509 default:
10510 return false;
10513 return false;
10516 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10517 most probably expanded inline into reasonably simple code. This is a
10518 superset of is_simple_builtin. */
10519 bool
10520 is_inexpensive_builtin (tree decl)
10522 if (!decl)
10523 return false;
10524 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10525 return true;
10526 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10527 switch (DECL_FUNCTION_CODE (decl))
10529 case BUILT_IN_ABS:
10530 CASE_BUILT_IN_ALLOCA:
10531 case BUILT_IN_BSWAP16:
10532 case BUILT_IN_BSWAP32:
10533 case BUILT_IN_BSWAP64:
10534 case BUILT_IN_CLZ:
10535 case BUILT_IN_CLZIMAX:
10536 case BUILT_IN_CLZL:
10537 case BUILT_IN_CLZLL:
10538 case BUILT_IN_CTZ:
10539 case BUILT_IN_CTZIMAX:
10540 case BUILT_IN_CTZL:
10541 case BUILT_IN_CTZLL:
10542 case BUILT_IN_FFS:
10543 case BUILT_IN_FFSIMAX:
10544 case BUILT_IN_FFSL:
10545 case BUILT_IN_FFSLL:
10546 case BUILT_IN_IMAXABS:
10547 case BUILT_IN_FINITE:
10548 case BUILT_IN_FINITEF:
10549 case BUILT_IN_FINITEL:
10550 case BUILT_IN_FINITED32:
10551 case BUILT_IN_FINITED64:
10552 case BUILT_IN_FINITED128:
10553 case BUILT_IN_FPCLASSIFY:
10554 case BUILT_IN_ISFINITE:
10555 case BUILT_IN_ISINF_SIGN:
10556 case BUILT_IN_ISINF:
10557 case BUILT_IN_ISINFF:
10558 case BUILT_IN_ISINFL:
10559 case BUILT_IN_ISINFD32:
10560 case BUILT_IN_ISINFD64:
10561 case BUILT_IN_ISINFD128:
10562 case BUILT_IN_ISNAN:
10563 case BUILT_IN_ISNANF:
10564 case BUILT_IN_ISNANL:
10565 case BUILT_IN_ISNAND32:
10566 case BUILT_IN_ISNAND64:
10567 case BUILT_IN_ISNAND128:
10568 case BUILT_IN_ISNORMAL:
10569 case BUILT_IN_ISGREATER:
10570 case BUILT_IN_ISGREATEREQUAL:
10571 case BUILT_IN_ISLESS:
10572 case BUILT_IN_ISLESSEQUAL:
10573 case BUILT_IN_ISLESSGREATER:
10574 case BUILT_IN_ISUNORDERED:
10575 case BUILT_IN_VA_ARG_PACK:
10576 case BUILT_IN_VA_ARG_PACK_LEN:
10577 case BUILT_IN_VA_COPY:
10578 case BUILT_IN_TRAP:
10579 case BUILT_IN_SAVEREGS:
10580 case BUILT_IN_POPCOUNTL:
10581 case BUILT_IN_POPCOUNTLL:
10582 case BUILT_IN_POPCOUNTIMAX:
10583 case BUILT_IN_POPCOUNT:
10584 case BUILT_IN_PARITYL:
10585 case BUILT_IN_PARITYLL:
10586 case BUILT_IN_PARITYIMAX:
10587 case BUILT_IN_PARITY:
10588 case BUILT_IN_LABS:
10589 case BUILT_IN_LLABS:
10590 case BUILT_IN_PREFETCH:
10591 case BUILT_IN_ACC_ON_DEVICE:
10592 return true;
10594 default:
10595 return is_simple_builtin (decl);
10598 return false;
10601 /* Return true if T is a constant and the value cast to a target char
10602 can be represented by a host char.
10603 Store the casted char constant in *P if so. */
10605 bool
10606 target_char_cst_p (tree t, char *p)
10608 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10609 return false;
10611 *p = (char)tree_to_uhwi (t);
10612 return true;