* config/spu/spu.md (floatunsdidf2): Remove unused local variable.
[official-gcc.git] / gcc / builtins.c
blob1316c27e9a6c6eead2768275bae77fb7018de56a
1 /* Expand builtin functions.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "asan.h"
64 #include "cilk.h"
65 #include "tree-chkp.h"
66 #include "rtl-chkp.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
84 #include "builtins.def"
87 /* Setup an array of builtin_info_type, make sure each element decl is
88 initialized to NULL_TREE. */
89 builtin_info_type builtin_info[(int)END_BUILTINS];
91 /* Non-zero if __builtin_constant_p should be folded right away. */
92 bool force_folding_builtin_constant_p;
94 static rtx c_readstr (const char *, machine_mode);
95 static int target_char_cast (tree, char *);
96 static rtx get_memory_rtx (tree, tree);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 static rtx result_vector (int, rtx);
100 static void expand_builtin_prefetch (tree);
101 static rtx expand_builtin_apply_args (void);
102 static rtx expand_builtin_apply_args_1 (void);
103 static rtx expand_builtin_apply (rtx, rtx, rtx);
104 static void expand_builtin_return (rtx);
105 static enum type_class type_to_class (tree);
106 static rtx expand_builtin_classify_type (tree);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_strcmp (tree, rtx);
119 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx);
122 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
123 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
124 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 machine_mode, int, tree);
128 static rtx expand_builtin_strcpy (tree, rtx);
129 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
130 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
131 static rtx expand_builtin_strncpy (tree, rtx);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, machine_mode);
134 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
138 static rtx expand_builtin_alloca (tree, bool);
139 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static tree stabilize_va_list_loc (location_t, tree, int);
142 static rtx expand_builtin_expect (tree, rtx);
143 static tree fold_builtin_constant_p (tree);
144 static tree fold_builtin_classify_type (tree);
145 static tree fold_builtin_strlen (location_t, tree, tree);
146 static tree fold_builtin_inf (location_t, tree, int);
147 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
148 static bool validate_arg (const_tree, enum tree_code code);
149 static rtx expand_builtin_fabs (tree, rtx, rtx);
150 static rtx expand_builtin_signbit (tree, rtx);
151 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
152 static tree fold_builtin_isascii (location_t, tree);
153 static tree fold_builtin_toascii (location_t, tree);
154 static tree fold_builtin_isdigit (location_t, tree);
155 static tree fold_builtin_fabs (location_t, tree, tree);
156 static tree fold_builtin_abs (location_t, tree, tree);
157 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
158 enum tree_code);
159 static tree fold_builtin_0 (location_t, tree);
160 static tree fold_builtin_1 (location_t, tree, tree);
161 static tree fold_builtin_2 (location_t, tree, tree, tree);
162 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
163 static tree fold_builtin_varargs (location_t, tree, tree*, int);
165 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
166 static tree fold_builtin_strstr (location_t, tree, tree, tree);
167 static tree fold_builtin_strspn (location_t, tree, tree);
168 static tree fold_builtin_strcspn (location_t, tree, tree);
170 static rtx expand_builtin_object_size (tree);
171 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
172 enum built_in_function);
173 static void maybe_emit_chk_warning (tree, enum built_in_function);
174 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
175 static void maybe_emit_free_warning (tree);
176 static tree fold_builtin_object_size (tree, tree);
178 unsigned HOST_WIDE_INT target_newline;
179 unsigned HOST_WIDE_INT target_percent;
180 static unsigned HOST_WIDE_INT target_c;
181 static unsigned HOST_WIDE_INT target_s;
182 char target_percent_c[3];
183 char target_percent_s[3];
184 char target_percent_s_newline[4];
185 static tree do_mpfr_remquo (tree, tree, tree);
186 static tree do_mpfr_lgamma_r (tree, tree, tree);
187 static void expand_builtin_sync_synchronize (void);
189 /* Return true if NAME starts with __builtin_ or __sync_. */
191 static bool
192 is_builtin_name (const char *name)
194 if (strncmp (name, "__builtin_", 10) == 0)
195 return true;
196 if (strncmp (name, "__sync_", 7) == 0)
197 return true;
198 if (strncmp (name, "__atomic_", 9) == 0)
199 return true;
200 if (flag_cilkplus
201 && (!strcmp (name, "__cilkrts_detach")
202 || !strcmp (name, "__cilkrts_pop_frame")))
203 return true;
204 return false;
208 /* Return true if DECL is a function symbol representing a built-in. */
210 bool
211 is_builtin_fn (tree decl)
213 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
216 /* Return true if NODE should be considered for inline expansion regardless
217 of the optimization level. This means whenever a function is invoked with
218 its "internal" name, which normally contains the prefix "__builtin". */
220 bool
221 called_as_built_in (tree node)
223 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
224 we want the name used to call the function, not the name it
225 will have. */
226 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
227 return is_builtin_name (name);
230 /* Compute values M and N such that M divides (address of EXP - N) and such
231 that N < M. If these numbers can be determined, store M in alignp and N in
232 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
233 *alignp and any bit-offset to *bitposp.
235 Note that the address (and thus the alignment) computed here is based
236 on the address to which a symbol resolves, whereas DECL_ALIGN is based
237 on the address at which an object is actually located. These two
238 addresses are not always the same. For example, on ARM targets,
239 the address &foo of a Thumb function foo() has the lowest bit set,
240 whereas foo() itself starts on an even address.
242 If ADDR_P is true we are taking the address of the memory reference EXP
243 and thus cannot rely on the access taking place. */
245 static bool
246 get_object_alignment_2 (tree exp, unsigned int *alignp,
247 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
249 HOST_WIDE_INT bitsize, bitpos;
250 tree offset;
251 machine_mode mode;
252 int unsignedp, reversep, volatilep;
253 unsigned int align = BITS_PER_UNIT;
254 bool known_alignment = false;
256 /* Get the innermost object and the constant (bitpos) and possibly
257 variable (offset) offset of the access. */
258 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
259 &unsignedp, &reversep, &volatilep);
261 /* Extract alignment information from the innermost object and
262 possibly adjust bitpos and offset. */
263 if (TREE_CODE (exp) == FUNCTION_DECL)
265 /* Function addresses can encode extra information besides their
266 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
267 allows the low bit to be used as a virtual bit, we know
268 that the address itself must be at least 2-byte aligned. */
269 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
270 align = 2 * BITS_PER_UNIT;
272 else if (TREE_CODE (exp) == LABEL_DECL)
274 else if (TREE_CODE (exp) == CONST_DECL)
276 /* The alignment of a CONST_DECL is determined by its initializer. */
277 exp = DECL_INITIAL (exp);
278 align = TYPE_ALIGN (TREE_TYPE (exp));
279 if (CONSTANT_CLASS_P (exp))
280 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
282 known_alignment = true;
284 else if (DECL_P (exp))
286 align = DECL_ALIGN (exp);
287 known_alignment = true;
289 else if (TREE_CODE (exp) == INDIRECT_REF
290 || TREE_CODE (exp) == MEM_REF
291 || TREE_CODE (exp) == TARGET_MEM_REF)
293 tree addr = TREE_OPERAND (exp, 0);
294 unsigned ptr_align;
295 unsigned HOST_WIDE_INT ptr_bitpos;
296 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
298 /* If the address is explicitely aligned, handle that. */
299 if (TREE_CODE (addr) == BIT_AND_EXPR
300 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
302 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
303 ptr_bitmask *= BITS_PER_UNIT;
304 align = least_bit_hwi (ptr_bitmask);
305 addr = TREE_OPERAND (addr, 0);
308 known_alignment
309 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
310 align = MAX (ptr_align, align);
312 /* Re-apply explicit alignment to the bitpos. */
313 ptr_bitpos &= ptr_bitmask;
315 /* The alignment of the pointer operand in a TARGET_MEM_REF
316 has to take the variable offset parts into account. */
317 if (TREE_CODE (exp) == TARGET_MEM_REF)
319 if (TMR_INDEX (exp))
321 unsigned HOST_WIDE_INT step = 1;
322 if (TMR_STEP (exp))
323 step = TREE_INT_CST_LOW (TMR_STEP (exp));
324 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
326 if (TMR_INDEX2 (exp))
327 align = BITS_PER_UNIT;
328 known_alignment = false;
331 /* When EXP is an actual memory reference then we can use
332 TYPE_ALIGN of a pointer indirection to derive alignment.
333 Do so only if get_pointer_alignment_1 did not reveal absolute
334 alignment knowledge and if using that alignment would
335 improve the situation. */
336 if (!addr_p && !known_alignment
337 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
338 align = TYPE_ALIGN (TREE_TYPE (exp));
339 else
341 /* Else adjust bitpos accordingly. */
342 bitpos += ptr_bitpos;
343 if (TREE_CODE (exp) == MEM_REF
344 || TREE_CODE (exp) == TARGET_MEM_REF)
345 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
348 else if (TREE_CODE (exp) == STRING_CST)
350 /* STRING_CST are the only constant objects we allow to be not
351 wrapped inside a CONST_DECL. */
352 align = TYPE_ALIGN (TREE_TYPE (exp));
353 if (CONSTANT_CLASS_P (exp))
354 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
356 known_alignment = true;
359 /* If there is a non-constant offset part extract the maximum
360 alignment that can prevail. */
361 if (offset)
363 unsigned int trailing_zeros = tree_ctz (offset);
364 if (trailing_zeros < HOST_BITS_PER_INT)
366 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
367 if (inner)
368 align = MIN (align, inner);
372 *alignp = align;
373 *bitposp = bitpos & (*alignp - 1);
374 return known_alignment;
377 /* For a memory reference expression EXP compute values M and N such that M
378 divides (&EXP - N) and such that N < M. If these numbers can be determined,
379 store M in alignp and N in *BITPOSP and return true. Otherwise return false
380 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
382 bool
383 get_object_alignment_1 (tree exp, unsigned int *alignp,
384 unsigned HOST_WIDE_INT *bitposp)
386 return get_object_alignment_2 (exp, alignp, bitposp, false);
389 /* Return the alignment in bits of EXP, an object. */
391 unsigned int
392 get_object_alignment (tree exp)
394 unsigned HOST_WIDE_INT bitpos = 0;
395 unsigned int align;
397 get_object_alignment_1 (exp, &align, &bitpos);
399 /* align and bitpos now specify known low bits of the pointer.
400 ptr & (align - 1) == bitpos. */
402 if (bitpos != 0)
403 align = least_bit_hwi (bitpos);
404 return align;
407 /* For a pointer valued expression EXP compute values M and N such that M
408 divides (EXP - N) and such that N < M. If these numbers can be determined,
409 store M in alignp and N in *BITPOSP and return true. Return false if
410 the results are just a conservative approximation.
412 If EXP is not a pointer, false is returned too. */
414 bool
415 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
416 unsigned HOST_WIDE_INT *bitposp)
418 STRIP_NOPS (exp);
420 if (TREE_CODE (exp) == ADDR_EXPR)
421 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
422 alignp, bitposp, true);
423 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
425 unsigned int align;
426 unsigned HOST_WIDE_INT bitpos;
427 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
428 &align, &bitpos);
429 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
430 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
431 else
433 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
434 if (trailing_zeros < HOST_BITS_PER_INT)
436 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
437 if (inner)
438 align = MIN (align, inner);
441 *alignp = align;
442 *bitposp = bitpos & (align - 1);
443 return res;
445 else if (TREE_CODE (exp) == SSA_NAME
446 && POINTER_TYPE_P (TREE_TYPE (exp)))
448 unsigned int ptr_align, ptr_misalign;
449 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
451 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
453 *bitposp = ptr_misalign * BITS_PER_UNIT;
454 *alignp = ptr_align * BITS_PER_UNIT;
455 /* Make sure to return a sensible alignment when the multiplication
456 by BITS_PER_UNIT overflowed. */
457 if (*alignp == 0)
458 *alignp = 1u << (HOST_BITS_PER_INT - 1);
459 /* We cannot really tell whether this result is an approximation. */
460 return false;
462 else
464 *bitposp = 0;
465 *alignp = BITS_PER_UNIT;
466 return false;
469 else if (TREE_CODE (exp) == INTEGER_CST)
471 *alignp = BIGGEST_ALIGNMENT;
472 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
473 & (BIGGEST_ALIGNMENT - 1));
474 return true;
477 *bitposp = 0;
478 *alignp = BITS_PER_UNIT;
479 return false;
482 /* Return the alignment in bits of EXP, a pointer valued expression.
483 The alignment returned is, by default, the alignment of the thing that
484 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
486 Otherwise, look at the expression to see if we can do better, i.e., if the
487 expression is actually pointing at an object whose alignment is tighter. */
489 unsigned int
490 get_pointer_alignment (tree exp)
492 unsigned HOST_WIDE_INT bitpos = 0;
493 unsigned int align;
495 get_pointer_alignment_1 (exp, &align, &bitpos);
497 /* align and bitpos now specify known low bits of the pointer.
498 ptr & (align - 1) == bitpos. */
500 if (bitpos != 0)
501 align = least_bit_hwi (bitpos);
503 return align;
506 /* Return the number of non-zero elements in the sequence
507 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
508 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
510 static unsigned
511 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
513 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
515 unsigned n;
517 if (eltsize == 1)
519 /* Optimize the common case of plain char. */
520 for (n = 0; n < maxelts; n++)
522 const char *elt = (const char*) ptr + n;
523 if (!*elt)
524 break;
527 else
529 for (n = 0; n < maxelts; n++)
531 const char *elt = (const char*) ptr + n * eltsize;
532 if (!memcmp (elt, "\0\0\0\0", eltsize))
533 break;
536 return n;
539 /* Compute the length of a null-terminated character string or wide
540 character string handling character sizes of 1, 2, and 4 bytes.
541 TREE_STRING_LENGTH is not the right way because it evaluates to
542 the size of the character array in bytes (as opposed to characters)
543 and because it can contain a zero byte in the middle.
545 ONLY_VALUE should be nonzero if the result is not going to be emitted
546 into the instruction stream and zero if it is going to be expanded.
547 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
548 is returned, otherwise NULL, since
549 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
550 evaluate the side-effects.
552 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
553 accesses. Note that this implies the result is not going to be emitted
554 into the instruction stream.
556 The value returned is of type `ssizetype'.
558 Unfortunately, string_constant can't access the values of const char
559 arrays with initializers, so neither can we do so here. */
561 tree
562 c_strlen (tree src, int only_value)
564 STRIP_NOPS (src);
565 if (TREE_CODE (src) == COND_EXPR
566 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
568 tree len1, len2;
570 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
571 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
572 if (tree_int_cst_equal (len1, len2))
573 return len1;
576 if (TREE_CODE (src) == COMPOUND_EXPR
577 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
578 return c_strlen (TREE_OPERAND (src, 1), only_value);
580 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
582 /* Offset from the beginning of the string in bytes. */
583 tree byteoff;
584 src = string_constant (src, &byteoff);
585 if (src == 0)
586 return NULL_TREE;
588 /* Determine the size of the string element. */
589 unsigned eltsize
590 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
592 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
593 length of SRC. */
594 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
596 /* PTR can point to the byte representation of any string type, including
597 char* and wchar_t*. */
598 const char *ptr = TREE_STRING_POINTER (src);
600 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
602 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
603 compute the offset to the following null if we don't know where to
604 start searching for it. */
605 if (string_length (ptr, eltsize, maxelts) < maxelts)
607 /* Return when an embedded null character is found. */
608 return NULL_TREE;
611 /* We don't know the starting offset, but we do know that the string
612 has no internal zero bytes. We can assume that the offset falls
613 within the bounds of the string; otherwise, the programmer deserves
614 what he gets. Subtract the offset from the length of the string,
615 and return that. This would perhaps not be valid if we were dealing
616 with named arrays in addition to literal string constants. */
618 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
621 /* Offset from the beginning of the string in elements. */
622 HOST_WIDE_INT eltoff;
624 /* We have a known offset into the string. Start searching there for
625 a null character if we can represent it as a single HOST_WIDE_INT. */
626 if (byteoff == 0)
627 eltoff = 0;
628 else if (! tree_fits_shwi_p (byteoff))
629 eltoff = -1;
630 else
631 eltoff = tree_to_shwi (byteoff) / eltsize;
633 /* If the offset is known to be out of bounds, warn, and call strlen at
634 runtime. */
635 if (eltoff < 0 || eltoff > maxelts)
637 /* Suppress multiple warnings for propagated constant strings. */
638 if (only_value != 2
639 && !TREE_NO_WARNING (src))
641 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
642 eltoff);
643 TREE_NO_WARNING (src) = 1;
645 return NULL_TREE;
648 /* Use strlen to search for the first zero byte. Since any strings
649 constructed with build_string will have nulls appended, we win even
650 if we get handed something like (char[4])"abcd".
652 Since ELTOFF is our starting index into the string, no further
653 calculation is needed. */
654 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
655 maxelts - eltoff);
657 return ssize_int (len);
660 /* Return a constant integer corresponding to target reading
661 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
663 static rtx
664 c_readstr (const char *str, machine_mode mode)
666 HOST_WIDE_INT ch;
667 unsigned int i, j;
668 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
670 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
671 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
672 / HOST_BITS_PER_WIDE_INT;
674 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
675 for (i = 0; i < len; i++)
676 tmp[i] = 0;
678 ch = 1;
679 for (i = 0; i < GET_MODE_SIZE (mode); i++)
681 j = i;
682 if (WORDS_BIG_ENDIAN)
683 j = GET_MODE_SIZE (mode) - i - 1;
684 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
685 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
686 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
687 j *= BITS_PER_UNIT;
689 if (ch)
690 ch = (unsigned char) str[i];
691 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
694 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
695 return immed_wide_int_const (c, mode);
698 /* Cast a target constant CST to target CHAR and if that value fits into
699 host char type, return zero and put that value into variable pointed to by
700 P. */
702 static int
703 target_char_cast (tree cst, char *p)
705 unsigned HOST_WIDE_INT val, hostval;
707 if (TREE_CODE (cst) != INTEGER_CST
708 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
709 return 1;
711 /* Do not care if it fits or not right here. */
712 val = TREE_INT_CST_LOW (cst);
714 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
715 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
717 hostval = val;
718 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
719 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
721 if (val != hostval)
722 return 1;
724 *p = hostval;
725 return 0;
728 /* Similar to save_expr, but assumes that arbitrary code is not executed
729 in between the multiple evaluations. In particular, we assume that a
730 non-addressable local variable will not be modified. */
732 static tree
733 builtin_save_expr (tree exp)
735 if (TREE_CODE (exp) == SSA_NAME
736 || (TREE_ADDRESSABLE (exp) == 0
737 && (TREE_CODE (exp) == PARM_DECL
738 || (VAR_P (exp) && !TREE_STATIC (exp)))))
739 return exp;
741 return save_expr (exp);
744 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
745 times to get the address of either a higher stack frame, or a return
746 address located within it (depending on FNDECL_CODE). */
748 static rtx
749 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
751 int i;
752 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
753 if (tem == NULL_RTX)
755 /* For a zero count with __builtin_return_address, we don't care what
756 frame address we return, because target-specific definitions will
757 override us. Therefore frame pointer elimination is OK, and using
758 the soft frame pointer is OK.
760 For a nonzero count, or a zero count with __builtin_frame_address,
761 we require a stable offset from the current frame pointer to the
762 previous one, so we must use the hard frame pointer, and
763 we must disable frame pointer elimination. */
764 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
765 tem = frame_pointer_rtx;
766 else
768 tem = hard_frame_pointer_rtx;
770 /* Tell reload not to eliminate the frame pointer. */
771 crtl->accesses_prior_frames = 1;
775 if (count > 0)
776 SETUP_FRAME_ADDRESSES ();
778 /* On the SPARC, the return address is not in the frame, it is in a
779 register. There is no way to access it off of the current frame
780 pointer, but it can be accessed off the previous frame pointer by
781 reading the value from the register window save area. */
782 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
783 count--;
785 /* Scan back COUNT frames to the specified frame. */
786 for (i = 0; i < count; i++)
788 /* Assume the dynamic chain pointer is in the word that the
789 frame address points to, unless otherwise specified. */
790 tem = DYNAMIC_CHAIN_ADDRESS (tem);
791 tem = memory_address (Pmode, tem);
792 tem = gen_frame_mem (Pmode, tem);
793 tem = copy_to_reg (tem);
796 /* For __builtin_frame_address, return what we've got. But, on
797 the SPARC for example, we may have to add a bias. */
798 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
799 return FRAME_ADDR_RTX (tem);
801 /* For __builtin_return_address, get the return address from that frame. */
802 #ifdef RETURN_ADDR_RTX
803 tem = RETURN_ADDR_RTX (count, tem);
804 #else
805 tem = memory_address (Pmode,
806 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
807 tem = gen_frame_mem (Pmode, tem);
808 #endif
809 return tem;
812 /* Alias set used for setjmp buffer. */
813 static alias_set_type setjmp_alias_set = -1;
815 /* Construct the leading half of a __builtin_setjmp call. Control will
816 return to RECEIVER_LABEL. This is also called directly by the SJLJ
817 exception handling code. */
819 void
820 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
822 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
823 rtx stack_save;
824 rtx mem;
826 if (setjmp_alias_set == -1)
827 setjmp_alias_set = new_alias_set ();
829 buf_addr = convert_memory_address (Pmode, buf_addr);
831 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
833 /* We store the frame pointer and the address of receiver_label in
834 the buffer and use the rest of it for the stack save area, which
835 is machine-dependent. */
837 mem = gen_rtx_MEM (Pmode, buf_addr);
838 set_mem_alias_set (mem, setjmp_alias_set);
839 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
841 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
842 GET_MODE_SIZE (Pmode))),
843 set_mem_alias_set (mem, setjmp_alias_set);
845 emit_move_insn (validize_mem (mem),
846 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
848 stack_save = gen_rtx_MEM (sa_mode,
849 plus_constant (Pmode, buf_addr,
850 2 * GET_MODE_SIZE (Pmode)));
851 set_mem_alias_set (stack_save, setjmp_alias_set);
852 emit_stack_save (SAVE_NONLOCAL, &stack_save);
854 /* If there is further processing to do, do it. */
855 if (targetm.have_builtin_setjmp_setup ())
856 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
858 /* We have a nonlocal label. */
859 cfun->has_nonlocal_label = 1;
862 /* Construct the trailing part of a __builtin_setjmp call. This is
863 also called directly by the SJLJ exception handling code.
864 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
866 void
867 expand_builtin_setjmp_receiver (rtx receiver_label)
869 rtx chain;
871 /* Mark the FP as used when we get here, so we have to make sure it's
872 marked as used by this function. */
873 emit_use (hard_frame_pointer_rtx);
875 /* Mark the static chain as clobbered here so life information
876 doesn't get messed up for it. */
877 chain = targetm.calls.static_chain (current_function_decl, true);
878 if (chain && REG_P (chain))
879 emit_clobber (chain);
881 /* Now put in the code to restore the frame pointer, and argument
882 pointer, if needed. */
883 if (! targetm.have_nonlocal_goto ())
885 /* First adjust our frame pointer to its actual value. It was
886 previously set to the start of the virtual area corresponding to
887 the stacked variables when we branched here and now needs to be
888 adjusted to the actual hardware fp value.
890 Assignments to virtual registers are converted by
891 instantiate_virtual_regs into the corresponding assignment
892 to the underlying register (fp in this case) that makes
893 the original assignment true.
894 So the following insn will actually be decrementing fp by
895 STARTING_FRAME_OFFSET. */
896 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
898 /* Restoring the frame pointer also modifies the hard frame pointer.
899 Mark it used (so that the previous assignment remains live once
900 the frame pointer is eliminated) and clobbered (to represent the
901 implicit update from the assignment). */
902 emit_use (hard_frame_pointer_rtx);
903 emit_clobber (hard_frame_pointer_rtx);
906 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
908 /* If the argument pointer can be eliminated in favor of the
909 frame pointer, we don't need to restore it. We assume here
910 that if such an elimination is present, it can always be used.
911 This is the case on all known machines; if we don't make this
912 assumption, we do unnecessary saving on many machines. */
913 size_t i;
914 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
916 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
917 if (elim_regs[i].from == ARG_POINTER_REGNUM
918 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
919 break;
921 if (i == ARRAY_SIZE (elim_regs))
923 /* Now restore our arg pointer from the address at which it
924 was saved in our stack frame. */
925 emit_move_insn (crtl->args.internal_arg_pointer,
926 copy_to_reg (get_arg_pointer_save_area ()));
930 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
931 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
932 else if (targetm.have_nonlocal_goto_receiver ())
933 emit_insn (targetm.gen_nonlocal_goto_receiver ());
934 else
935 { /* Nothing */ }
937 /* We must not allow the code we just generated to be reordered by
938 scheduling. Specifically, the update of the frame pointer must
939 happen immediately, not later. */
940 emit_insn (gen_blockage ());
943 /* __builtin_longjmp is passed a pointer to an array of five words (not
944 all will be used on all machines). It operates similarly to the C
945 library function of the same name, but is more efficient. Much of
946 the code below is copied from the handling of non-local gotos. */
948 static void
949 expand_builtin_longjmp (rtx buf_addr, rtx value)
951 rtx fp, lab, stack;
952 rtx_insn *insn, *last;
953 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
955 /* DRAP is needed for stack realign if longjmp is expanded to current
956 function */
957 if (SUPPORTS_STACK_ALIGNMENT)
958 crtl->need_drap = true;
960 if (setjmp_alias_set == -1)
961 setjmp_alias_set = new_alias_set ();
963 buf_addr = convert_memory_address (Pmode, buf_addr);
965 buf_addr = force_reg (Pmode, buf_addr);
967 /* We require that the user must pass a second argument of 1, because
968 that is what builtin_setjmp will return. */
969 gcc_assert (value == const1_rtx);
971 last = get_last_insn ();
972 if (targetm.have_builtin_longjmp ())
973 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
974 else
976 fp = gen_rtx_MEM (Pmode, buf_addr);
977 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
978 GET_MODE_SIZE (Pmode)));
980 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
981 2 * GET_MODE_SIZE (Pmode)));
982 set_mem_alias_set (fp, setjmp_alias_set);
983 set_mem_alias_set (lab, setjmp_alias_set);
984 set_mem_alias_set (stack, setjmp_alias_set);
986 /* Pick up FP, label, and SP from the block and jump. This code is
987 from expand_goto in stmt.c; see there for detailed comments. */
988 if (targetm.have_nonlocal_goto ())
989 /* We have to pass a value to the nonlocal_goto pattern that will
990 get copied into the static_chain pointer, but it does not matter
991 what that value is, because builtin_setjmp does not use it. */
992 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
993 else
995 lab = copy_to_reg (lab);
997 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
998 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1000 emit_move_insn (hard_frame_pointer_rtx, fp);
1001 emit_stack_restore (SAVE_NONLOCAL, stack);
1003 emit_use (hard_frame_pointer_rtx);
1004 emit_use (stack_pointer_rtx);
1005 emit_indirect_jump (lab);
1009 /* Search backwards and mark the jump insn as a non-local goto.
1010 Note that this precludes the use of __builtin_longjmp to a
1011 __builtin_setjmp target in the same function. However, we've
1012 already cautioned the user that these functions are for
1013 internal exception handling use only. */
1014 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1016 gcc_assert (insn != last);
1018 if (JUMP_P (insn))
1020 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1021 break;
1023 else if (CALL_P (insn))
1024 break;
1028 static inline bool
1029 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1031 return (iter->i < iter->n);
1034 /* This function validates the types of a function call argument list
1035 against a specified list of tree_codes. If the last specifier is a 0,
1036 that represents an ellipses, otherwise the last specifier must be a
1037 VOID_TYPE. */
1039 static bool
1040 validate_arglist (const_tree callexpr, ...)
1042 enum tree_code code;
1043 bool res = 0;
1044 va_list ap;
1045 const_call_expr_arg_iterator iter;
1046 const_tree arg;
1048 va_start (ap, callexpr);
1049 init_const_call_expr_arg_iterator (callexpr, &iter);
1053 code = (enum tree_code) va_arg (ap, int);
1054 switch (code)
1056 case 0:
1057 /* This signifies an ellipses, any further arguments are all ok. */
1058 res = true;
1059 goto end;
1060 case VOID_TYPE:
1061 /* This signifies an endlink, if no arguments remain, return
1062 true, otherwise return false. */
1063 res = !more_const_call_expr_args_p (&iter);
1064 goto end;
1065 default:
1066 /* If no parameters remain or the parameter's code does not
1067 match the specified code, return false. Otherwise continue
1068 checking any remaining arguments. */
1069 arg = next_const_call_expr_arg (&iter);
1070 if (!validate_arg (arg, code))
1071 goto end;
1072 break;
1075 while (1);
1077 /* We need gotos here since we can only have one VA_CLOSE in a
1078 function. */
1079 end: ;
1080 va_end (ap);
1082 return res;
1085 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1086 and the address of the save area. */
1088 static rtx
1089 expand_builtin_nonlocal_goto (tree exp)
1091 tree t_label, t_save_area;
1092 rtx r_label, r_save_area, r_fp, r_sp;
1093 rtx_insn *insn;
1095 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1096 return NULL_RTX;
1098 t_label = CALL_EXPR_ARG (exp, 0);
1099 t_save_area = CALL_EXPR_ARG (exp, 1);
1101 r_label = expand_normal (t_label);
1102 r_label = convert_memory_address (Pmode, r_label);
1103 r_save_area = expand_normal (t_save_area);
1104 r_save_area = convert_memory_address (Pmode, r_save_area);
1105 /* Copy the address of the save location to a register just in case it was
1106 based on the frame pointer. */
1107 r_save_area = copy_to_reg (r_save_area);
1108 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1109 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1110 plus_constant (Pmode, r_save_area,
1111 GET_MODE_SIZE (Pmode)));
1113 crtl->has_nonlocal_goto = 1;
1115 /* ??? We no longer need to pass the static chain value, afaik. */
1116 if (targetm.have_nonlocal_goto ())
1117 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1118 else
1120 r_label = copy_to_reg (r_label);
1122 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1123 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1125 /* Restore frame pointer for containing function. */
1126 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1127 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1129 /* USE of hard_frame_pointer_rtx added for consistency;
1130 not clear if really needed. */
1131 emit_use (hard_frame_pointer_rtx);
1132 emit_use (stack_pointer_rtx);
1134 /* If the architecture is using a GP register, we must
1135 conservatively assume that the target function makes use of it.
1136 The prologue of functions with nonlocal gotos must therefore
1137 initialize the GP register to the appropriate value, and we
1138 must then make sure that this value is live at the point
1139 of the jump. (Note that this doesn't necessarily apply
1140 to targets with a nonlocal_goto pattern; they are free
1141 to implement it in their own way. Note also that this is
1142 a no-op if the GP register is a global invariant.) */
1143 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1144 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1145 emit_use (pic_offset_table_rtx);
1147 emit_indirect_jump (r_label);
1150 /* Search backwards to the jump insn and mark it as a
1151 non-local goto. */
1152 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1154 if (JUMP_P (insn))
1156 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1157 break;
1159 else if (CALL_P (insn))
1160 break;
1163 return const0_rtx;
1166 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1167 (not all will be used on all machines) that was passed to __builtin_setjmp.
1168 It updates the stack pointer in that block to the current value. This is
1169 also called directly by the SJLJ exception handling code. */
1171 void
1172 expand_builtin_update_setjmp_buf (rtx buf_addr)
1174 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1175 rtx stack_save
1176 = gen_rtx_MEM (sa_mode,
1177 memory_address
1178 (sa_mode,
1179 plus_constant (Pmode, buf_addr,
1180 2 * GET_MODE_SIZE (Pmode))));
1182 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1185 /* Expand a call to __builtin_prefetch. For a target that does not support
1186 data prefetch, evaluate the memory address argument in case it has side
1187 effects. */
1189 static void
1190 expand_builtin_prefetch (tree exp)
1192 tree arg0, arg1, arg2;
1193 int nargs;
1194 rtx op0, op1, op2;
1196 if (!validate_arglist (exp, POINTER_TYPE, 0))
1197 return;
1199 arg0 = CALL_EXPR_ARG (exp, 0);
1201 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1202 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1203 locality). */
1204 nargs = call_expr_nargs (exp);
1205 if (nargs > 1)
1206 arg1 = CALL_EXPR_ARG (exp, 1);
1207 else
1208 arg1 = integer_zero_node;
1209 if (nargs > 2)
1210 arg2 = CALL_EXPR_ARG (exp, 2);
1211 else
1212 arg2 = integer_three_node;
1214 /* Argument 0 is an address. */
1215 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1217 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1218 if (TREE_CODE (arg1) != INTEGER_CST)
1220 error ("second argument to %<__builtin_prefetch%> must be a constant");
1221 arg1 = integer_zero_node;
1223 op1 = expand_normal (arg1);
1224 /* Argument 1 must be either zero or one. */
1225 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1227 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1228 " using zero");
1229 op1 = const0_rtx;
1232 /* Argument 2 (locality) must be a compile-time constant int. */
1233 if (TREE_CODE (arg2) != INTEGER_CST)
1235 error ("third argument to %<__builtin_prefetch%> must be a constant");
1236 arg2 = integer_zero_node;
1238 op2 = expand_normal (arg2);
1239 /* Argument 2 must be 0, 1, 2, or 3. */
1240 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1242 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1243 op2 = const0_rtx;
1246 if (targetm.have_prefetch ())
1248 struct expand_operand ops[3];
1250 create_address_operand (&ops[0], op0);
1251 create_integer_operand (&ops[1], INTVAL (op1));
1252 create_integer_operand (&ops[2], INTVAL (op2));
1253 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1254 return;
1257 /* Don't do anything with direct references to volatile memory, but
1258 generate code to handle other side effects. */
1259 if (!MEM_P (op0) && side_effects_p (op0))
1260 emit_insn (op0);
1263 /* Get a MEM rtx for expression EXP which is the address of an operand
1264 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1265 the maximum length of the block of memory that might be accessed or
1266 NULL if unknown. */
1268 static rtx
1269 get_memory_rtx (tree exp, tree len)
1271 tree orig_exp = exp;
1272 rtx addr, mem;
1274 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1275 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1276 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1277 exp = TREE_OPERAND (exp, 0);
1279 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1280 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1282 /* Get an expression we can use to find the attributes to assign to MEM.
1283 First remove any nops. */
1284 while (CONVERT_EXPR_P (exp)
1285 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1286 exp = TREE_OPERAND (exp, 0);
1288 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1289 (as builtin stringops may alias with anything). */
1290 exp = fold_build2 (MEM_REF,
1291 build_array_type (char_type_node,
1292 build_range_type (sizetype,
1293 size_one_node, len)),
1294 exp, build_int_cst (ptr_type_node, 0));
1296 /* If the MEM_REF has no acceptable address, try to get the base object
1297 from the original address we got, and build an all-aliasing
1298 unknown-sized access to that one. */
1299 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1300 set_mem_attributes (mem, exp, 0);
1301 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1302 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1303 0))))
1305 exp = build_fold_addr_expr (exp);
1306 exp = fold_build2 (MEM_REF,
1307 build_array_type (char_type_node,
1308 build_range_type (sizetype,
1309 size_zero_node,
1310 NULL)),
1311 exp, build_int_cst (ptr_type_node, 0));
1312 set_mem_attributes (mem, exp, 0);
1314 set_mem_alias_set (mem, 0);
1315 return mem;
1318 /* Built-in functions to perform an untyped call and return. */
1320 #define apply_args_mode \
1321 (this_target_builtins->x_apply_args_mode)
1322 #define apply_result_mode \
1323 (this_target_builtins->x_apply_result_mode)
1325 /* Return the size required for the block returned by __builtin_apply_args,
1326 and initialize apply_args_mode. */
1328 static int
1329 apply_args_size (void)
1331 static int size = -1;
1332 int align;
1333 unsigned int regno;
1334 machine_mode mode;
1336 /* The values computed by this function never change. */
1337 if (size < 0)
1339 /* The first value is the incoming arg-pointer. */
1340 size = GET_MODE_SIZE (Pmode);
1342 /* The second value is the structure value address unless this is
1343 passed as an "invisible" first argument. */
1344 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1345 size += GET_MODE_SIZE (Pmode);
1347 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1348 if (FUNCTION_ARG_REGNO_P (regno))
1350 mode = targetm.calls.get_raw_arg_mode (regno);
1352 gcc_assert (mode != VOIDmode);
1354 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1355 if (size % align != 0)
1356 size = CEIL (size, align) * align;
1357 size += GET_MODE_SIZE (mode);
1358 apply_args_mode[regno] = mode;
1360 else
1362 apply_args_mode[regno] = VOIDmode;
1365 return size;
1368 /* Return the size required for the block returned by __builtin_apply,
1369 and initialize apply_result_mode. */
1371 static int
1372 apply_result_size (void)
1374 static int size = -1;
1375 int align, regno;
1376 machine_mode mode;
1378 /* The values computed by this function never change. */
1379 if (size < 0)
1381 size = 0;
1383 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1384 if (targetm.calls.function_value_regno_p (regno))
1386 mode = targetm.calls.get_raw_result_mode (regno);
1388 gcc_assert (mode != VOIDmode);
1390 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1391 if (size % align != 0)
1392 size = CEIL (size, align) * align;
1393 size += GET_MODE_SIZE (mode);
1394 apply_result_mode[regno] = mode;
1396 else
1397 apply_result_mode[regno] = VOIDmode;
1399 /* Allow targets that use untyped_call and untyped_return to override
1400 the size so that machine-specific information can be stored here. */
1401 #ifdef APPLY_RESULT_SIZE
1402 size = APPLY_RESULT_SIZE;
1403 #endif
1405 return size;
1408 /* Create a vector describing the result block RESULT. If SAVEP is true,
1409 the result block is used to save the values; otherwise it is used to
1410 restore the values. */
1412 static rtx
1413 result_vector (int savep, rtx result)
1415 int regno, size, align, nelts;
1416 machine_mode mode;
1417 rtx reg, mem;
1418 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1420 size = nelts = 0;
1421 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1422 if ((mode = apply_result_mode[regno]) != VOIDmode)
1424 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1425 if (size % align != 0)
1426 size = CEIL (size, align) * align;
1427 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1428 mem = adjust_address (result, mode, size);
1429 savevec[nelts++] = (savep
1430 ? gen_rtx_SET (mem, reg)
1431 : gen_rtx_SET (reg, mem));
1432 size += GET_MODE_SIZE (mode);
1434 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1437 /* Save the state required to perform an untyped call with the same
1438 arguments as were passed to the current function. */
1440 static rtx
1441 expand_builtin_apply_args_1 (void)
1443 rtx registers, tem;
1444 int size, align, regno;
1445 machine_mode mode;
1446 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1448 /* Create a block where the arg-pointer, structure value address,
1449 and argument registers can be saved. */
1450 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1452 /* Walk past the arg-pointer and structure value address. */
1453 size = GET_MODE_SIZE (Pmode);
1454 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1455 size += GET_MODE_SIZE (Pmode);
1457 /* Save each register used in calling a function to the block. */
1458 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1459 if ((mode = apply_args_mode[regno]) != VOIDmode)
1461 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1462 if (size % align != 0)
1463 size = CEIL (size, align) * align;
1465 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1467 emit_move_insn (adjust_address (registers, mode, size), tem);
1468 size += GET_MODE_SIZE (mode);
1471 /* Save the arg pointer to the block. */
1472 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1473 /* We need the pointer as the caller actually passed them to us, not
1474 as we might have pretended they were passed. Make sure it's a valid
1475 operand, as emit_move_insn isn't expected to handle a PLUS. */
1476 if (STACK_GROWS_DOWNWARD)
1478 = force_operand (plus_constant (Pmode, tem,
1479 crtl->args.pretend_args_size),
1480 NULL_RTX);
1481 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1483 size = GET_MODE_SIZE (Pmode);
1485 /* Save the structure value address unless this is passed as an
1486 "invisible" first argument. */
1487 if (struct_incoming_value)
1489 emit_move_insn (adjust_address (registers, Pmode, size),
1490 copy_to_reg (struct_incoming_value));
1491 size += GET_MODE_SIZE (Pmode);
1494 /* Return the address of the block. */
1495 return copy_addr_to_reg (XEXP (registers, 0));
1498 /* __builtin_apply_args returns block of memory allocated on
1499 the stack into which is stored the arg pointer, structure
1500 value address, static chain, and all the registers that might
1501 possibly be used in performing a function call. The code is
1502 moved to the start of the function so the incoming values are
1503 saved. */
1505 static rtx
1506 expand_builtin_apply_args (void)
1508 /* Don't do __builtin_apply_args more than once in a function.
1509 Save the result of the first call and reuse it. */
1510 if (apply_args_value != 0)
1511 return apply_args_value;
1513 /* When this function is called, it means that registers must be
1514 saved on entry to this function. So we migrate the
1515 call to the first insn of this function. */
1516 rtx temp;
1518 start_sequence ();
1519 temp = expand_builtin_apply_args_1 ();
1520 rtx_insn *seq = get_insns ();
1521 end_sequence ();
1523 apply_args_value = temp;
1525 /* Put the insns after the NOTE that starts the function.
1526 If this is inside a start_sequence, make the outer-level insn
1527 chain current, so the code is placed at the start of the
1528 function. If internal_arg_pointer is a non-virtual pseudo,
1529 it needs to be placed after the function that initializes
1530 that pseudo. */
1531 push_topmost_sequence ();
1532 if (REG_P (crtl->args.internal_arg_pointer)
1533 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1534 emit_insn_before (seq, parm_birth_insn);
1535 else
1536 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1537 pop_topmost_sequence ();
1538 return temp;
1542 /* Perform an untyped call and save the state required to perform an
1543 untyped return of whatever value was returned by the given function. */
1545 static rtx
1546 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1548 int size, align, regno;
1549 machine_mode mode;
1550 rtx incoming_args, result, reg, dest, src;
1551 rtx_call_insn *call_insn;
1552 rtx old_stack_level = 0;
1553 rtx call_fusage = 0;
1554 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1556 arguments = convert_memory_address (Pmode, arguments);
1558 /* Create a block where the return registers can be saved. */
1559 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1561 /* Fetch the arg pointer from the ARGUMENTS block. */
1562 incoming_args = gen_reg_rtx (Pmode);
1563 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1564 if (!STACK_GROWS_DOWNWARD)
1565 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1566 incoming_args, 0, OPTAB_LIB_WIDEN);
1568 /* Push a new argument block and copy the arguments. Do not allow
1569 the (potential) memcpy call below to interfere with our stack
1570 manipulations. */
1571 do_pending_stack_adjust ();
1572 NO_DEFER_POP;
1574 /* Save the stack with nonlocal if available. */
1575 if (targetm.have_save_stack_nonlocal ())
1576 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1577 else
1578 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1580 /* Allocate a block of memory onto the stack and copy the memory
1581 arguments to the outgoing arguments address. We can pass TRUE
1582 as the 4th argument because we just saved the stack pointer
1583 and will restore it right after the call. */
1584 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1586 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1587 may have already set current_function_calls_alloca to true.
1588 current_function_calls_alloca won't be set if argsize is zero,
1589 so we have to guarantee need_drap is true here. */
1590 if (SUPPORTS_STACK_ALIGNMENT)
1591 crtl->need_drap = true;
1593 dest = virtual_outgoing_args_rtx;
1594 if (!STACK_GROWS_DOWNWARD)
1596 if (CONST_INT_P (argsize))
1597 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1598 else
1599 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1601 dest = gen_rtx_MEM (BLKmode, dest);
1602 set_mem_align (dest, PARM_BOUNDARY);
1603 src = gen_rtx_MEM (BLKmode, incoming_args);
1604 set_mem_align (src, PARM_BOUNDARY);
1605 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1607 /* Refer to the argument block. */
1608 apply_args_size ();
1609 arguments = gen_rtx_MEM (BLKmode, arguments);
1610 set_mem_align (arguments, PARM_BOUNDARY);
1612 /* Walk past the arg-pointer and structure value address. */
1613 size = GET_MODE_SIZE (Pmode);
1614 if (struct_value)
1615 size += GET_MODE_SIZE (Pmode);
1617 /* Restore each of the registers previously saved. Make USE insns
1618 for each of these registers for use in making the call. */
1619 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1620 if ((mode = apply_args_mode[regno]) != VOIDmode)
1622 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1623 if (size % align != 0)
1624 size = CEIL (size, align) * align;
1625 reg = gen_rtx_REG (mode, regno);
1626 emit_move_insn (reg, adjust_address (arguments, mode, size));
1627 use_reg (&call_fusage, reg);
1628 size += GET_MODE_SIZE (mode);
1631 /* Restore the structure value address unless this is passed as an
1632 "invisible" first argument. */
1633 size = GET_MODE_SIZE (Pmode);
1634 if (struct_value)
1636 rtx value = gen_reg_rtx (Pmode);
1637 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1638 emit_move_insn (struct_value, value);
1639 if (REG_P (struct_value))
1640 use_reg (&call_fusage, struct_value);
1641 size += GET_MODE_SIZE (Pmode);
1644 /* All arguments and registers used for the call are set up by now! */
1645 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1647 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1648 and we don't want to load it into a register as an optimization,
1649 because prepare_call_address already did it if it should be done. */
1650 if (GET_CODE (function) != SYMBOL_REF)
1651 function = memory_address (FUNCTION_MODE, function);
1653 /* Generate the actual call instruction and save the return value. */
1654 if (targetm.have_untyped_call ())
1656 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1657 emit_call_insn (targetm.gen_untyped_call (mem, result,
1658 result_vector (1, result)));
1660 else if (targetm.have_call_value ())
1662 rtx valreg = 0;
1664 /* Locate the unique return register. It is not possible to
1665 express a call that sets more than one return register using
1666 call_value; use untyped_call for that. In fact, untyped_call
1667 only needs to save the return registers in the given block. */
1668 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1669 if ((mode = apply_result_mode[regno]) != VOIDmode)
1671 gcc_assert (!valreg); /* have_untyped_call required. */
1673 valreg = gen_rtx_REG (mode, regno);
1676 emit_insn (targetm.gen_call_value (valreg,
1677 gen_rtx_MEM (FUNCTION_MODE, function),
1678 const0_rtx, NULL_RTX, const0_rtx));
1680 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1682 else
1683 gcc_unreachable ();
1685 /* Find the CALL insn we just emitted, and attach the register usage
1686 information. */
1687 call_insn = last_call_insn ();
1688 add_function_usage_to (call_insn, call_fusage);
1690 /* Restore the stack. */
1691 if (targetm.have_save_stack_nonlocal ())
1692 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1693 else
1694 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1695 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1697 OK_DEFER_POP;
1699 /* Return the address of the result block. */
1700 result = copy_addr_to_reg (XEXP (result, 0));
1701 return convert_memory_address (ptr_mode, result);
1704 /* Perform an untyped return. */
1706 static void
1707 expand_builtin_return (rtx result)
1709 int size, align, regno;
1710 machine_mode mode;
1711 rtx reg;
1712 rtx_insn *call_fusage = 0;
1714 result = convert_memory_address (Pmode, result);
1716 apply_result_size ();
1717 result = gen_rtx_MEM (BLKmode, result);
1719 if (targetm.have_untyped_return ())
1721 rtx vector = result_vector (0, result);
1722 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1723 emit_barrier ();
1724 return;
1727 /* Restore the return value and note that each value is used. */
1728 size = 0;
1729 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1730 if ((mode = apply_result_mode[regno]) != VOIDmode)
1732 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1733 if (size % align != 0)
1734 size = CEIL (size, align) * align;
1735 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1736 emit_move_insn (reg, adjust_address (result, mode, size));
1738 push_to_sequence (call_fusage);
1739 emit_use (reg);
1740 call_fusage = get_insns ();
1741 end_sequence ();
1742 size += GET_MODE_SIZE (mode);
1745 /* Put the USE insns before the return. */
1746 emit_insn (call_fusage);
1748 /* Return whatever values was restored by jumping directly to the end
1749 of the function. */
1750 expand_naked_return ();
1753 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1755 static enum type_class
1756 type_to_class (tree type)
1758 switch (TREE_CODE (type))
1760 case VOID_TYPE: return void_type_class;
1761 case INTEGER_TYPE: return integer_type_class;
1762 case ENUMERAL_TYPE: return enumeral_type_class;
1763 case BOOLEAN_TYPE: return boolean_type_class;
1764 case POINTER_TYPE: return pointer_type_class;
1765 case REFERENCE_TYPE: return reference_type_class;
1766 case OFFSET_TYPE: return offset_type_class;
1767 case REAL_TYPE: return real_type_class;
1768 case COMPLEX_TYPE: return complex_type_class;
1769 case FUNCTION_TYPE: return function_type_class;
1770 case METHOD_TYPE: return method_type_class;
1771 case RECORD_TYPE: return record_type_class;
1772 case UNION_TYPE:
1773 case QUAL_UNION_TYPE: return union_type_class;
1774 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1775 ? string_type_class : array_type_class);
1776 case LANG_TYPE: return lang_type_class;
1777 default: return no_type_class;
1781 /* Expand a call EXP to __builtin_classify_type. */
1783 static rtx
1784 expand_builtin_classify_type (tree exp)
1786 if (call_expr_nargs (exp))
1787 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1788 return GEN_INT (no_type_class);
1791 /* This helper macro, meant to be used in mathfn_built_in below,
1792 determines which among a set of three builtin math functions is
1793 appropriate for a given type mode. The `F' and `L' cases are
1794 automatically generated from the `double' case. */
1795 #define CASE_MATHFN(MATHFN) \
1796 CASE_CFN_##MATHFN: \
1797 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1798 fcodel = BUILT_IN_##MATHFN##L ; break;
1799 /* Similar to above, but appends _R after any F/L suffix. */
1800 #define CASE_MATHFN_REENT(MATHFN) \
1801 case CFN_BUILT_IN_##MATHFN##_R: \
1802 case CFN_BUILT_IN_##MATHFN##F_R: \
1803 case CFN_BUILT_IN_##MATHFN##L_R: \
1804 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1805 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1807 /* Return a function equivalent to FN but operating on floating-point
1808 values of type TYPE, or END_BUILTINS if no such function exists.
1809 This is purely an operation on function codes; it does not guarantee
1810 that the target actually has an implementation of the function. */
1812 static built_in_function
1813 mathfn_built_in_2 (tree type, combined_fn fn)
1815 built_in_function fcode, fcodef, fcodel;
1817 switch (fn)
1819 CASE_MATHFN (ACOS)
1820 CASE_MATHFN (ACOSH)
1821 CASE_MATHFN (ASIN)
1822 CASE_MATHFN (ASINH)
1823 CASE_MATHFN (ATAN)
1824 CASE_MATHFN (ATAN2)
1825 CASE_MATHFN (ATANH)
1826 CASE_MATHFN (CBRT)
1827 CASE_MATHFN (CEIL)
1828 CASE_MATHFN (CEXPI)
1829 CASE_MATHFN (COPYSIGN)
1830 CASE_MATHFN (COS)
1831 CASE_MATHFN (COSH)
1832 CASE_MATHFN (DREM)
1833 CASE_MATHFN (ERF)
1834 CASE_MATHFN (ERFC)
1835 CASE_MATHFN (EXP)
1836 CASE_MATHFN (EXP10)
1837 CASE_MATHFN (EXP2)
1838 CASE_MATHFN (EXPM1)
1839 CASE_MATHFN (FABS)
1840 CASE_MATHFN (FDIM)
1841 CASE_MATHFN (FLOOR)
1842 CASE_MATHFN (FMA)
1843 CASE_MATHFN (FMAX)
1844 CASE_MATHFN (FMIN)
1845 CASE_MATHFN (FMOD)
1846 CASE_MATHFN (FREXP)
1847 CASE_MATHFN (GAMMA)
1848 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1849 CASE_MATHFN (HUGE_VAL)
1850 CASE_MATHFN (HYPOT)
1851 CASE_MATHFN (ILOGB)
1852 CASE_MATHFN (ICEIL)
1853 CASE_MATHFN (IFLOOR)
1854 CASE_MATHFN (INF)
1855 CASE_MATHFN (IRINT)
1856 CASE_MATHFN (IROUND)
1857 CASE_MATHFN (ISINF)
1858 CASE_MATHFN (J0)
1859 CASE_MATHFN (J1)
1860 CASE_MATHFN (JN)
1861 CASE_MATHFN (LCEIL)
1862 CASE_MATHFN (LDEXP)
1863 CASE_MATHFN (LFLOOR)
1864 CASE_MATHFN (LGAMMA)
1865 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1866 CASE_MATHFN (LLCEIL)
1867 CASE_MATHFN (LLFLOOR)
1868 CASE_MATHFN (LLRINT)
1869 CASE_MATHFN (LLROUND)
1870 CASE_MATHFN (LOG)
1871 CASE_MATHFN (LOG10)
1872 CASE_MATHFN (LOG1P)
1873 CASE_MATHFN (LOG2)
1874 CASE_MATHFN (LOGB)
1875 CASE_MATHFN (LRINT)
1876 CASE_MATHFN (LROUND)
1877 CASE_MATHFN (MODF)
1878 CASE_MATHFN (NAN)
1879 CASE_MATHFN (NANS)
1880 CASE_MATHFN (NEARBYINT)
1881 CASE_MATHFN (NEXTAFTER)
1882 CASE_MATHFN (NEXTTOWARD)
1883 CASE_MATHFN (POW)
1884 CASE_MATHFN (POWI)
1885 CASE_MATHFN (POW10)
1886 CASE_MATHFN (REMAINDER)
1887 CASE_MATHFN (REMQUO)
1888 CASE_MATHFN (RINT)
1889 CASE_MATHFN (ROUND)
1890 CASE_MATHFN (SCALB)
1891 CASE_MATHFN (SCALBLN)
1892 CASE_MATHFN (SCALBN)
1893 CASE_MATHFN (SIGNBIT)
1894 CASE_MATHFN (SIGNIFICAND)
1895 CASE_MATHFN (SIN)
1896 CASE_MATHFN (SINCOS)
1897 CASE_MATHFN (SINH)
1898 CASE_MATHFN (SQRT)
1899 CASE_MATHFN (TAN)
1900 CASE_MATHFN (TANH)
1901 CASE_MATHFN (TGAMMA)
1902 CASE_MATHFN (TRUNC)
1903 CASE_MATHFN (Y0)
1904 CASE_MATHFN (Y1)
1905 CASE_MATHFN (YN)
1907 default:
1908 return END_BUILTINS;
1911 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1912 return fcode;
1913 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1914 return fcodef;
1915 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1916 return fcodel;
1917 else
1918 return END_BUILTINS;
1921 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1922 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1923 otherwise use the explicit declaration. If we can't do the conversion,
1924 return null. */
1926 static tree
1927 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1929 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1930 if (fcode2 == END_BUILTINS)
1931 return NULL_TREE;
1933 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1934 return NULL_TREE;
1936 return builtin_decl_explicit (fcode2);
1939 /* Like mathfn_built_in_1, but always use the implicit array. */
1941 tree
1942 mathfn_built_in (tree type, combined_fn fn)
1944 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1947 /* Like mathfn_built_in_1, but take a built_in_function and
1948 always use the implicit array. */
1950 tree
1951 mathfn_built_in (tree type, enum built_in_function fn)
1953 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1956 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1957 return its code, otherwise return IFN_LAST. Note that this function
1958 only tests whether the function is defined in internals.def, not whether
1959 it is actually available on the target. */
1961 internal_fn
1962 associated_internal_fn (tree fndecl)
1964 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1965 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1966 switch (DECL_FUNCTION_CODE (fndecl))
1968 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1969 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1970 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1971 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1972 #include "internal-fn.def"
1974 CASE_FLT_FN (BUILT_IN_POW10):
1975 return IFN_EXP10;
1977 CASE_FLT_FN (BUILT_IN_DREM):
1978 return IFN_REMAINDER;
1980 CASE_FLT_FN (BUILT_IN_SCALBN):
1981 CASE_FLT_FN (BUILT_IN_SCALBLN):
1982 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1983 return IFN_LDEXP;
1984 return IFN_LAST;
1986 default:
1987 return IFN_LAST;
1991 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1992 on the current target by a call to an internal function, return the
1993 code of that internal function, otherwise return IFN_LAST. The caller
1994 is responsible for ensuring that any side-effects of the built-in
1995 call are dealt with correctly. E.g. if CALL sets errno, the caller
1996 must decide that the errno result isn't needed or make it available
1997 in some other way. */
1999 internal_fn
2000 replacement_internal_fn (gcall *call)
2002 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2004 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2005 if (ifn != IFN_LAST)
2007 tree_pair types = direct_internal_fn_types (ifn, call);
2008 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2009 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2010 return ifn;
2013 return IFN_LAST;
2016 /* Expand a call to the builtin trinary math functions (fma).
2017 Return NULL_RTX if a normal call should be emitted rather than expanding the
2018 function in-line. EXP is the expression that is a call to the builtin
2019 function; if convenient, the result should be placed in TARGET.
2020 SUBTARGET may be used as the target for computing one of EXP's
2021 operands. */
2023 static rtx
2024 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2026 optab builtin_optab;
2027 rtx op0, op1, op2, result;
2028 rtx_insn *insns;
2029 tree fndecl = get_callee_fndecl (exp);
2030 tree arg0, arg1, arg2;
2031 machine_mode mode;
2033 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2034 return NULL_RTX;
2036 arg0 = CALL_EXPR_ARG (exp, 0);
2037 arg1 = CALL_EXPR_ARG (exp, 1);
2038 arg2 = CALL_EXPR_ARG (exp, 2);
2040 switch (DECL_FUNCTION_CODE (fndecl))
2042 CASE_FLT_FN (BUILT_IN_FMA):
2043 builtin_optab = fma_optab; break;
2044 default:
2045 gcc_unreachable ();
2048 /* Make a suitable register to place result in. */
2049 mode = TYPE_MODE (TREE_TYPE (exp));
2051 /* Before working hard, check whether the instruction is available. */
2052 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2053 return NULL_RTX;
2055 result = gen_reg_rtx (mode);
2057 /* Always stabilize the argument list. */
2058 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2059 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2060 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2062 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2063 op1 = expand_normal (arg1);
2064 op2 = expand_normal (arg2);
2066 start_sequence ();
2068 /* Compute into RESULT.
2069 Set RESULT to wherever the result comes back. */
2070 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2071 result, 0);
2073 /* If we were unable to expand via the builtin, stop the sequence
2074 (without outputting the insns) and call to the library function
2075 with the stabilized argument list. */
2076 if (result == 0)
2078 end_sequence ();
2079 return expand_call (exp, target, target == const0_rtx);
2082 /* Output the entire sequence. */
2083 insns = get_insns ();
2084 end_sequence ();
2085 emit_insn (insns);
2087 return result;
2090 /* Expand a call to the builtin sin and cos math functions.
2091 Return NULL_RTX if a normal call should be emitted rather than expanding the
2092 function in-line. EXP is the expression that is a call to the builtin
2093 function; if convenient, the result should be placed in TARGET.
2094 SUBTARGET may be used as the target for computing one of EXP's
2095 operands. */
2097 static rtx
2098 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2100 optab builtin_optab;
2101 rtx op0;
2102 rtx_insn *insns;
2103 tree fndecl = get_callee_fndecl (exp);
2104 machine_mode mode;
2105 tree arg;
2107 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2108 return NULL_RTX;
2110 arg = CALL_EXPR_ARG (exp, 0);
2112 switch (DECL_FUNCTION_CODE (fndecl))
2114 CASE_FLT_FN (BUILT_IN_SIN):
2115 CASE_FLT_FN (BUILT_IN_COS):
2116 builtin_optab = sincos_optab; break;
2117 default:
2118 gcc_unreachable ();
2121 /* Make a suitable register to place result in. */
2122 mode = TYPE_MODE (TREE_TYPE (exp));
2124 /* Check if sincos insn is available, otherwise fallback
2125 to sin or cos insn. */
2126 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2127 switch (DECL_FUNCTION_CODE (fndecl))
2129 CASE_FLT_FN (BUILT_IN_SIN):
2130 builtin_optab = sin_optab; break;
2131 CASE_FLT_FN (BUILT_IN_COS):
2132 builtin_optab = cos_optab; break;
2133 default:
2134 gcc_unreachable ();
2137 /* Before working hard, check whether the instruction is available. */
2138 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2140 rtx result = gen_reg_rtx (mode);
2142 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2143 need to expand the argument again. This way, we will not perform
2144 side-effects more the once. */
2145 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2147 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2149 start_sequence ();
2151 /* Compute into RESULT.
2152 Set RESULT to wherever the result comes back. */
2153 if (builtin_optab == sincos_optab)
2155 int ok;
2157 switch (DECL_FUNCTION_CODE (fndecl))
2159 CASE_FLT_FN (BUILT_IN_SIN):
2160 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2161 break;
2162 CASE_FLT_FN (BUILT_IN_COS):
2163 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2164 break;
2165 default:
2166 gcc_unreachable ();
2168 gcc_assert (ok);
2170 else
2171 result = expand_unop (mode, builtin_optab, op0, result, 0);
2173 if (result != 0)
2175 /* Output the entire sequence. */
2176 insns = get_insns ();
2177 end_sequence ();
2178 emit_insn (insns);
2179 return result;
2182 /* If we were unable to expand via the builtin, stop the sequence
2183 (without outputting the insns) and call to the library function
2184 with the stabilized argument list. */
2185 end_sequence ();
2188 return expand_call (exp, target, target == const0_rtx);
2191 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2192 return an RTL instruction code that implements the functionality.
2193 If that isn't possible or available return CODE_FOR_nothing. */
2195 static enum insn_code
2196 interclass_mathfn_icode (tree arg, tree fndecl)
2198 bool errno_set = false;
2199 optab builtin_optab = unknown_optab;
2200 machine_mode mode;
2202 switch (DECL_FUNCTION_CODE (fndecl))
2204 CASE_FLT_FN (BUILT_IN_ILOGB):
2205 errno_set = true; builtin_optab = ilogb_optab; break;
2206 CASE_FLT_FN (BUILT_IN_ISINF):
2207 builtin_optab = isinf_optab; break;
2208 case BUILT_IN_ISNORMAL:
2209 case BUILT_IN_ISFINITE:
2210 CASE_FLT_FN (BUILT_IN_FINITE):
2211 case BUILT_IN_FINITED32:
2212 case BUILT_IN_FINITED64:
2213 case BUILT_IN_FINITED128:
2214 case BUILT_IN_ISINFD32:
2215 case BUILT_IN_ISINFD64:
2216 case BUILT_IN_ISINFD128:
2217 /* These builtins have no optabs (yet). */
2218 break;
2219 default:
2220 gcc_unreachable ();
2223 /* There's no easy way to detect the case we need to set EDOM. */
2224 if (flag_errno_math && errno_set)
2225 return CODE_FOR_nothing;
2227 /* Optab mode depends on the mode of the input argument. */
2228 mode = TYPE_MODE (TREE_TYPE (arg));
2230 if (builtin_optab)
2231 return optab_handler (builtin_optab, mode);
2232 return CODE_FOR_nothing;
2235 /* Expand a call to one of the builtin math functions that operate on
2236 floating point argument and output an integer result (ilogb, isinf,
2237 isnan, etc).
2238 Return 0 if a normal call should be emitted rather than expanding the
2239 function in-line. EXP is the expression that is a call to the builtin
2240 function; if convenient, the result should be placed in TARGET. */
2242 static rtx
2243 expand_builtin_interclass_mathfn (tree exp, rtx target)
2245 enum insn_code icode = CODE_FOR_nothing;
2246 rtx op0;
2247 tree fndecl = get_callee_fndecl (exp);
2248 machine_mode mode;
2249 tree arg;
2251 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2252 return NULL_RTX;
2254 arg = CALL_EXPR_ARG (exp, 0);
2255 icode = interclass_mathfn_icode (arg, fndecl);
2256 mode = TYPE_MODE (TREE_TYPE (arg));
2258 if (icode != CODE_FOR_nothing)
2260 struct expand_operand ops[1];
2261 rtx_insn *last = get_last_insn ();
2262 tree orig_arg = arg;
2264 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2265 need to expand the argument again. This way, we will not perform
2266 side-effects more the once. */
2267 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2269 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2271 if (mode != GET_MODE (op0))
2272 op0 = convert_to_mode (mode, op0, 0);
2274 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2275 if (maybe_legitimize_operands (icode, 0, 1, ops)
2276 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2277 return ops[0].value;
2279 delete_insns_since (last);
2280 CALL_EXPR_ARG (exp, 0) = orig_arg;
2283 return NULL_RTX;
2286 /* Expand a call to the builtin sincos math function.
2287 Return NULL_RTX if a normal call should be emitted rather than expanding the
2288 function in-line. EXP is the expression that is a call to the builtin
2289 function. */
2291 static rtx
2292 expand_builtin_sincos (tree exp)
2294 rtx op0, op1, op2, target1, target2;
2295 machine_mode mode;
2296 tree arg, sinp, cosp;
2297 int result;
2298 location_t loc = EXPR_LOCATION (exp);
2299 tree alias_type, alias_off;
2301 if (!validate_arglist (exp, REAL_TYPE,
2302 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2303 return NULL_RTX;
2305 arg = CALL_EXPR_ARG (exp, 0);
2306 sinp = CALL_EXPR_ARG (exp, 1);
2307 cosp = CALL_EXPR_ARG (exp, 2);
2309 /* Make a suitable register to place result in. */
2310 mode = TYPE_MODE (TREE_TYPE (arg));
2312 /* Check if sincos insn is available, otherwise emit the call. */
2313 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2314 return NULL_RTX;
2316 target1 = gen_reg_rtx (mode);
2317 target2 = gen_reg_rtx (mode);
2319 op0 = expand_normal (arg);
2320 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2321 alias_off = build_int_cst (alias_type, 0);
2322 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2323 sinp, alias_off));
2324 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2325 cosp, alias_off));
2327 /* Compute into target1 and target2.
2328 Set TARGET to wherever the result comes back. */
2329 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2330 gcc_assert (result);
2332 /* Move target1 and target2 to the memory locations indicated
2333 by op1 and op2. */
2334 emit_move_insn (op1, target1);
2335 emit_move_insn (op2, target2);
2337 return const0_rtx;
2340 /* Expand a call to the internal cexpi builtin to the sincos math function.
2341 EXP is the expression that is a call to the builtin function; if convenient,
2342 the result should be placed in TARGET. */
2344 static rtx
2345 expand_builtin_cexpi (tree exp, rtx target)
2347 tree fndecl = get_callee_fndecl (exp);
2348 tree arg, type;
2349 machine_mode mode;
2350 rtx op0, op1, op2;
2351 location_t loc = EXPR_LOCATION (exp);
2353 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2354 return NULL_RTX;
2356 arg = CALL_EXPR_ARG (exp, 0);
2357 type = TREE_TYPE (arg);
2358 mode = TYPE_MODE (TREE_TYPE (arg));
2360 /* Try expanding via a sincos optab, fall back to emitting a libcall
2361 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2362 is only generated from sincos, cexp or if we have either of them. */
2363 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2365 op1 = gen_reg_rtx (mode);
2366 op2 = gen_reg_rtx (mode);
2368 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2370 /* Compute into op1 and op2. */
2371 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2373 else if (targetm.libc_has_function (function_sincos))
2375 tree call, fn = NULL_TREE;
2376 tree top1, top2;
2377 rtx op1a, op2a;
2379 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2380 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2381 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2382 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2383 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2384 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2385 else
2386 gcc_unreachable ();
2388 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2389 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2390 op1a = copy_addr_to_reg (XEXP (op1, 0));
2391 op2a = copy_addr_to_reg (XEXP (op2, 0));
2392 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2393 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2395 /* Make sure not to fold the sincos call again. */
2396 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2397 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2398 call, 3, arg, top1, top2));
2400 else
2402 tree call, fn = NULL_TREE, narg;
2403 tree ctype = build_complex_type (type);
2405 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2406 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2407 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2408 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2409 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2410 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2411 else
2412 gcc_unreachable ();
2414 /* If we don't have a decl for cexp create one. This is the
2415 friendliest fallback if the user calls __builtin_cexpi
2416 without full target C99 function support. */
2417 if (fn == NULL_TREE)
2419 tree fntype;
2420 const char *name = NULL;
2422 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2423 name = "cexpf";
2424 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2425 name = "cexp";
2426 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2427 name = "cexpl";
2429 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2430 fn = build_fn_decl (name, fntype);
2433 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2434 build_real (type, dconst0), arg);
2436 /* Make sure not to fold the cexp call again. */
2437 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2438 return expand_expr (build_call_nary (ctype, call, 1, narg),
2439 target, VOIDmode, EXPAND_NORMAL);
2442 /* Now build the proper return type. */
2443 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2444 make_tree (TREE_TYPE (arg), op2),
2445 make_tree (TREE_TYPE (arg), op1)),
2446 target, VOIDmode, EXPAND_NORMAL);
2449 /* Conveniently construct a function call expression. FNDECL names the
2450 function to be called, N is the number of arguments, and the "..."
2451 parameters are the argument expressions. Unlike build_call_exr
2452 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2454 static tree
2455 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2457 va_list ap;
2458 tree fntype = TREE_TYPE (fndecl);
2459 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2461 va_start (ap, n);
2462 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2463 va_end (ap);
2464 SET_EXPR_LOCATION (fn, loc);
2465 return fn;
2468 /* Expand a call to one of the builtin rounding functions gcc defines
2469 as an extension (lfloor and lceil). As these are gcc extensions we
2470 do not need to worry about setting errno to EDOM.
2471 If expanding via optab fails, lower expression to (int)(floor(x)).
2472 EXP is the expression that is a call to the builtin function;
2473 if convenient, the result should be placed in TARGET. */
2475 static rtx
2476 expand_builtin_int_roundingfn (tree exp, rtx target)
2478 convert_optab builtin_optab;
2479 rtx op0, tmp;
2480 rtx_insn *insns;
2481 tree fndecl = get_callee_fndecl (exp);
2482 enum built_in_function fallback_fn;
2483 tree fallback_fndecl;
2484 machine_mode mode;
2485 tree arg;
2487 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2488 gcc_unreachable ();
2490 arg = CALL_EXPR_ARG (exp, 0);
2492 switch (DECL_FUNCTION_CODE (fndecl))
2494 CASE_FLT_FN (BUILT_IN_ICEIL):
2495 CASE_FLT_FN (BUILT_IN_LCEIL):
2496 CASE_FLT_FN (BUILT_IN_LLCEIL):
2497 builtin_optab = lceil_optab;
2498 fallback_fn = BUILT_IN_CEIL;
2499 break;
2501 CASE_FLT_FN (BUILT_IN_IFLOOR):
2502 CASE_FLT_FN (BUILT_IN_LFLOOR):
2503 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2504 builtin_optab = lfloor_optab;
2505 fallback_fn = BUILT_IN_FLOOR;
2506 break;
2508 default:
2509 gcc_unreachable ();
2512 /* Make a suitable register to place result in. */
2513 mode = TYPE_MODE (TREE_TYPE (exp));
2515 target = gen_reg_rtx (mode);
2517 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2518 need to expand the argument again. This way, we will not perform
2519 side-effects more the once. */
2520 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2522 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2524 start_sequence ();
2526 /* Compute into TARGET. */
2527 if (expand_sfix_optab (target, op0, builtin_optab))
2529 /* Output the entire sequence. */
2530 insns = get_insns ();
2531 end_sequence ();
2532 emit_insn (insns);
2533 return target;
2536 /* If we were unable to expand via the builtin, stop the sequence
2537 (without outputting the insns). */
2538 end_sequence ();
2540 /* Fall back to floating point rounding optab. */
2541 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2543 /* For non-C99 targets we may end up without a fallback fndecl here
2544 if the user called __builtin_lfloor directly. In this case emit
2545 a call to the floor/ceil variants nevertheless. This should result
2546 in the best user experience for not full C99 targets. */
2547 if (fallback_fndecl == NULL_TREE)
2549 tree fntype;
2550 const char *name = NULL;
2552 switch (DECL_FUNCTION_CODE (fndecl))
2554 case BUILT_IN_ICEIL:
2555 case BUILT_IN_LCEIL:
2556 case BUILT_IN_LLCEIL:
2557 name = "ceil";
2558 break;
2559 case BUILT_IN_ICEILF:
2560 case BUILT_IN_LCEILF:
2561 case BUILT_IN_LLCEILF:
2562 name = "ceilf";
2563 break;
2564 case BUILT_IN_ICEILL:
2565 case BUILT_IN_LCEILL:
2566 case BUILT_IN_LLCEILL:
2567 name = "ceill";
2568 break;
2569 case BUILT_IN_IFLOOR:
2570 case BUILT_IN_LFLOOR:
2571 case BUILT_IN_LLFLOOR:
2572 name = "floor";
2573 break;
2574 case BUILT_IN_IFLOORF:
2575 case BUILT_IN_LFLOORF:
2576 case BUILT_IN_LLFLOORF:
2577 name = "floorf";
2578 break;
2579 case BUILT_IN_IFLOORL:
2580 case BUILT_IN_LFLOORL:
2581 case BUILT_IN_LLFLOORL:
2582 name = "floorl";
2583 break;
2584 default:
2585 gcc_unreachable ();
2588 fntype = build_function_type_list (TREE_TYPE (arg),
2589 TREE_TYPE (arg), NULL_TREE);
2590 fallback_fndecl = build_fn_decl (name, fntype);
2593 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2595 tmp = expand_normal (exp);
2596 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2598 /* Truncate the result of floating point optab to integer
2599 via expand_fix (). */
2600 target = gen_reg_rtx (mode);
2601 expand_fix (target, tmp, 0);
2603 return target;
2606 /* Expand a call to one of the builtin math functions doing integer
2607 conversion (lrint).
2608 Return 0 if a normal call should be emitted rather than expanding the
2609 function in-line. EXP is the expression that is a call to the builtin
2610 function; if convenient, the result should be placed in TARGET. */
2612 static rtx
2613 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2615 convert_optab builtin_optab;
2616 rtx op0;
2617 rtx_insn *insns;
2618 tree fndecl = get_callee_fndecl (exp);
2619 tree arg;
2620 machine_mode mode;
2621 enum built_in_function fallback_fn = BUILT_IN_NONE;
2623 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2624 gcc_unreachable ();
2626 arg = CALL_EXPR_ARG (exp, 0);
2628 switch (DECL_FUNCTION_CODE (fndecl))
2630 CASE_FLT_FN (BUILT_IN_IRINT):
2631 fallback_fn = BUILT_IN_LRINT;
2632 gcc_fallthrough ();
2633 CASE_FLT_FN (BUILT_IN_LRINT):
2634 CASE_FLT_FN (BUILT_IN_LLRINT):
2635 builtin_optab = lrint_optab;
2636 break;
2638 CASE_FLT_FN (BUILT_IN_IROUND):
2639 fallback_fn = BUILT_IN_LROUND;
2640 gcc_fallthrough ();
2641 CASE_FLT_FN (BUILT_IN_LROUND):
2642 CASE_FLT_FN (BUILT_IN_LLROUND):
2643 builtin_optab = lround_optab;
2644 break;
2646 default:
2647 gcc_unreachable ();
2650 /* There's no easy way to detect the case we need to set EDOM. */
2651 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2652 return NULL_RTX;
2654 /* Make a suitable register to place result in. */
2655 mode = TYPE_MODE (TREE_TYPE (exp));
2657 /* There's no easy way to detect the case we need to set EDOM. */
2658 if (!flag_errno_math)
2660 rtx result = gen_reg_rtx (mode);
2662 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2663 need to expand the argument again. This way, we will not perform
2664 side-effects more the once. */
2665 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2667 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2669 start_sequence ();
2671 if (expand_sfix_optab (result, op0, builtin_optab))
2673 /* Output the entire sequence. */
2674 insns = get_insns ();
2675 end_sequence ();
2676 emit_insn (insns);
2677 return result;
2680 /* If we were unable to expand via the builtin, stop the sequence
2681 (without outputting the insns) and call to the library function
2682 with the stabilized argument list. */
2683 end_sequence ();
2686 if (fallback_fn != BUILT_IN_NONE)
2688 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2689 targets, (int) round (x) should never be transformed into
2690 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2691 a call to lround in the hope that the target provides at least some
2692 C99 functions. This should result in the best user experience for
2693 not full C99 targets. */
2694 tree fallback_fndecl = mathfn_built_in_1
2695 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2697 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2698 fallback_fndecl, 1, arg);
2700 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2701 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2702 return convert_to_mode (mode, target, 0);
2705 return expand_call (exp, target, target == const0_rtx);
2708 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2709 a normal call should be emitted rather than expanding the function
2710 in-line. EXP is the expression that is a call to the builtin
2711 function; if convenient, the result should be placed in TARGET. */
2713 static rtx
2714 expand_builtin_powi (tree exp, rtx target)
2716 tree arg0, arg1;
2717 rtx op0, op1;
2718 machine_mode mode;
2719 machine_mode mode2;
2721 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2722 return NULL_RTX;
2724 arg0 = CALL_EXPR_ARG (exp, 0);
2725 arg1 = CALL_EXPR_ARG (exp, 1);
2726 mode = TYPE_MODE (TREE_TYPE (exp));
2728 /* Emit a libcall to libgcc. */
2730 /* Mode of the 2nd argument must match that of an int. */
2731 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2733 if (target == NULL_RTX)
2734 target = gen_reg_rtx (mode);
2736 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2737 if (GET_MODE (op0) != mode)
2738 op0 = convert_to_mode (mode, op0, 0);
2739 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2740 if (GET_MODE (op1) != mode2)
2741 op1 = convert_to_mode (mode2, op1, 0);
2743 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2744 target, LCT_CONST, mode, 2,
2745 op0, mode, op1, mode2);
2747 return target;
2750 /* Expand expression EXP which is a call to the strlen builtin. Return
2751 NULL_RTX if we failed the caller should emit a normal call, otherwise
2752 try to get the result in TARGET, if convenient. */
2754 static rtx
2755 expand_builtin_strlen (tree exp, rtx target,
2756 machine_mode target_mode)
2758 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2759 return NULL_RTX;
2760 else
2762 struct expand_operand ops[4];
2763 rtx pat;
2764 tree len;
2765 tree src = CALL_EXPR_ARG (exp, 0);
2766 rtx src_reg;
2767 rtx_insn *before_strlen;
2768 machine_mode insn_mode = target_mode;
2769 enum insn_code icode = CODE_FOR_nothing;
2770 unsigned int align;
2772 /* If the length can be computed at compile-time, return it. */
2773 len = c_strlen (src, 0);
2774 if (len)
2775 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2777 /* If the length can be computed at compile-time and is constant
2778 integer, but there are side-effects in src, evaluate
2779 src for side-effects, then return len.
2780 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2781 can be optimized into: i++; x = 3; */
2782 len = c_strlen (src, 1);
2783 if (len && TREE_CODE (len) == INTEGER_CST)
2785 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2786 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2789 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2791 /* If SRC is not a pointer type, don't do this operation inline. */
2792 if (align == 0)
2793 return NULL_RTX;
2795 /* Bail out if we can't compute strlen in the right mode. */
2796 while (insn_mode != VOIDmode)
2798 icode = optab_handler (strlen_optab, insn_mode);
2799 if (icode != CODE_FOR_nothing)
2800 break;
2802 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2804 if (insn_mode == VOIDmode)
2805 return NULL_RTX;
2807 /* Make a place to hold the source address. We will not expand
2808 the actual source until we are sure that the expansion will
2809 not fail -- there are trees that cannot be expanded twice. */
2810 src_reg = gen_reg_rtx (Pmode);
2812 /* Mark the beginning of the strlen sequence so we can emit the
2813 source operand later. */
2814 before_strlen = get_last_insn ();
2816 create_output_operand (&ops[0], target, insn_mode);
2817 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2818 create_integer_operand (&ops[2], 0);
2819 create_integer_operand (&ops[3], align);
2820 if (!maybe_expand_insn (icode, 4, ops))
2821 return NULL_RTX;
2823 /* Now that we are assured of success, expand the source. */
2824 start_sequence ();
2825 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2826 if (pat != src_reg)
2828 #ifdef POINTERS_EXTEND_UNSIGNED
2829 if (GET_MODE (pat) != Pmode)
2830 pat = convert_to_mode (Pmode, pat,
2831 POINTERS_EXTEND_UNSIGNED);
2832 #endif
2833 emit_move_insn (src_reg, pat);
2835 pat = get_insns ();
2836 end_sequence ();
2838 if (before_strlen)
2839 emit_insn_after (pat, before_strlen);
2840 else
2841 emit_insn_before (pat, get_insns ());
2843 /* Return the value in the proper mode for this function. */
2844 if (GET_MODE (ops[0].value) == target_mode)
2845 target = ops[0].value;
2846 else if (target != 0)
2847 convert_move (target, ops[0].value, 0);
2848 else
2849 target = convert_to_mode (target_mode, ops[0].value, 0);
2851 return target;
2855 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2856 bytes from constant string DATA + OFFSET and return it as target
2857 constant. */
2859 static rtx
2860 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2861 machine_mode mode)
2863 const char *str = (const char *) data;
2865 gcc_assert (offset >= 0
2866 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2867 <= strlen (str) + 1));
2869 return c_readstr (str + offset, mode);
2872 /* LEN specify length of the block of memcpy/memset operation.
2873 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2874 In some cases we can make very likely guess on max size, then we
2875 set it into PROBABLE_MAX_SIZE. */
2877 static void
2878 determine_block_size (tree len, rtx len_rtx,
2879 unsigned HOST_WIDE_INT *min_size,
2880 unsigned HOST_WIDE_INT *max_size,
2881 unsigned HOST_WIDE_INT *probable_max_size)
2883 if (CONST_INT_P (len_rtx))
2885 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2886 return;
2888 else
2890 wide_int min, max;
2891 enum value_range_type range_type = VR_UNDEFINED;
2893 /* Determine bounds from the type. */
2894 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2895 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2896 else
2897 *min_size = 0;
2898 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2899 *probable_max_size = *max_size
2900 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2901 else
2902 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2904 if (TREE_CODE (len) == SSA_NAME)
2905 range_type = get_range_info (len, &min, &max);
2906 if (range_type == VR_RANGE)
2908 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2909 *min_size = min.to_uhwi ();
2910 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2911 *probable_max_size = *max_size = max.to_uhwi ();
2913 else if (range_type == VR_ANTI_RANGE)
2915 /* Anti range 0...N lets us to determine minimal size to N+1. */
2916 if (min == 0)
2918 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2919 *min_size = max.to_uhwi () + 1;
2921 /* Code like
2923 int n;
2924 if (n < 100)
2925 memcpy (a, b, n)
2927 Produce anti range allowing negative values of N. We still
2928 can use the information and make a guess that N is not negative.
2930 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2931 *probable_max_size = min.to_uhwi () - 1;
2934 gcc_checking_assert (*max_size <=
2935 (unsigned HOST_WIDE_INT)
2936 GET_MODE_MASK (GET_MODE (len_rtx)));
2939 /* Helper function to do the actual work for expand_builtin_memcpy. */
2941 static rtx
2942 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2944 const char *src_str;
2945 unsigned int src_align = get_pointer_alignment (src);
2946 unsigned int dest_align = get_pointer_alignment (dest);
2947 rtx dest_mem, src_mem, dest_addr, len_rtx;
2948 HOST_WIDE_INT expected_size = -1;
2949 unsigned int expected_align = 0;
2950 unsigned HOST_WIDE_INT min_size;
2951 unsigned HOST_WIDE_INT max_size;
2952 unsigned HOST_WIDE_INT probable_max_size;
2954 /* If DEST is not a pointer type, call the normal function. */
2955 if (dest_align == 0)
2956 return NULL_RTX;
2958 /* If either SRC is not a pointer type, don't do this
2959 operation in-line. */
2960 if (src_align == 0)
2961 return NULL_RTX;
2963 if (currently_expanding_gimple_stmt)
2964 stringop_block_profile (currently_expanding_gimple_stmt,
2965 &expected_align, &expected_size);
2967 if (expected_align < dest_align)
2968 expected_align = dest_align;
2969 dest_mem = get_memory_rtx (dest, len);
2970 set_mem_align (dest_mem, dest_align);
2971 len_rtx = expand_normal (len);
2972 determine_block_size (len, len_rtx, &min_size, &max_size,
2973 &probable_max_size);
2974 src_str = c_getstr (src);
2976 /* If SRC is a string constant and block move would be done
2977 by pieces, we can avoid loading the string from memory
2978 and only stored the computed constants. */
2979 if (src_str
2980 && CONST_INT_P (len_rtx)
2981 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2982 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2983 CONST_CAST (char *, src_str),
2984 dest_align, false))
2986 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2987 builtin_memcpy_read_str,
2988 CONST_CAST (char *, src_str),
2989 dest_align, false, 0);
2990 dest_mem = force_operand (XEXP (dest_mem, 0), target);
2991 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2992 return dest_mem;
2995 src_mem = get_memory_rtx (src, len);
2996 set_mem_align (src_mem, src_align);
2998 /* Copy word part most expediently. */
2999 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3000 CALL_EXPR_TAILCALL (exp)
3001 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3002 expected_align, expected_size,
3003 min_size, max_size, probable_max_size);
3005 if (dest_addr == 0)
3007 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3008 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3011 return dest_addr;
3014 /* Expand a call EXP to the memcpy builtin.
3015 Return NULL_RTX if we failed, the caller should emit a normal call,
3016 otherwise try to get the result in TARGET, if convenient (and in
3017 mode MODE if that's convenient). */
3019 static rtx
3020 expand_builtin_memcpy (tree exp, rtx target)
3022 if (!validate_arglist (exp,
3023 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3024 return NULL_RTX;
3025 else
3027 tree dest = CALL_EXPR_ARG (exp, 0);
3028 tree src = CALL_EXPR_ARG (exp, 1);
3029 tree len = CALL_EXPR_ARG (exp, 2);
3030 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3034 /* Expand an instrumented call EXP to the memcpy builtin.
3035 Return NULL_RTX if we failed, the caller should emit a normal call,
3036 otherwise try to get the result in TARGET, if convenient (and in
3037 mode MODE if that's convenient). */
3039 static rtx
3040 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3042 if (!validate_arglist (exp,
3043 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3044 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3045 INTEGER_TYPE, VOID_TYPE))
3046 return NULL_RTX;
3047 else
3049 tree dest = CALL_EXPR_ARG (exp, 0);
3050 tree src = CALL_EXPR_ARG (exp, 2);
3051 tree len = CALL_EXPR_ARG (exp, 4);
3052 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3054 /* Return src bounds with the result. */
3055 if (res)
3057 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3058 expand_normal (CALL_EXPR_ARG (exp, 1)));
3059 res = chkp_join_splitted_slot (res, bnd);
3061 return res;
3065 /* Expand a call EXP to the mempcpy builtin.
3066 Return NULL_RTX if we failed; the caller should emit a normal call,
3067 otherwise try to get the result in TARGET, if convenient (and in
3068 mode MODE if that's convenient). If ENDP is 0 return the
3069 destination pointer, if ENDP is 1 return the end pointer ala
3070 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3071 stpcpy. */
3073 static rtx
3074 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3076 if (!validate_arglist (exp,
3077 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3078 return NULL_RTX;
3079 else
3081 tree dest = CALL_EXPR_ARG (exp, 0);
3082 tree src = CALL_EXPR_ARG (exp, 1);
3083 tree len = CALL_EXPR_ARG (exp, 2);
3084 return expand_builtin_mempcpy_args (dest, src, len,
3085 target, mode, /*endp=*/ 1,
3086 exp);
3090 /* Expand an instrumented call EXP to the mempcpy builtin.
3091 Return NULL_RTX if we failed, the caller should emit a normal call,
3092 otherwise try to get the result in TARGET, if convenient (and in
3093 mode MODE if that's convenient). */
3095 static rtx
3096 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3098 if (!validate_arglist (exp,
3099 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3100 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3101 INTEGER_TYPE, VOID_TYPE))
3102 return NULL_RTX;
3103 else
3105 tree dest = CALL_EXPR_ARG (exp, 0);
3106 tree src = CALL_EXPR_ARG (exp, 2);
3107 tree len = CALL_EXPR_ARG (exp, 4);
3108 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3109 mode, 1, exp);
3111 /* Return src bounds with the result. */
3112 if (res)
3114 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3115 expand_normal (CALL_EXPR_ARG (exp, 1)));
3116 res = chkp_join_splitted_slot (res, bnd);
3118 return res;
3122 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3123 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3124 so that this can also be called without constructing an actual CALL_EXPR.
3125 The other arguments and return value are the same as for
3126 expand_builtin_mempcpy. */
3128 static rtx
3129 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3130 rtx target, machine_mode mode, int endp,
3131 tree orig_exp)
3133 tree fndecl = get_callee_fndecl (orig_exp);
3135 /* If return value is ignored, transform mempcpy into memcpy. */
3136 if (target == const0_rtx
3137 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3138 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3140 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3141 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3142 dest, src, len);
3143 return expand_expr (result, target, mode, EXPAND_NORMAL);
3145 else if (target == const0_rtx
3146 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3148 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3149 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3150 dest, src, len);
3151 return expand_expr (result, target, mode, EXPAND_NORMAL);
3153 else
3155 const char *src_str;
3156 unsigned int src_align = get_pointer_alignment (src);
3157 unsigned int dest_align = get_pointer_alignment (dest);
3158 rtx dest_mem, src_mem, len_rtx;
3160 /* If either SRC or DEST is not a pointer type, don't do this
3161 operation in-line. */
3162 if (dest_align == 0 || src_align == 0)
3163 return NULL_RTX;
3165 /* If LEN is not constant, call the normal function. */
3166 if (! tree_fits_uhwi_p (len))
3167 return NULL_RTX;
3169 len_rtx = expand_normal (len);
3170 src_str = c_getstr (src);
3172 /* If SRC is a string constant and block move would be done
3173 by pieces, we can avoid loading the string from memory
3174 and only stored the computed constants. */
3175 if (src_str
3176 && CONST_INT_P (len_rtx)
3177 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3178 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3179 CONST_CAST (char *, src_str),
3180 dest_align, false))
3182 dest_mem = get_memory_rtx (dest, len);
3183 set_mem_align (dest_mem, dest_align);
3184 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3185 builtin_memcpy_read_str,
3186 CONST_CAST (char *, src_str),
3187 dest_align, false, endp);
3188 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3189 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3190 return dest_mem;
3193 if (CONST_INT_P (len_rtx)
3194 && can_move_by_pieces (INTVAL (len_rtx),
3195 MIN (dest_align, src_align)))
3197 dest_mem = get_memory_rtx (dest, len);
3198 set_mem_align (dest_mem, dest_align);
3199 src_mem = get_memory_rtx (src, len);
3200 set_mem_align (src_mem, src_align);
3201 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3202 MIN (dest_align, src_align), endp);
3203 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3204 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3205 return dest_mem;
3208 return NULL_RTX;
3212 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3213 we failed, the caller should emit a normal call, otherwise try to
3214 get the result in TARGET, if convenient. If ENDP is 0 return the
3215 destination pointer, if ENDP is 1 return the end pointer ala
3216 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3217 stpcpy. */
3219 static rtx
3220 expand_movstr (tree dest, tree src, rtx target, int endp)
3222 struct expand_operand ops[3];
3223 rtx dest_mem;
3224 rtx src_mem;
3226 if (!targetm.have_movstr ())
3227 return NULL_RTX;
3229 dest_mem = get_memory_rtx (dest, NULL);
3230 src_mem = get_memory_rtx (src, NULL);
3231 if (!endp)
3233 target = force_reg (Pmode, XEXP (dest_mem, 0));
3234 dest_mem = replace_equiv_address (dest_mem, target);
3237 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3238 create_fixed_operand (&ops[1], dest_mem);
3239 create_fixed_operand (&ops[2], src_mem);
3240 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3241 return NULL_RTX;
3243 if (endp && target != const0_rtx)
3245 target = ops[0].value;
3246 /* movstr is supposed to set end to the address of the NUL
3247 terminator. If the caller requested a mempcpy-like return value,
3248 adjust it. */
3249 if (endp == 1)
3251 rtx tem = plus_constant (GET_MODE (target),
3252 gen_lowpart (GET_MODE (target), target), 1);
3253 emit_move_insn (target, force_operand (tem, NULL_RTX));
3256 return target;
3259 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3260 NULL_RTX if we failed the caller should emit a normal call, otherwise
3261 try to get the result in TARGET, if convenient (and in mode MODE if that's
3262 convenient). */
3264 static rtx
3265 expand_builtin_strcpy (tree exp, rtx target)
3267 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3269 tree dest = CALL_EXPR_ARG (exp, 0);
3270 tree src = CALL_EXPR_ARG (exp, 1);
3271 return expand_builtin_strcpy_args (dest, src, target);
3273 return NULL_RTX;
3276 /* Helper function to do the actual work for expand_builtin_strcpy. The
3277 arguments to the builtin_strcpy call DEST and SRC are broken out
3278 so that this can also be called without constructing an actual CALL_EXPR.
3279 The other arguments and return value are the same as for
3280 expand_builtin_strcpy. */
3282 static rtx
3283 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3285 return expand_movstr (dest, src, target, /*endp=*/0);
3288 /* Expand a call EXP to the stpcpy builtin.
3289 Return NULL_RTX if we failed the caller should emit a normal call,
3290 otherwise try to get the result in TARGET, if convenient (and in
3291 mode MODE if that's convenient). */
3293 static rtx
3294 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3296 tree dst, src;
3297 location_t loc = EXPR_LOCATION (exp);
3299 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3300 return NULL_RTX;
3302 dst = CALL_EXPR_ARG (exp, 0);
3303 src = CALL_EXPR_ARG (exp, 1);
3305 /* If return value is ignored, transform stpcpy into strcpy. */
3306 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3308 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3309 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3310 return expand_expr (result, target, mode, EXPAND_NORMAL);
3312 else
3314 tree len, lenp1;
3315 rtx ret;
3317 /* Ensure we get an actual string whose length can be evaluated at
3318 compile-time, not an expression containing a string. This is
3319 because the latter will potentially produce pessimized code
3320 when used to produce the return value. */
3321 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3322 return expand_movstr (dst, src, target, /*endp=*/2);
3324 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3325 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3326 target, mode, /*endp=*/2,
3327 exp);
3329 if (ret)
3330 return ret;
3332 if (TREE_CODE (len) == INTEGER_CST)
3334 rtx len_rtx = expand_normal (len);
3336 if (CONST_INT_P (len_rtx))
3338 ret = expand_builtin_strcpy_args (dst, src, target);
3340 if (ret)
3342 if (! target)
3344 if (mode != VOIDmode)
3345 target = gen_reg_rtx (mode);
3346 else
3347 target = gen_reg_rtx (GET_MODE (ret));
3349 if (GET_MODE (target) != GET_MODE (ret))
3350 ret = gen_lowpart (GET_MODE (target), ret);
3352 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3353 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3354 gcc_assert (ret);
3356 return target;
3361 return expand_movstr (dst, src, target, /*endp=*/2);
3365 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3366 bytes from constant string DATA + OFFSET and return it as target
3367 constant. */
3370 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3371 machine_mode mode)
3373 const char *str = (const char *) data;
3375 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3376 return const0_rtx;
3378 return c_readstr (str + offset, mode);
3381 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3382 NULL_RTX if we failed the caller should emit a normal call. */
3384 static rtx
3385 expand_builtin_strncpy (tree exp, rtx target)
3387 location_t loc = EXPR_LOCATION (exp);
3389 if (validate_arglist (exp,
3390 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3392 tree dest = CALL_EXPR_ARG (exp, 0);
3393 tree src = CALL_EXPR_ARG (exp, 1);
3394 tree len = CALL_EXPR_ARG (exp, 2);
3395 tree slen = c_strlen (src, 1);
3397 /* We must be passed a constant len and src parameter. */
3398 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3399 return NULL_RTX;
3401 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3403 /* We're required to pad with trailing zeros if the requested
3404 len is greater than strlen(s2)+1. In that case try to
3405 use store_by_pieces, if it fails, punt. */
3406 if (tree_int_cst_lt (slen, len))
3408 unsigned int dest_align = get_pointer_alignment (dest);
3409 const char *p = c_getstr (src);
3410 rtx dest_mem;
3412 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3413 || !can_store_by_pieces (tree_to_uhwi (len),
3414 builtin_strncpy_read_str,
3415 CONST_CAST (char *, p),
3416 dest_align, false))
3417 return NULL_RTX;
3419 dest_mem = get_memory_rtx (dest, len);
3420 store_by_pieces (dest_mem, tree_to_uhwi (len),
3421 builtin_strncpy_read_str,
3422 CONST_CAST (char *, p), dest_align, false, 0);
3423 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3424 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3425 return dest_mem;
3428 return NULL_RTX;
3431 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3432 bytes from constant string DATA + OFFSET and return it as target
3433 constant. */
3436 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3437 machine_mode mode)
3439 const char *c = (const char *) data;
3440 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3442 memset (p, *c, GET_MODE_SIZE (mode));
3444 return c_readstr (p, mode);
3447 /* Callback routine for store_by_pieces. Return the RTL of a register
3448 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3449 char value given in the RTL register data. For example, if mode is
3450 4 bytes wide, return the RTL for 0x01010101*data. */
3452 static rtx
3453 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3454 machine_mode mode)
3456 rtx target, coeff;
3457 size_t size;
3458 char *p;
3460 size = GET_MODE_SIZE (mode);
3461 if (size == 1)
3462 return (rtx) data;
3464 p = XALLOCAVEC (char, size);
3465 memset (p, 1, size);
3466 coeff = c_readstr (p, mode);
3468 target = convert_to_mode (mode, (rtx) data, 1);
3469 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3470 return force_reg (mode, target);
3473 /* Expand expression EXP, which is a call to the memset builtin. Return
3474 NULL_RTX if we failed the caller should emit a normal call, otherwise
3475 try to get the result in TARGET, if convenient (and in mode MODE if that's
3476 convenient). */
3478 static rtx
3479 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3481 if (!validate_arglist (exp,
3482 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3483 return NULL_RTX;
3484 else
3486 tree dest = CALL_EXPR_ARG (exp, 0);
3487 tree val = CALL_EXPR_ARG (exp, 1);
3488 tree len = CALL_EXPR_ARG (exp, 2);
3489 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3493 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3494 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3495 try to get the result in TARGET, if convenient (and in mode MODE if that's
3496 convenient). */
3498 static rtx
3499 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3501 if (!validate_arglist (exp,
3502 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3503 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3504 return NULL_RTX;
3505 else
3507 tree dest = CALL_EXPR_ARG (exp, 0);
3508 tree val = CALL_EXPR_ARG (exp, 2);
3509 tree len = CALL_EXPR_ARG (exp, 3);
3510 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3512 /* Return src bounds with the result. */
3513 if (res)
3515 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3516 expand_normal (CALL_EXPR_ARG (exp, 1)));
3517 res = chkp_join_splitted_slot (res, bnd);
3519 return res;
3523 /* Helper function to do the actual work for expand_builtin_memset. The
3524 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3525 so that this can also be called without constructing an actual CALL_EXPR.
3526 The other arguments and return value are the same as for
3527 expand_builtin_memset. */
3529 static rtx
3530 expand_builtin_memset_args (tree dest, tree val, tree len,
3531 rtx target, machine_mode mode, tree orig_exp)
3533 tree fndecl, fn;
3534 enum built_in_function fcode;
3535 machine_mode val_mode;
3536 char c;
3537 unsigned int dest_align;
3538 rtx dest_mem, dest_addr, len_rtx;
3539 HOST_WIDE_INT expected_size = -1;
3540 unsigned int expected_align = 0;
3541 unsigned HOST_WIDE_INT min_size;
3542 unsigned HOST_WIDE_INT max_size;
3543 unsigned HOST_WIDE_INT probable_max_size;
3545 dest_align = get_pointer_alignment (dest);
3547 /* If DEST is not a pointer type, don't do this operation in-line. */
3548 if (dest_align == 0)
3549 return NULL_RTX;
3551 if (currently_expanding_gimple_stmt)
3552 stringop_block_profile (currently_expanding_gimple_stmt,
3553 &expected_align, &expected_size);
3555 if (expected_align < dest_align)
3556 expected_align = dest_align;
3558 /* If the LEN parameter is zero, return DEST. */
3559 if (integer_zerop (len))
3561 /* Evaluate and ignore VAL in case it has side-effects. */
3562 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3563 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3566 /* Stabilize the arguments in case we fail. */
3567 dest = builtin_save_expr (dest);
3568 val = builtin_save_expr (val);
3569 len = builtin_save_expr (len);
3571 len_rtx = expand_normal (len);
3572 determine_block_size (len, len_rtx, &min_size, &max_size,
3573 &probable_max_size);
3574 dest_mem = get_memory_rtx (dest, len);
3575 val_mode = TYPE_MODE (unsigned_char_type_node);
3577 if (TREE_CODE (val) != INTEGER_CST)
3579 rtx val_rtx;
3581 val_rtx = expand_normal (val);
3582 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3584 /* Assume that we can memset by pieces if we can store
3585 * the coefficients by pieces (in the required modes).
3586 * We can't pass builtin_memset_gen_str as that emits RTL. */
3587 c = 1;
3588 if (tree_fits_uhwi_p (len)
3589 && can_store_by_pieces (tree_to_uhwi (len),
3590 builtin_memset_read_str, &c, dest_align,
3591 true))
3593 val_rtx = force_reg (val_mode, val_rtx);
3594 store_by_pieces (dest_mem, tree_to_uhwi (len),
3595 builtin_memset_gen_str, val_rtx, dest_align,
3596 true, 0);
3598 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3599 dest_align, expected_align,
3600 expected_size, min_size, max_size,
3601 probable_max_size))
3602 goto do_libcall;
3604 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3605 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3606 return dest_mem;
3609 if (target_char_cast (val, &c))
3610 goto do_libcall;
3612 if (c)
3614 if (tree_fits_uhwi_p (len)
3615 && can_store_by_pieces (tree_to_uhwi (len),
3616 builtin_memset_read_str, &c, dest_align,
3617 true))
3618 store_by_pieces (dest_mem, tree_to_uhwi (len),
3619 builtin_memset_read_str, &c, dest_align, true, 0);
3620 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3621 gen_int_mode (c, val_mode),
3622 dest_align, expected_align,
3623 expected_size, min_size, max_size,
3624 probable_max_size))
3625 goto do_libcall;
3627 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3628 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3629 return dest_mem;
3632 set_mem_align (dest_mem, dest_align);
3633 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3634 CALL_EXPR_TAILCALL (orig_exp)
3635 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3636 expected_align, expected_size,
3637 min_size, max_size,
3638 probable_max_size);
3640 if (dest_addr == 0)
3642 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3643 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3646 return dest_addr;
3648 do_libcall:
3649 fndecl = get_callee_fndecl (orig_exp);
3650 fcode = DECL_FUNCTION_CODE (fndecl);
3651 if (fcode == BUILT_IN_MEMSET
3652 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3653 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3654 dest, val, len);
3655 else if (fcode == BUILT_IN_BZERO)
3656 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3657 dest, len);
3658 else
3659 gcc_unreachable ();
3660 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3661 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3662 return expand_call (fn, target, target == const0_rtx);
3665 /* Expand expression EXP, which is a call to the bzero builtin. Return
3666 NULL_RTX if we failed the caller should emit a normal call. */
3668 static rtx
3669 expand_builtin_bzero (tree exp)
3671 tree dest, size;
3672 location_t loc = EXPR_LOCATION (exp);
3674 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3675 return NULL_RTX;
3677 dest = CALL_EXPR_ARG (exp, 0);
3678 size = CALL_EXPR_ARG (exp, 1);
3680 /* New argument list transforming bzero(ptr x, int y) to
3681 memset(ptr x, int 0, size_t y). This is done this way
3682 so that if it isn't expanded inline, we fallback to
3683 calling bzero instead of memset. */
3685 return expand_builtin_memset_args (dest, integer_zero_node,
3686 fold_convert_loc (loc,
3687 size_type_node, size),
3688 const0_rtx, VOIDmode, exp);
3691 /* Try to expand cmpstr operation ICODE with the given operands.
3692 Return the result rtx on success, otherwise return null. */
3694 static rtx
3695 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3696 HOST_WIDE_INT align)
3698 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3700 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3701 target = NULL_RTX;
3703 struct expand_operand ops[4];
3704 create_output_operand (&ops[0], target, insn_mode);
3705 create_fixed_operand (&ops[1], arg1_rtx);
3706 create_fixed_operand (&ops[2], arg2_rtx);
3707 create_integer_operand (&ops[3], align);
3708 if (maybe_expand_insn (icode, 4, ops))
3709 return ops[0].value;
3710 return NULL_RTX;
3713 /* Expand expression EXP, which is a call to the memcmp built-in function.
3714 Return NULL_RTX if we failed and the caller should emit a normal call,
3715 otherwise try to get the result in TARGET, if convenient.
3716 RESULT_EQ is true if we can relax the returned value to be either zero
3717 or nonzero, without caring about the sign. */
3719 static rtx
3720 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
3722 if (!validate_arglist (exp,
3723 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3724 return NULL_RTX;
3726 tree arg1 = CALL_EXPR_ARG (exp, 0);
3727 tree arg2 = CALL_EXPR_ARG (exp, 1);
3728 tree len = CALL_EXPR_ARG (exp, 2);
3729 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3730 location_t loc = EXPR_LOCATION (exp);
3732 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3733 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3735 /* If we don't have POINTER_TYPE, call the function. */
3736 if (arg1_align == 0 || arg2_align == 0)
3737 return NULL_RTX;
3739 rtx arg1_rtx = get_memory_rtx (arg1, len);
3740 rtx arg2_rtx = get_memory_rtx (arg2, len);
3741 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3743 /* Set MEM_SIZE as appropriate. */
3744 if (CONST_INT_P (len_rtx))
3746 set_mem_size (arg1_rtx, INTVAL (len_rtx));
3747 set_mem_size (arg2_rtx, INTVAL (len_rtx));
3750 by_pieces_constfn constfn = NULL;
3752 const char *src_str = c_getstr (arg2);
3753 if (result_eq && src_str == NULL)
3755 src_str = c_getstr (arg1);
3756 if (src_str != NULL)
3757 std::swap (arg1_rtx, arg2_rtx);
3760 /* If SRC is a string constant and block move would be done
3761 by pieces, we can avoid loading the string from memory
3762 and only stored the computed constants. */
3763 if (src_str
3764 && CONST_INT_P (len_rtx)
3765 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
3766 constfn = builtin_memcpy_read_str;
3768 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
3769 TREE_TYPE (len), target,
3770 result_eq, constfn,
3771 CONST_CAST (char *, src_str));
3773 if (result)
3775 /* Return the value in the proper mode for this function. */
3776 if (GET_MODE (result) == mode)
3777 return result;
3779 if (target != 0)
3781 convert_move (target, result, 0);
3782 return target;
3785 return convert_to_mode (mode, result, 0);
3788 return NULL_RTX;
3791 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3792 if we failed the caller should emit a normal call, otherwise try to get
3793 the result in TARGET, if convenient. */
3795 static rtx
3796 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3798 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3799 return NULL_RTX;
3801 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
3802 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3803 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
3805 rtx arg1_rtx, arg2_rtx;
3806 tree fndecl, fn;
3807 tree arg1 = CALL_EXPR_ARG (exp, 0);
3808 tree arg2 = CALL_EXPR_ARG (exp, 1);
3809 rtx result = NULL_RTX;
3811 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3812 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3814 /* If we don't have POINTER_TYPE, call the function. */
3815 if (arg1_align == 0 || arg2_align == 0)
3816 return NULL_RTX;
3818 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3819 arg1 = builtin_save_expr (arg1);
3820 arg2 = builtin_save_expr (arg2);
3822 arg1_rtx = get_memory_rtx (arg1, NULL);
3823 arg2_rtx = get_memory_rtx (arg2, NULL);
3825 /* Try to call cmpstrsi. */
3826 if (cmpstr_icode != CODE_FOR_nothing)
3827 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
3828 MIN (arg1_align, arg2_align));
3830 /* Try to determine at least one length and call cmpstrnsi. */
3831 if (!result && cmpstrn_icode != CODE_FOR_nothing)
3833 tree len;
3834 rtx arg3_rtx;
3836 tree len1 = c_strlen (arg1, 1);
3837 tree len2 = c_strlen (arg2, 1);
3839 if (len1)
3840 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3841 if (len2)
3842 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3844 /* If we don't have a constant length for the first, use the length
3845 of the second, if we know it. We don't require a constant for
3846 this case; some cost analysis could be done if both are available
3847 but neither is constant. For now, assume they're equally cheap,
3848 unless one has side effects. If both strings have constant lengths,
3849 use the smaller. */
3851 if (!len1)
3852 len = len2;
3853 else if (!len2)
3854 len = len1;
3855 else if (TREE_SIDE_EFFECTS (len1))
3856 len = len2;
3857 else if (TREE_SIDE_EFFECTS (len2))
3858 len = len1;
3859 else if (TREE_CODE (len1) != INTEGER_CST)
3860 len = len2;
3861 else if (TREE_CODE (len2) != INTEGER_CST)
3862 len = len1;
3863 else if (tree_int_cst_lt (len1, len2))
3864 len = len1;
3865 else
3866 len = len2;
3868 /* If both arguments have side effects, we cannot optimize. */
3869 if (len && !TREE_SIDE_EFFECTS (len))
3871 arg3_rtx = expand_normal (len);
3872 result = expand_cmpstrn_or_cmpmem
3873 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
3874 arg3_rtx, MIN (arg1_align, arg2_align));
3878 if (result)
3880 /* Return the value in the proper mode for this function. */
3881 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3882 if (GET_MODE (result) == mode)
3883 return result;
3884 if (target == 0)
3885 return convert_to_mode (mode, result, 0);
3886 convert_move (target, result, 0);
3887 return target;
3890 /* Expand the library call ourselves using a stabilized argument
3891 list to avoid re-evaluating the function's arguments twice. */
3892 fndecl = get_callee_fndecl (exp);
3893 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3894 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3895 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3896 return expand_call (fn, target, target == const0_rtx);
3898 return NULL_RTX;
3901 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3902 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3903 the result in TARGET, if convenient. */
3905 static rtx
3906 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3907 ATTRIBUTE_UNUSED machine_mode mode)
3909 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3911 if (!validate_arglist (exp,
3912 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3913 return NULL_RTX;
3915 /* If c_strlen can determine an expression for one of the string
3916 lengths, and it doesn't have side effects, then emit cmpstrnsi
3917 using length MIN(strlen(string)+1, arg3). */
3918 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3919 if (cmpstrn_icode != CODE_FOR_nothing)
3921 tree len, len1, len2, len3;
3922 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3923 rtx result;
3924 tree fndecl, fn;
3925 tree arg1 = CALL_EXPR_ARG (exp, 0);
3926 tree arg2 = CALL_EXPR_ARG (exp, 1);
3927 tree arg3 = CALL_EXPR_ARG (exp, 2);
3929 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3930 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3932 len1 = c_strlen (arg1, 1);
3933 len2 = c_strlen (arg2, 1);
3935 if (len1)
3936 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3937 if (len2)
3938 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3940 len3 = fold_convert_loc (loc, sizetype, arg3);
3942 /* If we don't have a constant length for the first, use the length
3943 of the second, if we know it. If neither string is constant length,
3944 use the given length argument. We don't require a constant for
3945 this case; some cost analysis could be done if both are available
3946 but neither is constant. For now, assume they're equally cheap,
3947 unless one has side effects. If both strings have constant lengths,
3948 use the smaller. */
3950 if (!len1 && !len2)
3951 len = len3;
3952 else if (!len1)
3953 len = len2;
3954 else if (!len2)
3955 len = len1;
3956 else if (TREE_SIDE_EFFECTS (len1))
3957 len = len2;
3958 else if (TREE_SIDE_EFFECTS (len2))
3959 len = len1;
3960 else if (TREE_CODE (len1) != INTEGER_CST)
3961 len = len2;
3962 else if (TREE_CODE (len2) != INTEGER_CST)
3963 len = len1;
3964 else if (tree_int_cst_lt (len1, len2))
3965 len = len1;
3966 else
3967 len = len2;
3969 /* If we are not using the given length, we must incorporate it here.
3970 The actual new length parameter will be MIN(len,arg3) in this case. */
3971 if (len != len3)
3972 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
3973 arg1_rtx = get_memory_rtx (arg1, len);
3974 arg2_rtx = get_memory_rtx (arg2, len);
3975 arg3_rtx = expand_normal (len);
3976 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
3977 arg2_rtx, TREE_TYPE (len), arg3_rtx,
3978 MIN (arg1_align, arg2_align));
3979 if (result)
3981 /* Return the value in the proper mode for this function. */
3982 mode = TYPE_MODE (TREE_TYPE (exp));
3983 if (GET_MODE (result) == mode)
3984 return result;
3985 if (target == 0)
3986 return convert_to_mode (mode, result, 0);
3987 convert_move (target, result, 0);
3988 return target;
3991 /* Expand the library call ourselves using a stabilized argument
3992 list to avoid re-evaluating the function's arguments twice. */
3993 fndecl = get_callee_fndecl (exp);
3994 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3995 arg1, arg2, len);
3996 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3997 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3998 return expand_call (fn, target, target == const0_rtx);
4000 return NULL_RTX;
4003 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4004 if that's convenient. */
4007 expand_builtin_saveregs (void)
4009 rtx val;
4010 rtx_insn *seq;
4012 /* Don't do __builtin_saveregs more than once in a function.
4013 Save the result of the first call and reuse it. */
4014 if (saveregs_value != 0)
4015 return saveregs_value;
4017 /* When this function is called, it means that registers must be
4018 saved on entry to this function. So we migrate the call to the
4019 first insn of this function. */
4021 start_sequence ();
4023 /* Do whatever the machine needs done in this case. */
4024 val = targetm.calls.expand_builtin_saveregs ();
4026 seq = get_insns ();
4027 end_sequence ();
4029 saveregs_value = val;
4031 /* Put the insns after the NOTE that starts the function. If this
4032 is inside a start_sequence, make the outer-level insn chain current, so
4033 the code is placed at the start of the function. */
4034 push_topmost_sequence ();
4035 emit_insn_after (seq, entry_of_function ());
4036 pop_topmost_sequence ();
4038 return val;
4041 /* Expand a call to __builtin_next_arg. */
4043 static rtx
4044 expand_builtin_next_arg (void)
4046 /* Checking arguments is already done in fold_builtin_next_arg
4047 that must be called before this function. */
4048 return expand_binop (ptr_mode, add_optab,
4049 crtl->args.internal_arg_pointer,
4050 crtl->args.arg_offset_rtx,
4051 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4054 /* Make it easier for the backends by protecting the valist argument
4055 from multiple evaluations. */
4057 static tree
4058 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4060 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4062 /* The current way of determining the type of valist is completely
4063 bogus. We should have the information on the va builtin instead. */
4064 if (!vatype)
4065 vatype = targetm.fn_abi_va_list (cfun->decl);
4067 if (TREE_CODE (vatype) == ARRAY_TYPE)
4069 if (TREE_SIDE_EFFECTS (valist))
4070 valist = save_expr (valist);
4072 /* For this case, the backends will be expecting a pointer to
4073 vatype, but it's possible we've actually been given an array
4074 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4075 So fix it. */
4076 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4078 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4079 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4082 else
4084 tree pt = build_pointer_type (vatype);
4086 if (! needs_lvalue)
4088 if (! TREE_SIDE_EFFECTS (valist))
4089 return valist;
4091 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4092 TREE_SIDE_EFFECTS (valist) = 1;
4095 if (TREE_SIDE_EFFECTS (valist))
4096 valist = save_expr (valist);
4097 valist = fold_build2_loc (loc, MEM_REF,
4098 vatype, valist, build_int_cst (pt, 0));
4101 return valist;
4104 /* The "standard" definition of va_list is void*. */
4106 tree
4107 std_build_builtin_va_list (void)
4109 return ptr_type_node;
4112 /* The "standard" abi va_list is va_list_type_node. */
4114 tree
4115 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4117 return va_list_type_node;
4120 /* The "standard" type of va_list is va_list_type_node. */
4122 tree
4123 std_canonical_va_list_type (tree type)
4125 tree wtype, htype;
4127 wtype = va_list_type_node;
4128 htype = type;
4130 if (TREE_CODE (wtype) == ARRAY_TYPE)
4132 /* If va_list is an array type, the argument may have decayed
4133 to a pointer type, e.g. by being passed to another function.
4134 In that case, unwrap both types so that we can compare the
4135 underlying records. */
4136 if (TREE_CODE (htype) == ARRAY_TYPE
4137 || POINTER_TYPE_P (htype))
4139 wtype = TREE_TYPE (wtype);
4140 htype = TREE_TYPE (htype);
4143 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4144 return va_list_type_node;
4146 return NULL_TREE;
4149 /* The "standard" implementation of va_start: just assign `nextarg' to
4150 the variable. */
4152 void
4153 std_expand_builtin_va_start (tree valist, rtx nextarg)
4155 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4156 convert_move (va_r, nextarg, 0);
4158 /* We do not have any valid bounds for the pointer, so
4159 just store zero bounds for it. */
4160 if (chkp_function_instrumented_p (current_function_decl))
4161 chkp_expand_bounds_reset_for_mem (valist,
4162 make_tree (TREE_TYPE (valist),
4163 nextarg));
4166 /* Expand EXP, a call to __builtin_va_start. */
4168 static rtx
4169 expand_builtin_va_start (tree exp)
4171 rtx nextarg;
4172 tree valist;
4173 location_t loc = EXPR_LOCATION (exp);
4175 if (call_expr_nargs (exp) < 2)
4177 error_at (loc, "too few arguments to function %<va_start%>");
4178 return const0_rtx;
4181 if (fold_builtin_next_arg (exp, true))
4182 return const0_rtx;
4184 nextarg = expand_builtin_next_arg ();
4185 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4187 if (targetm.expand_builtin_va_start)
4188 targetm.expand_builtin_va_start (valist, nextarg);
4189 else
4190 std_expand_builtin_va_start (valist, nextarg);
4192 return const0_rtx;
4195 /* Expand EXP, a call to __builtin_va_end. */
4197 static rtx
4198 expand_builtin_va_end (tree exp)
4200 tree valist = CALL_EXPR_ARG (exp, 0);
4202 /* Evaluate for side effects, if needed. I hate macros that don't
4203 do that. */
4204 if (TREE_SIDE_EFFECTS (valist))
4205 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4207 return const0_rtx;
4210 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4211 builtin rather than just as an assignment in stdarg.h because of the
4212 nastiness of array-type va_list types. */
4214 static rtx
4215 expand_builtin_va_copy (tree exp)
4217 tree dst, src, t;
4218 location_t loc = EXPR_LOCATION (exp);
4220 dst = CALL_EXPR_ARG (exp, 0);
4221 src = CALL_EXPR_ARG (exp, 1);
4223 dst = stabilize_va_list_loc (loc, dst, 1);
4224 src = stabilize_va_list_loc (loc, src, 0);
4226 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4228 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4230 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4231 TREE_SIDE_EFFECTS (t) = 1;
4232 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4234 else
4236 rtx dstb, srcb, size;
4238 /* Evaluate to pointers. */
4239 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4240 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4241 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4242 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4244 dstb = convert_memory_address (Pmode, dstb);
4245 srcb = convert_memory_address (Pmode, srcb);
4247 /* "Dereference" to BLKmode memories. */
4248 dstb = gen_rtx_MEM (BLKmode, dstb);
4249 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4250 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4251 srcb = gen_rtx_MEM (BLKmode, srcb);
4252 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4253 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4255 /* Copy. */
4256 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4259 return const0_rtx;
4262 /* Expand a call to one of the builtin functions __builtin_frame_address or
4263 __builtin_return_address. */
4265 static rtx
4266 expand_builtin_frame_address (tree fndecl, tree exp)
4268 /* The argument must be a nonnegative integer constant.
4269 It counts the number of frames to scan up the stack.
4270 The value is either the frame pointer value or the return
4271 address saved in that frame. */
4272 if (call_expr_nargs (exp) == 0)
4273 /* Warning about missing arg was already issued. */
4274 return const0_rtx;
4275 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4277 error ("invalid argument to %qD", fndecl);
4278 return const0_rtx;
4280 else
4282 /* Number of frames to scan up the stack. */
4283 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4285 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4287 /* Some ports cannot access arbitrary stack frames. */
4288 if (tem == NULL)
4290 warning (0, "unsupported argument to %qD", fndecl);
4291 return const0_rtx;
4294 if (count)
4296 /* Warn since no effort is made to ensure that any frame
4297 beyond the current one exists or can be safely reached. */
4298 warning (OPT_Wframe_address, "calling %qD with "
4299 "a nonzero argument is unsafe", fndecl);
4302 /* For __builtin_frame_address, return what we've got. */
4303 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4304 return tem;
4306 if (!REG_P (tem)
4307 && ! CONSTANT_P (tem))
4308 tem = copy_addr_to_reg (tem);
4309 return tem;
4313 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4314 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4315 is the same as for allocate_dynamic_stack_space. */
4317 static rtx
4318 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4320 rtx op0;
4321 rtx result;
4322 bool valid_arglist;
4323 unsigned int align;
4324 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4325 == BUILT_IN_ALLOCA_WITH_ALIGN);
4327 valid_arglist
4328 = (alloca_with_align
4329 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4330 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4332 if (!valid_arglist)
4333 return NULL_RTX;
4335 /* Compute the argument. */
4336 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4338 /* Compute the alignment. */
4339 align = (alloca_with_align
4340 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4341 : BIGGEST_ALIGNMENT);
4343 /* Allocate the desired space. */
4344 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4345 result = convert_memory_address (ptr_mode, result);
4347 return result;
4350 /* Expand a call to bswap builtin in EXP.
4351 Return NULL_RTX if a normal call should be emitted rather than expanding the
4352 function in-line. If convenient, the result should be placed in TARGET.
4353 SUBTARGET may be used as the target for computing one of EXP's operands. */
4355 static rtx
4356 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4357 rtx subtarget)
4359 tree arg;
4360 rtx op0;
4362 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4363 return NULL_RTX;
4365 arg = CALL_EXPR_ARG (exp, 0);
4366 op0 = expand_expr (arg,
4367 subtarget && GET_MODE (subtarget) == target_mode
4368 ? subtarget : NULL_RTX,
4369 target_mode, EXPAND_NORMAL);
4370 if (GET_MODE (op0) != target_mode)
4371 op0 = convert_to_mode (target_mode, op0, 1);
4373 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4375 gcc_assert (target);
4377 return convert_to_mode (target_mode, target, 1);
4380 /* Expand a call to a unary builtin in EXP.
4381 Return NULL_RTX if a normal call should be emitted rather than expanding the
4382 function in-line. If convenient, the result should be placed in TARGET.
4383 SUBTARGET may be used as the target for computing one of EXP's operands. */
4385 static rtx
4386 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4387 rtx subtarget, optab op_optab)
4389 rtx op0;
4391 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4392 return NULL_RTX;
4394 /* Compute the argument. */
4395 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4396 (subtarget
4397 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4398 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4399 VOIDmode, EXPAND_NORMAL);
4400 /* Compute op, into TARGET if possible.
4401 Set TARGET to wherever the result comes back. */
4402 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4403 op_optab, op0, target, op_optab != clrsb_optab);
4404 gcc_assert (target);
4406 return convert_to_mode (target_mode, target, 0);
4409 /* Expand a call to __builtin_expect. We just return our argument
4410 as the builtin_expect semantic should've been already executed by
4411 tree branch prediction pass. */
4413 static rtx
4414 expand_builtin_expect (tree exp, rtx target)
4416 tree arg;
4418 if (call_expr_nargs (exp) < 2)
4419 return const0_rtx;
4420 arg = CALL_EXPR_ARG (exp, 0);
4422 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4423 /* When guessing was done, the hints should be already stripped away. */
4424 gcc_assert (!flag_guess_branch_prob
4425 || optimize == 0 || seen_error ());
4426 return target;
4429 /* Expand a call to __builtin_assume_aligned. We just return our first
4430 argument as the builtin_assume_aligned semantic should've been already
4431 executed by CCP. */
4433 static rtx
4434 expand_builtin_assume_aligned (tree exp, rtx target)
4436 if (call_expr_nargs (exp) < 2)
4437 return const0_rtx;
4438 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4439 EXPAND_NORMAL);
4440 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4441 && (call_expr_nargs (exp) < 3
4442 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4443 return target;
4446 void
4447 expand_builtin_trap (void)
4449 if (targetm.have_trap ())
4451 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4452 /* For trap insns when not accumulating outgoing args force
4453 REG_ARGS_SIZE note to prevent crossjumping of calls with
4454 different args sizes. */
4455 if (!ACCUMULATE_OUTGOING_ARGS)
4456 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4458 else
4460 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
4461 tree call_expr = build_call_expr (fn, 0);
4462 expand_call (call_expr, NULL_RTX, false);
4465 emit_barrier ();
4468 /* Expand a call to __builtin_unreachable. We do nothing except emit
4469 a barrier saying that control flow will not pass here.
4471 It is the responsibility of the program being compiled to ensure
4472 that control flow does never reach __builtin_unreachable. */
4473 static void
4474 expand_builtin_unreachable (void)
4476 emit_barrier ();
4479 /* Expand EXP, a call to fabs, fabsf or fabsl.
4480 Return NULL_RTX if a normal call should be emitted rather than expanding
4481 the function inline. If convenient, the result should be placed
4482 in TARGET. SUBTARGET may be used as the target for computing
4483 the operand. */
4485 static rtx
4486 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4488 machine_mode mode;
4489 tree arg;
4490 rtx op0;
4492 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4493 return NULL_RTX;
4495 arg = CALL_EXPR_ARG (exp, 0);
4496 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4497 mode = TYPE_MODE (TREE_TYPE (arg));
4498 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4499 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4502 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4503 Return NULL is a normal call should be emitted rather than expanding the
4504 function inline. If convenient, the result should be placed in TARGET.
4505 SUBTARGET may be used as the target for computing the operand. */
4507 static rtx
4508 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4510 rtx op0, op1;
4511 tree arg;
4513 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4514 return NULL_RTX;
4516 arg = CALL_EXPR_ARG (exp, 0);
4517 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4519 arg = CALL_EXPR_ARG (exp, 1);
4520 op1 = expand_normal (arg);
4522 return expand_copysign (op0, op1, target);
4525 /* Expand a call to __builtin___clear_cache. */
4527 static rtx
4528 expand_builtin___clear_cache (tree exp)
4530 if (!targetm.code_for_clear_cache)
4532 #ifdef CLEAR_INSN_CACHE
4533 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4534 does something. Just do the default expansion to a call to
4535 __clear_cache(). */
4536 return NULL_RTX;
4537 #else
4538 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4539 does nothing. There is no need to call it. Do nothing. */
4540 return const0_rtx;
4541 #endif /* CLEAR_INSN_CACHE */
4544 /* We have a "clear_cache" insn, and it will handle everything. */
4545 tree begin, end;
4546 rtx begin_rtx, end_rtx;
4548 /* We must not expand to a library call. If we did, any
4549 fallback library function in libgcc that might contain a call to
4550 __builtin___clear_cache() would recurse infinitely. */
4551 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4553 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4554 return const0_rtx;
4557 if (targetm.have_clear_cache ())
4559 struct expand_operand ops[2];
4561 begin = CALL_EXPR_ARG (exp, 0);
4562 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4564 end = CALL_EXPR_ARG (exp, 1);
4565 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4567 create_address_operand (&ops[0], begin_rtx);
4568 create_address_operand (&ops[1], end_rtx);
4569 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4570 return const0_rtx;
4572 return const0_rtx;
4575 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4577 static rtx
4578 round_trampoline_addr (rtx tramp)
4580 rtx temp, addend, mask;
4582 /* If we don't need too much alignment, we'll have been guaranteed
4583 proper alignment by get_trampoline_type. */
4584 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4585 return tramp;
4587 /* Round address up to desired boundary. */
4588 temp = gen_reg_rtx (Pmode);
4589 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4590 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4592 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4593 temp, 0, OPTAB_LIB_WIDEN);
4594 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4595 temp, 0, OPTAB_LIB_WIDEN);
4597 return tramp;
4600 static rtx
4601 expand_builtin_init_trampoline (tree exp, bool onstack)
4603 tree t_tramp, t_func, t_chain;
4604 rtx m_tramp, r_tramp, r_chain, tmp;
4606 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4607 POINTER_TYPE, VOID_TYPE))
4608 return NULL_RTX;
4610 t_tramp = CALL_EXPR_ARG (exp, 0);
4611 t_func = CALL_EXPR_ARG (exp, 1);
4612 t_chain = CALL_EXPR_ARG (exp, 2);
4614 r_tramp = expand_normal (t_tramp);
4615 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4616 MEM_NOTRAP_P (m_tramp) = 1;
4618 /* If ONSTACK, the TRAMP argument should be the address of a field
4619 within the local function's FRAME decl. Either way, let's see if
4620 we can fill in the MEM_ATTRs for this memory. */
4621 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4622 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4624 /* Creator of a heap trampoline is responsible for making sure the
4625 address is aligned to at least STACK_BOUNDARY. Normally malloc
4626 will ensure this anyhow. */
4627 tmp = round_trampoline_addr (r_tramp);
4628 if (tmp != r_tramp)
4630 m_tramp = change_address (m_tramp, BLKmode, tmp);
4631 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4632 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4635 /* The FUNC argument should be the address of the nested function.
4636 Extract the actual function decl to pass to the hook. */
4637 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4638 t_func = TREE_OPERAND (t_func, 0);
4639 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4641 r_chain = expand_normal (t_chain);
4643 /* Generate insns to initialize the trampoline. */
4644 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4646 if (onstack)
4648 trampolines_created = 1;
4650 if (targetm.calls.custom_function_descriptors != 0)
4651 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4652 "trampoline generated for nested function %qD", t_func);
4655 return const0_rtx;
4658 static rtx
4659 expand_builtin_adjust_trampoline (tree exp)
4661 rtx tramp;
4663 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4664 return NULL_RTX;
4666 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4667 tramp = round_trampoline_addr (tramp);
4668 if (targetm.calls.trampoline_adjust_address)
4669 tramp = targetm.calls.trampoline_adjust_address (tramp);
4671 return tramp;
4674 /* Expand a call to the builtin descriptor initialization routine.
4675 A descriptor is made up of a couple of pointers to the static
4676 chain and the code entry in this order. */
4678 static rtx
4679 expand_builtin_init_descriptor (tree exp)
4681 tree t_descr, t_func, t_chain;
4682 rtx m_descr, r_descr, r_func, r_chain;
4684 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
4685 VOID_TYPE))
4686 return NULL_RTX;
4688 t_descr = CALL_EXPR_ARG (exp, 0);
4689 t_func = CALL_EXPR_ARG (exp, 1);
4690 t_chain = CALL_EXPR_ARG (exp, 2);
4692 r_descr = expand_normal (t_descr);
4693 m_descr = gen_rtx_MEM (BLKmode, r_descr);
4694 MEM_NOTRAP_P (m_descr) = 1;
4696 r_func = expand_normal (t_func);
4697 r_chain = expand_normal (t_chain);
4699 /* Generate insns to initialize the descriptor. */
4700 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
4701 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
4702 POINTER_SIZE / BITS_PER_UNIT), r_func);
4704 return const0_rtx;
4707 /* Expand a call to the builtin descriptor adjustment routine. */
4709 static rtx
4710 expand_builtin_adjust_descriptor (tree exp)
4712 rtx tramp;
4714 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4715 return NULL_RTX;
4717 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4719 /* Unalign the descriptor to allow runtime identification. */
4720 tramp = plus_constant (ptr_mode, tramp,
4721 targetm.calls.custom_function_descriptors);
4723 return force_operand (tramp, NULL_RTX);
4726 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4727 function. The function first checks whether the back end provides
4728 an insn to implement signbit for the respective mode. If not, it
4729 checks whether the floating point format of the value is such that
4730 the sign bit can be extracted. If that is not the case, error out.
4731 EXP is the expression that is a call to the builtin function; if
4732 convenient, the result should be placed in TARGET. */
4733 static rtx
4734 expand_builtin_signbit (tree exp, rtx target)
4736 const struct real_format *fmt;
4737 machine_mode fmode, imode, rmode;
4738 tree arg;
4739 int word, bitpos;
4740 enum insn_code icode;
4741 rtx temp;
4742 location_t loc = EXPR_LOCATION (exp);
4744 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4745 return NULL_RTX;
4747 arg = CALL_EXPR_ARG (exp, 0);
4748 fmode = TYPE_MODE (TREE_TYPE (arg));
4749 rmode = TYPE_MODE (TREE_TYPE (exp));
4750 fmt = REAL_MODE_FORMAT (fmode);
4752 arg = builtin_save_expr (arg);
4754 /* Expand the argument yielding a RTX expression. */
4755 temp = expand_normal (arg);
4757 /* Check if the back end provides an insn that handles signbit for the
4758 argument's mode. */
4759 icode = optab_handler (signbit_optab, fmode);
4760 if (icode != CODE_FOR_nothing)
4762 rtx_insn *last = get_last_insn ();
4763 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4764 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4765 return target;
4766 delete_insns_since (last);
4769 /* For floating point formats without a sign bit, implement signbit
4770 as "ARG < 0.0". */
4771 bitpos = fmt->signbit_ro;
4772 if (bitpos < 0)
4774 /* But we can't do this if the format supports signed zero. */
4775 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4777 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4778 build_real (TREE_TYPE (arg), dconst0));
4779 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4782 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4784 imode = int_mode_for_mode (fmode);
4785 gcc_assert (imode != BLKmode);
4786 temp = gen_lowpart (imode, temp);
4788 else
4790 imode = word_mode;
4791 /* Handle targets with different FP word orders. */
4792 if (FLOAT_WORDS_BIG_ENDIAN)
4793 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4794 else
4795 word = bitpos / BITS_PER_WORD;
4796 temp = operand_subword_force (temp, word, fmode);
4797 bitpos = bitpos % BITS_PER_WORD;
4800 /* Force the intermediate word_mode (or narrower) result into a
4801 register. This avoids attempting to create paradoxical SUBREGs
4802 of floating point modes below. */
4803 temp = force_reg (imode, temp);
4805 /* If the bitpos is within the "result mode" lowpart, the operation
4806 can be implement with a single bitwise AND. Otherwise, we need
4807 a right shift and an AND. */
4809 if (bitpos < GET_MODE_BITSIZE (rmode))
4811 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4813 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4814 temp = gen_lowpart (rmode, temp);
4815 temp = expand_binop (rmode, and_optab, temp,
4816 immed_wide_int_const (mask, rmode),
4817 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4819 else
4821 /* Perform a logical right shift to place the signbit in the least
4822 significant bit, then truncate the result to the desired mode
4823 and mask just this bit. */
4824 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4825 temp = gen_lowpart (rmode, temp);
4826 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4827 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4830 return temp;
4833 /* Expand fork or exec calls. TARGET is the desired target of the
4834 call. EXP is the call. FN is the
4835 identificator of the actual function. IGNORE is nonzero if the
4836 value is to be ignored. */
4838 static rtx
4839 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4841 tree id, decl;
4842 tree call;
4844 /* If we are not profiling, just call the function. */
4845 if (!profile_arc_flag)
4846 return NULL_RTX;
4848 /* Otherwise call the wrapper. This should be equivalent for the rest of
4849 compiler, so the code does not diverge, and the wrapper may run the
4850 code necessary for keeping the profiling sane. */
4852 switch (DECL_FUNCTION_CODE (fn))
4854 case BUILT_IN_FORK:
4855 id = get_identifier ("__gcov_fork");
4856 break;
4858 case BUILT_IN_EXECL:
4859 id = get_identifier ("__gcov_execl");
4860 break;
4862 case BUILT_IN_EXECV:
4863 id = get_identifier ("__gcov_execv");
4864 break;
4866 case BUILT_IN_EXECLP:
4867 id = get_identifier ("__gcov_execlp");
4868 break;
4870 case BUILT_IN_EXECLE:
4871 id = get_identifier ("__gcov_execle");
4872 break;
4874 case BUILT_IN_EXECVP:
4875 id = get_identifier ("__gcov_execvp");
4876 break;
4878 case BUILT_IN_EXECVE:
4879 id = get_identifier ("__gcov_execve");
4880 break;
4882 default:
4883 gcc_unreachable ();
4886 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4887 FUNCTION_DECL, id, TREE_TYPE (fn));
4888 DECL_EXTERNAL (decl) = 1;
4889 TREE_PUBLIC (decl) = 1;
4890 DECL_ARTIFICIAL (decl) = 1;
4891 TREE_NOTHROW (decl) = 1;
4892 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4893 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4894 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4895 return expand_call (call, target, ignore);
4900 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4901 the pointer in these functions is void*, the tree optimizers may remove
4902 casts. The mode computed in expand_builtin isn't reliable either, due
4903 to __sync_bool_compare_and_swap.
4905 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4906 group of builtins. This gives us log2 of the mode size. */
4908 static inline machine_mode
4909 get_builtin_sync_mode (int fcode_diff)
4911 /* The size is not negotiable, so ask not to get BLKmode in return
4912 if the target indicates that a smaller size would be better. */
4913 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4916 /* Expand the memory expression LOC and return the appropriate memory operand
4917 for the builtin_sync operations. */
4919 static rtx
4920 get_builtin_sync_mem (tree loc, machine_mode mode)
4922 rtx addr, mem;
4924 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4925 addr = convert_memory_address (Pmode, addr);
4927 /* Note that we explicitly do not want any alias information for this
4928 memory, so that we kill all other live memories. Otherwise we don't
4929 satisfy the full barrier semantics of the intrinsic. */
4930 mem = validize_mem (gen_rtx_MEM (mode, addr));
4932 /* The alignment needs to be at least according to that of the mode. */
4933 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4934 get_pointer_alignment (loc)));
4935 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4936 MEM_VOLATILE_P (mem) = 1;
4938 return mem;
4941 /* Make sure an argument is in the right mode.
4942 EXP is the tree argument.
4943 MODE is the mode it should be in. */
4945 static rtx
4946 expand_expr_force_mode (tree exp, machine_mode mode)
4948 rtx val;
4949 machine_mode old_mode;
4951 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4952 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4953 of CONST_INTs, where we know the old_mode only from the call argument. */
4955 old_mode = GET_MODE (val);
4956 if (old_mode == VOIDmode)
4957 old_mode = TYPE_MODE (TREE_TYPE (exp));
4958 val = convert_modes (mode, old_mode, val, 1);
4959 return val;
4963 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4964 EXP is the CALL_EXPR. CODE is the rtx code
4965 that corresponds to the arithmetic or logical operation from the name;
4966 an exception here is that NOT actually means NAND. TARGET is an optional
4967 place for us to store the results; AFTER is true if this is the
4968 fetch_and_xxx form. */
4970 static rtx
4971 expand_builtin_sync_operation (machine_mode mode, tree exp,
4972 enum rtx_code code, bool after,
4973 rtx target)
4975 rtx val, mem;
4976 location_t loc = EXPR_LOCATION (exp);
4978 if (code == NOT && warn_sync_nand)
4980 tree fndecl = get_callee_fndecl (exp);
4981 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4983 static bool warned_f_a_n, warned_n_a_f;
4985 switch (fcode)
4987 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
4988 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
4989 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
4990 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
4991 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
4992 if (warned_f_a_n)
4993 break;
4995 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
4996 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4997 warned_f_a_n = true;
4998 break;
5000 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5001 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5002 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5003 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5004 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5005 if (warned_n_a_f)
5006 break;
5008 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5009 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5010 warned_n_a_f = true;
5011 break;
5013 default:
5014 gcc_unreachable ();
5018 /* Expand the operands. */
5019 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5020 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5022 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5023 after);
5026 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5027 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5028 true if this is the boolean form. TARGET is a place for us to store the
5029 results; this is NOT optional if IS_BOOL is true. */
5031 static rtx
5032 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5033 bool is_bool, rtx target)
5035 rtx old_val, new_val, mem;
5036 rtx *pbool, *poval;
5038 /* Expand the operands. */
5039 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5040 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5041 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5043 pbool = poval = NULL;
5044 if (target != const0_rtx)
5046 if (is_bool)
5047 pbool = &target;
5048 else
5049 poval = &target;
5051 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5052 false, MEMMODEL_SYNC_SEQ_CST,
5053 MEMMODEL_SYNC_SEQ_CST))
5054 return NULL_RTX;
5056 return target;
5059 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5060 general form is actually an atomic exchange, and some targets only
5061 support a reduced form with the second argument being a constant 1.
5062 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5063 the results. */
5065 static rtx
5066 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5067 rtx target)
5069 rtx val, mem;
5071 /* Expand the operands. */
5072 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5073 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5075 return expand_sync_lock_test_and_set (target, mem, val);
5078 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5080 static void
5081 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5083 rtx mem;
5085 /* Expand the operands. */
5086 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5088 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5091 /* Given an integer representing an ``enum memmodel'', verify its
5092 correctness and return the memory model enum. */
5094 static enum memmodel
5095 get_memmodel (tree exp)
5097 rtx op;
5098 unsigned HOST_WIDE_INT val;
5099 source_location loc
5100 = expansion_point_location_if_in_system_header (input_location);
5102 /* If the parameter is not a constant, it's a run time value so we'll just
5103 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5104 if (TREE_CODE (exp) != INTEGER_CST)
5105 return MEMMODEL_SEQ_CST;
5107 op = expand_normal (exp);
5109 val = INTVAL (op);
5110 if (targetm.memmodel_check)
5111 val = targetm.memmodel_check (val);
5112 else if (val & ~MEMMODEL_MASK)
5114 warning_at (loc, OPT_Winvalid_memory_model,
5115 "unknown architecture specifier in memory model to builtin");
5116 return MEMMODEL_SEQ_CST;
5119 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5120 if (memmodel_base (val) >= MEMMODEL_LAST)
5122 warning_at (loc, OPT_Winvalid_memory_model,
5123 "invalid memory model argument to builtin");
5124 return MEMMODEL_SEQ_CST;
5127 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5128 be conservative and promote consume to acquire. */
5129 if (val == MEMMODEL_CONSUME)
5130 val = MEMMODEL_ACQUIRE;
5132 return (enum memmodel) val;
5135 /* Expand the __atomic_exchange intrinsic:
5136 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5137 EXP is the CALL_EXPR.
5138 TARGET is an optional place for us to store the results. */
5140 static rtx
5141 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5143 rtx val, mem;
5144 enum memmodel model;
5146 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5148 if (!flag_inline_atomics)
5149 return NULL_RTX;
5151 /* Expand the operands. */
5152 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5153 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5155 return expand_atomic_exchange (target, mem, val, model);
5158 /* Expand the __atomic_compare_exchange intrinsic:
5159 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5160 TYPE desired, BOOL weak,
5161 enum memmodel success,
5162 enum memmodel failure)
5163 EXP is the CALL_EXPR.
5164 TARGET is an optional place for us to store the results. */
5166 static rtx
5167 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5168 rtx target)
5170 rtx expect, desired, mem, oldval;
5171 rtx_code_label *label;
5172 enum memmodel success, failure;
5173 tree weak;
5174 bool is_weak;
5175 source_location loc
5176 = expansion_point_location_if_in_system_header (input_location);
5178 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5179 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5181 if (failure > success)
5183 warning_at (loc, OPT_Winvalid_memory_model,
5184 "failure memory model cannot be stronger than success "
5185 "memory model for %<__atomic_compare_exchange%>");
5186 success = MEMMODEL_SEQ_CST;
5189 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5191 warning_at (loc, OPT_Winvalid_memory_model,
5192 "invalid failure memory model for "
5193 "%<__atomic_compare_exchange%>");
5194 failure = MEMMODEL_SEQ_CST;
5195 success = MEMMODEL_SEQ_CST;
5199 if (!flag_inline_atomics)
5200 return NULL_RTX;
5202 /* Expand the operands. */
5203 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5205 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5206 expect = convert_memory_address (Pmode, expect);
5207 expect = gen_rtx_MEM (mode, expect);
5208 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5210 weak = CALL_EXPR_ARG (exp, 3);
5211 is_weak = false;
5212 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5213 is_weak = true;
5215 if (target == const0_rtx)
5216 target = NULL;
5218 /* Lest the rtl backend create a race condition with an imporoper store
5219 to memory, always create a new pseudo for OLDVAL. */
5220 oldval = NULL;
5222 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5223 is_weak, success, failure))
5224 return NULL_RTX;
5226 /* Conditionally store back to EXPECT, lest we create a race condition
5227 with an improper store to memory. */
5228 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5229 the normal case where EXPECT is totally private, i.e. a register. At
5230 which point the store can be unconditional. */
5231 label = gen_label_rtx ();
5232 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5233 GET_MODE (target), 1, label);
5234 emit_move_insn (expect, oldval);
5235 emit_label (label);
5237 return target;
5240 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5241 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5242 call. The weak parameter must be dropped to match the expected parameter
5243 list and the expected argument changed from value to pointer to memory
5244 slot. */
5246 static void
5247 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5249 unsigned int z;
5250 vec<tree, va_gc> *vec;
5252 vec_alloc (vec, 5);
5253 vec->quick_push (gimple_call_arg (call, 0));
5254 tree expected = gimple_call_arg (call, 1);
5255 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5256 TREE_TYPE (expected));
5257 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5258 if (expd != x)
5259 emit_move_insn (x, expd);
5260 tree v = make_tree (TREE_TYPE (expected), x);
5261 vec->quick_push (build1 (ADDR_EXPR,
5262 build_pointer_type (TREE_TYPE (expected)), v));
5263 vec->quick_push (gimple_call_arg (call, 2));
5264 /* Skip the boolean weak parameter. */
5265 for (z = 4; z < 6; z++)
5266 vec->quick_push (gimple_call_arg (call, z));
5267 built_in_function fncode
5268 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5269 + exact_log2 (GET_MODE_SIZE (mode)));
5270 tree fndecl = builtin_decl_explicit (fncode);
5271 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5272 fndecl);
5273 tree exp = build_call_vec (boolean_type_node, fn, vec);
5274 tree lhs = gimple_call_lhs (call);
5275 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5276 if (lhs)
5278 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5279 if (GET_MODE (boolret) != mode)
5280 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5281 x = force_reg (mode, x);
5282 write_complex_part (target, boolret, true);
5283 write_complex_part (target, x, false);
5287 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5289 void
5290 expand_ifn_atomic_compare_exchange (gcall *call)
5292 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5293 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5294 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5295 rtx expect, desired, mem, oldval, boolret;
5296 enum memmodel success, failure;
5297 tree lhs;
5298 bool is_weak;
5299 source_location loc
5300 = expansion_point_location_if_in_system_header (gimple_location (call));
5302 success = get_memmodel (gimple_call_arg (call, 4));
5303 failure = get_memmodel (gimple_call_arg (call, 5));
5305 if (failure > success)
5307 warning_at (loc, OPT_Winvalid_memory_model,
5308 "failure memory model cannot be stronger than success "
5309 "memory model for %<__atomic_compare_exchange%>");
5310 success = MEMMODEL_SEQ_CST;
5313 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5315 warning_at (loc, OPT_Winvalid_memory_model,
5316 "invalid failure memory model for "
5317 "%<__atomic_compare_exchange%>");
5318 failure = MEMMODEL_SEQ_CST;
5319 success = MEMMODEL_SEQ_CST;
5322 if (!flag_inline_atomics)
5324 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5325 return;
5328 /* Expand the operands. */
5329 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5331 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5332 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5334 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5336 boolret = NULL;
5337 oldval = NULL;
5339 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5340 is_weak, success, failure))
5342 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5343 return;
5346 lhs = gimple_call_lhs (call);
5347 if (lhs)
5349 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5350 if (GET_MODE (boolret) != mode)
5351 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5352 write_complex_part (target, boolret, true);
5353 write_complex_part (target, oldval, false);
5357 /* Expand the __atomic_load intrinsic:
5358 TYPE __atomic_load (TYPE *object, enum memmodel)
5359 EXP is the CALL_EXPR.
5360 TARGET is an optional place for us to store the results. */
5362 static rtx
5363 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5365 rtx mem;
5366 enum memmodel model;
5368 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5369 if (is_mm_release (model) || is_mm_acq_rel (model))
5371 source_location loc
5372 = expansion_point_location_if_in_system_header (input_location);
5373 warning_at (loc, OPT_Winvalid_memory_model,
5374 "invalid memory model for %<__atomic_load%>");
5375 model = MEMMODEL_SEQ_CST;
5378 if (!flag_inline_atomics)
5379 return NULL_RTX;
5381 /* Expand the operand. */
5382 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5384 return expand_atomic_load (target, mem, model);
5388 /* Expand the __atomic_store intrinsic:
5389 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5390 EXP is the CALL_EXPR.
5391 TARGET is an optional place for us to store the results. */
5393 static rtx
5394 expand_builtin_atomic_store (machine_mode mode, tree exp)
5396 rtx mem, val;
5397 enum memmodel model;
5399 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5400 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5401 || is_mm_release (model)))
5403 source_location loc
5404 = expansion_point_location_if_in_system_header (input_location);
5405 warning_at (loc, OPT_Winvalid_memory_model,
5406 "invalid memory model for %<__atomic_store%>");
5407 model = MEMMODEL_SEQ_CST;
5410 if (!flag_inline_atomics)
5411 return NULL_RTX;
5413 /* Expand the operands. */
5414 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5415 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5417 return expand_atomic_store (mem, val, model, false);
5420 /* Expand the __atomic_fetch_XXX intrinsic:
5421 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5422 EXP is the CALL_EXPR.
5423 TARGET is an optional place for us to store the results.
5424 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5425 FETCH_AFTER is true if returning the result of the operation.
5426 FETCH_AFTER is false if returning the value before the operation.
5427 IGNORE is true if the result is not used.
5428 EXT_CALL is the correct builtin for an external call if this cannot be
5429 resolved to an instruction sequence. */
5431 static rtx
5432 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5433 enum rtx_code code, bool fetch_after,
5434 bool ignore, enum built_in_function ext_call)
5436 rtx val, mem, ret;
5437 enum memmodel model;
5438 tree fndecl;
5439 tree addr;
5441 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5443 /* Expand the operands. */
5444 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5445 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5447 /* Only try generating instructions if inlining is turned on. */
5448 if (flag_inline_atomics)
5450 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5451 if (ret)
5452 return ret;
5455 /* Return if a different routine isn't needed for the library call. */
5456 if (ext_call == BUILT_IN_NONE)
5457 return NULL_RTX;
5459 /* Change the call to the specified function. */
5460 fndecl = get_callee_fndecl (exp);
5461 addr = CALL_EXPR_FN (exp);
5462 STRIP_NOPS (addr);
5464 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5465 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5467 /* Expand the call here so we can emit trailing code. */
5468 ret = expand_call (exp, target, ignore);
5470 /* Replace the original function just in case it matters. */
5471 TREE_OPERAND (addr, 0) = fndecl;
5473 /* Then issue the arithmetic correction to return the right result. */
5474 if (!ignore)
5476 if (code == NOT)
5478 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5479 OPTAB_LIB_WIDEN);
5480 ret = expand_simple_unop (mode, NOT, ret, target, true);
5482 else
5483 ret = expand_simple_binop (mode, code, ret, val, target, true,
5484 OPTAB_LIB_WIDEN);
5486 return ret;
5489 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5491 void
5492 expand_ifn_atomic_bit_test_and (gcall *call)
5494 tree ptr = gimple_call_arg (call, 0);
5495 tree bit = gimple_call_arg (call, 1);
5496 tree flag = gimple_call_arg (call, 2);
5497 tree lhs = gimple_call_lhs (call);
5498 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
5499 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
5500 enum rtx_code code;
5501 optab optab;
5502 struct expand_operand ops[5];
5504 gcc_assert (flag_inline_atomics);
5506 if (gimple_call_num_args (call) == 4)
5507 model = get_memmodel (gimple_call_arg (call, 3));
5509 rtx mem = get_builtin_sync_mem (ptr, mode);
5510 rtx val = expand_expr_force_mode (bit, mode);
5512 switch (gimple_call_internal_fn (call))
5514 case IFN_ATOMIC_BIT_TEST_AND_SET:
5515 code = IOR;
5516 optab = atomic_bit_test_and_set_optab;
5517 break;
5518 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
5519 code = XOR;
5520 optab = atomic_bit_test_and_complement_optab;
5521 break;
5522 case IFN_ATOMIC_BIT_TEST_AND_RESET:
5523 code = AND;
5524 optab = atomic_bit_test_and_reset_optab;
5525 break;
5526 default:
5527 gcc_unreachable ();
5530 if (lhs == NULL_TREE)
5532 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5533 val, NULL_RTX, true, OPTAB_DIRECT);
5534 if (code == AND)
5535 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5536 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
5537 return;
5540 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5541 enum insn_code icode = direct_optab_handler (optab, mode);
5542 gcc_assert (icode != CODE_FOR_nothing);
5543 create_output_operand (&ops[0], target, mode);
5544 create_fixed_operand (&ops[1], mem);
5545 create_convert_operand_to (&ops[2], val, mode, true);
5546 create_integer_operand (&ops[3], model);
5547 create_integer_operand (&ops[4], integer_onep (flag));
5548 if (maybe_expand_insn (icode, 5, ops))
5549 return;
5551 rtx bitval = val;
5552 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5553 val, NULL_RTX, true, OPTAB_DIRECT);
5554 rtx maskval = val;
5555 if (code == AND)
5556 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5557 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
5558 code, model, false);
5559 if (integer_onep (flag))
5561 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
5562 NULL_RTX, true, OPTAB_DIRECT);
5563 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
5564 true, OPTAB_DIRECT);
5566 else
5567 result = expand_simple_binop (mode, AND, result, maskval, target, true,
5568 OPTAB_DIRECT);
5569 if (result != target)
5570 emit_move_insn (target, result);
5573 /* Expand an atomic clear operation.
5574 void _atomic_clear (BOOL *obj, enum memmodel)
5575 EXP is the call expression. */
5577 static rtx
5578 expand_builtin_atomic_clear (tree exp)
5580 machine_mode mode;
5581 rtx mem, ret;
5582 enum memmodel model;
5584 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5585 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5586 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5588 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5590 source_location loc
5591 = expansion_point_location_if_in_system_header (input_location);
5592 warning_at (loc, OPT_Winvalid_memory_model,
5593 "invalid memory model for %<__atomic_store%>");
5594 model = MEMMODEL_SEQ_CST;
5597 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5598 Failing that, a store is issued by __atomic_store. The only way this can
5599 fail is if the bool type is larger than a word size. Unlikely, but
5600 handle it anyway for completeness. Assume a single threaded model since
5601 there is no atomic support in this case, and no barriers are required. */
5602 ret = expand_atomic_store (mem, const0_rtx, model, true);
5603 if (!ret)
5604 emit_move_insn (mem, const0_rtx);
5605 return const0_rtx;
5608 /* Expand an atomic test_and_set operation.
5609 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5610 EXP is the call expression. */
5612 static rtx
5613 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5615 rtx mem;
5616 enum memmodel model;
5617 machine_mode mode;
5619 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5620 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5621 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5623 return expand_atomic_test_and_set (target, mem, model);
5627 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5628 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5630 static tree
5631 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5633 int size;
5634 machine_mode mode;
5635 unsigned int mode_align, type_align;
5637 if (TREE_CODE (arg0) != INTEGER_CST)
5638 return NULL_TREE;
5640 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5641 mode = mode_for_size (size, MODE_INT, 0);
5642 mode_align = GET_MODE_ALIGNMENT (mode);
5644 if (TREE_CODE (arg1) == INTEGER_CST)
5646 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5648 /* Either this argument is null, or it's a fake pointer encoding
5649 the alignment of the object. */
5650 val = least_bit_hwi (val);
5651 val *= BITS_PER_UNIT;
5653 if (val == 0 || mode_align < val)
5654 type_align = mode_align;
5655 else
5656 type_align = val;
5658 else
5660 tree ttype = TREE_TYPE (arg1);
5662 /* This function is usually invoked and folded immediately by the front
5663 end before anything else has a chance to look at it. The pointer
5664 parameter at this point is usually cast to a void *, so check for that
5665 and look past the cast. */
5666 if (CONVERT_EXPR_P (arg1)
5667 && POINTER_TYPE_P (ttype)
5668 && VOID_TYPE_P (TREE_TYPE (ttype))
5669 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5670 arg1 = TREE_OPERAND (arg1, 0);
5672 ttype = TREE_TYPE (arg1);
5673 gcc_assert (POINTER_TYPE_P (ttype));
5675 /* Get the underlying type of the object. */
5676 ttype = TREE_TYPE (ttype);
5677 type_align = TYPE_ALIGN (ttype);
5680 /* If the object has smaller alignment, the lock free routines cannot
5681 be used. */
5682 if (type_align < mode_align)
5683 return boolean_false_node;
5685 /* Check if a compare_and_swap pattern exists for the mode which represents
5686 the required size. The pattern is not allowed to fail, so the existence
5687 of the pattern indicates support is present. */
5688 if (can_compare_and_swap_p (mode, true))
5689 return boolean_true_node;
5690 else
5691 return boolean_false_node;
5694 /* Return true if the parameters to call EXP represent an object which will
5695 always generate lock free instructions. The first argument represents the
5696 size of the object, and the second parameter is a pointer to the object
5697 itself. If NULL is passed for the object, then the result is based on
5698 typical alignment for an object of the specified size. Otherwise return
5699 false. */
5701 static rtx
5702 expand_builtin_atomic_always_lock_free (tree exp)
5704 tree size;
5705 tree arg0 = CALL_EXPR_ARG (exp, 0);
5706 tree arg1 = CALL_EXPR_ARG (exp, 1);
5708 if (TREE_CODE (arg0) != INTEGER_CST)
5710 error ("non-constant argument 1 to __atomic_always_lock_free");
5711 return const0_rtx;
5714 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5715 if (size == boolean_true_node)
5716 return const1_rtx;
5717 return const0_rtx;
5720 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5721 is lock free on this architecture. */
5723 static tree
5724 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5726 if (!flag_inline_atomics)
5727 return NULL_TREE;
5729 /* If it isn't always lock free, don't generate a result. */
5730 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5731 return boolean_true_node;
5733 return NULL_TREE;
5736 /* Return true if the parameters to call EXP represent an object which will
5737 always generate lock free instructions. The first argument represents the
5738 size of the object, and the second parameter is a pointer to the object
5739 itself. If NULL is passed for the object, then the result is based on
5740 typical alignment for an object of the specified size. Otherwise return
5741 NULL*/
5743 static rtx
5744 expand_builtin_atomic_is_lock_free (tree exp)
5746 tree size;
5747 tree arg0 = CALL_EXPR_ARG (exp, 0);
5748 tree arg1 = CALL_EXPR_ARG (exp, 1);
5750 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5752 error ("non-integer argument 1 to __atomic_is_lock_free");
5753 return NULL_RTX;
5756 if (!flag_inline_atomics)
5757 return NULL_RTX;
5759 /* If the value is known at compile time, return the RTX for it. */
5760 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5761 if (size == boolean_true_node)
5762 return const1_rtx;
5764 return NULL_RTX;
5767 /* Expand the __atomic_thread_fence intrinsic:
5768 void __atomic_thread_fence (enum memmodel)
5769 EXP is the CALL_EXPR. */
5771 static void
5772 expand_builtin_atomic_thread_fence (tree exp)
5774 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5775 expand_mem_thread_fence (model);
5778 /* Expand the __atomic_signal_fence intrinsic:
5779 void __atomic_signal_fence (enum memmodel)
5780 EXP is the CALL_EXPR. */
5782 static void
5783 expand_builtin_atomic_signal_fence (tree exp)
5785 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5786 expand_mem_signal_fence (model);
5789 /* Expand the __sync_synchronize intrinsic. */
5791 static void
5792 expand_builtin_sync_synchronize (void)
5794 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5797 static rtx
5798 expand_builtin_thread_pointer (tree exp, rtx target)
5800 enum insn_code icode;
5801 if (!validate_arglist (exp, VOID_TYPE))
5802 return const0_rtx;
5803 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5804 if (icode != CODE_FOR_nothing)
5806 struct expand_operand op;
5807 /* If the target is not sutitable then create a new target. */
5808 if (target == NULL_RTX
5809 || !REG_P (target)
5810 || GET_MODE (target) != Pmode)
5811 target = gen_reg_rtx (Pmode);
5812 create_output_operand (&op, target, Pmode);
5813 expand_insn (icode, 1, &op);
5814 return target;
5816 error ("__builtin_thread_pointer is not supported on this target");
5817 return const0_rtx;
5820 static void
5821 expand_builtin_set_thread_pointer (tree exp)
5823 enum insn_code icode;
5824 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5825 return;
5826 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5827 if (icode != CODE_FOR_nothing)
5829 struct expand_operand op;
5830 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5831 Pmode, EXPAND_NORMAL);
5832 create_input_operand (&op, val, Pmode);
5833 expand_insn (icode, 1, &op);
5834 return;
5836 error ("__builtin_set_thread_pointer is not supported on this target");
5840 /* Emit code to restore the current value of stack. */
5842 static void
5843 expand_stack_restore (tree var)
5845 rtx_insn *prev;
5846 rtx sa = expand_normal (var);
5848 sa = convert_memory_address (Pmode, sa);
5850 prev = get_last_insn ();
5851 emit_stack_restore (SAVE_BLOCK, sa);
5853 record_new_stack_level ();
5855 fixup_args_size_notes (prev, get_last_insn (), 0);
5858 /* Emit code to save the current value of stack. */
5860 static rtx
5861 expand_stack_save (void)
5863 rtx ret = NULL_RTX;
5865 emit_stack_save (SAVE_BLOCK, &ret);
5866 return ret;
5870 /* Expand an expression EXP that calls a built-in function,
5871 with result going to TARGET if that's convenient
5872 (and in mode MODE if that's convenient).
5873 SUBTARGET may be used as the target for computing one of EXP's operands.
5874 IGNORE is nonzero if the value is to be ignored. */
5877 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5878 int ignore)
5880 tree fndecl = get_callee_fndecl (exp);
5881 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5882 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5883 int flags;
5885 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5886 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5888 /* When ASan is enabled, we don't want to expand some memory/string
5889 builtins and rely on libsanitizer's hooks. This allows us to avoid
5890 redundant checks and be sure, that possible overflow will be detected
5891 by ASan. */
5893 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5894 return expand_call (exp, target, ignore);
5896 /* When not optimizing, generate calls to library functions for a certain
5897 set of builtins. */
5898 if (!optimize
5899 && !called_as_built_in (fndecl)
5900 && fcode != BUILT_IN_FORK
5901 && fcode != BUILT_IN_EXECL
5902 && fcode != BUILT_IN_EXECV
5903 && fcode != BUILT_IN_EXECLP
5904 && fcode != BUILT_IN_EXECLE
5905 && fcode != BUILT_IN_EXECVP
5906 && fcode != BUILT_IN_EXECVE
5907 && fcode != BUILT_IN_ALLOCA
5908 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5909 && fcode != BUILT_IN_FREE
5910 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5911 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5912 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5913 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5914 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5915 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5916 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5917 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5918 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5919 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5920 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5921 && fcode != BUILT_IN_CHKP_BNDRET)
5922 return expand_call (exp, target, ignore);
5924 /* The built-in function expanders test for target == const0_rtx
5925 to determine whether the function's result will be ignored. */
5926 if (ignore)
5927 target = const0_rtx;
5929 /* If the result of a pure or const built-in function is ignored, and
5930 none of its arguments are volatile, we can avoid expanding the
5931 built-in call and just evaluate the arguments for side-effects. */
5932 if (target == const0_rtx
5933 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5934 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5936 bool volatilep = false;
5937 tree arg;
5938 call_expr_arg_iterator iter;
5940 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5941 if (TREE_THIS_VOLATILE (arg))
5943 volatilep = true;
5944 break;
5947 if (! volatilep)
5949 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5950 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5951 return const0_rtx;
5955 /* expand_builtin_with_bounds is supposed to be used for
5956 instrumented builtin calls. */
5957 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5959 switch (fcode)
5961 CASE_FLT_FN (BUILT_IN_FABS):
5962 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
5963 case BUILT_IN_FABSD32:
5964 case BUILT_IN_FABSD64:
5965 case BUILT_IN_FABSD128:
5966 target = expand_builtin_fabs (exp, target, subtarget);
5967 if (target)
5968 return target;
5969 break;
5971 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5972 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
5973 target = expand_builtin_copysign (exp, target, subtarget);
5974 if (target)
5975 return target;
5976 break;
5978 /* Just do a normal library call if we were unable to fold
5979 the values. */
5980 CASE_FLT_FN (BUILT_IN_CABS):
5981 break;
5983 CASE_FLT_FN (BUILT_IN_FMA):
5984 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5985 if (target)
5986 return target;
5987 break;
5989 CASE_FLT_FN (BUILT_IN_ILOGB):
5990 if (! flag_unsafe_math_optimizations)
5991 break;
5992 gcc_fallthrough ();
5993 CASE_FLT_FN (BUILT_IN_ISINF):
5994 CASE_FLT_FN (BUILT_IN_FINITE):
5995 case BUILT_IN_ISFINITE:
5996 case BUILT_IN_ISNORMAL:
5997 target = expand_builtin_interclass_mathfn (exp, target);
5998 if (target)
5999 return target;
6000 break;
6002 CASE_FLT_FN (BUILT_IN_ICEIL):
6003 CASE_FLT_FN (BUILT_IN_LCEIL):
6004 CASE_FLT_FN (BUILT_IN_LLCEIL):
6005 CASE_FLT_FN (BUILT_IN_LFLOOR):
6006 CASE_FLT_FN (BUILT_IN_IFLOOR):
6007 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6008 target = expand_builtin_int_roundingfn (exp, target);
6009 if (target)
6010 return target;
6011 break;
6013 CASE_FLT_FN (BUILT_IN_IRINT):
6014 CASE_FLT_FN (BUILT_IN_LRINT):
6015 CASE_FLT_FN (BUILT_IN_LLRINT):
6016 CASE_FLT_FN (BUILT_IN_IROUND):
6017 CASE_FLT_FN (BUILT_IN_LROUND):
6018 CASE_FLT_FN (BUILT_IN_LLROUND):
6019 target = expand_builtin_int_roundingfn_2 (exp, target);
6020 if (target)
6021 return target;
6022 break;
6024 CASE_FLT_FN (BUILT_IN_POWI):
6025 target = expand_builtin_powi (exp, target);
6026 if (target)
6027 return target;
6028 break;
6030 CASE_FLT_FN (BUILT_IN_CEXPI):
6031 target = expand_builtin_cexpi (exp, target);
6032 gcc_assert (target);
6033 return target;
6035 CASE_FLT_FN (BUILT_IN_SIN):
6036 CASE_FLT_FN (BUILT_IN_COS):
6037 if (! flag_unsafe_math_optimizations)
6038 break;
6039 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6040 if (target)
6041 return target;
6042 break;
6044 CASE_FLT_FN (BUILT_IN_SINCOS):
6045 if (! flag_unsafe_math_optimizations)
6046 break;
6047 target = expand_builtin_sincos (exp);
6048 if (target)
6049 return target;
6050 break;
6052 case BUILT_IN_APPLY_ARGS:
6053 return expand_builtin_apply_args ();
6055 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6056 FUNCTION with a copy of the parameters described by
6057 ARGUMENTS, and ARGSIZE. It returns a block of memory
6058 allocated on the stack into which is stored all the registers
6059 that might possibly be used for returning the result of a
6060 function. ARGUMENTS is the value returned by
6061 __builtin_apply_args. ARGSIZE is the number of bytes of
6062 arguments that must be copied. ??? How should this value be
6063 computed? We'll also need a safe worst case value for varargs
6064 functions. */
6065 case BUILT_IN_APPLY:
6066 if (!validate_arglist (exp, POINTER_TYPE,
6067 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6068 && !validate_arglist (exp, REFERENCE_TYPE,
6069 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6070 return const0_rtx;
6071 else
6073 rtx ops[3];
6075 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6076 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6077 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6079 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6082 /* __builtin_return (RESULT) causes the function to return the
6083 value described by RESULT. RESULT is address of the block of
6084 memory returned by __builtin_apply. */
6085 case BUILT_IN_RETURN:
6086 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6087 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6088 return const0_rtx;
6090 case BUILT_IN_SAVEREGS:
6091 return expand_builtin_saveregs ();
6093 case BUILT_IN_VA_ARG_PACK:
6094 /* All valid uses of __builtin_va_arg_pack () are removed during
6095 inlining. */
6096 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6097 return const0_rtx;
6099 case BUILT_IN_VA_ARG_PACK_LEN:
6100 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6101 inlining. */
6102 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6103 return const0_rtx;
6105 /* Return the address of the first anonymous stack arg. */
6106 case BUILT_IN_NEXT_ARG:
6107 if (fold_builtin_next_arg (exp, false))
6108 return const0_rtx;
6109 return expand_builtin_next_arg ();
6111 case BUILT_IN_CLEAR_CACHE:
6112 target = expand_builtin___clear_cache (exp);
6113 if (target)
6114 return target;
6115 break;
6117 case BUILT_IN_CLASSIFY_TYPE:
6118 return expand_builtin_classify_type (exp);
6120 case BUILT_IN_CONSTANT_P:
6121 return const0_rtx;
6123 case BUILT_IN_FRAME_ADDRESS:
6124 case BUILT_IN_RETURN_ADDRESS:
6125 return expand_builtin_frame_address (fndecl, exp);
6127 /* Returns the address of the area where the structure is returned.
6128 0 otherwise. */
6129 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6130 if (call_expr_nargs (exp) != 0
6131 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6132 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6133 return const0_rtx;
6134 else
6135 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6137 case BUILT_IN_ALLOCA:
6138 case BUILT_IN_ALLOCA_WITH_ALIGN:
6139 /* If the allocation stems from the declaration of a variable-sized
6140 object, it cannot accumulate. */
6141 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6142 if (target)
6143 return target;
6144 break;
6146 case BUILT_IN_STACK_SAVE:
6147 return expand_stack_save ();
6149 case BUILT_IN_STACK_RESTORE:
6150 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6151 return const0_rtx;
6153 case BUILT_IN_BSWAP16:
6154 case BUILT_IN_BSWAP32:
6155 case BUILT_IN_BSWAP64:
6156 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6157 if (target)
6158 return target;
6159 break;
6161 CASE_INT_FN (BUILT_IN_FFS):
6162 target = expand_builtin_unop (target_mode, exp, target,
6163 subtarget, ffs_optab);
6164 if (target)
6165 return target;
6166 break;
6168 CASE_INT_FN (BUILT_IN_CLZ):
6169 target = expand_builtin_unop (target_mode, exp, target,
6170 subtarget, clz_optab);
6171 if (target)
6172 return target;
6173 break;
6175 CASE_INT_FN (BUILT_IN_CTZ):
6176 target = expand_builtin_unop (target_mode, exp, target,
6177 subtarget, ctz_optab);
6178 if (target)
6179 return target;
6180 break;
6182 CASE_INT_FN (BUILT_IN_CLRSB):
6183 target = expand_builtin_unop (target_mode, exp, target,
6184 subtarget, clrsb_optab);
6185 if (target)
6186 return target;
6187 break;
6189 CASE_INT_FN (BUILT_IN_POPCOUNT):
6190 target = expand_builtin_unop (target_mode, exp, target,
6191 subtarget, popcount_optab);
6192 if (target)
6193 return target;
6194 break;
6196 CASE_INT_FN (BUILT_IN_PARITY):
6197 target = expand_builtin_unop (target_mode, exp, target,
6198 subtarget, parity_optab);
6199 if (target)
6200 return target;
6201 break;
6203 case BUILT_IN_STRLEN:
6204 target = expand_builtin_strlen (exp, target, target_mode);
6205 if (target)
6206 return target;
6207 break;
6209 case BUILT_IN_STRCPY:
6210 target = expand_builtin_strcpy (exp, target);
6211 if (target)
6212 return target;
6213 break;
6215 case BUILT_IN_STRNCPY:
6216 target = expand_builtin_strncpy (exp, target);
6217 if (target)
6218 return target;
6219 break;
6221 case BUILT_IN_STPCPY:
6222 target = expand_builtin_stpcpy (exp, target, mode);
6223 if (target)
6224 return target;
6225 break;
6227 case BUILT_IN_MEMCPY:
6228 target = expand_builtin_memcpy (exp, target);
6229 if (target)
6230 return target;
6231 break;
6233 case BUILT_IN_MEMPCPY:
6234 target = expand_builtin_mempcpy (exp, target, mode);
6235 if (target)
6236 return target;
6237 break;
6239 case BUILT_IN_MEMSET:
6240 target = expand_builtin_memset (exp, target, mode);
6241 if (target)
6242 return target;
6243 break;
6245 case BUILT_IN_BZERO:
6246 target = expand_builtin_bzero (exp);
6247 if (target)
6248 return target;
6249 break;
6251 case BUILT_IN_STRCMP:
6252 target = expand_builtin_strcmp (exp, target);
6253 if (target)
6254 return target;
6255 break;
6257 case BUILT_IN_STRNCMP:
6258 target = expand_builtin_strncmp (exp, target, mode);
6259 if (target)
6260 return target;
6261 break;
6263 case BUILT_IN_BCMP:
6264 case BUILT_IN_MEMCMP:
6265 case BUILT_IN_MEMCMP_EQ:
6266 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6267 if (target)
6268 return target;
6269 if (fcode == BUILT_IN_MEMCMP_EQ)
6271 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6272 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6274 break;
6276 case BUILT_IN_SETJMP:
6277 /* This should have been lowered to the builtins below. */
6278 gcc_unreachable ();
6280 case BUILT_IN_SETJMP_SETUP:
6281 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6282 and the receiver label. */
6283 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6285 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6286 VOIDmode, EXPAND_NORMAL);
6287 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6288 rtx_insn *label_r = label_rtx (label);
6290 /* This is copied from the handling of non-local gotos. */
6291 expand_builtin_setjmp_setup (buf_addr, label_r);
6292 nonlocal_goto_handler_labels
6293 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6294 nonlocal_goto_handler_labels);
6295 /* ??? Do not let expand_label treat us as such since we would
6296 not want to be both on the list of non-local labels and on
6297 the list of forced labels. */
6298 FORCED_LABEL (label) = 0;
6299 return const0_rtx;
6301 break;
6303 case BUILT_IN_SETJMP_RECEIVER:
6304 /* __builtin_setjmp_receiver is passed the receiver label. */
6305 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6307 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6308 rtx_insn *label_r = label_rtx (label);
6310 expand_builtin_setjmp_receiver (label_r);
6311 return const0_rtx;
6313 break;
6315 /* __builtin_longjmp is passed a pointer to an array of five words.
6316 It's similar to the C library longjmp function but works with
6317 __builtin_setjmp above. */
6318 case BUILT_IN_LONGJMP:
6319 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6321 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6322 VOIDmode, EXPAND_NORMAL);
6323 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6325 if (value != const1_rtx)
6327 error ("%<__builtin_longjmp%> second argument must be 1");
6328 return const0_rtx;
6331 expand_builtin_longjmp (buf_addr, value);
6332 return const0_rtx;
6334 break;
6336 case BUILT_IN_NONLOCAL_GOTO:
6337 target = expand_builtin_nonlocal_goto (exp);
6338 if (target)
6339 return target;
6340 break;
6342 /* This updates the setjmp buffer that is its argument with the value
6343 of the current stack pointer. */
6344 case BUILT_IN_UPDATE_SETJMP_BUF:
6345 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6347 rtx buf_addr
6348 = expand_normal (CALL_EXPR_ARG (exp, 0));
6350 expand_builtin_update_setjmp_buf (buf_addr);
6351 return const0_rtx;
6353 break;
6355 case BUILT_IN_TRAP:
6356 expand_builtin_trap ();
6357 return const0_rtx;
6359 case BUILT_IN_UNREACHABLE:
6360 expand_builtin_unreachable ();
6361 return const0_rtx;
6363 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6364 case BUILT_IN_SIGNBITD32:
6365 case BUILT_IN_SIGNBITD64:
6366 case BUILT_IN_SIGNBITD128:
6367 target = expand_builtin_signbit (exp, target);
6368 if (target)
6369 return target;
6370 break;
6372 /* Various hooks for the DWARF 2 __throw routine. */
6373 case BUILT_IN_UNWIND_INIT:
6374 expand_builtin_unwind_init ();
6375 return const0_rtx;
6376 case BUILT_IN_DWARF_CFA:
6377 return virtual_cfa_rtx;
6378 #ifdef DWARF2_UNWIND_INFO
6379 case BUILT_IN_DWARF_SP_COLUMN:
6380 return expand_builtin_dwarf_sp_column ();
6381 case BUILT_IN_INIT_DWARF_REG_SIZES:
6382 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6383 return const0_rtx;
6384 #endif
6385 case BUILT_IN_FROB_RETURN_ADDR:
6386 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6387 case BUILT_IN_EXTRACT_RETURN_ADDR:
6388 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6389 case BUILT_IN_EH_RETURN:
6390 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6391 CALL_EXPR_ARG (exp, 1));
6392 return const0_rtx;
6393 case BUILT_IN_EH_RETURN_DATA_REGNO:
6394 return expand_builtin_eh_return_data_regno (exp);
6395 case BUILT_IN_EXTEND_POINTER:
6396 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6397 case BUILT_IN_EH_POINTER:
6398 return expand_builtin_eh_pointer (exp);
6399 case BUILT_IN_EH_FILTER:
6400 return expand_builtin_eh_filter (exp);
6401 case BUILT_IN_EH_COPY_VALUES:
6402 return expand_builtin_eh_copy_values (exp);
6404 case BUILT_IN_VA_START:
6405 return expand_builtin_va_start (exp);
6406 case BUILT_IN_VA_END:
6407 return expand_builtin_va_end (exp);
6408 case BUILT_IN_VA_COPY:
6409 return expand_builtin_va_copy (exp);
6410 case BUILT_IN_EXPECT:
6411 return expand_builtin_expect (exp, target);
6412 case BUILT_IN_ASSUME_ALIGNED:
6413 return expand_builtin_assume_aligned (exp, target);
6414 case BUILT_IN_PREFETCH:
6415 expand_builtin_prefetch (exp);
6416 return const0_rtx;
6418 case BUILT_IN_INIT_TRAMPOLINE:
6419 return expand_builtin_init_trampoline (exp, true);
6420 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6421 return expand_builtin_init_trampoline (exp, false);
6422 case BUILT_IN_ADJUST_TRAMPOLINE:
6423 return expand_builtin_adjust_trampoline (exp);
6425 case BUILT_IN_INIT_DESCRIPTOR:
6426 return expand_builtin_init_descriptor (exp);
6427 case BUILT_IN_ADJUST_DESCRIPTOR:
6428 return expand_builtin_adjust_descriptor (exp);
6430 case BUILT_IN_FORK:
6431 case BUILT_IN_EXECL:
6432 case BUILT_IN_EXECV:
6433 case BUILT_IN_EXECLP:
6434 case BUILT_IN_EXECLE:
6435 case BUILT_IN_EXECVP:
6436 case BUILT_IN_EXECVE:
6437 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6438 if (target)
6439 return target;
6440 break;
6442 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6443 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6444 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6445 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6446 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6447 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6448 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6449 if (target)
6450 return target;
6451 break;
6453 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6454 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6455 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6456 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6457 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6458 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6459 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6460 if (target)
6461 return target;
6462 break;
6464 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6465 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6466 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6467 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6468 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6469 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6470 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6471 if (target)
6472 return target;
6473 break;
6475 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6476 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6477 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6478 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6479 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6480 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6481 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6482 if (target)
6483 return target;
6484 break;
6486 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6487 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6488 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6489 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6490 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6491 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6492 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6493 if (target)
6494 return target;
6495 break;
6497 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6498 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6499 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6500 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6501 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6502 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6503 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6504 if (target)
6505 return target;
6506 break;
6508 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6509 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6510 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6511 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6512 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6513 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6514 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6515 if (target)
6516 return target;
6517 break;
6519 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6520 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6521 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6522 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6523 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6524 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6525 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6526 if (target)
6527 return target;
6528 break;
6530 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6531 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6532 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6533 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6534 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6535 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6536 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6537 if (target)
6538 return target;
6539 break;
6541 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6542 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6543 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6544 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6545 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6546 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6547 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6548 if (target)
6549 return target;
6550 break;
6552 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6553 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6554 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6555 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6556 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6557 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6558 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6559 if (target)
6560 return target;
6561 break;
6563 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6564 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6565 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6566 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6567 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6568 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6569 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6570 if (target)
6571 return target;
6572 break;
6574 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6575 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6576 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6577 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6578 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6579 if (mode == VOIDmode)
6580 mode = TYPE_MODE (boolean_type_node);
6581 if (!target || !register_operand (target, mode))
6582 target = gen_reg_rtx (mode);
6584 mode = get_builtin_sync_mode
6585 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6586 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6587 if (target)
6588 return target;
6589 break;
6591 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6592 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6593 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6594 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6595 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6596 mode = get_builtin_sync_mode
6597 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6598 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6599 if (target)
6600 return target;
6601 break;
6603 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6604 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6605 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6606 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6607 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6608 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6609 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6610 if (target)
6611 return target;
6612 break;
6614 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6615 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6616 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6617 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6618 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6619 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6620 expand_builtin_sync_lock_release (mode, exp);
6621 return const0_rtx;
6623 case BUILT_IN_SYNC_SYNCHRONIZE:
6624 expand_builtin_sync_synchronize ();
6625 return const0_rtx;
6627 case BUILT_IN_ATOMIC_EXCHANGE_1:
6628 case BUILT_IN_ATOMIC_EXCHANGE_2:
6629 case BUILT_IN_ATOMIC_EXCHANGE_4:
6630 case BUILT_IN_ATOMIC_EXCHANGE_8:
6631 case BUILT_IN_ATOMIC_EXCHANGE_16:
6632 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6633 target = expand_builtin_atomic_exchange (mode, exp, target);
6634 if (target)
6635 return target;
6636 break;
6638 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6639 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6640 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6641 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6642 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6644 unsigned int nargs, z;
6645 vec<tree, va_gc> *vec;
6647 mode =
6648 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6649 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6650 if (target)
6651 return target;
6653 /* If this is turned into an external library call, the weak parameter
6654 must be dropped to match the expected parameter list. */
6655 nargs = call_expr_nargs (exp);
6656 vec_alloc (vec, nargs - 1);
6657 for (z = 0; z < 3; z++)
6658 vec->quick_push (CALL_EXPR_ARG (exp, z));
6659 /* Skip the boolean weak parameter. */
6660 for (z = 4; z < 6; z++)
6661 vec->quick_push (CALL_EXPR_ARG (exp, z));
6662 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6663 break;
6666 case BUILT_IN_ATOMIC_LOAD_1:
6667 case BUILT_IN_ATOMIC_LOAD_2:
6668 case BUILT_IN_ATOMIC_LOAD_4:
6669 case BUILT_IN_ATOMIC_LOAD_8:
6670 case BUILT_IN_ATOMIC_LOAD_16:
6671 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6672 target = expand_builtin_atomic_load (mode, exp, target);
6673 if (target)
6674 return target;
6675 break;
6677 case BUILT_IN_ATOMIC_STORE_1:
6678 case BUILT_IN_ATOMIC_STORE_2:
6679 case BUILT_IN_ATOMIC_STORE_4:
6680 case BUILT_IN_ATOMIC_STORE_8:
6681 case BUILT_IN_ATOMIC_STORE_16:
6682 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6683 target = expand_builtin_atomic_store (mode, exp);
6684 if (target)
6685 return const0_rtx;
6686 break;
6688 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6689 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6690 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6691 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6692 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6694 enum built_in_function lib;
6695 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6696 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6697 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6698 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6699 ignore, lib);
6700 if (target)
6701 return target;
6702 break;
6704 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6705 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6706 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6707 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6708 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6710 enum built_in_function lib;
6711 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6712 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6713 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6714 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6715 ignore, lib);
6716 if (target)
6717 return target;
6718 break;
6720 case BUILT_IN_ATOMIC_AND_FETCH_1:
6721 case BUILT_IN_ATOMIC_AND_FETCH_2:
6722 case BUILT_IN_ATOMIC_AND_FETCH_4:
6723 case BUILT_IN_ATOMIC_AND_FETCH_8:
6724 case BUILT_IN_ATOMIC_AND_FETCH_16:
6726 enum built_in_function lib;
6727 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6728 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6729 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6730 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6731 ignore, lib);
6732 if (target)
6733 return target;
6734 break;
6736 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6737 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6738 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6739 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6740 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6742 enum built_in_function lib;
6743 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6744 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6745 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6746 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6747 ignore, lib);
6748 if (target)
6749 return target;
6750 break;
6752 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6753 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6754 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6755 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6756 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6758 enum built_in_function lib;
6759 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6760 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6761 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6762 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6763 ignore, lib);
6764 if (target)
6765 return target;
6766 break;
6768 case BUILT_IN_ATOMIC_OR_FETCH_1:
6769 case BUILT_IN_ATOMIC_OR_FETCH_2:
6770 case BUILT_IN_ATOMIC_OR_FETCH_4:
6771 case BUILT_IN_ATOMIC_OR_FETCH_8:
6772 case BUILT_IN_ATOMIC_OR_FETCH_16:
6774 enum built_in_function lib;
6775 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6776 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6777 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6778 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6779 ignore, lib);
6780 if (target)
6781 return target;
6782 break;
6784 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6785 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6786 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6787 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6788 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6789 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6790 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6791 ignore, BUILT_IN_NONE);
6792 if (target)
6793 return target;
6794 break;
6796 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6797 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6798 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6799 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6800 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6801 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6802 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6803 ignore, BUILT_IN_NONE);
6804 if (target)
6805 return target;
6806 break;
6808 case BUILT_IN_ATOMIC_FETCH_AND_1:
6809 case BUILT_IN_ATOMIC_FETCH_AND_2:
6810 case BUILT_IN_ATOMIC_FETCH_AND_4:
6811 case BUILT_IN_ATOMIC_FETCH_AND_8:
6812 case BUILT_IN_ATOMIC_FETCH_AND_16:
6813 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6814 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6815 ignore, BUILT_IN_NONE);
6816 if (target)
6817 return target;
6818 break;
6820 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6821 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6822 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6823 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6824 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6825 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6826 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6827 ignore, BUILT_IN_NONE);
6828 if (target)
6829 return target;
6830 break;
6832 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6833 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6834 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6835 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6836 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6837 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6838 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6839 ignore, BUILT_IN_NONE);
6840 if (target)
6841 return target;
6842 break;
6844 case BUILT_IN_ATOMIC_FETCH_OR_1:
6845 case BUILT_IN_ATOMIC_FETCH_OR_2:
6846 case BUILT_IN_ATOMIC_FETCH_OR_4:
6847 case BUILT_IN_ATOMIC_FETCH_OR_8:
6848 case BUILT_IN_ATOMIC_FETCH_OR_16:
6849 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6850 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6851 ignore, BUILT_IN_NONE);
6852 if (target)
6853 return target;
6854 break;
6856 case BUILT_IN_ATOMIC_TEST_AND_SET:
6857 return expand_builtin_atomic_test_and_set (exp, target);
6859 case BUILT_IN_ATOMIC_CLEAR:
6860 return expand_builtin_atomic_clear (exp);
6862 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6863 return expand_builtin_atomic_always_lock_free (exp);
6865 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6866 target = expand_builtin_atomic_is_lock_free (exp);
6867 if (target)
6868 return target;
6869 break;
6871 case BUILT_IN_ATOMIC_THREAD_FENCE:
6872 expand_builtin_atomic_thread_fence (exp);
6873 return const0_rtx;
6875 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6876 expand_builtin_atomic_signal_fence (exp);
6877 return const0_rtx;
6879 case BUILT_IN_OBJECT_SIZE:
6880 return expand_builtin_object_size (exp);
6882 case BUILT_IN_MEMCPY_CHK:
6883 case BUILT_IN_MEMPCPY_CHK:
6884 case BUILT_IN_MEMMOVE_CHK:
6885 case BUILT_IN_MEMSET_CHK:
6886 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6887 if (target)
6888 return target;
6889 break;
6891 case BUILT_IN_STRCPY_CHK:
6892 case BUILT_IN_STPCPY_CHK:
6893 case BUILT_IN_STRNCPY_CHK:
6894 case BUILT_IN_STPNCPY_CHK:
6895 case BUILT_IN_STRCAT_CHK:
6896 case BUILT_IN_STRNCAT_CHK:
6897 case BUILT_IN_SNPRINTF_CHK:
6898 case BUILT_IN_VSNPRINTF_CHK:
6899 maybe_emit_chk_warning (exp, fcode);
6900 break;
6902 case BUILT_IN_SPRINTF_CHK:
6903 case BUILT_IN_VSPRINTF_CHK:
6904 maybe_emit_sprintf_chk_warning (exp, fcode);
6905 break;
6907 case BUILT_IN_FREE:
6908 if (warn_free_nonheap_object)
6909 maybe_emit_free_warning (exp);
6910 break;
6912 case BUILT_IN_THREAD_POINTER:
6913 return expand_builtin_thread_pointer (exp, target);
6915 case BUILT_IN_SET_THREAD_POINTER:
6916 expand_builtin_set_thread_pointer (exp);
6917 return const0_rtx;
6919 case BUILT_IN_CILK_DETACH:
6920 expand_builtin_cilk_detach (exp);
6921 return const0_rtx;
6923 case BUILT_IN_CILK_POP_FRAME:
6924 expand_builtin_cilk_pop_frame (exp);
6925 return const0_rtx;
6927 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6928 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6929 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6930 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6931 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6932 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6933 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6934 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6935 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6936 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6937 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6938 /* We allow user CHKP builtins if Pointer Bounds
6939 Checker is off. */
6940 if (!chkp_function_instrumented_p (current_function_decl))
6942 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6943 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6944 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6945 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6946 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6947 return expand_normal (CALL_EXPR_ARG (exp, 0));
6948 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6949 return expand_normal (size_zero_node);
6950 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6951 return expand_normal (size_int (-1));
6952 else
6953 return const0_rtx;
6955 /* FALLTHROUGH */
6957 case BUILT_IN_CHKP_BNDMK:
6958 case BUILT_IN_CHKP_BNDSTX:
6959 case BUILT_IN_CHKP_BNDCL:
6960 case BUILT_IN_CHKP_BNDCU:
6961 case BUILT_IN_CHKP_BNDLDX:
6962 case BUILT_IN_CHKP_BNDRET:
6963 case BUILT_IN_CHKP_INTERSECT:
6964 case BUILT_IN_CHKP_NARROW:
6965 case BUILT_IN_CHKP_EXTRACT_LOWER:
6966 case BUILT_IN_CHKP_EXTRACT_UPPER:
6967 /* Software implementation of Pointer Bounds Checker is NYI.
6968 Target support is required. */
6969 error ("Your target platform does not support -fcheck-pointer-bounds");
6970 break;
6972 case BUILT_IN_ACC_ON_DEVICE:
6973 /* Do library call, if we failed to expand the builtin when
6974 folding. */
6975 break;
6977 default: /* just do library call, if unknown builtin */
6978 break;
6981 /* The switch statement above can drop through to cause the function
6982 to be called normally. */
6983 return expand_call (exp, target, ignore);
6986 /* Similar to expand_builtin but is used for instrumented calls. */
6989 expand_builtin_with_bounds (tree exp, rtx target,
6990 rtx subtarget ATTRIBUTE_UNUSED,
6991 machine_mode mode, int ignore)
6993 tree fndecl = get_callee_fndecl (exp);
6994 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6996 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6998 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6999 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7001 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7002 && fcode < END_CHKP_BUILTINS);
7004 switch (fcode)
7006 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7007 target = expand_builtin_memcpy_with_bounds (exp, target);
7008 if (target)
7009 return target;
7010 break;
7012 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7013 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7014 if (target)
7015 return target;
7016 break;
7018 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7019 target = expand_builtin_memset_with_bounds (exp, target, mode);
7020 if (target)
7021 return target;
7022 break;
7024 default:
7025 break;
7028 /* The switch statement above can drop through to cause the function
7029 to be called normally. */
7030 return expand_call (exp, target, ignore);
7033 /* Determine whether a tree node represents a call to a built-in
7034 function. If the tree T is a call to a built-in function with
7035 the right number of arguments of the appropriate types, return
7036 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7037 Otherwise the return value is END_BUILTINS. */
7039 enum built_in_function
7040 builtin_mathfn_code (const_tree t)
7042 const_tree fndecl, arg, parmlist;
7043 const_tree argtype, parmtype;
7044 const_call_expr_arg_iterator iter;
7046 if (TREE_CODE (t) != CALL_EXPR
7047 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7048 return END_BUILTINS;
7050 fndecl = get_callee_fndecl (t);
7051 if (fndecl == NULL_TREE
7052 || TREE_CODE (fndecl) != FUNCTION_DECL
7053 || ! DECL_BUILT_IN (fndecl)
7054 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7055 return END_BUILTINS;
7057 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7058 init_const_call_expr_arg_iterator (t, &iter);
7059 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7061 /* If a function doesn't take a variable number of arguments,
7062 the last element in the list will have type `void'. */
7063 parmtype = TREE_VALUE (parmlist);
7064 if (VOID_TYPE_P (parmtype))
7066 if (more_const_call_expr_args_p (&iter))
7067 return END_BUILTINS;
7068 return DECL_FUNCTION_CODE (fndecl);
7071 if (! more_const_call_expr_args_p (&iter))
7072 return END_BUILTINS;
7074 arg = next_const_call_expr_arg (&iter);
7075 argtype = TREE_TYPE (arg);
7077 if (SCALAR_FLOAT_TYPE_P (parmtype))
7079 if (! SCALAR_FLOAT_TYPE_P (argtype))
7080 return END_BUILTINS;
7082 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7084 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7085 return END_BUILTINS;
7087 else if (POINTER_TYPE_P (parmtype))
7089 if (! POINTER_TYPE_P (argtype))
7090 return END_BUILTINS;
7092 else if (INTEGRAL_TYPE_P (parmtype))
7094 if (! INTEGRAL_TYPE_P (argtype))
7095 return END_BUILTINS;
7097 else
7098 return END_BUILTINS;
7101 /* Variable-length argument list. */
7102 return DECL_FUNCTION_CODE (fndecl);
7105 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7106 evaluate to a constant. */
7108 static tree
7109 fold_builtin_constant_p (tree arg)
7111 /* We return 1 for a numeric type that's known to be a constant
7112 value at compile-time or for an aggregate type that's a
7113 literal constant. */
7114 STRIP_NOPS (arg);
7116 /* If we know this is a constant, emit the constant of one. */
7117 if (CONSTANT_CLASS_P (arg)
7118 || (TREE_CODE (arg) == CONSTRUCTOR
7119 && TREE_CONSTANT (arg)))
7120 return integer_one_node;
7121 if (TREE_CODE (arg) == ADDR_EXPR)
7123 tree op = TREE_OPERAND (arg, 0);
7124 if (TREE_CODE (op) == STRING_CST
7125 || (TREE_CODE (op) == ARRAY_REF
7126 && integer_zerop (TREE_OPERAND (op, 1))
7127 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7128 return integer_one_node;
7131 /* If this expression has side effects, show we don't know it to be a
7132 constant. Likewise if it's a pointer or aggregate type since in
7133 those case we only want literals, since those are only optimized
7134 when generating RTL, not later.
7135 And finally, if we are compiling an initializer, not code, we
7136 need to return a definite result now; there's not going to be any
7137 more optimization done. */
7138 if (TREE_SIDE_EFFECTS (arg)
7139 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7140 || POINTER_TYPE_P (TREE_TYPE (arg))
7141 || cfun == 0
7142 || folding_initializer
7143 || force_folding_builtin_constant_p)
7144 return integer_zero_node;
7146 return NULL_TREE;
7149 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7150 return it as a truthvalue. */
7152 static tree
7153 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7154 tree predictor)
7156 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7158 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7159 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7160 ret_type = TREE_TYPE (TREE_TYPE (fn));
7161 pred_type = TREE_VALUE (arg_types);
7162 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7164 pred = fold_convert_loc (loc, pred_type, pred);
7165 expected = fold_convert_loc (loc, expected_type, expected);
7166 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7167 predictor);
7169 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7170 build_int_cst (ret_type, 0));
7173 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7174 NULL_TREE if no simplification is possible. */
7176 tree
7177 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7179 tree inner, fndecl, inner_arg0;
7180 enum tree_code code;
7182 /* Distribute the expected value over short-circuiting operators.
7183 See through the cast from truthvalue_type_node to long. */
7184 inner_arg0 = arg0;
7185 while (CONVERT_EXPR_P (inner_arg0)
7186 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7187 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7188 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7190 /* If this is a builtin_expect within a builtin_expect keep the
7191 inner one. See through a comparison against a constant. It
7192 might have been added to create a thruthvalue. */
7193 inner = inner_arg0;
7195 if (COMPARISON_CLASS_P (inner)
7196 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7197 inner = TREE_OPERAND (inner, 0);
7199 if (TREE_CODE (inner) == CALL_EXPR
7200 && (fndecl = get_callee_fndecl (inner))
7201 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7202 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7203 return arg0;
7205 inner = inner_arg0;
7206 code = TREE_CODE (inner);
7207 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7209 tree op0 = TREE_OPERAND (inner, 0);
7210 tree op1 = TREE_OPERAND (inner, 1);
7212 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7213 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7214 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7216 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7219 /* If the argument isn't invariant then there's nothing else we can do. */
7220 if (!TREE_CONSTANT (inner_arg0))
7221 return NULL_TREE;
7223 /* If we expect that a comparison against the argument will fold to
7224 a constant return the constant. In practice, this means a true
7225 constant or the address of a non-weak symbol. */
7226 inner = inner_arg0;
7227 STRIP_NOPS (inner);
7228 if (TREE_CODE (inner) == ADDR_EXPR)
7232 inner = TREE_OPERAND (inner, 0);
7234 while (TREE_CODE (inner) == COMPONENT_REF
7235 || TREE_CODE (inner) == ARRAY_REF);
7236 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7237 return NULL_TREE;
7240 /* Otherwise, ARG0 already has the proper type for the return value. */
7241 return arg0;
7244 /* Fold a call to __builtin_classify_type with argument ARG. */
7246 static tree
7247 fold_builtin_classify_type (tree arg)
7249 if (arg == 0)
7250 return build_int_cst (integer_type_node, no_type_class);
7252 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7255 /* Fold a call to __builtin_strlen with argument ARG. */
7257 static tree
7258 fold_builtin_strlen (location_t loc, tree type, tree arg)
7260 if (!validate_arg (arg, POINTER_TYPE))
7261 return NULL_TREE;
7262 else
7264 tree len = c_strlen (arg, 0);
7266 if (len)
7267 return fold_convert_loc (loc, type, len);
7269 return NULL_TREE;
7273 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7275 static tree
7276 fold_builtin_inf (location_t loc, tree type, int warn)
7278 REAL_VALUE_TYPE real;
7280 /* __builtin_inff is intended to be usable to define INFINITY on all
7281 targets. If an infinity is not available, INFINITY expands "to a
7282 positive constant of type float that overflows at translation
7283 time", footnote "In this case, using INFINITY will violate the
7284 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7285 Thus we pedwarn to ensure this constraint violation is
7286 diagnosed. */
7287 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7288 pedwarn (loc, 0, "target format does not support infinity");
7290 real_inf (&real);
7291 return build_real (type, real);
7294 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7295 NULL_TREE if no simplification can be made. */
7297 static tree
7298 fold_builtin_sincos (location_t loc,
7299 tree arg0, tree arg1, tree arg2)
7301 tree type;
7302 tree fndecl, call = NULL_TREE;
7304 if (!validate_arg (arg0, REAL_TYPE)
7305 || !validate_arg (arg1, POINTER_TYPE)
7306 || !validate_arg (arg2, POINTER_TYPE))
7307 return NULL_TREE;
7309 type = TREE_TYPE (arg0);
7311 /* Calculate the result when the argument is a constant. */
7312 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7313 if (fn == END_BUILTINS)
7314 return NULL_TREE;
7316 /* Canonicalize sincos to cexpi. */
7317 if (TREE_CODE (arg0) == REAL_CST)
7319 tree complex_type = build_complex_type (type);
7320 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7322 if (!call)
7324 if (!targetm.libc_has_function (function_c99_math_complex)
7325 || !builtin_decl_implicit_p (fn))
7326 return NULL_TREE;
7327 fndecl = builtin_decl_explicit (fn);
7328 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7329 call = builtin_save_expr (call);
7332 return build2 (COMPOUND_EXPR, void_type_node,
7333 build2 (MODIFY_EXPR, void_type_node,
7334 build_fold_indirect_ref_loc (loc, arg1),
7335 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7336 build2 (MODIFY_EXPR, void_type_node,
7337 build_fold_indirect_ref_loc (loc, arg2),
7338 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7341 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7342 Return NULL_TREE if no simplification can be made. */
7344 static tree
7345 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7347 if (!validate_arg (arg1, POINTER_TYPE)
7348 || !validate_arg (arg2, POINTER_TYPE)
7349 || !validate_arg (len, INTEGER_TYPE))
7350 return NULL_TREE;
7352 /* If the LEN parameter is zero, return zero. */
7353 if (integer_zerop (len))
7354 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7355 arg1, arg2);
7357 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7358 if (operand_equal_p (arg1, arg2, 0))
7359 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7361 /* If len parameter is one, return an expression corresponding to
7362 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7363 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7365 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7366 tree cst_uchar_ptr_node
7367 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7369 tree ind1
7370 = fold_convert_loc (loc, integer_type_node,
7371 build1 (INDIRECT_REF, cst_uchar_node,
7372 fold_convert_loc (loc,
7373 cst_uchar_ptr_node,
7374 arg1)));
7375 tree ind2
7376 = fold_convert_loc (loc, integer_type_node,
7377 build1 (INDIRECT_REF, cst_uchar_node,
7378 fold_convert_loc (loc,
7379 cst_uchar_ptr_node,
7380 arg2)));
7381 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7384 return NULL_TREE;
7387 /* Fold a call to builtin isascii with argument ARG. */
7389 static tree
7390 fold_builtin_isascii (location_t loc, tree arg)
7392 if (!validate_arg (arg, INTEGER_TYPE))
7393 return NULL_TREE;
7394 else
7396 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7397 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7398 build_int_cst (integer_type_node,
7399 ~ (unsigned HOST_WIDE_INT) 0x7f));
7400 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7401 arg, integer_zero_node);
7405 /* Fold a call to builtin toascii with argument ARG. */
7407 static tree
7408 fold_builtin_toascii (location_t loc, tree arg)
7410 if (!validate_arg (arg, INTEGER_TYPE))
7411 return NULL_TREE;
7413 /* Transform toascii(c) -> (c & 0x7f). */
7414 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7415 build_int_cst (integer_type_node, 0x7f));
7418 /* Fold a call to builtin isdigit with argument ARG. */
7420 static tree
7421 fold_builtin_isdigit (location_t loc, tree arg)
7423 if (!validate_arg (arg, INTEGER_TYPE))
7424 return NULL_TREE;
7425 else
7427 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7428 /* According to the C standard, isdigit is unaffected by locale.
7429 However, it definitely is affected by the target character set. */
7430 unsigned HOST_WIDE_INT target_digit0
7431 = lang_hooks.to_target_charset ('0');
7433 if (target_digit0 == 0)
7434 return NULL_TREE;
7436 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7437 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7438 build_int_cst (unsigned_type_node, target_digit0));
7439 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7440 build_int_cst (unsigned_type_node, 9));
7444 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7446 static tree
7447 fold_builtin_fabs (location_t loc, tree arg, tree type)
7449 if (!validate_arg (arg, REAL_TYPE))
7450 return NULL_TREE;
7452 arg = fold_convert_loc (loc, type, arg);
7453 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7456 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7458 static tree
7459 fold_builtin_abs (location_t loc, tree arg, tree type)
7461 if (!validate_arg (arg, INTEGER_TYPE))
7462 return NULL_TREE;
7464 arg = fold_convert_loc (loc, type, arg);
7465 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7468 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7470 static tree
7471 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7473 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7474 if (validate_arg (arg0, REAL_TYPE)
7475 && validate_arg (arg1, REAL_TYPE)
7476 && validate_arg (arg2, REAL_TYPE)
7477 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7478 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7480 return NULL_TREE;
7483 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7485 static tree
7486 fold_builtin_carg (location_t loc, tree arg, tree type)
7488 if (validate_arg (arg, COMPLEX_TYPE)
7489 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7491 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7493 if (atan2_fn)
7495 tree new_arg = builtin_save_expr (arg);
7496 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7497 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7498 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7502 return NULL_TREE;
7505 /* Fold a call to builtin frexp, we can assume the base is 2. */
7507 static tree
7508 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7510 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7511 return NULL_TREE;
7513 STRIP_NOPS (arg0);
7515 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7516 return NULL_TREE;
7518 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7520 /* Proceed if a valid pointer type was passed in. */
7521 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7523 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7524 tree frac, exp;
7526 switch (value->cl)
7528 case rvc_zero:
7529 /* For +-0, return (*exp = 0, +-0). */
7530 exp = integer_zero_node;
7531 frac = arg0;
7532 break;
7533 case rvc_nan:
7534 case rvc_inf:
7535 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7536 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7537 case rvc_normal:
7539 /* Since the frexp function always expects base 2, and in
7540 GCC normalized significands are already in the range
7541 [0.5, 1.0), we have exactly what frexp wants. */
7542 REAL_VALUE_TYPE frac_rvt = *value;
7543 SET_REAL_EXP (&frac_rvt, 0);
7544 frac = build_real (rettype, frac_rvt);
7545 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7547 break;
7548 default:
7549 gcc_unreachable ();
7552 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7553 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7554 TREE_SIDE_EFFECTS (arg1) = 1;
7555 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7558 return NULL_TREE;
7561 /* Fold a call to builtin modf. */
7563 static tree
7564 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7566 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7567 return NULL_TREE;
7569 STRIP_NOPS (arg0);
7571 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7572 return NULL_TREE;
7574 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7576 /* Proceed if a valid pointer type was passed in. */
7577 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7579 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7580 REAL_VALUE_TYPE trunc, frac;
7582 switch (value->cl)
7584 case rvc_nan:
7585 case rvc_zero:
7586 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7587 trunc = frac = *value;
7588 break;
7589 case rvc_inf:
7590 /* For +-Inf, return (*arg1 = arg0, +-0). */
7591 frac = dconst0;
7592 frac.sign = value->sign;
7593 trunc = *value;
7594 break;
7595 case rvc_normal:
7596 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7597 real_trunc (&trunc, VOIDmode, value);
7598 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7599 /* If the original number was negative and already
7600 integral, then the fractional part is -0.0. */
7601 if (value->sign && frac.cl == rvc_zero)
7602 frac.sign = value->sign;
7603 break;
7606 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7607 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7608 build_real (rettype, trunc));
7609 TREE_SIDE_EFFECTS (arg1) = 1;
7610 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7611 build_real (rettype, frac));
7614 return NULL_TREE;
7617 /* Given a location LOC, an interclass builtin function decl FNDECL
7618 and its single argument ARG, return an folded expression computing
7619 the same, or NULL_TREE if we either couldn't or didn't want to fold
7620 (the latter happen if there's an RTL instruction available). */
7622 static tree
7623 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7625 machine_mode mode;
7627 if (!validate_arg (arg, REAL_TYPE))
7628 return NULL_TREE;
7630 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7631 return NULL_TREE;
7633 mode = TYPE_MODE (TREE_TYPE (arg));
7635 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7637 /* If there is no optab, try generic code. */
7638 switch (DECL_FUNCTION_CODE (fndecl))
7640 tree result;
7642 CASE_FLT_FN (BUILT_IN_ISINF):
7644 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7645 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7646 tree type = TREE_TYPE (arg);
7647 REAL_VALUE_TYPE r;
7648 char buf[128];
7650 if (is_ibm_extended)
7652 /* NaN and Inf are encoded in the high-order double value
7653 only. The low-order value is not significant. */
7654 type = double_type_node;
7655 mode = DFmode;
7656 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7658 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7659 real_from_string (&r, buf);
7660 result = build_call_expr (isgr_fn, 2,
7661 fold_build1_loc (loc, ABS_EXPR, type, arg),
7662 build_real (type, r));
7663 return result;
7665 CASE_FLT_FN (BUILT_IN_FINITE):
7666 case BUILT_IN_ISFINITE:
7668 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7669 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7670 tree type = TREE_TYPE (arg);
7671 REAL_VALUE_TYPE r;
7672 char buf[128];
7674 if (is_ibm_extended)
7676 /* NaN and Inf are encoded in the high-order double value
7677 only. The low-order value is not significant. */
7678 type = double_type_node;
7679 mode = DFmode;
7680 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7682 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7683 real_from_string (&r, buf);
7684 result = build_call_expr (isle_fn, 2,
7685 fold_build1_loc (loc, ABS_EXPR, type, arg),
7686 build_real (type, r));
7687 /*result = fold_build2_loc (loc, UNGT_EXPR,
7688 TREE_TYPE (TREE_TYPE (fndecl)),
7689 fold_build1_loc (loc, ABS_EXPR, type, arg),
7690 build_real (type, r));
7691 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7692 TREE_TYPE (TREE_TYPE (fndecl)),
7693 result);*/
7694 return result;
7696 case BUILT_IN_ISNORMAL:
7698 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7699 islessequal(fabs(x),DBL_MAX). */
7700 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7701 tree type = TREE_TYPE (arg);
7702 tree orig_arg, max_exp, min_exp;
7703 machine_mode orig_mode = mode;
7704 REAL_VALUE_TYPE rmax, rmin;
7705 char buf[128];
7707 orig_arg = arg = builtin_save_expr (arg);
7708 if (is_ibm_extended)
7710 /* Use double to test the normal range of IBM extended
7711 precision. Emin for IBM extended precision is
7712 different to emin for IEEE double, being 53 higher
7713 since the low double exponent is at least 53 lower
7714 than the high double exponent. */
7715 type = double_type_node;
7716 mode = DFmode;
7717 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7719 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
7721 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7722 real_from_string (&rmax, buf);
7723 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
7724 real_from_string (&rmin, buf);
7725 max_exp = build_real (type, rmax);
7726 min_exp = build_real (type, rmin);
7728 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
7729 if (is_ibm_extended)
7731 /* Testing the high end of the range is done just using
7732 the high double, using the same test as isfinite().
7733 For the subnormal end of the range we first test the
7734 high double, then if its magnitude is equal to the
7735 limit of 0x1p-969, we test whether the low double is
7736 non-zero and opposite sign to the high double. */
7737 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
7738 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7739 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
7740 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
7741 arg, min_exp);
7742 tree as_complex = build1 (VIEW_CONVERT_EXPR,
7743 complex_double_type_node, orig_arg);
7744 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
7745 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
7746 tree zero = build_real (type, dconst0);
7747 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
7748 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
7749 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
7750 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
7751 fold_build3 (COND_EXPR,
7752 integer_type_node,
7753 hilt, logt, lolt));
7754 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
7755 eq_min, ok_lo);
7756 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
7757 gt_min, eq_min);
7759 else
7761 tree const isge_fn
7762 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7763 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
7765 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
7766 max_exp, min_exp);
7767 return result;
7769 default:
7770 break;
7773 return NULL_TREE;
7776 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7777 ARG is the argument for the call. */
7779 static tree
7780 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7782 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7784 if (!validate_arg (arg, REAL_TYPE))
7785 return NULL_TREE;
7787 switch (builtin_index)
7789 case BUILT_IN_ISINF:
7790 if (!HONOR_INFINITIES (arg))
7791 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7793 return NULL_TREE;
7795 case BUILT_IN_ISINF_SIGN:
7797 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7798 /* In a boolean context, GCC will fold the inner COND_EXPR to
7799 1. So e.g. "if (isinf_sign(x))" would be folded to just
7800 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7801 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
7802 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7803 tree tmp = NULL_TREE;
7805 arg = builtin_save_expr (arg);
7807 if (signbit_fn && isinf_fn)
7809 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7810 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7812 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7813 signbit_call, integer_zero_node);
7814 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7815 isinf_call, integer_zero_node);
7817 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7818 integer_minus_one_node, integer_one_node);
7819 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7820 isinf_call, tmp,
7821 integer_zero_node);
7824 return tmp;
7827 case BUILT_IN_ISFINITE:
7828 if (!HONOR_NANS (arg)
7829 && !HONOR_INFINITIES (arg))
7830 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7832 return NULL_TREE;
7834 case BUILT_IN_ISNAN:
7835 if (!HONOR_NANS (arg))
7836 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7839 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
7840 if (is_ibm_extended)
7842 /* NaN and Inf are encoded in the high-order double value
7843 only. The low-order value is not significant. */
7844 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
7847 arg = builtin_save_expr (arg);
7848 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7850 default:
7851 gcc_unreachable ();
7855 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7856 This builtin will generate code to return the appropriate floating
7857 point classification depending on the value of the floating point
7858 number passed in. The possible return values must be supplied as
7859 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7860 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7861 one floating point argument which is "type generic". */
7863 static tree
7864 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7866 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7867 arg, type, res, tmp;
7868 machine_mode mode;
7869 REAL_VALUE_TYPE r;
7870 char buf[128];
7872 /* Verify the required arguments in the original call. */
7873 if (nargs != 6
7874 || !validate_arg (args[0], INTEGER_TYPE)
7875 || !validate_arg (args[1], INTEGER_TYPE)
7876 || !validate_arg (args[2], INTEGER_TYPE)
7877 || !validate_arg (args[3], INTEGER_TYPE)
7878 || !validate_arg (args[4], INTEGER_TYPE)
7879 || !validate_arg (args[5], REAL_TYPE))
7880 return NULL_TREE;
7882 fp_nan = args[0];
7883 fp_infinite = args[1];
7884 fp_normal = args[2];
7885 fp_subnormal = args[3];
7886 fp_zero = args[4];
7887 arg = args[5];
7888 type = TREE_TYPE (arg);
7889 mode = TYPE_MODE (type);
7890 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7892 /* fpclassify(x) ->
7893 isnan(x) ? FP_NAN :
7894 (fabs(x) == Inf ? FP_INFINITE :
7895 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7896 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
7898 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7899 build_real (type, dconst0));
7900 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7901 tmp, fp_zero, fp_subnormal);
7903 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7904 real_from_string (&r, buf);
7905 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
7906 arg, build_real (type, r));
7907 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
7909 if (HONOR_INFINITIES (mode))
7911 real_inf (&r);
7912 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7913 build_real (type, r));
7914 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
7915 fp_infinite, res);
7918 if (HONOR_NANS (mode))
7920 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
7921 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
7924 return res;
7927 /* Fold a call to an unordered comparison function such as
7928 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
7929 being called and ARG0 and ARG1 are the arguments for the call.
7930 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
7931 the opposite of the desired result. UNORDERED_CODE is used
7932 for modes that can hold NaNs and ORDERED_CODE is used for
7933 the rest. */
7935 static tree
7936 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
7937 enum tree_code unordered_code,
7938 enum tree_code ordered_code)
7940 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7941 enum tree_code code;
7942 tree type0, type1;
7943 enum tree_code code0, code1;
7944 tree cmp_type = NULL_TREE;
7946 type0 = TREE_TYPE (arg0);
7947 type1 = TREE_TYPE (arg1);
7949 code0 = TREE_CODE (type0);
7950 code1 = TREE_CODE (type1);
7952 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
7953 /* Choose the wider of two real types. */
7954 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
7955 ? type0 : type1;
7956 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
7957 cmp_type = type0;
7958 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
7959 cmp_type = type1;
7961 arg0 = fold_convert_loc (loc, cmp_type, arg0);
7962 arg1 = fold_convert_loc (loc, cmp_type, arg1);
7964 if (unordered_code == UNORDERED_EXPR)
7966 if (!HONOR_NANS (arg0))
7967 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
7968 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
7971 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
7972 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
7973 fold_build2_loc (loc, code, type, arg0, arg1));
7976 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
7977 arithmetics if it can never overflow, or into internal functions that
7978 return both result of arithmetics and overflowed boolean flag in
7979 a complex integer result, or some other check for overflow.
7980 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
7981 checking part of that. */
7983 static tree
7984 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
7985 tree arg0, tree arg1, tree arg2)
7987 enum internal_fn ifn = IFN_LAST;
7988 /* The code of the expression corresponding to the type-generic
7989 built-in, or ERROR_MARK for the type-specific ones. */
7990 enum tree_code opcode = ERROR_MARK;
7991 bool ovf_only = false;
7993 switch (fcode)
7995 case BUILT_IN_ADD_OVERFLOW_P:
7996 ovf_only = true;
7997 /* FALLTHRU */
7998 case BUILT_IN_ADD_OVERFLOW:
7999 opcode = PLUS_EXPR;
8000 /* FALLTHRU */
8001 case BUILT_IN_SADD_OVERFLOW:
8002 case BUILT_IN_SADDL_OVERFLOW:
8003 case BUILT_IN_SADDLL_OVERFLOW:
8004 case BUILT_IN_UADD_OVERFLOW:
8005 case BUILT_IN_UADDL_OVERFLOW:
8006 case BUILT_IN_UADDLL_OVERFLOW:
8007 ifn = IFN_ADD_OVERFLOW;
8008 break;
8009 case BUILT_IN_SUB_OVERFLOW_P:
8010 ovf_only = true;
8011 /* FALLTHRU */
8012 case BUILT_IN_SUB_OVERFLOW:
8013 opcode = MINUS_EXPR;
8014 /* FALLTHRU */
8015 case BUILT_IN_SSUB_OVERFLOW:
8016 case BUILT_IN_SSUBL_OVERFLOW:
8017 case BUILT_IN_SSUBLL_OVERFLOW:
8018 case BUILT_IN_USUB_OVERFLOW:
8019 case BUILT_IN_USUBL_OVERFLOW:
8020 case BUILT_IN_USUBLL_OVERFLOW:
8021 ifn = IFN_SUB_OVERFLOW;
8022 break;
8023 case BUILT_IN_MUL_OVERFLOW_P:
8024 ovf_only = true;
8025 /* FALLTHRU */
8026 case BUILT_IN_MUL_OVERFLOW:
8027 opcode = MULT_EXPR;
8028 /* FALLTHRU */
8029 case BUILT_IN_SMUL_OVERFLOW:
8030 case BUILT_IN_SMULL_OVERFLOW:
8031 case BUILT_IN_SMULLL_OVERFLOW:
8032 case BUILT_IN_UMUL_OVERFLOW:
8033 case BUILT_IN_UMULL_OVERFLOW:
8034 case BUILT_IN_UMULLL_OVERFLOW:
8035 ifn = IFN_MUL_OVERFLOW;
8036 break;
8037 default:
8038 gcc_unreachable ();
8041 /* For the "generic" overloads, the first two arguments can have different
8042 types and the last argument determines the target type to use to check
8043 for overflow. The arguments of the other overloads all have the same
8044 type. */
8045 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8047 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8048 arguments are constant, attempt to fold the built-in call into a constant
8049 expression indicating whether or not it detected an overflow. */
8050 if (ovf_only
8051 && TREE_CODE (arg0) == INTEGER_CST
8052 && TREE_CODE (arg1) == INTEGER_CST)
8053 /* Perform the computation in the target type and check for overflow. */
8054 return omit_one_operand_loc (loc, boolean_type_node,
8055 arith_overflowed_p (opcode, type, arg0, arg1)
8056 ? boolean_true_node : boolean_false_node,
8057 arg2);
8059 tree ctype = build_complex_type (type);
8060 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8061 2, arg0, arg1);
8062 tree tgt = save_expr (call);
8063 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8064 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8065 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8067 if (ovf_only)
8068 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8070 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8071 tree store
8072 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8073 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8076 /* Fold a call to __builtin_FILE to a constant string. */
8078 static inline tree
8079 fold_builtin_FILE (location_t loc)
8081 if (const char *fname = LOCATION_FILE (loc))
8082 return build_string_literal (strlen (fname) + 1, fname);
8084 return build_string_literal (1, "");
8087 /* Fold a call to __builtin_FUNCTION to a constant string. */
8089 static inline tree
8090 fold_builtin_FUNCTION ()
8092 if (current_function_decl)
8094 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8095 return build_string_literal (strlen (name) + 1, name);
8098 return build_string_literal (1, "");
8101 /* Fold a call to __builtin_LINE to an integer constant. */
8103 static inline tree
8104 fold_builtin_LINE (location_t loc, tree type)
8106 return build_int_cst (type, LOCATION_LINE (loc));
8109 /* Fold a call to built-in function FNDECL with 0 arguments.
8110 This function returns NULL_TREE if no simplification was possible. */
8112 static tree
8113 fold_builtin_0 (location_t loc, tree fndecl)
8115 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8116 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8117 switch (fcode)
8119 case BUILT_IN_FILE:
8120 return fold_builtin_FILE (loc);
8122 case BUILT_IN_FUNCTION:
8123 return fold_builtin_FUNCTION ();
8125 case BUILT_IN_LINE:
8126 return fold_builtin_LINE (loc, type);
8128 CASE_FLT_FN (BUILT_IN_INF):
8129 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8130 case BUILT_IN_INFD32:
8131 case BUILT_IN_INFD64:
8132 case BUILT_IN_INFD128:
8133 return fold_builtin_inf (loc, type, true);
8135 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8136 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8137 return fold_builtin_inf (loc, type, false);
8139 case BUILT_IN_CLASSIFY_TYPE:
8140 return fold_builtin_classify_type (NULL_TREE);
8142 default:
8143 break;
8145 return NULL_TREE;
8148 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8149 This function returns NULL_TREE if no simplification was possible. */
8151 static tree
8152 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8154 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8155 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8157 if (TREE_CODE (arg0) == ERROR_MARK)
8158 return NULL_TREE;
8160 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8161 return ret;
8163 switch (fcode)
8165 case BUILT_IN_CONSTANT_P:
8167 tree val = fold_builtin_constant_p (arg0);
8169 /* Gimplification will pull the CALL_EXPR for the builtin out of
8170 an if condition. When not optimizing, we'll not CSE it back.
8171 To avoid link error types of regressions, return false now. */
8172 if (!val && !optimize)
8173 val = integer_zero_node;
8175 return val;
8178 case BUILT_IN_CLASSIFY_TYPE:
8179 return fold_builtin_classify_type (arg0);
8181 case BUILT_IN_STRLEN:
8182 return fold_builtin_strlen (loc, type, arg0);
8184 CASE_FLT_FN (BUILT_IN_FABS):
8185 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8186 case BUILT_IN_FABSD32:
8187 case BUILT_IN_FABSD64:
8188 case BUILT_IN_FABSD128:
8189 return fold_builtin_fabs (loc, arg0, type);
8191 case BUILT_IN_ABS:
8192 case BUILT_IN_LABS:
8193 case BUILT_IN_LLABS:
8194 case BUILT_IN_IMAXABS:
8195 return fold_builtin_abs (loc, arg0, type);
8197 CASE_FLT_FN (BUILT_IN_CONJ):
8198 if (validate_arg (arg0, COMPLEX_TYPE)
8199 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8200 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8201 break;
8203 CASE_FLT_FN (BUILT_IN_CREAL):
8204 if (validate_arg (arg0, COMPLEX_TYPE)
8205 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8206 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8207 break;
8209 CASE_FLT_FN (BUILT_IN_CIMAG):
8210 if (validate_arg (arg0, COMPLEX_TYPE)
8211 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8212 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8213 break;
8215 CASE_FLT_FN (BUILT_IN_CARG):
8216 return fold_builtin_carg (loc, arg0, type);
8218 case BUILT_IN_ISASCII:
8219 return fold_builtin_isascii (loc, arg0);
8221 case BUILT_IN_TOASCII:
8222 return fold_builtin_toascii (loc, arg0);
8224 case BUILT_IN_ISDIGIT:
8225 return fold_builtin_isdigit (loc, arg0);
8227 CASE_FLT_FN (BUILT_IN_FINITE):
8228 case BUILT_IN_FINITED32:
8229 case BUILT_IN_FINITED64:
8230 case BUILT_IN_FINITED128:
8231 case BUILT_IN_ISFINITE:
8233 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8234 if (ret)
8235 return ret;
8236 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8239 CASE_FLT_FN (BUILT_IN_ISINF):
8240 case BUILT_IN_ISINFD32:
8241 case BUILT_IN_ISINFD64:
8242 case BUILT_IN_ISINFD128:
8244 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8245 if (ret)
8246 return ret;
8247 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8250 case BUILT_IN_ISNORMAL:
8251 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8253 case BUILT_IN_ISINF_SIGN:
8254 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8256 CASE_FLT_FN (BUILT_IN_ISNAN):
8257 case BUILT_IN_ISNAND32:
8258 case BUILT_IN_ISNAND64:
8259 case BUILT_IN_ISNAND128:
8260 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8262 case BUILT_IN_FREE:
8263 if (integer_zerop (arg0))
8264 return build_empty_stmt (loc);
8265 break;
8267 default:
8268 break;
8271 return NULL_TREE;
8275 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8276 This function returns NULL_TREE if no simplification was possible. */
8278 static tree
8279 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8281 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8282 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8284 if (TREE_CODE (arg0) == ERROR_MARK
8285 || TREE_CODE (arg1) == ERROR_MARK)
8286 return NULL_TREE;
8288 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8289 return ret;
8291 switch (fcode)
8293 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8294 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8295 if (validate_arg (arg0, REAL_TYPE)
8296 && validate_arg (arg1, POINTER_TYPE))
8297 return do_mpfr_lgamma_r (arg0, arg1, type);
8298 break;
8300 CASE_FLT_FN (BUILT_IN_FREXP):
8301 return fold_builtin_frexp (loc, arg0, arg1, type);
8303 CASE_FLT_FN (BUILT_IN_MODF):
8304 return fold_builtin_modf (loc, arg0, arg1, type);
8306 case BUILT_IN_STRSTR:
8307 return fold_builtin_strstr (loc, arg0, arg1, type);
8309 case BUILT_IN_STRSPN:
8310 return fold_builtin_strspn (loc, arg0, arg1);
8312 case BUILT_IN_STRCSPN:
8313 return fold_builtin_strcspn (loc, arg0, arg1);
8315 case BUILT_IN_STRPBRK:
8316 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8318 case BUILT_IN_EXPECT:
8319 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8321 case BUILT_IN_ISGREATER:
8322 return fold_builtin_unordered_cmp (loc, fndecl,
8323 arg0, arg1, UNLE_EXPR, LE_EXPR);
8324 case BUILT_IN_ISGREATEREQUAL:
8325 return fold_builtin_unordered_cmp (loc, fndecl,
8326 arg0, arg1, UNLT_EXPR, LT_EXPR);
8327 case BUILT_IN_ISLESS:
8328 return fold_builtin_unordered_cmp (loc, fndecl,
8329 arg0, arg1, UNGE_EXPR, GE_EXPR);
8330 case BUILT_IN_ISLESSEQUAL:
8331 return fold_builtin_unordered_cmp (loc, fndecl,
8332 arg0, arg1, UNGT_EXPR, GT_EXPR);
8333 case BUILT_IN_ISLESSGREATER:
8334 return fold_builtin_unordered_cmp (loc, fndecl,
8335 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8336 case BUILT_IN_ISUNORDERED:
8337 return fold_builtin_unordered_cmp (loc, fndecl,
8338 arg0, arg1, UNORDERED_EXPR,
8339 NOP_EXPR);
8341 /* We do the folding for va_start in the expander. */
8342 case BUILT_IN_VA_START:
8343 break;
8345 case BUILT_IN_OBJECT_SIZE:
8346 return fold_builtin_object_size (arg0, arg1);
8348 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8349 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8351 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8352 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8354 default:
8355 break;
8357 return NULL_TREE;
8360 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8361 and ARG2.
8362 This function returns NULL_TREE if no simplification was possible. */
8364 static tree
8365 fold_builtin_3 (location_t loc, tree fndecl,
8366 tree arg0, tree arg1, tree arg2)
8368 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8369 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8371 if (TREE_CODE (arg0) == ERROR_MARK
8372 || TREE_CODE (arg1) == ERROR_MARK
8373 || TREE_CODE (arg2) == ERROR_MARK)
8374 return NULL_TREE;
8376 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8377 arg0, arg1, arg2))
8378 return ret;
8380 switch (fcode)
8383 CASE_FLT_FN (BUILT_IN_SINCOS):
8384 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8386 CASE_FLT_FN (BUILT_IN_FMA):
8387 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8389 CASE_FLT_FN (BUILT_IN_REMQUO):
8390 if (validate_arg (arg0, REAL_TYPE)
8391 && validate_arg (arg1, REAL_TYPE)
8392 && validate_arg (arg2, POINTER_TYPE))
8393 return do_mpfr_remquo (arg0, arg1, arg2);
8394 break;
8396 case BUILT_IN_BCMP:
8397 case BUILT_IN_MEMCMP:
8398 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8400 case BUILT_IN_EXPECT:
8401 return fold_builtin_expect (loc, arg0, arg1, arg2);
8403 case BUILT_IN_ADD_OVERFLOW:
8404 case BUILT_IN_SUB_OVERFLOW:
8405 case BUILT_IN_MUL_OVERFLOW:
8406 case BUILT_IN_ADD_OVERFLOW_P:
8407 case BUILT_IN_SUB_OVERFLOW_P:
8408 case BUILT_IN_MUL_OVERFLOW_P:
8409 case BUILT_IN_SADD_OVERFLOW:
8410 case BUILT_IN_SADDL_OVERFLOW:
8411 case BUILT_IN_SADDLL_OVERFLOW:
8412 case BUILT_IN_SSUB_OVERFLOW:
8413 case BUILT_IN_SSUBL_OVERFLOW:
8414 case BUILT_IN_SSUBLL_OVERFLOW:
8415 case BUILT_IN_SMUL_OVERFLOW:
8416 case BUILT_IN_SMULL_OVERFLOW:
8417 case BUILT_IN_SMULLL_OVERFLOW:
8418 case BUILT_IN_UADD_OVERFLOW:
8419 case BUILT_IN_UADDL_OVERFLOW:
8420 case BUILT_IN_UADDLL_OVERFLOW:
8421 case BUILT_IN_USUB_OVERFLOW:
8422 case BUILT_IN_USUBL_OVERFLOW:
8423 case BUILT_IN_USUBLL_OVERFLOW:
8424 case BUILT_IN_UMUL_OVERFLOW:
8425 case BUILT_IN_UMULL_OVERFLOW:
8426 case BUILT_IN_UMULLL_OVERFLOW:
8427 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8429 default:
8430 break;
8432 return NULL_TREE;
8435 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8436 arguments. IGNORE is true if the result of the
8437 function call is ignored. This function returns NULL_TREE if no
8438 simplification was possible. */
8440 tree
8441 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8443 tree ret = NULL_TREE;
8445 switch (nargs)
8447 case 0:
8448 ret = fold_builtin_0 (loc, fndecl);
8449 break;
8450 case 1:
8451 ret = fold_builtin_1 (loc, fndecl, args[0]);
8452 break;
8453 case 2:
8454 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8455 break;
8456 case 3:
8457 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8458 break;
8459 default:
8460 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8461 break;
8463 if (ret)
8465 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8466 SET_EXPR_LOCATION (ret, loc);
8467 TREE_NO_WARNING (ret) = 1;
8468 return ret;
8470 return NULL_TREE;
8473 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8474 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8475 of arguments in ARGS to be omitted. OLDNARGS is the number of
8476 elements in ARGS. */
8478 static tree
8479 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8480 int skip, tree fndecl, int n, va_list newargs)
8482 int nargs = oldnargs - skip + n;
8483 tree *buffer;
8485 if (n > 0)
8487 int i, j;
8489 buffer = XALLOCAVEC (tree, nargs);
8490 for (i = 0; i < n; i++)
8491 buffer[i] = va_arg (newargs, tree);
8492 for (j = skip; j < oldnargs; j++, i++)
8493 buffer[i] = args[j];
8495 else
8496 buffer = args + skip;
8498 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8501 /* Return true if FNDECL shouldn't be folded right now.
8502 If a built-in function has an inline attribute always_inline
8503 wrapper, defer folding it after always_inline functions have
8504 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8505 might not be performed. */
8507 bool
8508 avoid_folding_inline_builtin (tree fndecl)
8510 return (DECL_DECLARED_INLINE_P (fndecl)
8511 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8512 && cfun
8513 && !cfun->always_inline_functions_inlined
8514 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8517 /* A wrapper function for builtin folding that prevents warnings for
8518 "statement without effect" and the like, caused by removing the
8519 call node earlier than the warning is generated. */
8521 tree
8522 fold_call_expr (location_t loc, tree exp, bool ignore)
8524 tree ret = NULL_TREE;
8525 tree fndecl = get_callee_fndecl (exp);
8526 if (fndecl
8527 && TREE_CODE (fndecl) == FUNCTION_DECL
8528 && DECL_BUILT_IN (fndecl)
8529 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8530 yet. Defer folding until we see all the arguments
8531 (after inlining). */
8532 && !CALL_EXPR_VA_ARG_PACK (exp))
8534 int nargs = call_expr_nargs (exp);
8536 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8537 instead last argument is __builtin_va_arg_pack (). Defer folding
8538 even in that case, until arguments are finalized. */
8539 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8541 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8542 if (fndecl2
8543 && TREE_CODE (fndecl2) == FUNCTION_DECL
8544 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8545 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8546 return NULL_TREE;
8549 if (avoid_folding_inline_builtin (fndecl))
8550 return NULL_TREE;
8552 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8553 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8554 CALL_EXPR_ARGP (exp), ignore);
8555 else
8557 tree *args = CALL_EXPR_ARGP (exp);
8558 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8559 if (ret)
8560 return ret;
8563 return NULL_TREE;
8566 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8567 N arguments are passed in the array ARGARRAY. Return a folded
8568 expression or NULL_TREE if no simplification was possible. */
8570 tree
8571 fold_builtin_call_array (location_t loc, tree,
8572 tree fn,
8573 int n,
8574 tree *argarray)
8576 if (TREE_CODE (fn) != ADDR_EXPR)
8577 return NULL_TREE;
8579 tree fndecl = TREE_OPERAND (fn, 0);
8580 if (TREE_CODE (fndecl) == FUNCTION_DECL
8581 && DECL_BUILT_IN (fndecl))
8583 /* If last argument is __builtin_va_arg_pack (), arguments to this
8584 function are not finalized yet. Defer folding until they are. */
8585 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8587 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8588 if (fndecl2
8589 && TREE_CODE (fndecl2) == FUNCTION_DECL
8590 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8591 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8592 return NULL_TREE;
8594 if (avoid_folding_inline_builtin (fndecl))
8595 return NULL_TREE;
8596 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8597 return targetm.fold_builtin (fndecl, n, argarray, false);
8598 else
8599 return fold_builtin_n (loc, fndecl, argarray, n, false);
8602 return NULL_TREE;
8605 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8606 along with N new arguments specified as the "..." parameters. SKIP
8607 is the number of arguments in EXP to be omitted. This function is used
8608 to do varargs-to-varargs transformations. */
8610 static tree
8611 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8613 va_list ap;
8614 tree t;
8616 va_start (ap, n);
8617 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8618 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8619 va_end (ap);
8621 return t;
8624 /* Validate a single argument ARG against a tree code CODE representing
8625 a type. */
8627 static bool
8628 validate_arg (const_tree arg, enum tree_code code)
8630 if (!arg)
8631 return false;
8632 else if (code == POINTER_TYPE)
8633 return POINTER_TYPE_P (TREE_TYPE (arg));
8634 else if (code == INTEGER_TYPE)
8635 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8636 return code == TREE_CODE (TREE_TYPE (arg));
8639 /* This function validates the types of a function call argument list
8640 against a specified list of tree_codes. If the last specifier is a 0,
8641 that represents an ellipses, otherwise the last specifier must be a
8642 VOID_TYPE.
8644 This is the GIMPLE version of validate_arglist. Eventually we want to
8645 completely convert builtins.c to work from GIMPLEs and the tree based
8646 validate_arglist will then be removed. */
8648 bool
8649 validate_gimple_arglist (const gcall *call, ...)
8651 enum tree_code code;
8652 bool res = 0;
8653 va_list ap;
8654 const_tree arg;
8655 size_t i;
8657 va_start (ap, call);
8658 i = 0;
8662 code = (enum tree_code) va_arg (ap, int);
8663 switch (code)
8665 case 0:
8666 /* This signifies an ellipses, any further arguments are all ok. */
8667 res = true;
8668 goto end;
8669 case VOID_TYPE:
8670 /* This signifies an endlink, if no arguments remain, return
8671 true, otherwise return false. */
8672 res = (i == gimple_call_num_args (call));
8673 goto end;
8674 default:
8675 /* If no parameters remain or the parameter's code does not
8676 match the specified code, return false. Otherwise continue
8677 checking any remaining arguments. */
8678 arg = gimple_call_arg (call, i++);
8679 if (!validate_arg (arg, code))
8680 goto end;
8681 break;
8684 while (1);
8686 /* We need gotos here since we can only have one VA_CLOSE in a
8687 function. */
8688 end: ;
8689 va_end (ap);
8691 return res;
8694 /* Default target-specific builtin expander that does nothing. */
8697 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8698 rtx target ATTRIBUTE_UNUSED,
8699 rtx subtarget ATTRIBUTE_UNUSED,
8700 machine_mode mode ATTRIBUTE_UNUSED,
8701 int ignore ATTRIBUTE_UNUSED)
8703 return NULL_RTX;
8706 /* Returns true is EXP represents data that would potentially reside
8707 in a readonly section. */
8709 bool
8710 readonly_data_expr (tree exp)
8712 STRIP_NOPS (exp);
8714 if (TREE_CODE (exp) != ADDR_EXPR)
8715 return false;
8717 exp = get_base_address (TREE_OPERAND (exp, 0));
8718 if (!exp)
8719 return false;
8721 /* Make sure we call decl_readonly_section only for trees it
8722 can handle (since it returns true for everything it doesn't
8723 understand). */
8724 if (TREE_CODE (exp) == STRING_CST
8725 || TREE_CODE (exp) == CONSTRUCTOR
8726 || (VAR_P (exp) && TREE_STATIC (exp)))
8727 return decl_readonly_section (exp, 0);
8728 else
8729 return false;
8732 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8733 to the call, and TYPE is its return type.
8735 Return NULL_TREE if no simplification was possible, otherwise return the
8736 simplified form of the call as a tree.
8738 The simplified form may be a constant or other expression which
8739 computes the same value, but in a more efficient manner (including
8740 calls to other builtin functions).
8742 The call may contain arguments which need to be evaluated, but
8743 which are not useful to determine the result of the call. In
8744 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8745 COMPOUND_EXPR will be an argument which must be evaluated.
8746 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8747 COMPOUND_EXPR in the chain will contain the tree for the simplified
8748 form of the builtin function call. */
8750 static tree
8751 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8753 if (!validate_arg (s1, POINTER_TYPE)
8754 || !validate_arg (s2, POINTER_TYPE))
8755 return NULL_TREE;
8756 else
8758 tree fn;
8759 const char *p1, *p2;
8761 p2 = c_getstr (s2);
8762 if (p2 == NULL)
8763 return NULL_TREE;
8765 p1 = c_getstr (s1);
8766 if (p1 != NULL)
8768 const char *r = strstr (p1, p2);
8769 tree tem;
8771 if (r == NULL)
8772 return build_int_cst (TREE_TYPE (s1), 0);
8774 /* Return an offset into the constant string argument. */
8775 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8776 return fold_convert_loc (loc, type, tem);
8779 /* The argument is const char *, and the result is char *, so we need
8780 a type conversion here to avoid a warning. */
8781 if (p2[0] == '\0')
8782 return fold_convert_loc (loc, type, s1);
8784 if (p2[1] != '\0')
8785 return NULL_TREE;
8787 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8788 if (!fn)
8789 return NULL_TREE;
8791 /* New argument list transforming strstr(s1, s2) to
8792 strchr(s1, s2[0]). */
8793 return build_call_expr_loc (loc, fn, 2, s1,
8794 build_int_cst (integer_type_node, p2[0]));
8798 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
8799 to the call, and TYPE is its return type.
8801 Return NULL_TREE if no simplification was possible, otherwise return the
8802 simplified form of the call as a tree.
8804 The simplified form may be a constant or other expression which
8805 computes the same value, but in a more efficient manner (including
8806 calls to other builtin functions).
8808 The call may contain arguments which need to be evaluated, but
8809 which are not useful to determine the result of the call. In
8810 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8811 COMPOUND_EXPR will be an argument which must be evaluated.
8812 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8813 COMPOUND_EXPR in the chain will contain the tree for the simplified
8814 form of the builtin function call. */
8816 static tree
8817 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
8819 if (!validate_arg (s1, POINTER_TYPE)
8820 || !validate_arg (s2, POINTER_TYPE))
8821 return NULL_TREE;
8822 else
8824 tree fn;
8825 const char *p1, *p2;
8827 p2 = c_getstr (s2);
8828 if (p2 == NULL)
8829 return NULL_TREE;
8831 p1 = c_getstr (s1);
8832 if (p1 != NULL)
8834 const char *r = strpbrk (p1, p2);
8835 tree tem;
8837 if (r == NULL)
8838 return build_int_cst (TREE_TYPE (s1), 0);
8840 /* Return an offset into the constant string argument. */
8841 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8842 return fold_convert_loc (loc, type, tem);
8845 if (p2[0] == '\0')
8846 /* strpbrk(x, "") == NULL.
8847 Evaluate and ignore s1 in case it had side-effects. */
8848 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
8850 if (p2[1] != '\0')
8851 return NULL_TREE; /* Really call strpbrk. */
8853 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8854 if (!fn)
8855 return NULL_TREE;
8857 /* New argument list transforming strpbrk(s1, s2) to
8858 strchr(s1, s2[0]). */
8859 return build_call_expr_loc (loc, fn, 2, s1,
8860 build_int_cst (integer_type_node, p2[0]));
8864 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
8865 to the call.
8867 Return NULL_TREE if no simplification was possible, otherwise return the
8868 simplified form of the call as a tree.
8870 The simplified form may be a constant or other expression which
8871 computes the same value, but in a more efficient manner (including
8872 calls to other builtin functions).
8874 The call may contain arguments which need to be evaluated, but
8875 which are not useful to determine the result of the call. In
8876 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8877 COMPOUND_EXPR will be an argument which must be evaluated.
8878 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8879 COMPOUND_EXPR in the chain will contain the tree for the simplified
8880 form of the builtin function call. */
8882 static tree
8883 fold_builtin_strspn (location_t loc, tree s1, tree s2)
8885 if (!validate_arg (s1, POINTER_TYPE)
8886 || !validate_arg (s2, POINTER_TYPE))
8887 return NULL_TREE;
8888 else
8890 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
8892 /* If either argument is "", return NULL_TREE. */
8893 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
8894 /* Evaluate and ignore both arguments in case either one has
8895 side-effects. */
8896 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
8897 s1, s2);
8898 return NULL_TREE;
8902 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
8903 to the call.
8905 Return NULL_TREE if no simplification was possible, otherwise return the
8906 simplified form of the call as a tree.
8908 The simplified form may be a constant or other expression which
8909 computes the same value, but in a more efficient manner (including
8910 calls to other builtin functions).
8912 The call may contain arguments which need to be evaluated, but
8913 which are not useful to determine the result of the call. In
8914 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8915 COMPOUND_EXPR will be an argument which must be evaluated.
8916 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8917 COMPOUND_EXPR in the chain will contain the tree for the simplified
8918 form of the builtin function call. */
8920 static tree
8921 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
8923 if (!validate_arg (s1, POINTER_TYPE)
8924 || !validate_arg (s2, POINTER_TYPE))
8925 return NULL_TREE;
8926 else
8928 /* If the first argument is "", return NULL_TREE. */
8929 const char *p1 = c_getstr (s1);
8930 if (p1 && *p1 == '\0')
8932 /* Evaluate and ignore argument s2 in case it has
8933 side-effects. */
8934 return omit_one_operand_loc (loc, size_type_node,
8935 size_zero_node, s2);
8938 /* If the second argument is "", return __builtin_strlen(s1). */
8939 const char *p2 = c_getstr (s2);
8940 if (p2 && *p2 == '\0')
8942 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
8944 /* If the replacement _DECL isn't initialized, don't do the
8945 transformation. */
8946 if (!fn)
8947 return NULL_TREE;
8949 return build_call_expr_loc (loc, fn, 1, s1);
8951 return NULL_TREE;
8955 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
8956 produced. False otherwise. This is done so that we don't output the error
8957 or warning twice or three times. */
8959 bool
8960 fold_builtin_next_arg (tree exp, bool va_start_p)
8962 tree fntype = TREE_TYPE (current_function_decl);
8963 int nargs = call_expr_nargs (exp);
8964 tree arg;
8965 /* There is good chance the current input_location points inside the
8966 definition of the va_start macro (perhaps on the token for
8967 builtin) in a system header, so warnings will not be emitted.
8968 Use the location in real source code. */
8969 source_location current_location =
8970 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
8971 NULL);
8973 if (!stdarg_p (fntype))
8975 error ("%<va_start%> used in function with fixed args");
8976 return true;
8979 if (va_start_p)
8981 if (va_start_p && (nargs != 2))
8983 error ("wrong number of arguments to function %<va_start%>");
8984 return true;
8986 arg = CALL_EXPR_ARG (exp, 1);
8988 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
8989 when we checked the arguments and if needed issued a warning. */
8990 else
8992 if (nargs == 0)
8994 /* Evidently an out of date version of <stdarg.h>; can't validate
8995 va_start's second argument, but can still work as intended. */
8996 warning_at (current_location,
8997 OPT_Wvarargs,
8998 "%<__builtin_next_arg%> called without an argument");
8999 return true;
9001 else if (nargs > 1)
9003 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9004 return true;
9006 arg = CALL_EXPR_ARG (exp, 0);
9009 if (TREE_CODE (arg) == SSA_NAME)
9010 arg = SSA_NAME_VAR (arg);
9012 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9013 or __builtin_next_arg (0) the first time we see it, after checking
9014 the arguments and if needed issuing a warning. */
9015 if (!integer_zerop (arg))
9017 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9019 /* Strip off all nops for the sake of the comparison. This
9020 is not quite the same as STRIP_NOPS. It does more.
9021 We must also strip off INDIRECT_EXPR for C++ reference
9022 parameters. */
9023 while (CONVERT_EXPR_P (arg)
9024 || TREE_CODE (arg) == INDIRECT_REF)
9025 arg = TREE_OPERAND (arg, 0);
9026 if (arg != last_parm)
9028 /* FIXME: Sometimes with the tree optimizers we can get the
9029 not the last argument even though the user used the last
9030 argument. We just warn and set the arg to be the last
9031 argument so that we will get wrong-code because of
9032 it. */
9033 warning_at (current_location,
9034 OPT_Wvarargs,
9035 "second parameter of %<va_start%> not last named argument");
9038 /* Undefined by C99 7.15.1.4p4 (va_start):
9039 "If the parameter parmN is declared with the register storage
9040 class, with a function or array type, or with a type that is
9041 not compatible with the type that results after application of
9042 the default argument promotions, the behavior is undefined."
9044 else if (DECL_REGISTER (arg))
9046 warning_at (current_location,
9047 OPT_Wvarargs,
9048 "undefined behavior when second parameter of "
9049 "%<va_start%> is declared with %<register%> storage");
9052 /* We want to verify the second parameter just once before the tree
9053 optimizers are run and then avoid keeping it in the tree,
9054 as otherwise we could warn even for correct code like:
9055 void foo (int i, ...)
9056 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9057 if (va_start_p)
9058 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9059 else
9060 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9062 return false;
9066 /* Expand a call EXP to __builtin_object_size. */
9068 static rtx
9069 expand_builtin_object_size (tree exp)
9071 tree ost;
9072 int object_size_type;
9073 tree fndecl = get_callee_fndecl (exp);
9075 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9077 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9078 exp, fndecl);
9079 expand_builtin_trap ();
9080 return const0_rtx;
9083 ost = CALL_EXPR_ARG (exp, 1);
9084 STRIP_NOPS (ost);
9086 if (TREE_CODE (ost) != INTEGER_CST
9087 || tree_int_cst_sgn (ost) < 0
9088 || compare_tree_int (ost, 3) > 0)
9090 error ("%Klast argument of %D is not integer constant between 0 and 3",
9091 exp, fndecl);
9092 expand_builtin_trap ();
9093 return const0_rtx;
9096 object_size_type = tree_to_shwi (ost);
9098 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9101 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9102 FCODE is the BUILT_IN_* to use.
9103 Return NULL_RTX if we failed; the caller should emit a normal call,
9104 otherwise try to get the result in TARGET, if convenient (and in
9105 mode MODE if that's convenient). */
9107 static rtx
9108 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9109 enum built_in_function fcode)
9111 tree dest, src, len, size;
9113 if (!validate_arglist (exp,
9114 POINTER_TYPE,
9115 fcode == BUILT_IN_MEMSET_CHK
9116 ? INTEGER_TYPE : POINTER_TYPE,
9117 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9118 return NULL_RTX;
9120 dest = CALL_EXPR_ARG (exp, 0);
9121 src = CALL_EXPR_ARG (exp, 1);
9122 len = CALL_EXPR_ARG (exp, 2);
9123 size = CALL_EXPR_ARG (exp, 3);
9125 if (! tree_fits_uhwi_p (size))
9126 return NULL_RTX;
9128 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9130 tree fn;
9132 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9134 warning_at (tree_nonartificial_location (exp),
9135 0, "%Kcall to %D will always overflow destination buffer",
9136 exp, get_callee_fndecl (exp));
9137 return NULL_RTX;
9140 fn = NULL_TREE;
9141 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9142 mem{cpy,pcpy,move,set} is available. */
9143 switch (fcode)
9145 case BUILT_IN_MEMCPY_CHK:
9146 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9147 break;
9148 case BUILT_IN_MEMPCPY_CHK:
9149 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9150 break;
9151 case BUILT_IN_MEMMOVE_CHK:
9152 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9153 break;
9154 case BUILT_IN_MEMSET_CHK:
9155 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9156 break;
9157 default:
9158 break;
9161 if (! fn)
9162 return NULL_RTX;
9164 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9165 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9166 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9167 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9169 else if (fcode == BUILT_IN_MEMSET_CHK)
9170 return NULL_RTX;
9171 else
9173 unsigned int dest_align = get_pointer_alignment (dest);
9175 /* If DEST is not a pointer type, call the normal function. */
9176 if (dest_align == 0)
9177 return NULL_RTX;
9179 /* If SRC and DEST are the same (and not volatile), do nothing. */
9180 if (operand_equal_p (src, dest, 0))
9182 tree expr;
9184 if (fcode != BUILT_IN_MEMPCPY_CHK)
9186 /* Evaluate and ignore LEN in case it has side-effects. */
9187 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9188 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9191 expr = fold_build_pointer_plus (dest, len);
9192 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9195 /* __memmove_chk special case. */
9196 if (fcode == BUILT_IN_MEMMOVE_CHK)
9198 unsigned int src_align = get_pointer_alignment (src);
9200 if (src_align == 0)
9201 return NULL_RTX;
9203 /* If src is categorized for a readonly section we can use
9204 normal __memcpy_chk. */
9205 if (readonly_data_expr (src))
9207 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9208 if (!fn)
9209 return NULL_RTX;
9210 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9211 dest, src, len, size);
9212 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9213 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9214 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9217 return NULL_RTX;
9221 /* Emit warning if a buffer overflow is detected at compile time. */
9223 static void
9224 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9226 int is_strlen = 0;
9227 tree len, size;
9228 location_t loc = tree_nonartificial_location (exp);
9230 switch (fcode)
9232 case BUILT_IN_STRCPY_CHK:
9233 case BUILT_IN_STPCPY_CHK:
9234 /* For __strcat_chk the warning will be emitted only if overflowing
9235 by at least strlen (dest) + 1 bytes. */
9236 case BUILT_IN_STRCAT_CHK:
9237 len = CALL_EXPR_ARG (exp, 1);
9238 size = CALL_EXPR_ARG (exp, 2);
9239 is_strlen = 1;
9240 break;
9241 case BUILT_IN_STRNCAT_CHK:
9242 case BUILT_IN_STRNCPY_CHK:
9243 case BUILT_IN_STPNCPY_CHK:
9244 len = CALL_EXPR_ARG (exp, 2);
9245 size = CALL_EXPR_ARG (exp, 3);
9246 break;
9247 case BUILT_IN_SNPRINTF_CHK:
9248 case BUILT_IN_VSNPRINTF_CHK:
9249 len = CALL_EXPR_ARG (exp, 1);
9250 size = CALL_EXPR_ARG (exp, 3);
9251 break;
9252 default:
9253 gcc_unreachable ();
9256 if (!len || !size)
9257 return;
9259 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9260 return;
9262 if (is_strlen)
9264 len = c_strlen (len, 1);
9265 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9266 return;
9268 else if (fcode == BUILT_IN_STRNCAT_CHK)
9270 tree src = CALL_EXPR_ARG (exp, 1);
9271 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9272 return;
9273 src = c_strlen (src, 1);
9274 if (! src || ! tree_fits_uhwi_p (src))
9276 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9277 exp, get_callee_fndecl (exp));
9278 return;
9280 else if (tree_int_cst_lt (src, size))
9281 return;
9283 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9284 return;
9286 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9287 exp, get_callee_fndecl (exp));
9290 /* Emit warning if a buffer overflow is detected at compile time
9291 in __sprintf_chk/__vsprintf_chk calls. */
9293 static void
9294 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9296 tree size, len, fmt;
9297 const char *fmt_str;
9298 int nargs = call_expr_nargs (exp);
9300 /* Verify the required arguments in the original call. */
9302 if (nargs < 4)
9303 return;
9304 size = CALL_EXPR_ARG (exp, 2);
9305 fmt = CALL_EXPR_ARG (exp, 3);
9307 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9308 return;
9310 /* Check whether the format is a literal string constant. */
9311 fmt_str = c_getstr (fmt);
9312 if (fmt_str == NULL)
9313 return;
9315 if (!init_target_chars ())
9316 return;
9318 /* If the format doesn't contain % args or %%, we know its size. */
9319 if (strchr (fmt_str, target_percent) == 0)
9320 len = build_int_cstu (size_type_node, strlen (fmt_str));
9321 /* If the format is "%s" and first ... argument is a string literal,
9322 we know it too. */
9323 else if (fcode == BUILT_IN_SPRINTF_CHK
9324 && strcmp (fmt_str, target_percent_s) == 0)
9326 tree arg;
9328 if (nargs < 5)
9329 return;
9330 arg = CALL_EXPR_ARG (exp, 4);
9331 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9332 return;
9334 len = c_strlen (arg, 1);
9335 if (!len || ! tree_fits_uhwi_p (len))
9336 return;
9338 else
9339 return;
9341 if (! tree_int_cst_lt (len, size))
9342 warning_at (tree_nonartificial_location (exp),
9343 0, "%Kcall to %D will always overflow destination buffer",
9344 exp, get_callee_fndecl (exp));
9347 /* Emit warning if a free is called with address of a variable. */
9349 static void
9350 maybe_emit_free_warning (tree exp)
9352 tree arg = CALL_EXPR_ARG (exp, 0);
9354 STRIP_NOPS (arg);
9355 if (TREE_CODE (arg) != ADDR_EXPR)
9356 return;
9358 arg = get_base_address (TREE_OPERAND (arg, 0));
9359 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9360 return;
9362 if (SSA_VAR_P (arg))
9363 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9364 "%Kattempt to free a non-heap object %qD", exp, arg);
9365 else
9366 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9367 "%Kattempt to free a non-heap object", exp);
9370 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9371 if possible. */
9373 static tree
9374 fold_builtin_object_size (tree ptr, tree ost)
9376 unsigned HOST_WIDE_INT bytes;
9377 int object_size_type;
9379 if (!validate_arg (ptr, POINTER_TYPE)
9380 || !validate_arg (ost, INTEGER_TYPE))
9381 return NULL_TREE;
9383 STRIP_NOPS (ost);
9385 if (TREE_CODE (ost) != INTEGER_CST
9386 || tree_int_cst_sgn (ost) < 0
9387 || compare_tree_int (ost, 3) > 0)
9388 return NULL_TREE;
9390 object_size_type = tree_to_shwi (ost);
9392 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9393 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9394 and (size_t) 0 for types 2 and 3. */
9395 if (TREE_SIDE_EFFECTS (ptr))
9396 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9398 if (TREE_CODE (ptr) == ADDR_EXPR)
9400 compute_builtin_object_size (ptr, object_size_type, &bytes);
9401 if (wi::fits_to_tree_p (bytes, size_type_node))
9402 return build_int_cstu (size_type_node, bytes);
9404 else if (TREE_CODE (ptr) == SSA_NAME)
9406 /* If object size is not known yet, delay folding until
9407 later. Maybe subsequent passes will help determining
9408 it. */
9409 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9410 && wi::fits_to_tree_p (bytes, size_type_node))
9411 return build_int_cstu (size_type_node, bytes);
9414 return NULL_TREE;
9417 /* Builtins with folding operations that operate on "..." arguments
9418 need special handling; we need to store the arguments in a convenient
9419 data structure before attempting any folding. Fortunately there are
9420 only a few builtins that fall into this category. FNDECL is the
9421 function, EXP is the CALL_EXPR for the call. */
9423 static tree
9424 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9426 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9427 tree ret = NULL_TREE;
9429 switch (fcode)
9431 case BUILT_IN_FPCLASSIFY:
9432 ret = fold_builtin_fpclassify (loc, args, nargs);
9433 break;
9435 default:
9436 break;
9438 if (ret)
9440 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9441 SET_EXPR_LOCATION (ret, loc);
9442 TREE_NO_WARNING (ret) = 1;
9443 return ret;
9445 return NULL_TREE;
9448 /* Initialize format string characters in the target charset. */
9450 bool
9451 init_target_chars (void)
9453 static bool init;
9454 if (!init)
9456 target_newline = lang_hooks.to_target_charset ('\n');
9457 target_percent = lang_hooks.to_target_charset ('%');
9458 target_c = lang_hooks.to_target_charset ('c');
9459 target_s = lang_hooks.to_target_charset ('s');
9460 if (target_newline == 0 || target_percent == 0 || target_c == 0
9461 || target_s == 0)
9462 return false;
9464 target_percent_c[0] = target_percent;
9465 target_percent_c[1] = target_c;
9466 target_percent_c[2] = '\0';
9468 target_percent_s[0] = target_percent;
9469 target_percent_s[1] = target_s;
9470 target_percent_s[2] = '\0';
9472 target_percent_s_newline[0] = target_percent;
9473 target_percent_s_newline[1] = target_s;
9474 target_percent_s_newline[2] = target_newline;
9475 target_percent_s_newline[3] = '\0';
9477 init = true;
9479 return true;
9482 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9483 and no overflow/underflow occurred. INEXACT is true if M was not
9484 exactly calculated. TYPE is the tree type for the result. This
9485 function assumes that you cleared the MPFR flags and then
9486 calculated M to see if anything subsequently set a flag prior to
9487 entering this function. Return NULL_TREE if any checks fail. */
9489 static tree
9490 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9492 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9493 overflow/underflow occurred. If -frounding-math, proceed iff the
9494 result of calling FUNC was exact. */
9495 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9496 && (!flag_rounding_math || !inexact))
9498 REAL_VALUE_TYPE rr;
9500 real_from_mpfr (&rr, m, type, GMP_RNDN);
9501 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9502 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9503 but the mpft_t is not, then we underflowed in the
9504 conversion. */
9505 if (real_isfinite (&rr)
9506 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9508 REAL_VALUE_TYPE rmode;
9510 real_convert (&rmode, TYPE_MODE (type), &rr);
9511 /* Proceed iff the specified mode can hold the value. */
9512 if (real_identical (&rmode, &rr))
9513 return build_real (type, rmode);
9516 return NULL_TREE;
9519 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9520 number and no overflow/underflow occurred. INEXACT is true if M
9521 was not exactly calculated. TYPE is the tree type for the result.
9522 This function assumes that you cleared the MPFR flags and then
9523 calculated M to see if anything subsequently set a flag prior to
9524 entering this function. Return NULL_TREE if any checks fail, if
9525 FORCE_CONVERT is true, then bypass the checks. */
9527 static tree
9528 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9530 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9531 overflow/underflow occurred. If -frounding-math, proceed iff the
9532 result of calling FUNC was exact. */
9533 if (force_convert
9534 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9535 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9536 && (!flag_rounding_math || !inexact)))
9538 REAL_VALUE_TYPE re, im;
9540 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9541 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9542 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9543 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9544 but the mpft_t is not, then we underflowed in the
9545 conversion. */
9546 if (force_convert
9547 || (real_isfinite (&re) && real_isfinite (&im)
9548 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9549 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9551 REAL_VALUE_TYPE re_mode, im_mode;
9553 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9554 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9555 /* Proceed iff the specified mode can hold the value. */
9556 if (force_convert
9557 || (real_identical (&re_mode, &re)
9558 && real_identical (&im_mode, &im)))
9559 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9560 build_real (TREE_TYPE (type), im_mode));
9563 return NULL_TREE;
9566 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9567 the pointer *(ARG_QUO) and return the result. The type is taken
9568 from the type of ARG0 and is used for setting the precision of the
9569 calculation and results. */
9571 static tree
9572 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9574 tree const type = TREE_TYPE (arg0);
9575 tree result = NULL_TREE;
9577 STRIP_NOPS (arg0);
9578 STRIP_NOPS (arg1);
9580 /* To proceed, MPFR must exactly represent the target floating point
9581 format, which only happens when the target base equals two. */
9582 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9583 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9584 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9586 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9587 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9589 if (real_isfinite (ra0) && real_isfinite (ra1))
9591 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9592 const int prec = fmt->p;
9593 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9594 tree result_rem;
9595 long integer_quo;
9596 mpfr_t m0, m1;
9598 mpfr_inits2 (prec, m0, m1, NULL);
9599 mpfr_from_real (m0, ra0, GMP_RNDN);
9600 mpfr_from_real (m1, ra1, GMP_RNDN);
9601 mpfr_clear_flags ();
9602 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9603 /* Remquo is independent of the rounding mode, so pass
9604 inexact=0 to do_mpfr_ckconv(). */
9605 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9606 mpfr_clears (m0, m1, NULL);
9607 if (result_rem)
9609 /* MPFR calculates quo in the host's long so it may
9610 return more bits in quo than the target int can hold
9611 if sizeof(host long) > sizeof(target int). This can
9612 happen even for native compilers in LP64 mode. In
9613 these cases, modulo the quo value with the largest
9614 number that the target int can hold while leaving one
9615 bit for the sign. */
9616 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9617 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9619 /* Dereference the quo pointer argument. */
9620 arg_quo = build_fold_indirect_ref (arg_quo);
9621 /* Proceed iff a valid pointer type was passed in. */
9622 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9624 /* Set the value. */
9625 tree result_quo
9626 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9627 build_int_cst (TREE_TYPE (arg_quo),
9628 integer_quo));
9629 TREE_SIDE_EFFECTS (result_quo) = 1;
9630 /* Combine the quo assignment with the rem. */
9631 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9632 result_quo, result_rem));
9637 return result;
9640 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9641 resulting value as a tree with type TYPE. The mpfr precision is
9642 set to the precision of TYPE. We assume that this mpfr function
9643 returns zero if the result could be calculated exactly within the
9644 requested precision. In addition, the integer pointer represented
9645 by ARG_SG will be dereferenced and set to the appropriate signgam
9646 (-1,1) value. */
9648 static tree
9649 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9651 tree result = NULL_TREE;
9653 STRIP_NOPS (arg);
9655 /* To proceed, MPFR must exactly represent the target floating point
9656 format, which only happens when the target base equals two. Also
9657 verify ARG is a constant and that ARG_SG is an int pointer. */
9658 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9659 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9660 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9661 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9663 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9665 /* In addition to NaN and Inf, the argument cannot be zero or a
9666 negative integer. */
9667 if (real_isfinite (ra)
9668 && ra->cl != rvc_zero
9669 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9671 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9672 const int prec = fmt->p;
9673 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9674 int inexact, sg;
9675 mpfr_t m;
9676 tree result_lg;
9678 mpfr_init2 (m, prec);
9679 mpfr_from_real (m, ra, GMP_RNDN);
9680 mpfr_clear_flags ();
9681 inexact = mpfr_lgamma (m, &sg, m, rnd);
9682 result_lg = do_mpfr_ckconv (m, type, inexact);
9683 mpfr_clear (m);
9684 if (result_lg)
9686 tree result_sg;
9688 /* Dereference the arg_sg pointer argument. */
9689 arg_sg = build_fold_indirect_ref (arg_sg);
9690 /* Assign the signgam value into *arg_sg. */
9691 result_sg = fold_build2 (MODIFY_EXPR,
9692 TREE_TYPE (arg_sg), arg_sg,
9693 build_int_cst (TREE_TYPE (arg_sg), sg));
9694 TREE_SIDE_EFFECTS (result_sg) = 1;
9695 /* Combine the signgam assignment with the lgamma result. */
9696 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9697 result_sg, result_lg));
9702 return result;
9705 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9706 mpc function FUNC on it and return the resulting value as a tree
9707 with type TYPE. The mpfr precision is set to the precision of
9708 TYPE. We assume that function FUNC returns zero if the result
9709 could be calculated exactly within the requested precision. If
9710 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9711 in the arguments and/or results. */
9713 tree
9714 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9715 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9717 tree result = NULL_TREE;
9719 STRIP_NOPS (arg0);
9720 STRIP_NOPS (arg1);
9722 /* To proceed, MPFR must exactly represent the target floating point
9723 format, which only happens when the target base equals two. */
9724 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9725 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9726 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9727 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9728 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9730 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9731 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9732 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9733 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9735 if (do_nonfinite
9736 || (real_isfinite (re0) && real_isfinite (im0)
9737 && real_isfinite (re1) && real_isfinite (im1)))
9739 const struct real_format *const fmt =
9740 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9741 const int prec = fmt->p;
9742 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9743 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9744 int inexact;
9745 mpc_t m0, m1;
9747 mpc_init2 (m0, prec);
9748 mpc_init2 (m1, prec);
9749 mpfr_from_real (mpc_realref (m0), re0, rnd);
9750 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9751 mpfr_from_real (mpc_realref (m1), re1, rnd);
9752 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9753 mpfr_clear_flags ();
9754 inexact = func (m0, m0, m1, crnd);
9755 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9756 mpc_clear (m0);
9757 mpc_clear (m1);
9761 return result;
9764 /* A wrapper function for builtin folding that prevents warnings for
9765 "statement without effect" and the like, caused by removing the
9766 call node earlier than the warning is generated. */
9768 tree
9769 fold_call_stmt (gcall *stmt, bool ignore)
9771 tree ret = NULL_TREE;
9772 tree fndecl = gimple_call_fndecl (stmt);
9773 location_t loc = gimple_location (stmt);
9774 if (fndecl
9775 && TREE_CODE (fndecl) == FUNCTION_DECL
9776 && DECL_BUILT_IN (fndecl)
9777 && !gimple_call_va_arg_pack_p (stmt))
9779 int nargs = gimple_call_num_args (stmt);
9780 tree *args = (nargs > 0
9781 ? gimple_call_arg_ptr (stmt, 0)
9782 : &error_mark_node);
9784 if (avoid_folding_inline_builtin (fndecl))
9785 return NULL_TREE;
9786 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9788 return targetm.fold_builtin (fndecl, nargs, args, ignore);
9790 else
9792 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9793 if (ret)
9795 /* Propagate location information from original call to
9796 expansion of builtin. Otherwise things like
9797 maybe_emit_chk_warning, that operate on the expansion
9798 of a builtin, will use the wrong location information. */
9799 if (gimple_has_location (stmt))
9801 tree realret = ret;
9802 if (TREE_CODE (ret) == NOP_EXPR)
9803 realret = TREE_OPERAND (ret, 0);
9804 if (CAN_HAVE_LOCATION_P (realret)
9805 && !EXPR_HAS_LOCATION (realret))
9806 SET_EXPR_LOCATION (realret, loc);
9807 return realret;
9809 return ret;
9813 return NULL_TREE;
9816 /* Look up the function in builtin_decl that corresponds to DECL
9817 and set ASMSPEC as its user assembler name. DECL must be a
9818 function decl that declares a builtin. */
9820 void
9821 set_builtin_user_assembler_name (tree decl, const char *asmspec)
9823 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
9824 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
9825 && asmspec != 0);
9827 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
9828 set_user_assembler_name (builtin, asmspec);
9830 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
9831 && INT_TYPE_SIZE < BITS_PER_WORD)
9833 set_user_assembler_libfunc ("ffs", asmspec);
9834 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
9835 "ffs");
9839 /* Return true if DECL is a builtin that expands to a constant or similarly
9840 simple code. */
9841 bool
9842 is_simple_builtin (tree decl)
9844 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9845 switch (DECL_FUNCTION_CODE (decl))
9847 /* Builtins that expand to constants. */
9848 case BUILT_IN_CONSTANT_P:
9849 case BUILT_IN_EXPECT:
9850 case BUILT_IN_OBJECT_SIZE:
9851 case BUILT_IN_UNREACHABLE:
9852 /* Simple register moves or loads from stack. */
9853 case BUILT_IN_ASSUME_ALIGNED:
9854 case BUILT_IN_RETURN_ADDRESS:
9855 case BUILT_IN_EXTRACT_RETURN_ADDR:
9856 case BUILT_IN_FROB_RETURN_ADDR:
9857 case BUILT_IN_RETURN:
9858 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9859 case BUILT_IN_FRAME_ADDRESS:
9860 case BUILT_IN_VA_END:
9861 case BUILT_IN_STACK_SAVE:
9862 case BUILT_IN_STACK_RESTORE:
9863 /* Exception state returns or moves registers around. */
9864 case BUILT_IN_EH_FILTER:
9865 case BUILT_IN_EH_POINTER:
9866 case BUILT_IN_EH_COPY_VALUES:
9867 return true;
9869 default:
9870 return false;
9873 return false;
9876 /* Return true if DECL is a builtin that is not expensive, i.e., they are
9877 most probably expanded inline into reasonably simple code. This is a
9878 superset of is_simple_builtin. */
9879 bool
9880 is_inexpensive_builtin (tree decl)
9882 if (!decl)
9883 return false;
9884 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
9885 return true;
9886 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9887 switch (DECL_FUNCTION_CODE (decl))
9889 case BUILT_IN_ABS:
9890 case BUILT_IN_ALLOCA:
9891 case BUILT_IN_ALLOCA_WITH_ALIGN:
9892 case BUILT_IN_BSWAP16:
9893 case BUILT_IN_BSWAP32:
9894 case BUILT_IN_BSWAP64:
9895 case BUILT_IN_CLZ:
9896 case BUILT_IN_CLZIMAX:
9897 case BUILT_IN_CLZL:
9898 case BUILT_IN_CLZLL:
9899 case BUILT_IN_CTZ:
9900 case BUILT_IN_CTZIMAX:
9901 case BUILT_IN_CTZL:
9902 case BUILT_IN_CTZLL:
9903 case BUILT_IN_FFS:
9904 case BUILT_IN_FFSIMAX:
9905 case BUILT_IN_FFSL:
9906 case BUILT_IN_FFSLL:
9907 case BUILT_IN_IMAXABS:
9908 case BUILT_IN_FINITE:
9909 case BUILT_IN_FINITEF:
9910 case BUILT_IN_FINITEL:
9911 case BUILT_IN_FINITED32:
9912 case BUILT_IN_FINITED64:
9913 case BUILT_IN_FINITED128:
9914 case BUILT_IN_FPCLASSIFY:
9915 case BUILT_IN_ISFINITE:
9916 case BUILT_IN_ISINF_SIGN:
9917 case BUILT_IN_ISINF:
9918 case BUILT_IN_ISINFF:
9919 case BUILT_IN_ISINFL:
9920 case BUILT_IN_ISINFD32:
9921 case BUILT_IN_ISINFD64:
9922 case BUILT_IN_ISINFD128:
9923 case BUILT_IN_ISNAN:
9924 case BUILT_IN_ISNANF:
9925 case BUILT_IN_ISNANL:
9926 case BUILT_IN_ISNAND32:
9927 case BUILT_IN_ISNAND64:
9928 case BUILT_IN_ISNAND128:
9929 case BUILT_IN_ISNORMAL:
9930 case BUILT_IN_ISGREATER:
9931 case BUILT_IN_ISGREATEREQUAL:
9932 case BUILT_IN_ISLESS:
9933 case BUILT_IN_ISLESSEQUAL:
9934 case BUILT_IN_ISLESSGREATER:
9935 case BUILT_IN_ISUNORDERED:
9936 case BUILT_IN_VA_ARG_PACK:
9937 case BUILT_IN_VA_ARG_PACK_LEN:
9938 case BUILT_IN_VA_COPY:
9939 case BUILT_IN_TRAP:
9940 case BUILT_IN_SAVEREGS:
9941 case BUILT_IN_POPCOUNTL:
9942 case BUILT_IN_POPCOUNTLL:
9943 case BUILT_IN_POPCOUNTIMAX:
9944 case BUILT_IN_POPCOUNT:
9945 case BUILT_IN_PARITYL:
9946 case BUILT_IN_PARITYLL:
9947 case BUILT_IN_PARITYIMAX:
9948 case BUILT_IN_PARITY:
9949 case BUILT_IN_LABS:
9950 case BUILT_IN_LLABS:
9951 case BUILT_IN_PREFETCH:
9952 case BUILT_IN_ACC_ON_DEVICE:
9953 return true;
9955 default:
9956 return is_simple_builtin (decl);
9959 return false;
9962 /* Return true if T is a constant and the value cast to a target char
9963 can be represented by a host char.
9964 Store the casted char constant in *P if so. */
9966 bool
9967 target_char_cst_p (tree t, char *p)
9969 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
9970 return false;
9972 *p = (char)tree_to_uhwi (t);
9973 return true;