2016-12-07 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / builtins.c
blob58ed469fc63c7925db9587ef34035ba2a3ff6ecb
1 /* Expand builtin functions.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "asan.h"
64 #include "cilk.h"
65 #include "tree-chkp.h"
66 #include "rtl-chkp.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
84 #include "builtins.def"
87 /* Setup an array of builtin_info_type, make sure each element decl is
88 initialized to NULL_TREE. */
89 builtin_info_type builtin_info[(int)END_BUILTINS];
91 /* Non-zero if __builtin_constant_p should be folded right away. */
92 bool force_folding_builtin_constant_p;
94 static rtx c_readstr (const char *, machine_mode);
95 static int target_char_cast (tree, char *);
96 static rtx get_memory_rtx (tree, tree);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 static rtx result_vector (int, rtx);
100 static void expand_builtin_prefetch (tree);
101 static rtx expand_builtin_apply_args (void);
102 static rtx expand_builtin_apply_args_1 (void);
103 static rtx expand_builtin_apply (rtx, rtx, rtx);
104 static void expand_builtin_return (rtx);
105 static enum type_class type_to_class (tree);
106 static rtx expand_builtin_classify_type (tree);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_strcmp (tree, rtx);
119 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx);
122 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
123 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
124 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 machine_mode, int, tree);
128 static rtx expand_builtin_strcpy (tree, rtx);
129 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
130 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
131 static rtx expand_builtin_strncpy (tree, rtx);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, machine_mode);
134 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
138 static rtx expand_builtin_alloca (tree, bool);
139 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static tree stabilize_va_list_loc (location_t, tree, int);
142 static rtx expand_builtin_expect (tree, rtx);
143 static tree fold_builtin_constant_p (tree);
144 static tree fold_builtin_classify_type (tree);
145 static tree fold_builtin_strlen (location_t, tree, tree);
146 static tree fold_builtin_inf (location_t, tree, int);
147 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
148 static bool validate_arg (const_tree, enum tree_code code);
149 static rtx expand_builtin_fabs (tree, rtx, rtx);
150 static rtx expand_builtin_signbit (tree, rtx);
151 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
152 static tree fold_builtin_isascii (location_t, tree);
153 static tree fold_builtin_toascii (location_t, tree);
154 static tree fold_builtin_isdigit (location_t, tree);
155 static tree fold_builtin_fabs (location_t, tree, tree);
156 static tree fold_builtin_abs (location_t, tree, tree);
157 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
158 enum tree_code);
159 static tree fold_builtin_0 (location_t, tree);
160 static tree fold_builtin_1 (location_t, tree, tree);
161 static tree fold_builtin_2 (location_t, tree, tree, tree);
162 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
163 static tree fold_builtin_varargs (location_t, tree, tree*, int);
165 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
166 static tree fold_builtin_strspn (location_t, tree, tree);
167 static tree fold_builtin_strcspn (location_t, tree, tree);
169 static rtx expand_builtin_object_size (tree);
170 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
171 enum built_in_function);
172 static void maybe_emit_chk_warning (tree, enum built_in_function);
173 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
174 static void maybe_emit_free_warning (tree);
175 static tree fold_builtin_object_size (tree, tree);
177 unsigned HOST_WIDE_INT target_newline;
178 unsigned HOST_WIDE_INT target_percent;
179 static unsigned HOST_WIDE_INT target_c;
180 static unsigned HOST_WIDE_INT target_s;
181 char target_percent_c[3];
182 char target_percent_s[3];
183 char target_percent_s_newline[4];
184 static tree do_mpfr_remquo (tree, tree, tree);
185 static tree do_mpfr_lgamma_r (tree, tree, tree);
186 static void expand_builtin_sync_synchronize (void);
188 /* Return true if NAME starts with __builtin_ or __sync_. */
190 static bool
191 is_builtin_name (const char *name)
193 if (strncmp (name, "__builtin_", 10) == 0)
194 return true;
195 if (strncmp (name, "__sync_", 7) == 0)
196 return true;
197 if (strncmp (name, "__atomic_", 9) == 0)
198 return true;
199 if (flag_cilkplus
200 && (!strcmp (name, "__cilkrts_detach")
201 || !strcmp (name, "__cilkrts_pop_frame")))
202 return true;
203 return false;
207 /* Return true if DECL is a function symbol representing a built-in. */
209 bool
210 is_builtin_fn (tree decl)
212 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
215 /* Return true if NODE should be considered for inline expansion regardless
216 of the optimization level. This means whenever a function is invoked with
217 its "internal" name, which normally contains the prefix "__builtin". */
219 bool
220 called_as_built_in (tree node)
222 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
223 we want the name used to call the function, not the name it
224 will have. */
225 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
226 return is_builtin_name (name);
229 /* Compute values M and N such that M divides (address of EXP - N) and such
230 that N < M. If these numbers can be determined, store M in alignp and N in
231 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
232 *alignp and any bit-offset to *bitposp.
234 Note that the address (and thus the alignment) computed here is based
235 on the address to which a symbol resolves, whereas DECL_ALIGN is based
236 on the address at which an object is actually located. These two
237 addresses are not always the same. For example, on ARM targets,
238 the address &foo of a Thumb function foo() has the lowest bit set,
239 whereas foo() itself starts on an even address.
241 If ADDR_P is true we are taking the address of the memory reference EXP
242 and thus cannot rely on the access taking place. */
244 static bool
245 get_object_alignment_2 (tree exp, unsigned int *alignp,
246 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
248 HOST_WIDE_INT bitsize, bitpos;
249 tree offset;
250 machine_mode mode;
251 int unsignedp, reversep, volatilep;
252 unsigned int align = BITS_PER_UNIT;
253 bool known_alignment = false;
255 /* Get the innermost object and the constant (bitpos) and possibly
256 variable (offset) offset of the access. */
257 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
258 &unsignedp, &reversep, &volatilep);
260 /* Extract alignment information from the innermost object and
261 possibly adjust bitpos and offset. */
262 if (TREE_CODE (exp) == FUNCTION_DECL)
264 /* Function addresses can encode extra information besides their
265 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
266 allows the low bit to be used as a virtual bit, we know
267 that the address itself must be at least 2-byte aligned. */
268 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
269 align = 2 * BITS_PER_UNIT;
271 else if (TREE_CODE (exp) == LABEL_DECL)
273 else if (TREE_CODE (exp) == CONST_DECL)
275 /* The alignment of a CONST_DECL is determined by its initializer. */
276 exp = DECL_INITIAL (exp);
277 align = TYPE_ALIGN (TREE_TYPE (exp));
278 if (CONSTANT_CLASS_P (exp))
279 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
281 known_alignment = true;
283 else if (DECL_P (exp))
285 align = DECL_ALIGN (exp);
286 known_alignment = true;
288 else if (TREE_CODE (exp) == INDIRECT_REF
289 || TREE_CODE (exp) == MEM_REF
290 || TREE_CODE (exp) == TARGET_MEM_REF)
292 tree addr = TREE_OPERAND (exp, 0);
293 unsigned ptr_align;
294 unsigned HOST_WIDE_INT ptr_bitpos;
295 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
297 /* If the address is explicitely aligned, handle that. */
298 if (TREE_CODE (addr) == BIT_AND_EXPR
299 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
301 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
302 ptr_bitmask *= BITS_PER_UNIT;
303 align = least_bit_hwi (ptr_bitmask);
304 addr = TREE_OPERAND (addr, 0);
307 known_alignment
308 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
309 align = MAX (ptr_align, align);
311 /* Re-apply explicit alignment to the bitpos. */
312 ptr_bitpos &= ptr_bitmask;
314 /* The alignment of the pointer operand in a TARGET_MEM_REF
315 has to take the variable offset parts into account. */
316 if (TREE_CODE (exp) == TARGET_MEM_REF)
318 if (TMR_INDEX (exp))
320 unsigned HOST_WIDE_INT step = 1;
321 if (TMR_STEP (exp))
322 step = TREE_INT_CST_LOW (TMR_STEP (exp));
323 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
325 if (TMR_INDEX2 (exp))
326 align = BITS_PER_UNIT;
327 known_alignment = false;
330 /* When EXP is an actual memory reference then we can use
331 TYPE_ALIGN of a pointer indirection to derive alignment.
332 Do so only if get_pointer_alignment_1 did not reveal absolute
333 alignment knowledge and if using that alignment would
334 improve the situation. */
335 if (!addr_p && !known_alignment
336 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
337 align = TYPE_ALIGN (TREE_TYPE (exp));
338 else
340 /* Else adjust bitpos accordingly. */
341 bitpos += ptr_bitpos;
342 if (TREE_CODE (exp) == MEM_REF
343 || TREE_CODE (exp) == TARGET_MEM_REF)
344 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
347 else if (TREE_CODE (exp) == STRING_CST)
349 /* STRING_CST are the only constant objects we allow to be not
350 wrapped inside a CONST_DECL. */
351 align = TYPE_ALIGN (TREE_TYPE (exp));
352 if (CONSTANT_CLASS_P (exp))
353 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
355 known_alignment = true;
358 /* If there is a non-constant offset part extract the maximum
359 alignment that can prevail. */
360 if (offset)
362 unsigned int trailing_zeros = tree_ctz (offset);
363 if (trailing_zeros < HOST_BITS_PER_INT)
365 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
366 if (inner)
367 align = MIN (align, inner);
371 *alignp = align;
372 *bitposp = bitpos & (*alignp - 1);
373 return known_alignment;
376 /* For a memory reference expression EXP compute values M and N such that M
377 divides (&EXP - N) and such that N < M. If these numbers can be determined,
378 store M in alignp and N in *BITPOSP and return true. Otherwise return false
379 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
381 bool
382 get_object_alignment_1 (tree exp, unsigned int *alignp,
383 unsigned HOST_WIDE_INT *bitposp)
385 return get_object_alignment_2 (exp, alignp, bitposp, false);
388 /* Return the alignment in bits of EXP, an object. */
390 unsigned int
391 get_object_alignment (tree exp)
393 unsigned HOST_WIDE_INT bitpos = 0;
394 unsigned int align;
396 get_object_alignment_1 (exp, &align, &bitpos);
398 /* align and bitpos now specify known low bits of the pointer.
399 ptr & (align - 1) == bitpos. */
401 if (bitpos != 0)
402 align = least_bit_hwi (bitpos);
403 return align;
406 /* For a pointer valued expression EXP compute values M and N such that M
407 divides (EXP - N) and such that N < M. If these numbers can be determined,
408 store M in alignp and N in *BITPOSP and return true. Return false if
409 the results are just a conservative approximation.
411 If EXP is not a pointer, false is returned too. */
413 bool
414 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
415 unsigned HOST_WIDE_INT *bitposp)
417 STRIP_NOPS (exp);
419 if (TREE_CODE (exp) == ADDR_EXPR)
420 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
421 alignp, bitposp, true);
422 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
424 unsigned int align;
425 unsigned HOST_WIDE_INT bitpos;
426 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
427 &align, &bitpos);
428 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
429 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
430 else
432 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
433 if (trailing_zeros < HOST_BITS_PER_INT)
435 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
436 if (inner)
437 align = MIN (align, inner);
440 *alignp = align;
441 *bitposp = bitpos & (align - 1);
442 return res;
444 else if (TREE_CODE (exp) == SSA_NAME
445 && POINTER_TYPE_P (TREE_TYPE (exp)))
447 unsigned int ptr_align, ptr_misalign;
448 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
450 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
452 *bitposp = ptr_misalign * BITS_PER_UNIT;
453 *alignp = ptr_align * BITS_PER_UNIT;
454 /* Make sure to return a sensible alignment when the multiplication
455 by BITS_PER_UNIT overflowed. */
456 if (*alignp == 0)
457 *alignp = 1u << (HOST_BITS_PER_INT - 1);
458 /* We cannot really tell whether this result is an approximation. */
459 return false;
461 else
463 *bitposp = 0;
464 *alignp = BITS_PER_UNIT;
465 return false;
468 else if (TREE_CODE (exp) == INTEGER_CST)
470 *alignp = BIGGEST_ALIGNMENT;
471 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
472 & (BIGGEST_ALIGNMENT - 1));
473 return true;
476 *bitposp = 0;
477 *alignp = BITS_PER_UNIT;
478 return false;
481 /* Return the alignment in bits of EXP, a pointer valued expression.
482 The alignment returned is, by default, the alignment of the thing that
483 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
485 Otherwise, look at the expression to see if we can do better, i.e., if the
486 expression is actually pointing at an object whose alignment is tighter. */
488 unsigned int
489 get_pointer_alignment (tree exp)
491 unsigned HOST_WIDE_INT bitpos = 0;
492 unsigned int align;
494 get_pointer_alignment_1 (exp, &align, &bitpos);
496 /* align and bitpos now specify known low bits of the pointer.
497 ptr & (align - 1) == bitpos. */
499 if (bitpos != 0)
500 align = least_bit_hwi (bitpos);
502 return align;
505 /* Return the number of non-zero elements in the sequence
506 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
507 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
509 static unsigned
510 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
512 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
514 unsigned n;
516 if (eltsize == 1)
518 /* Optimize the common case of plain char. */
519 for (n = 0; n < maxelts; n++)
521 const char *elt = (const char*) ptr + n;
522 if (!*elt)
523 break;
526 else
528 for (n = 0; n < maxelts; n++)
530 const char *elt = (const char*) ptr + n * eltsize;
531 if (!memcmp (elt, "\0\0\0\0", eltsize))
532 break;
535 return n;
538 /* Compute the length of a null-terminated character string or wide
539 character string handling character sizes of 1, 2, and 4 bytes.
540 TREE_STRING_LENGTH is not the right way because it evaluates to
541 the size of the character array in bytes (as opposed to characters)
542 and because it can contain a zero byte in the middle.
544 ONLY_VALUE should be nonzero if the result is not going to be emitted
545 into the instruction stream and zero if it is going to be expanded.
546 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
547 is returned, otherwise NULL, since
548 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
549 evaluate the side-effects.
551 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
552 accesses. Note that this implies the result is not going to be emitted
553 into the instruction stream.
555 The value returned is of type `ssizetype'.
557 Unfortunately, string_constant can't access the values of const char
558 arrays with initializers, so neither can we do so here. */
560 tree
561 c_strlen (tree src, int only_value)
563 STRIP_NOPS (src);
564 if (TREE_CODE (src) == COND_EXPR
565 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
567 tree len1, len2;
569 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
570 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
571 if (tree_int_cst_equal (len1, len2))
572 return len1;
575 if (TREE_CODE (src) == COMPOUND_EXPR
576 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
577 return c_strlen (TREE_OPERAND (src, 1), only_value);
579 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
581 /* Offset from the beginning of the string in bytes. */
582 tree byteoff;
583 src = string_constant (src, &byteoff);
584 if (src == 0)
585 return NULL_TREE;
587 /* Determine the size of the string element. */
588 unsigned eltsize
589 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
591 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
592 length of SRC. */
593 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
595 /* PTR can point to the byte representation of any string type, including
596 char* and wchar_t*. */
597 const char *ptr = TREE_STRING_POINTER (src);
599 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
601 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
602 compute the offset to the following null if we don't know where to
603 start searching for it. */
604 if (string_length (ptr, eltsize, maxelts) < maxelts)
606 /* Return when an embedded null character is found. */
607 return NULL_TREE;
610 /* We don't know the starting offset, but we do know that the string
611 has no internal zero bytes. We can assume that the offset falls
612 within the bounds of the string; otherwise, the programmer deserves
613 what he gets. Subtract the offset from the length of the string,
614 and return that. This would perhaps not be valid if we were dealing
615 with named arrays in addition to literal string constants. */
617 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
620 /* Offset from the beginning of the string in elements. */
621 HOST_WIDE_INT eltoff;
623 /* We have a known offset into the string. Start searching there for
624 a null character if we can represent it as a single HOST_WIDE_INT. */
625 if (byteoff == 0)
626 eltoff = 0;
627 else if (! tree_fits_shwi_p (byteoff))
628 eltoff = -1;
629 else
630 eltoff = tree_to_shwi (byteoff) / eltsize;
632 /* If the offset is known to be out of bounds, warn, and call strlen at
633 runtime. */
634 if (eltoff < 0 || eltoff > maxelts)
636 /* Suppress multiple warnings for propagated constant strings. */
637 if (only_value != 2
638 && !TREE_NO_WARNING (src))
640 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
641 eltoff);
642 TREE_NO_WARNING (src) = 1;
644 return NULL_TREE;
647 /* Use strlen to search for the first zero byte. Since any strings
648 constructed with build_string will have nulls appended, we win even
649 if we get handed something like (char[4])"abcd".
651 Since ELTOFF is our starting index into the string, no further
652 calculation is needed. */
653 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
654 maxelts - eltoff);
656 return ssize_int (len);
659 /* Return a constant integer corresponding to target reading
660 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
662 static rtx
663 c_readstr (const char *str, machine_mode mode)
665 HOST_WIDE_INT ch;
666 unsigned int i, j;
667 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
669 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
670 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
671 / HOST_BITS_PER_WIDE_INT;
673 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
674 for (i = 0; i < len; i++)
675 tmp[i] = 0;
677 ch = 1;
678 for (i = 0; i < GET_MODE_SIZE (mode); i++)
680 j = i;
681 if (WORDS_BIG_ENDIAN)
682 j = GET_MODE_SIZE (mode) - i - 1;
683 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
684 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
685 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
686 j *= BITS_PER_UNIT;
688 if (ch)
689 ch = (unsigned char) str[i];
690 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
693 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
694 return immed_wide_int_const (c, mode);
697 /* Cast a target constant CST to target CHAR and if that value fits into
698 host char type, return zero and put that value into variable pointed to by
699 P. */
701 static int
702 target_char_cast (tree cst, char *p)
704 unsigned HOST_WIDE_INT val, hostval;
706 if (TREE_CODE (cst) != INTEGER_CST
707 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
708 return 1;
710 /* Do not care if it fits or not right here. */
711 val = TREE_INT_CST_LOW (cst);
713 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
714 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
716 hostval = val;
717 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
718 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
720 if (val != hostval)
721 return 1;
723 *p = hostval;
724 return 0;
727 /* Similar to save_expr, but assumes that arbitrary code is not executed
728 in between the multiple evaluations. In particular, we assume that a
729 non-addressable local variable will not be modified. */
731 static tree
732 builtin_save_expr (tree exp)
734 if (TREE_CODE (exp) == SSA_NAME
735 || (TREE_ADDRESSABLE (exp) == 0
736 && (TREE_CODE (exp) == PARM_DECL
737 || (VAR_P (exp) && !TREE_STATIC (exp)))))
738 return exp;
740 return save_expr (exp);
743 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
744 times to get the address of either a higher stack frame, or a return
745 address located within it (depending on FNDECL_CODE). */
747 static rtx
748 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
750 int i;
751 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
752 if (tem == NULL_RTX)
754 /* For a zero count with __builtin_return_address, we don't care what
755 frame address we return, because target-specific definitions will
756 override us. Therefore frame pointer elimination is OK, and using
757 the soft frame pointer is OK.
759 For a nonzero count, or a zero count with __builtin_frame_address,
760 we require a stable offset from the current frame pointer to the
761 previous one, so we must use the hard frame pointer, and
762 we must disable frame pointer elimination. */
763 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
764 tem = frame_pointer_rtx;
765 else
767 tem = hard_frame_pointer_rtx;
769 /* Tell reload not to eliminate the frame pointer. */
770 crtl->accesses_prior_frames = 1;
774 if (count > 0)
775 SETUP_FRAME_ADDRESSES ();
777 /* On the SPARC, the return address is not in the frame, it is in a
778 register. There is no way to access it off of the current frame
779 pointer, but it can be accessed off the previous frame pointer by
780 reading the value from the register window save area. */
781 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
782 count--;
784 /* Scan back COUNT frames to the specified frame. */
785 for (i = 0; i < count; i++)
787 /* Assume the dynamic chain pointer is in the word that the
788 frame address points to, unless otherwise specified. */
789 tem = DYNAMIC_CHAIN_ADDRESS (tem);
790 tem = memory_address (Pmode, tem);
791 tem = gen_frame_mem (Pmode, tem);
792 tem = copy_to_reg (tem);
795 /* For __builtin_frame_address, return what we've got. But, on
796 the SPARC for example, we may have to add a bias. */
797 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
798 return FRAME_ADDR_RTX (tem);
800 /* For __builtin_return_address, get the return address from that frame. */
801 #ifdef RETURN_ADDR_RTX
802 tem = RETURN_ADDR_RTX (count, tem);
803 #else
804 tem = memory_address (Pmode,
805 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
806 tem = gen_frame_mem (Pmode, tem);
807 #endif
808 return tem;
811 /* Alias set used for setjmp buffer. */
812 static alias_set_type setjmp_alias_set = -1;
814 /* Construct the leading half of a __builtin_setjmp call. Control will
815 return to RECEIVER_LABEL. This is also called directly by the SJLJ
816 exception handling code. */
818 void
819 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
821 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
822 rtx stack_save;
823 rtx mem;
825 if (setjmp_alias_set == -1)
826 setjmp_alias_set = new_alias_set ();
828 buf_addr = convert_memory_address (Pmode, buf_addr);
830 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
832 /* We store the frame pointer and the address of receiver_label in
833 the buffer and use the rest of it for the stack save area, which
834 is machine-dependent. */
836 mem = gen_rtx_MEM (Pmode, buf_addr);
837 set_mem_alias_set (mem, setjmp_alias_set);
838 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
840 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
841 GET_MODE_SIZE (Pmode))),
842 set_mem_alias_set (mem, setjmp_alias_set);
844 emit_move_insn (validize_mem (mem),
845 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
847 stack_save = gen_rtx_MEM (sa_mode,
848 plus_constant (Pmode, buf_addr,
849 2 * GET_MODE_SIZE (Pmode)));
850 set_mem_alias_set (stack_save, setjmp_alias_set);
851 emit_stack_save (SAVE_NONLOCAL, &stack_save);
853 /* If there is further processing to do, do it. */
854 if (targetm.have_builtin_setjmp_setup ())
855 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
857 /* We have a nonlocal label. */
858 cfun->has_nonlocal_label = 1;
861 /* Construct the trailing part of a __builtin_setjmp call. This is
862 also called directly by the SJLJ exception handling code.
863 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
865 void
866 expand_builtin_setjmp_receiver (rtx receiver_label)
868 rtx chain;
870 /* Mark the FP as used when we get here, so we have to make sure it's
871 marked as used by this function. */
872 emit_use (hard_frame_pointer_rtx);
874 /* Mark the static chain as clobbered here so life information
875 doesn't get messed up for it. */
876 chain = targetm.calls.static_chain (current_function_decl, true);
877 if (chain && REG_P (chain))
878 emit_clobber (chain);
880 /* Now put in the code to restore the frame pointer, and argument
881 pointer, if needed. */
882 if (! targetm.have_nonlocal_goto ())
884 /* First adjust our frame pointer to its actual value. It was
885 previously set to the start of the virtual area corresponding to
886 the stacked variables when we branched here and now needs to be
887 adjusted to the actual hardware fp value.
889 Assignments to virtual registers are converted by
890 instantiate_virtual_regs into the corresponding assignment
891 to the underlying register (fp in this case) that makes
892 the original assignment true.
893 So the following insn will actually be decrementing fp by
894 STARTING_FRAME_OFFSET. */
895 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
897 /* Restoring the frame pointer also modifies the hard frame pointer.
898 Mark it used (so that the previous assignment remains live once
899 the frame pointer is eliminated) and clobbered (to represent the
900 implicit update from the assignment). */
901 emit_use (hard_frame_pointer_rtx);
902 emit_clobber (hard_frame_pointer_rtx);
905 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
907 /* If the argument pointer can be eliminated in favor of the
908 frame pointer, we don't need to restore it. We assume here
909 that if such an elimination is present, it can always be used.
910 This is the case on all known machines; if we don't make this
911 assumption, we do unnecessary saving on many machines. */
912 size_t i;
913 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
915 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
916 if (elim_regs[i].from == ARG_POINTER_REGNUM
917 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
918 break;
920 if (i == ARRAY_SIZE (elim_regs))
922 /* Now restore our arg pointer from the address at which it
923 was saved in our stack frame. */
924 emit_move_insn (crtl->args.internal_arg_pointer,
925 copy_to_reg (get_arg_pointer_save_area ()));
929 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
930 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
931 else if (targetm.have_nonlocal_goto_receiver ())
932 emit_insn (targetm.gen_nonlocal_goto_receiver ());
933 else
934 { /* Nothing */ }
936 /* We must not allow the code we just generated to be reordered by
937 scheduling. Specifically, the update of the frame pointer must
938 happen immediately, not later. */
939 emit_insn (gen_blockage ());
942 /* __builtin_longjmp is passed a pointer to an array of five words (not
943 all will be used on all machines). It operates similarly to the C
944 library function of the same name, but is more efficient. Much of
945 the code below is copied from the handling of non-local gotos. */
947 static void
948 expand_builtin_longjmp (rtx buf_addr, rtx value)
950 rtx fp, lab, stack;
951 rtx_insn *insn, *last;
952 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
954 /* DRAP is needed for stack realign if longjmp is expanded to current
955 function */
956 if (SUPPORTS_STACK_ALIGNMENT)
957 crtl->need_drap = true;
959 if (setjmp_alias_set == -1)
960 setjmp_alias_set = new_alias_set ();
962 buf_addr = convert_memory_address (Pmode, buf_addr);
964 buf_addr = force_reg (Pmode, buf_addr);
966 /* We require that the user must pass a second argument of 1, because
967 that is what builtin_setjmp will return. */
968 gcc_assert (value == const1_rtx);
970 last = get_last_insn ();
971 if (targetm.have_builtin_longjmp ())
972 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
973 else
975 fp = gen_rtx_MEM (Pmode, buf_addr);
976 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
977 GET_MODE_SIZE (Pmode)));
979 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
980 2 * GET_MODE_SIZE (Pmode)));
981 set_mem_alias_set (fp, setjmp_alias_set);
982 set_mem_alias_set (lab, setjmp_alias_set);
983 set_mem_alias_set (stack, setjmp_alias_set);
985 /* Pick up FP, label, and SP from the block and jump. This code is
986 from expand_goto in stmt.c; see there for detailed comments. */
987 if (targetm.have_nonlocal_goto ())
988 /* We have to pass a value to the nonlocal_goto pattern that will
989 get copied into the static_chain pointer, but it does not matter
990 what that value is, because builtin_setjmp does not use it. */
991 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
992 else
994 lab = copy_to_reg (lab);
996 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
997 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
999 emit_move_insn (hard_frame_pointer_rtx, fp);
1000 emit_stack_restore (SAVE_NONLOCAL, stack);
1002 emit_use (hard_frame_pointer_rtx);
1003 emit_use (stack_pointer_rtx);
1004 emit_indirect_jump (lab);
1008 /* Search backwards and mark the jump insn as a non-local goto.
1009 Note that this precludes the use of __builtin_longjmp to a
1010 __builtin_setjmp target in the same function. However, we've
1011 already cautioned the user that these functions are for
1012 internal exception handling use only. */
1013 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1015 gcc_assert (insn != last);
1017 if (JUMP_P (insn))
1019 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1020 break;
1022 else if (CALL_P (insn))
1023 break;
1027 static inline bool
1028 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1030 return (iter->i < iter->n);
1033 /* This function validates the types of a function call argument list
1034 against a specified list of tree_codes. If the last specifier is a 0,
1035 that represents an ellipses, otherwise the last specifier must be a
1036 VOID_TYPE. */
1038 static bool
1039 validate_arglist (const_tree callexpr, ...)
1041 enum tree_code code;
1042 bool res = 0;
1043 va_list ap;
1044 const_call_expr_arg_iterator iter;
1045 const_tree arg;
1047 va_start (ap, callexpr);
1048 init_const_call_expr_arg_iterator (callexpr, &iter);
1052 code = (enum tree_code) va_arg (ap, int);
1053 switch (code)
1055 case 0:
1056 /* This signifies an ellipses, any further arguments are all ok. */
1057 res = true;
1058 goto end;
1059 case VOID_TYPE:
1060 /* This signifies an endlink, if no arguments remain, return
1061 true, otherwise return false. */
1062 res = !more_const_call_expr_args_p (&iter);
1063 goto end;
1064 default:
1065 /* If no parameters remain or the parameter's code does not
1066 match the specified code, return false. Otherwise continue
1067 checking any remaining arguments. */
1068 arg = next_const_call_expr_arg (&iter);
1069 if (!validate_arg (arg, code))
1070 goto end;
1071 break;
1074 while (1);
1076 /* We need gotos here since we can only have one VA_CLOSE in a
1077 function. */
1078 end: ;
1079 va_end (ap);
1081 return res;
1084 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1085 and the address of the save area. */
1087 static rtx
1088 expand_builtin_nonlocal_goto (tree exp)
1090 tree t_label, t_save_area;
1091 rtx r_label, r_save_area, r_fp, r_sp;
1092 rtx_insn *insn;
1094 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1095 return NULL_RTX;
1097 t_label = CALL_EXPR_ARG (exp, 0);
1098 t_save_area = CALL_EXPR_ARG (exp, 1);
1100 r_label = expand_normal (t_label);
1101 r_label = convert_memory_address (Pmode, r_label);
1102 r_save_area = expand_normal (t_save_area);
1103 r_save_area = convert_memory_address (Pmode, r_save_area);
1104 /* Copy the address of the save location to a register just in case it was
1105 based on the frame pointer. */
1106 r_save_area = copy_to_reg (r_save_area);
1107 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1108 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1109 plus_constant (Pmode, r_save_area,
1110 GET_MODE_SIZE (Pmode)));
1112 crtl->has_nonlocal_goto = 1;
1114 /* ??? We no longer need to pass the static chain value, afaik. */
1115 if (targetm.have_nonlocal_goto ())
1116 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1117 else
1119 r_label = copy_to_reg (r_label);
1121 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1122 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1124 /* Restore frame pointer for containing function. */
1125 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1126 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1128 /* USE of hard_frame_pointer_rtx added for consistency;
1129 not clear if really needed. */
1130 emit_use (hard_frame_pointer_rtx);
1131 emit_use (stack_pointer_rtx);
1133 /* If the architecture is using a GP register, we must
1134 conservatively assume that the target function makes use of it.
1135 The prologue of functions with nonlocal gotos must therefore
1136 initialize the GP register to the appropriate value, and we
1137 must then make sure that this value is live at the point
1138 of the jump. (Note that this doesn't necessarily apply
1139 to targets with a nonlocal_goto pattern; they are free
1140 to implement it in their own way. Note also that this is
1141 a no-op if the GP register is a global invariant.) */
1142 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1143 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1144 emit_use (pic_offset_table_rtx);
1146 emit_indirect_jump (r_label);
1149 /* Search backwards to the jump insn and mark it as a
1150 non-local goto. */
1151 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1153 if (JUMP_P (insn))
1155 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1156 break;
1158 else if (CALL_P (insn))
1159 break;
1162 return const0_rtx;
1165 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1166 (not all will be used on all machines) that was passed to __builtin_setjmp.
1167 It updates the stack pointer in that block to the current value. This is
1168 also called directly by the SJLJ exception handling code. */
1170 void
1171 expand_builtin_update_setjmp_buf (rtx buf_addr)
1173 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1174 rtx stack_save
1175 = gen_rtx_MEM (sa_mode,
1176 memory_address
1177 (sa_mode,
1178 plus_constant (Pmode, buf_addr,
1179 2 * GET_MODE_SIZE (Pmode))));
1181 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1184 /* Expand a call to __builtin_prefetch. For a target that does not support
1185 data prefetch, evaluate the memory address argument in case it has side
1186 effects. */
1188 static void
1189 expand_builtin_prefetch (tree exp)
1191 tree arg0, arg1, arg2;
1192 int nargs;
1193 rtx op0, op1, op2;
1195 if (!validate_arglist (exp, POINTER_TYPE, 0))
1196 return;
1198 arg0 = CALL_EXPR_ARG (exp, 0);
1200 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1201 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1202 locality). */
1203 nargs = call_expr_nargs (exp);
1204 if (nargs > 1)
1205 arg1 = CALL_EXPR_ARG (exp, 1);
1206 else
1207 arg1 = integer_zero_node;
1208 if (nargs > 2)
1209 arg2 = CALL_EXPR_ARG (exp, 2);
1210 else
1211 arg2 = integer_three_node;
1213 /* Argument 0 is an address. */
1214 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1216 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1217 if (TREE_CODE (arg1) != INTEGER_CST)
1219 error ("second argument to %<__builtin_prefetch%> must be a constant");
1220 arg1 = integer_zero_node;
1222 op1 = expand_normal (arg1);
1223 /* Argument 1 must be either zero or one. */
1224 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1226 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1227 " using zero");
1228 op1 = const0_rtx;
1231 /* Argument 2 (locality) must be a compile-time constant int. */
1232 if (TREE_CODE (arg2) != INTEGER_CST)
1234 error ("third argument to %<__builtin_prefetch%> must be a constant");
1235 arg2 = integer_zero_node;
1237 op2 = expand_normal (arg2);
1238 /* Argument 2 must be 0, 1, 2, or 3. */
1239 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1241 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1242 op2 = const0_rtx;
1245 if (targetm.have_prefetch ())
1247 struct expand_operand ops[3];
1249 create_address_operand (&ops[0], op0);
1250 create_integer_operand (&ops[1], INTVAL (op1));
1251 create_integer_operand (&ops[2], INTVAL (op2));
1252 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1253 return;
1256 /* Don't do anything with direct references to volatile memory, but
1257 generate code to handle other side effects. */
1258 if (!MEM_P (op0) && side_effects_p (op0))
1259 emit_insn (op0);
1262 /* Get a MEM rtx for expression EXP which is the address of an operand
1263 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1264 the maximum length of the block of memory that might be accessed or
1265 NULL if unknown. */
1267 static rtx
1268 get_memory_rtx (tree exp, tree len)
1270 tree orig_exp = exp;
1271 rtx addr, mem;
1273 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1274 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1275 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1276 exp = TREE_OPERAND (exp, 0);
1278 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1279 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1281 /* Get an expression we can use to find the attributes to assign to MEM.
1282 First remove any nops. */
1283 while (CONVERT_EXPR_P (exp)
1284 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1285 exp = TREE_OPERAND (exp, 0);
1287 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1288 (as builtin stringops may alias with anything). */
1289 exp = fold_build2 (MEM_REF,
1290 build_array_type (char_type_node,
1291 build_range_type (sizetype,
1292 size_one_node, len)),
1293 exp, build_int_cst (ptr_type_node, 0));
1295 /* If the MEM_REF has no acceptable address, try to get the base object
1296 from the original address we got, and build an all-aliasing
1297 unknown-sized access to that one. */
1298 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1299 set_mem_attributes (mem, exp, 0);
1300 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1301 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1302 0))))
1304 exp = build_fold_addr_expr (exp);
1305 exp = fold_build2 (MEM_REF,
1306 build_array_type (char_type_node,
1307 build_range_type (sizetype,
1308 size_zero_node,
1309 NULL)),
1310 exp, build_int_cst (ptr_type_node, 0));
1311 set_mem_attributes (mem, exp, 0);
1313 set_mem_alias_set (mem, 0);
1314 return mem;
1317 /* Built-in functions to perform an untyped call and return. */
1319 #define apply_args_mode \
1320 (this_target_builtins->x_apply_args_mode)
1321 #define apply_result_mode \
1322 (this_target_builtins->x_apply_result_mode)
1324 /* Return the size required for the block returned by __builtin_apply_args,
1325 and initialize apply_args_mode. */
1327 static int
1328 apply_args_size (void)
1330 static int size = -1;
1331 int align;
1332 unsigned int regno;
1333 machine_mode mode;
1335 /* The values computed by this function never change. */
1336 if (size < 0)
1338 /* The first value is the incoming arg-pointer. */
1339 size = GET_MODE_SIZE (Pmode);
1341 /* The second value is the structure value address unless this is
1342 passed as an "invisible" first argument. */
1343 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1344 size += GET_MODE_SIZE (Pmode);
1346 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1347 if (FUNCTION_ARG_REGNO_P (regno))
1349 mode = targetm.calls.get_raw_arg_mode (regno);
1351 gcc_assert (mode != VOIDmode);
1353 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1354 if (size % align != 0)
1355 size = CEIL (size, align) * align;
1356 size += GET_MODE_SIZE (mode);
1357 apply_args_mode[regno] = mode;
1359 else
1361 apply_args_mode[regno] = VOIDmode;
1364 return size;
1367 /* Return the size required for the block returned by __builtin_apply,
1368 and initialize apply_result_mode. */
1370 static int
1371 apply_result_size (void)
1373 static int size = -1;
1374 int align, regno;
1375 machine_mode mode;
1377 /* The values computed by this function never change. */
1378 if (size < 0)
1380 size = 0;
1382 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1383 if (targetm.calls.function_value_regno_p (regno))
1385 mode = targetm.calls.get_raw_result_mode (regno);
1387 gcc_assert (mode != VOIDmode);
1389 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1390 if (size % align != 0)
1391 size = CEIL (size, align) * align;
1392 size += GET_MODE_SIZE (mode);
1393 apply_result_mode[regno] = mode;
1395 else
1396 apply_result_mode[regno] = VOIDmode;
1398 /* Allow targets that use untyped_call and untyped_return to override
1399 the size so that machine-specific information can be stored here. */
1400 #ifdef APPLY_RESULT_SIZE
1401 size = APPLY_RESULT_SIZE;
1402 #endif
1404 return size;
1407 /* Create a vector describing the result block RESULT. If SAVEP is true,
1408 the result block is used to save the values; otherwise it is used to
1409 restore the values. */
1411 static rtx
1412 result_vector (int savep, rtx result)
1414 int regno, size, align, nelts;
1415 machine_mode mode;
1416 rtx reg, mem;
1417 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1419 size = nelts = 0;
1420 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1421 if ((mode = apply_result_mode[regno]) != VOIDmode)
1423 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1424 if (size % align != 0)
1425 size = CEIL (size, align) * align;
1426 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1427 mem = adjust_address (result, mode, size);
1428 savevec[nelts++] = (savep
1429 ? gen_rtx_SET (mem, reg)
1430 : gen_rtx_SET (reg, mem));
1431 size += GET_MODE_SIZE (mode);
1433 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1436 /* Save the state required to perform an untyped call with the same
1437 arguments as were passed to the current function. */
1439 static rtx
1440 expand_builtin_apply_args_1 (void)
1442 rtx registers, tem;
1443 int size, align, regno;
1444 machine_mode mode;
1445 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1447 /* Create a block where the arg-pointer, structure value address,
1448 and argument registers can be saved. */
1449 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1451 /* Walk past the arg-pointer and structure value address. */
1452 size = GET_MODE_SIZE (Pmode);
1453 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1454 size += GET_MODE_SIZE (Pmode);
1456 /* Save each register used in calling a function to the block. */
1457 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1458 if ((mode = apply_args_mode[regno]) != VOIDmode)
1460 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1461 if (size % align != 0)
1462 size = CEIL (size, align) * align;
1464 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1466 emit_move_insn (adjust_address (registers, mode, size), tem);
1467 size += GET_MODE_SIZE (mode);
1470 /* Save the arg pointer to the block. */
1471 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1472 /* We need the pointer as the caller actually passed them to us, not
1473 as we might have pretended they were passed. Make sure it's a valid
1474 operand, as emit_move_insn isn't expected to handle a PLUS. */
1475 if (STACK_GROWS_DOWNWARD)
1477 = force_operand (plus_constant (Pmode, tem,
1478 crtl->args.pretend_args_size),
1479 NULL_RTX);
1480 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1482 size = GET_MODE_SIZE (Pmode);
1484 /* Save the structure value address unless this is passed as an
1485 "invisible" first argument. */
1486 if (struct_incoming_value)
1488 emit_move_insn (adjust_address (registers, Pmode, size),
1489 copy_to_reg (struct_incoming_value));
1490 size += GET_MODE_SIZE (Pmode);
1493 /* Return the address of the block. */
1494 return copy_addr_to_reg (XEXP (registers, 0));
1497 /* __builtin_apply_args returns block of memory allocated on
1498 the stack into which is stored the arg pointer, structure
1499 value address, static chain, and all the registers that might
1500 possibly be used in performing a function call. The code is
1501 moved to the start of the function so the incoming values are
1502 saved. */
1504 static rtx
1505 expand_builtin_apply_args (void)
1507 /* Don't do __builtin_apply_args more than once in a function.
1508 Save the result of the first call and reuse it. */
1509 if (apply_args_value != 0)
1510 return apply_args_value;
1512 /* When this function is called, it means that registers must be
1513 saved on entry to this function. So we migrate the
1514 call to the first insn of this function. */
1515 rtx temp;
1517 start_sequence ();
1518 temp = expand_builtin_apply_args_1 ();
1519 rtx_insn *seq = get_insns ();
1520 end_sequence ();
1522 apply_args_value = temp;
1524 /* Put the insns after the NOTE that starts the function.
1525 If this is inside a start_sequence, make the outer-level insn
1526 chain current, so the code is placed at the start of the
1527 function. If internal_arg_pointer is a non-virtual pseudo,
1528 it needs to be placed after the function that initializes
1529 that pseudo. */
1530 push_topmost_sequence ();
1531 if (REG_P (crtl->args.internal_arg_pointer)
1532 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1533 emit_insn_before (seq, parm_birth_insn);
1534 else
1535 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1536 pop_topmost_sequence ();
1537 return temp;
1541 /* Perform an untyped call and save the state required to perform an
1542 untyped return of whatever value was returned by the given function. */
1544 static rtx
1545 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1547 int size, align, regno;
1548 machine_mode mode;
1549 rtx incoming_args, result, reg, dest, src;
1550 rtx_call_insn *call_insn;
1551 rtx old_stack_level = 0;
1552 rtx call_fusage = 0;
1553 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1555 arguments = convert_memory_address (Pmode, arguments);
1557 /* Create a block where the return registers can be saved. */
1558 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1560 /* Fetch the arg pointer from the ARGUMENTS block. */
1561 incoming_args = gen_reg_rtx (Pmode);
1562 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1563 if (!STACK_GROWS_DOWNWARD)
1564 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1565 incoming_args, 0, OPTAB_LIB_WIDEN);
1567 /* Push a new argument block and copy the arguments. Do not allow
1568 the (potential) memcpy call below to interfere with our stack
1569 manipulations. */
1570 do_pending_stack_adjust ();
1571 NO_DEFER_POP;
1573 /* Save the stack with nonlocal if available. */
1574 if (targetm.have_save_stack_nonlocal ())
1575 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1576 else
1577 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1579 /* Allocate a block of memory onto the stack and copy the memory
1580 arguments to the outgoing arguments address. We can pass TRUE
1581 as the 4th argument because we just saved the stack pointer
1582 and will restore it right after the call. */
1583 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1585 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1586 may have already set current_function_calls_alloca to true.
1587 current_function_calls_alloca won't be set if argsize is zero,
1588 so we have to guarantee need_drap is true here. */
1589 if (SUPPORTS_STACK_ALIGNMENT)
1590 crtl->need_drap = true;
1592 dest = virtual_outgoing_args_rtx;
1593 if (!STACK_GROWS_DOWNWARD)
1595 if (CONST_INT_P (argsize))
1596 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1597 else
1598 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1600 dest = gen_rtx_MEM (BLKmode, dest);
1601 set_mem_align (dest, PARM_BOUNDARY);
1602 src = gen_rtx_MEM (BLKmode, incoming_args);
1603 set_mem_align (src, PARM_BOUNDARY);
1604 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1606 /* Refer to the argument block. */
1607 apply_args_size ();
1608 arguments = gen_rtx_MEM (BLKmode, arguments);
1609 set_mem_align (arguments, PARM_BOUNDARY);
1611 /* Walk past the arg-pointer and structure value address. */
1612 size = GET_MODE_SIZE (Pmode);
1613 if (struct_value)
1614 size += GET_MODE_SIZE (Pmode);
1616 /* Restore each of the registers previously saved. Make USE insns
1617 for each of these registers for use in making the call. */
1618 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1619 if ((mode = apply_args_mode[regno]) != VOIDmode)
1621 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1622 if (size % align != 0)
1623 size = CEIL (size, align) * align;
1624 reg = gen_rtx_REG (mode, regno);
1625 emit_move_insn (reg, adjust_address (arguments, mode, size));
1626 use_reg (&call_fusage, reg);
1627 size += GET_MODE_SIZE (mode);
1630 /* Restore the structure value address unless this is passed as an
1631 "invisible" first argument. */
1632 size = GET_MODE_SIZE (Pmode);
1633 if (struct_value)
1635 rtx value = gen_reg_rtx (Pmode);
1636 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1637 emit_move_insn (struct_value, value);
1638 if (REG_P (struct_value))
1639 use_reg (&call_fusage, struct_value);
1640 size += GET_MODE_SIZE (Pmode);
1643 /* All arguments and registers used for the call are set up by now! */
1644 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1646 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1647 and we don't want to load it into a register as an optimization,
1648 because prepare_call_address already did it if it should be done. */
1649 if (GET_CODE (function) != SYMBOL_REF)
1650 function = memory_address (FUNCTION_MODE, function);
1652 /* Generate the actual call instruction and save the return value. */
1653 if (targetm.have_untyped_call ())
1655 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1656 emit_call_insn (targetm.gen_untyped_call (mem, result,
1657 result_vector (1, result)));
1659 else if (targetm.have_call_value ())
1661 rtx valreg = 0;
1663 /* Locate the unique return register. It is not possible to
1664 express a call that sets more than one return register using
1665 call_value; use untyped_call for that. In fact, untyped_call
1666 only needs to save the return registers in the given block. */
1667 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1668 if ((mode = apply_result_mode[regno]) != VOIDmode)
1670 gcc_assert (!valreg); /* have_untyped_call required. */
1672 valreg = gen_rtx_REG (mode, regno);
1675 emit_insn (targetm.gen_call_value (valreg,
1676 gen_rtx_MEM (FUNCTION_MODE, function),
1677 const0_rtx, NULL_RTX, const0_rtx));
1679 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1681 else
1682 gcc_unreachable ();
1684 /* Find the CALL insn we just emitted, and attach the register usage
1685 information. */
1686 call_insn = last_call_insn ();
1687 add_function_usage_to (call_insn, call_fusage);
1689 /* Restore the stack. */
1690 if (targetm.have_save_stack_nonlocal ())
1691 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1692 else
1693 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1694 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1696 OK_DEFER_POP;
1698 /* Return the address of the result block. */
1699 result = copy_addr_to_reg (XEXP (result, 0));
1700 return convert_memory_address (ptr_mode, result);
1703 /* Perform an untyped return. */
1705 static void
1706 expand_builtin_return (rtx result)
1708 int size, align, regno;
1709 machine_mode mode;
1710 rtx reg;
1711 rtx_insn *call_fusage = 0;
1713 result = convert_memory_address (Pmode, result);
1715 apply_result_size ();
1716 result = gen_rtx_MEM (BLKmode, result);
1718 if (targetm.have_untyped_return ())
1720 rtx vector = result_vector (0, result);
1721 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1722 emit_barrier ();
1723 return;
1726 /* Restore the return value and note that each value is used. */
1727 size = 0;
1728 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1729 if ((mode = apply_result_mode[regno]) != VOIDmode)
1731 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1732 if (size % align != 0)
1733 size = CEIL (size, align) * align;
1734 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1735 emit_move_insn (reg, adjust_address (result, mode, size));
1737 push_to_sequence (call_fusage);
1738 emit_use (reg);
1739 call_fusage = get_insns ();
1740 end_sequence ();
1741 size += GET_MODE_SIZE (mode);
1744 /* Put the USE insns before the return. */
1745 emit_insn (call_fusage);
1747 /* Return whatever values was restored by jumping directly to the end
1748 of the function. */
1749 expand_naked_return ();
1752 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1754 static enum type_class
1755 type_to_class (tree type)
1757 switch (TREE_CODE (type))
1759 case VOID_TYPE: return void_type_class;
1760 case INTEGER_TYPE: return integer_type_class;
1761 case ENUMERAL_TYPE: return enumeral_type_class;
1762 case BOOLEAN_TYPE: return boolean_type_class;
1763 case POINTER_TYPE: return pointer_type_class;
1764 case REFERENCE_TYPE: return reference_type_class;
1765 case OFFSET_TYPE: return offset_type_class;
1766 case REAL_TYPE: return real_type_class;
1767 case COMPLEX_TYPE: return complex_type_class;
1768 case FUNCTION_TYPE: return function_type_class;
1769 case METHOD_TYPE: return method_type_class;
1770 case RECORD_TYPE: return record_type_class;
1771 case UNION_TYPE:
1772 case QUAL_UNION_TYPE: return union_type_class;
1773 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1774 ? string_type_class : array_type_class);
1775 case LANG_TYPE: return lang_type_class;
1776 default: return no_type_class;
1780 /* Expand a call EXP to __builtin_classify_type. */
1782 static rtx
1783 expand_builtin_classify_type (tree exp)
1785 if (call_expr_nargs (exp))
1786 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1787 return GEN_INT (no_type_class);
1790 /* This helper macro, meant to be used in mathfn_built_in below,
1791 determines which among a set of three builtin math functions is
1792 appropriate for a given type mode. The `F' and `L' cases are
1793 automatically generated from the `double' case. */
1794 #define CASE_MATHFN(MATHFN) \
1795 CASE_CFN_##MATHFN: \
1796 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1797 fcodel = BUILT_IN_##MATHFN##L ; break;
1798 /* Similar to above, but appends _R after any F/L suffix. */
1799 #define CASE_MATHFN_REENT(MATHFN) \
1800 case CFN_BUILT_IN_##MATHFN##_R: \
1801 case CFN_BUILT_IN_##MATHFN##F_R: \
1802 case CFN_BUILT_IN_##MATHFN##L_R: \
1803 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1804 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1806 /* Return a function equivalent to FN but operating on floating-point
1807 values of type TYPE, or END_BUILTINS if no such function exists.
1808 This is purely an operation on function codes; it does not guarantee
1809 that the target actually has an implementation of the function. */
1811 static built_in_function
1812 mathfn_built_in_2 (tree type, combined_fn fn)
1814 built_in_function fcode, fcodef, fcodel;
1816 switch (fn)
1818 CASE_MATHFN (ACOS)
1819 CASE_MATHFN (ACOSH)
1820 CASE_MATHFN (ASIN)
1821 CASE_MATHFN (ASINH)
1822 CASE_MATHFN (ATAN)
1823 CASE_MATHFN (ATAN2)
1824 CASE_MATHFN (ATANH)
1825 CASE_MATHFN (CBRT)
1826 CASE_MATHFN (CEIL)
1827 CASE_MATHFN (CEXPI)
1828 CASE_MATHFN (COPYSIGN)
1829 CASE_MATHFN (COS)
1830 CASE_MATHFN (COSH)
1831 CASE_MATHFN (DREM)
1832 CASE_MATHFN (ERF)
1833 CASE_MATHFN (ERFC)
1834 CASE_MATHFN (EXP)
1835 CASE_MATHFN (EXP10)
1836 CASE_MATHFN (EXP2)
1837 CASE_MATHFN (EXPM1)
1838 CASE_MATHFN (FABS)
1839 CASE_MATHFN (FDIM)
1840 CASE_MATHFN (FLOOR)
1841 CASE_MATHFN (FMA)
1842 CASE_MATHFN (FMAX)
1843 CASE_MATHFN (FMIN)
1844 CASE_MATHFN (FMOD)
1845 CASE_MATHFN (FREXP)
1846 CASE_MATHFN (GAMMA)
1847 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1848 CASE_MATHFN (HUGE_VAL)
1849 CASE_MATHFN (HYPOT)
1850 CASE_MATHFN (ILOGB)
1851 CASE_MATHFN (ICEIL)
1852 CASE_MATHFN (IFLOOR)
1853 CASE_MATHFN (INF)
1854 CASE_MATHFN (IRINT)
1855 CASE_MATHFN (IROUND)
1856 CASE_MATHFN (ISINF)
1857 CASE_MATHFN (J0)
1858 CASE_MATHFN (J1)
1859 CASE_MATHFN (JN)
1860 CASE_MATHFN (LCEIL)
1861 CASE_MATHFN (LDEXP)
1862 CASE_MATHFN (LFLOOR)
1863 CASE_MATHFN (LGAMMA)
1864 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1865 CASE_MATHFN (LLCEIL)
1866 CASE_MATHFN (LLFLOOR)
1867 CASE_MATHFN (LLRINT)
1868 CASE_MATHFN (LLROUND)
1869 CASE_MATHFN (LOG)
1870 CASE_MATHFN (LOG10)
1871 CASE_MATHFN (LOG1P)
1872 CASE_MATHFN (LOG2)
1873 CASE_MATHFN (LOGB)
1874 CASE_MATHFN (LRINT)
1875 CASE_MATHFN (LROUND)
1876 CASE_MATHFN (MODF)
1877 CASE_MATHFN (NAN)
1878 CASE_MATHFN (NANS)
1879 CASE_MATHFN (NEARBYINT)
1880 CASE_MATHFN (NEXTAFTER)
1881 CASE_MATHFN (NEXTTOWARD)
1882 CASE_MATHFN (POW)
1883 CASE_MATHFN (POWI)
1884 CASE_MATHFN (POW10)
1885 CASE_MATHFN (REMAINDER)
1886 CASE_MATHFN (REMQUO)
1887 CASE_MATHFN (RINT)
1888 CASE_MATHFN (ROUND)
1889 CASE_MATHFN (SCALB)
1890 CASE_MATHFN (SCALBLN)
1891 CASE_MATHFN (SCALBN)
1892 CASE_MATHFN (SIGNBIT)
1893 CASE_MATHFN (SIGNIFICAND)
1894 CASE_MATHFN (SIN)
1895 CASE_MATHFN (SINCOS)
1896 CASE_MATHFN (SINH)
1897 CASE_MATHFN (SQRT)
1898 CASE_MATHFN (TAN)
1899 CASE_MATHFN (TANH)
1900 CASE_MATHFN (TGAMMA)
1901 CASE_MATHFN (TRUNC)
1902 CASE_MATHFN (Y0)
1903 CASE_MATHFN (Y1)
1904 CASE_MATHFN (YN)
1906 default:
1907 return END_BUILTINS;
1910 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1911 return fcode;
1912 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1913 return fcodef;
1914 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1915 return fcodel;
1916 else
1917 return END_BUILTINS;
1920 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1921 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1922 otherwise use the explicit declaration. If we can't do the conversion,
1923 return null. */
1925 static tree
1926 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1928 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1929 if (fcode2 == END_BUILTINS)
1930 return NULL_TREE;
1932 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1933 return NULL_TREE;
1935 return builtin_decl_explicit (fcode2);
1938 /* Like mathfn_built_in_1, but always use the implicit array. */
1940 tree
1941 mathfn_built_in (tree type, combined_fn fn)
1943 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1946 /* Like mathfn_built_in_1, but take a built_in_function and
1947 always use the implicit array. */
1949 tree
1950 mathfn_built_in (tree type, enum built_in_function fn)
1952 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1955 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1956 return its code, otherwise return IFN_LAST. Note that this function
1957 only tests whether the function is defined in internals.def, not whether
1958 it is actually available on the target. */
1960 internal_fn
1961 associated_internal_fn (tree fndecl)
1963 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1964 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1965 switch (DECL_FUNCTION_CODE (fndecl))
1967 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1968 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1969 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1970 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1971 #include "internal-fn.def"
1973 CASE_FLT_FN (BUILT_IN_POW10):
1974 return IFN_EXP10;
1976 CASE_FLT_FN (BUILT_IN_DREM):
1977 return IFN_REMAINDER;
1979 CASE_FLT_FN (BUILT_IN_SCALBN):
1980 CASE_FLT_FN (BUILT_IN_SCALBLN):
1981 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1982 return IFN_LDEXP;
1983 return IFN_LAST;
1985 default:
1986 return IFN_LAST;
1990 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1991 on the current target by a call to an internal function, return the
1992 code of that internal function, otherwise return IFN_LAST. The caller
1993 is responsible for ensuring that any side-effects of the built-in
1994 call are dealt with correctly. E.g. if CALL sets errno, the caller
1995 must decide that the errno result isn't needed or make it available
1996 in some other way. */
1998 internal_fn
1999 replacement_internal_fn (gcall *call)
2001 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2003 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2004 if (ifn != IFN_LAST)
2006 tree_pair types = direct_internal_fn_types (ifn, call);
2007 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2008 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2009 return ifn;
2012 return IFN_LAST;
2015 /* Expand a call to the builtin trinary math functions (fma).
2016 Return NULL_RTX if a normal call should be emitted rather than expanding the
2017 function in-line. EXP is the expression that is a call to the builtin
2018 function; if convenient, the result should be placed in TARGET.
2019 SUBTARGET may be used as the target for computing one of EXP's
2020 operands. */
2022 static rtx
2023 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2025 optab builtin_optab;
2026 rtx op0, op1, op2, result;
2027 rtx_insn *insns;
2028 tree fndecl = get_callee_fndecl (exp);
2029 tree arg0, arg1, arg2;
2030 machine_mode mode;
2032 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2033 return NULL_RTX;
2035 arg0 = CALL_EXPR_ARG (exp, 0);
2036 arg1 = CALL_EXPR_ARG (exp, 1);
2037 arg2 = CALL_EXPR_ARG (exp, 2);
2039 switch (DECL_FUNCTION_CODE (fndecl))
2041 CASE_FLT_FN (BUILT_IN_FMA):
2042 builtin_optab = fma_optab; break;
2043 default:
2044 gcc_unreachable ();
2047 /* Make a suitable register to place result in. */
2048 mode = TYPE_MODE (TREE_TYPE (exp));
2050 /* Before working hard, check whether the instruction is available. */
2051 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2052 return NULL_RTX;
2054 result = gen_reg_rtx (mode);
2056 /* Always stabilize the argument list. */
2057 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2058 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2059 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2061 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2062 op1 = expand_normal (arg1);
2063 op2 = expand_normal (arg2);
2065 start_sequence ();
2067 /* Compute into RESULT.
2068 Set RESULT to wherever the result comes back. */
2069 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2070 result, 0);
2072 /* If we were unable to expand via the builtin, stop the sequence
2073 (without outputting the insns) and call to the library function
2074 with the stabilized argument list. */
2075 if (result == 0)
2077 end_sequence ();
2078 return expand_call (exp, target, target == const0_rtx);
2081 /* Output the entire sequence. */
2082 insns = get_insns ();
2083 end_sequence ();
2084 emit_insn (insns);
2086 return result;
2089 /* Expand a call to the builtin sin and cos math functions.
2090 Return NULL_RTX if a normal call should be emitted rather than expanding the
2091 function in-line. EXP is the expression that is a call to the builtin
2092 function; if convenient, the result should be placed in TARGET.
2093 SUBTARGET may be used as the target for computing one of EXP's
2094 operands. */
2096 static rtx
2097 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2099 optab builtin_optab;
2100 rtx op0;
2101 rtx_insn *insns;
2102 tree fndecl = get_callee_fndecl (exp);
2103 machine_mode mode;
2104 tree arg;
2106 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2107 return NULL_RTX;
2109 arg = CALL_EXPR_ARG (exp, 0);
2111 switch (DECL_FUNCTION_CODE (fndecl))
2113 CASE_FLT_FN (BUILT_IN_SIN):
2114 CASE_FLT_FN (BUILT_IN_COS):
2115 builtin_optab = sincos_optab; break;
2116 default:
2117 gcc_unreachable ();
2120 /* Make a suitable register to place result in. */
2121 mode = TYPE_MODE (TREE_TYPE (exp));
2123 /* Check if sincos insn is available, otherwise fallback
2124 to sin or cos insn. */
2125 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2126 switch (DECL_FUNCTION_CODE (fndecl))
2128 CASE_FLT_FN (BUILT_IN_SIN):
2129 builtin_optab = sin_optab; break;
2130 CASE_FLT_FN (BUILT_IN_COS):
2131 builtin_optab = cos_optab; break;
2132 default:
2133 gcc_unreachable ();
2136 /* Before working hard, check whether the instruction is available. */
2137 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2139 rtx result = gen_reg_rtx (mode);
2141 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2142 need to expand the argument again. This way, we will not perform
2143 side-effects more the once. */
2144 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2146 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2148 start_sequence ();
2150 /* Compute into RESULT.
2151 Set RESULT to wherever the result comes back. */
2152 if (builtin_optab == sincos_optab)
2154 int ok;
2156 switch (DECL_FUNCTION_CODE (fndecl))
2158 CASE_FLT_FN (BUILT_IN_SIN):
2159 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2160 break;
2161 CASE_FLT_FN (BUILT_IN_COS):
2162 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2163 break;
2164 default:
2165 gcc_unreachable ();
2167 gcc_assert (ok);
2169 else
2170 result = expand_unop (mode, builtin_optab, op0, result, 0);
2172 if (result != 0)
2174 /* Output the entire sequence. */
2175 insns = get_insns ();
2176 end_sequence ();
2177 emit_insn (insns);
2178 return result;
2181 /* If we were unable to expand via the builtin, stop the sequence
2182 (without outputting the insns) and call to the library function
2183 with the stabilized argument list. */
2184 end_sequence ();
2187 return expand_call (exp, target, target == const0_rtx);
2190 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2191 return an RTL instruction code that implements the functionality.
2192 If that isn't possible or available return CODE_FOR_nothing. */
2194 static enum insn_code
2195 interclass_mathfn_icode (tree arg, tree fndecl)
2197 bool errno_set = false;
2198 optab builtin_optab = unknown_optab;
2199 machine_mode mode;
2201 switch (DECL_FUNCTION_CODE (fndecl))
2203 CASE_FLT_FN (BUILT_IN_ILOGB):
2204 errno_set = true; builtin_optab = ilogb_optab; break;
2205 CASE_FLT_FN (BUILT_IN_ISINF):
2206 builtin_optab = isinf_optab; break;
2207 case BUILT_IN_ISNORMAL:
2208 case BUILT_IN_ISFINITE:
2209 CASE_FLT_FN (BUILT_IN_FINITE):
2210 case BUILT_IN_FINITED32:
2211 case BUILT_IN_FINITED64:
2212 case BUILT_IN_FINITED128:
2213 case BUILT_IN_ISINFD32:
2214 case BUILT_IN_ISINFD64:
2215 case BUILT_IN_ISINFD128:
2216 /* These builtins have no optabs (yet). */
2217 break;
2218 default:
2219 gcc_unreachable ();
2222 /* There's no easy way to detect the case we need to set EDOM. */
2223 if (flag_errno_math && errno_set)
2224 return CODE_FOR_nothing;
2226 /* Optab mode depends on the mode of the input argument. */
2227 mode = TYPE_MODE (TREE_TYPE (arg));
2229 if (builtin_optab)
2230 return optab_handler (builtin_optab, mode);
2231 return CODE_FOR_nothing;
2234 /* Expand a call to one of the builtin math functions that operate on
2235 floating point argument and output an integer result (ilogb, isinf,
2236 isnan, etc).
2237 Return 0 if a normal call should be emitted rather than expanding the
2238 function in-line. EXP is the expression that is a call to the builtin
2239 function; if convenient, the result should be placed in TARGET. */
2241 static rtx
2242 expand_builtin_interclass_mathfn (tree exp, rtx target)
2244 enum insn_code icode = CODE_FOR_nothing;
2245 rtx op0;
2246 tree fndecl = get_callee_fndecl (exp);
2247 machine_mode mode;
2248 tree arg;
2250 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2251 return NULL_RTX;
2253 arg = CALL_EXPR_ARG (exp, 0);
2254 icode = interclass_mathfn_icode (arg, fndecl);
2255 mode = TYPE_MODE (TREE_TYPE (arg));
2257 if (icode != CODE_FOR_nothing)
2259 struct expand_operand ops[1];
2260 rtx_insn *last = get_last_insn ();
2261 tree orig_arg = arg;
2263 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2264 need to expand the argument again. This way, we will not perform
2265 side-effects more the once. */
2266 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2268 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2270 if (mode != GET_MODE (op0))
2271 op0 = convert_to_mode (mode, op0, 0);
2273 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2274 if (maybe_legitimize_operands (icode, 0, 1, ops)
2275 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2276 return ops[0].value;
2278 delete_insns_since (last);
2279 CALL_EXPR_ARG (exp, 0) = orig_arg;
2282 return NULL_RTX;
2285 /* Expand a call to the builtin sincos math function.
2286 Return NULL_RTX if a normal call should be emitted rather than expanding the
2287 function in-line. EXP is the expression that is a call to the builtin
2288 function. */
2290 static rtx
2291 expand_builtin_sincos (tree exp)
2293 rtx op0, op1, op2, target1, target2;
2294 machine_mode mode;
2295 tree arg, sinp, cosp;
2296 int result;
2297 location_t loc = EXPR_LOCATION (exp);
2298 tree alias_type, alias_off;
2300 if (!validate_arglist (exp, REAL_TYPE,
2301 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2302 return NULL_RTX;
2304 arg = CALL_EXPR_ARG (exp, 0);
2305 sinp = CALL_EXPR_ARG (exp, 1);
2306 cosp = CALL_EXPR_ARG (exp, 2);
2308 /* Make a suitable register to place result in. */
2309 mode = TYPE_MODE (TREE_TYPE (arg));
2311 /* Check if sincos insn is available, otherwise emit the call. */
2312 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2313 return NULL_RTX;
2315 target1 = gen_reg_rtx (mode);
2316 target2 = gen_reg_rtx (mode);
2318 op0 = expand_normal (arg);
2319 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2320 alias_off = build_int_cst (alias_type, 0);
2321 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2322 sinp, alias_off));
2323 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2324 cosp, alias_off));
2326 /* Compute into target1 and target2.
2327 Set TARGET to wherever the result comes back. */
2328 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2329 gcc_assert (result);
2331 /* Move target1 and target2 to the memory locations indicated
2332 by op1 and op2. */
2333 emit_move_insn (op1, target1);
2334 emit_move_insn (op2, target2);
2336 return const0_rtx;
2339 /* Expand a call to the internal cexpi builtin to the sincos math function.
2340 EXP is the expression that is a call to the builtin function; if convenient,
2341 the result should be placed in TARGET. */
2343 static rtx
2344 expand_builtin_cexpi (tree exp, rtx target)
2346 tree fndecl = get_callee_fndecl (exp);
2347 tree arg, type;
2348 machine_mode mode;
2349 rtx op0, op1, op2;
2350 location_t loc = EXPR_LOCATION (exp);
2352 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2353 return NULL_RTX;
2355 arg = CALL_EXPR_ARG (exp, 0);
2356 type = TREE_TYPE (arg);
2357 mode = TYPE_MODE (TREE_TYPE (arg));
2359 /* Try expanding via a sincos optab, fall back to emitting a libcall
2360 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2361 is only generated from sincos, cexp or if we have either of them. */
2362 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2364 op1 = gen_reg_rtx (mode);
2365 op2 = gen_reg_rtx (mode);
2367 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2369 /* Compute into op1 and op2. */
2370 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2372 else if (targetm.libc_has_function (function_sincos))
2374 tree call, fn = NULL_TREE;
2375 tree top1, top2;
2376 rtx op1a, op2a;
2378 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2379 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2380 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2381 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2382 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2383 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2384 else
2385 gcc_unreachable ();
2387 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2388 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2389 op1a = copy_addr_to_reg (XEXP (op1, 0));
2390 op2a = copy_addr_to_reg (XEXP (op2, 0));
2391 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2392 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2394 /* Make sure not to fold the sincos call again. */
2395 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2396 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2397 call, 3, arg, top1, top2));
2399 else
2401 tree call, fn = NULL_TREE, narg;
2402 tree ctype = build_complex_type (type);
2404 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2405 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2406 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2407 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2408 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2409 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2410 else
2411 gcc_unreachable ();
2413 /* If we don't have a decl for cexp create one. This is the
2414 friendliest fallback if the user calls __builtin_cexpi
2415 without full target C99 function support. */
2416 if (fn == NULL_TREE)
2418 tree fntype;
2419 const char *name = NULL;
2421 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2422 name = "cexpf";
2423 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2424 name = "cexp";
2425 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2426 name = "cexpl";
2428 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2429 fn = build_fn_decl (name, fntype);
2432 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2433 build_real (type, dconst0), arg);
2435 /* Make sure not to fold the cexp call again. */
2436 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2437 return expand_expr (build_call_nary (ctype, call, 1, narg),
2438 target, VOIDmode, EXPAND_NORMAL);
2441 /* Now build the proper return type. */
2442 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2443 make_tree (TREE_TYPE (arg), op2),
2444 make_tree (TREE_TYPE (arg), op1)),
2445 target, VOIDmode, EXPAND_NORMAL);
2448 /* Conveniently construct a function call expression. FNDECL names the
2449 function to be called, N is the number of arguments, and the "..."
2450 parameters are the argument expressions. Unlike build_call_exr
2451 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2453 static tree
2454 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2456 va_list ap;
2457 tree fntype = TREE_TYPE (fndecl);
2458 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2460 va_start (ap, n);
2461 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2462 va_end (ap);
2463 SET_EXPR_LOCATION (fn, loc);
2464 return fn;
2467 /* Expand a call to one of the builtin rounding functions gcc defines
2468 as an extension (lfloor and lceil). As these are gcc extensions we
2469 do not need to worry about setting errno to EDOM.
2470 If expanding via optab fails, lower expression to (int)(floor(x)).
2471 EXP is the expression that is a call to the builtin function;
2472 if convenient, the result should be placed in TARGET. */
2474 static rtx
2475 expand_builtin_int_roundingfn (tree exp, rtx target)
2477 convert_optab builtin_optab;
2478 rtx op0, tmp;
2479 rtx_insn *insns;
2480 tree fndecl = get_callee_fndecl (exp);
2481 enum built_in_function fallback_fn;
2482 tree fallback_fndecl;
2483 machine_mode mode;
2484 tree arg;
2486 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2487 gcc_unreachable ();
2489 arg = CALL_EXPR_ARG (exp, 0);
2491 switch (DECL_FUNCTION_CODE (fndecl))
2493 CASE_FLT_FN (BUILT_IN_ICEIL):
2494 CASE_FLT_FN (BUILT_IN_LCEIL):
2495 CASE_FLT_FN (BUILT_IN_LLCEIL):
2496 builtin_optab = lceil_optab;
2497 fallback_fn = BUILT_IN_CEIL;
2498 break;
2500 CASE_FLT_FN (BUILT_IN_IFLOOR):
2501 CASE_FLT_FN (BUILT_IN_LFLOOR):
2502 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2503 builtin_optab = lfloor_optab;
2504 fallback_fn = BUILT_IN_FLOOR;
2505 break;
2507 default:
2508 gcc_unreachable ();
2511 /* Make a suitable register to place result in. */
2512 mode = TYPE_MODE (TREE_TYPE (exp));
2514 target = gen_reg_rtx (mode);
2516 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2517 need to expand the argument again. This way, we will not perform
2518 side-effects more the once. */
2519 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2521 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2523 start_sequence ();
2525 /* Compute into TARGET. */
2526 if (expand_sfix_optab (target, op0, builtin_optab))
2528 /* Output the entire sequence. */
2529 insns = get_insns ();
2530 end_sequence ();
2531 emit_insn (insns);
2532 return target;
2535 /* If we were unable to expand via the builtin, stop the sequence
2536 (without outputting the insns). */
2537 end_sequence ();
2539 /* Fall back to floating point rounding optab. */
2540 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2542 /* For non-C99 targets we may end up without a fallback fndecl here
2543 if the user called __builtin_lfloor directly. In this case emit
2544 a call to the floor/ceil variants nevertheless. This should result
2545 in the best user experience for not full C99 targets. */
2546 if (fallback_fndecl == NULL_TREE)
2548 tree fntype;
2549 const char *name = NULL;
2551 switch (DECL_FUNCTION_CODE (fndecl))
2553 case BUILT_IN_ICEIL:
2554 case BUILT_IN_LCEIL:
2555 case BUILT_IN_LLCEIL:
2556 name = "ceil";
2557 break;
2558 case BUILT_IN_ICEILF:
2559 case BUILT_IN_LCEILF:
2560 case BUILT_IN_LLCEILF:
2561 name = "ceilf";
2562 break;
2563 case BUILT_IN_ICEILL:
2564 case BUILT_IN_LCEILL:
2565 case BUILT_IN_LLCEILL:
2566 name = "ceill";
2567 break;
2568 case BUILT_IN_IFLOOR:
2569 case BUILT_IN_LFLOOR:
2570 case BUILT_IN_LLFLOOR:
2571 name = "floor";
2572 break;
2573 case BUILT_IN_IFLOORF:
2574 case BUILT_IN_LFLOORF:
2575 case BUILT_IN_LLFLOORF:
2576 name = "floorf";
2577 break;
2578 case BUILT_IN_IFLOORL:
2579 case BUILT_IN_LFLOORL:
2580 case BUILT_IN_LLFLOORL:
2581 name = "floorl";
2582 break;
2583 default:
2584 gcc_unreachable ();
2587 fntype = build_function_type_list (TREE_TYPE (arg),
2588 TREE_TYPE (arg), NULL_TREE);
2589 fallback_fndecl = build_fn_decl (name, fntype);
2592 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2594 tmp = expand_normal (exp);
2595 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2597 /* Truncate the result of floating point optab to integer
2598 via expand_fix (). */
2599 target = gen_reg_rtx (mode);
2600 expand_fix (target, tmp, 0);
2602 return target;
2605 /* Expand a call to one of the builtin math functions doing integer
2606 conversion (lrint).
2607 Return 0 if a normal call should be emitted rather than expanding the
2608 function in-line. EXP is the expression that is a call to the builtin
2609 function; if convenient, the result should be placed in TARGET. */
2611 static rtx
2612 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2614 convert_optab builtin_optab;
2615 rtx op0;
2616 rtx_insn *insns;
2617 tree fndecl = get_callee_fndecl (exp);
2618 tree arg;
2619 machine_mode mode;
2620 enum built_in_function fallback_fn = BUILT_IN_NONE;
2622 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2623 gcc_unreachable ();
2625 arg = CALL_EXPR_ARG (exp, 0);
2627 switch (DECL_FUNCTION_CODE (fndecl))
2629 CASE_FLT_FN (BUILT_IN_IRINT):
2630 fallback_fn = BUILT_IN_LRINT;
2631 gcc_fallthrough ();
2632 CASE_FLT_FN (BUILT_IN_LRINT):
2633 CASE_FLT_FN (BUILT_IN_LLRINT):
2634 builtin_optab = lrint_optab;
2635 break;
2637 CASE_FLT_FN (BUILT_IN_IROUND):
2638 fallback_fn = BUILT_IN_LROUND;
2639 gcc_fallthrough ();
2640 CASE_FLT_FN (BUILT_IN_LROUND):
2641 CASE_FLT_FN (BUILT_IN_LLROUND):
2642 builtin_optab = lround_optab;
2643 break;
2645 default:
2646 gcc_unreachable ();
2649 /* There's no easy way to detect the case we need to set EDOM. */
2650 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2651 return NULL_RTX;
2653 /* Make a suitable register to place result in. */
2654 mode = TYPE_MODE (TREE_TYPE (exp));
2656 /* There's no easy way to detect the case we need to set EDOM. */
2657 if (!flag_errno_math)
2659 rtx result = gen_reg_rtx (mode);
2661 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2662 need to expand the argument again. This way, we will not perform
2663 side-effects more the once. */
2664 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2666 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2668 start_sequence ();
2670 if (expand_sfix_optab (result, op0, builtin_optab))
2672 /* Output the entire sequence. */
2673 insns = get_insns ();
2674 end_sequence ();
2675 emit_insn (insns);
2676 return result;
2679 /* If we were unable to expand via the builtin, stop the sequence
2680 (without outputting the insns) and call to the library function
2681 with the stabilized argument list. */
2682 end_sequence ();
2685 if (fallback_fn != BUILT_IN_NONE)
2687 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2688 targets, (int) round (x) should never be transformed into
2689 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2690 a call to lround in the hope that the target provides at least some
2691 C99 functions. This should result in the best user experience for
2692 not full C99 targets. */
2693 tree fallback_fndecl = mathfn_built_in_1
2694 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2696 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2697 fallback_fndecl, 1, arg);
2699 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2700 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2701 return convert_to_mode (mode, target, 0);
2704 return expand_call (exp, target, target == const0_rtx);
2707 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2708 a normal call should be emitted rather than expanding the function
2709 in-line. EXP is the expression that is a call to the builtin
2710 function; if convenient, the result should be placed in TARGET. */
2712 static rtx
2713 expand_builtin_powi (tree exp, rtx target)
2715 tree arg0, arg1;
2716 rtx op0, op1;
2717 machine_mode mode;
2718 machine_mode mode2;
2720 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2721 return NULL_RTX;
2723 arg0 = CALL_EXPR_ARG (exp, 0);
2724 arg1 = CALL_EXPR_ARG (exp, 1);
2725 mode = TYPE_MODE (TREE_TYPE (exp));
2727 /* Emit a libcall to libgcc. */
2729 /* Mode of the 2nd argument must match that of an int. */
2730 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2732 if (target == NULL_RTX)
2733 target = gen_reg_rtx (mode);
2735 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2736 if (GET_MODE (op0) != mode)
2737 op0 = convert_to_mode (mode, op0, 0);
2738 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2739 if (GET_MODE (op1) != mode2)
2740 op1 = convert_to_mode (mode2, op1, 0);
2742 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2743 target, LCT_CONST, mode, 2,
2744 op0, mode, op1, mode2);
2746 return target;
2749 /* Expand expression EXP which is a call to the strlen builtin. Return
2750 NULL_RTX if we failed the caller should emit a normal call, otherwise
2751 try to get the result in TARGET, if convenient. */
2753 static rtx
2754 expand_builtin_strlen (tree exp, rtx target,
2755 machine_mode target_mode)
2757 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2758 return NULL_RTX;
2759 else
2761 struct expand_operand ops[4];
2762 rtx pat;
2763 tree len;
2764 tree src = CALL_EXPR_ARG (exp, 0);
2765 rtx src_reg;
2766 rtx_insn *before_strlen;
2767 machine_mode insn_mode = target_mode;
2768 enum insn_code icode = CODE_FOR_nothing;
2769 unsigned int align;
2771 /* If the length can be computed at compile-time, return it. */
2772 len = c_strlen (src, 0);
2773 if (len)
2774 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2776 /* If the length can be computed at compile-time and is constant
2777 integer, but there are side-effects in src, evaluate
2778 src for side-effects, then return len.
2779 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2780 can be optimized into: i++; x = 3; */
2781 len = c_strlen (src, 1);
2782 if (len && TREE_CODE (len) == INTEGER_CST)
2784 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2785 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2788 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2790 /* If SRC is not a pointer type, don't do this operation inline. */
2791 if (align == 0)
2792 return NULL_RTX;
2794 /* Bail out if we can't compute strlen in the right mode. */
2795 while (insn_mode != VOIDmode)
2797 icode = optab_handler (strlen_optab, insn_mode);
2798 if (icode != CODE_FOR_nothing)
2799 break;
2801 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2803 if (insn_mode == VOIDmode)
2804 return NULL_RTX;
2806 /* Make a place to hold the source address. We will not expand
2807 the actual source until we are sure that the expansion will
2808 not fail -- there are trees that cannot be expanded twice. */
2809 src_reg = gen_reg_rtx (Pmode);
2811 /* Mark the beginning of the strlen sequence so we can emit the
2812 source operand later. */
2813 before_strlen = get_last_insn ();
2815 create_output_operand (&ops[0], target, insn_mode);
2816 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2817 create_integer_operand (&ops[2], 0);
2818 create_integer_operand (&ops[3], align);
2819 if (!maybe_expand_insn (icode, 4, ops))
2820 return NULL_RTX;
2822 /* Now that we are assured of success, expand the source. */
2823 start_sequence ();
2824 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2825 if (pat != src_reg)
2827 #ifdef POINTERS_EXTEND_UNSIGNED
2828 if (GET_MODE (pat) != Pmode)
2829 pat = convert_to_mode (Pmode, pat,
2830 POINTERS_EXTEND_UNSIGNED);
2831 #endif
2832 emit_move_insn (src_reg, pat);
2834 pat = get_insns ();
2835 end_sequence ();
2837 if (before_strlen)
2838 emit_insn_after (pat, before_strlen);
2839 else
2840 emit_insn_before (pat, get_insns ());
2842 /* Return the value in the proper mode for this function. */
2843 if (GET_MODE (ops[0].value) == target_mode)
2844 target = ops[0].value;
2845 else if (target != 0)
2846 convert_move (target, ops[0].value, 0);
2847 else
2848 target = convert_to_mode (target_mode, ops[0].value, 0);
2850 return target;
2854 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2855 bytes from constant string DATA + OFFSET and return it as target
2856 constant. */
2858 static rtx
2859 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2860 machine_mode mode)
2862 const char *str = (const char *) data;
2864 gcc_assert (offset >= 0
2865 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2866 <= strlen (str) + 1));
2868 return c_readstr (str + offset, mode);
2871 /* LEN specify length of the block of memcpy/memset operation.
2872 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2873 In some cases we can make very likely guess on max size, then we
2874 set it into PROBABLE_MAX_SIZE. */
2876 static void
2877 determine_block_size (tree len, rtx len_rtx,
2878 unsigned HOST_WIDE_INT *min_size,
2879 unsigned HOST_WIDE_INT *max_size,
2880 unsigned HOST_WIDE_INT *probable_max_size)
2882 if (CONST_INT_P (len_rtx))
2884 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2885 return;
2887 else
2889 wide_int min, max;
2890 enum value_range_type range_type = VR_UNDEFINED;
2892 /* Determine bounds from the type. */
2893 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2894 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2895 else
2896 *min_size = 0;
2897 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2898 *probable_max_size = *max_size
2899 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2900 else
2901 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2903 if (TREE_CODE (len) == SSA_NAME)
2904 range_type = get_range_info (len, &min, &max);
2905 if (range_type == VR_RANGE)
2907 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2908 *min_size = min.to_uhwi ();
2909 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2910 *probable_max_size = *max_size = max.to_uhwi ();
2912 else if (range_type == VR_ANTI_RANGE)
2914 /* Anti range 0...N lets us to determine minimal size to N+1. */
2915 if (min == 0)
2917 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2918 *min_size = max.to_uhwi () + 1;
2920 /* Code like
2922 int n;
2923 if (n < 100)
2924 memcpy (a, b, n)
2926 Produce anti range allowing negative values of N. We still
2927 can use the information and make a guess that N is not negative.
2929 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2930 *probable_max_size = min.to_uhwi () - 1;
2933 gcc_checking_assert (*max_size <=
2934 (unsigned HOST_WIDE_INT)
2935 GET_MODE_MASK (GET_MODE (len_rtx)));
2938 /* Helper function to do the actual work for expand_builtin_memcpy. */
2940 static rtx
2941 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2943 const char *src_str;
2944 unsigned int src_align = get_pointer_alignment (src);
2945 unsigned int dest_align = get_pointer_alignment (dest);
2946 rtx dest_mem, src_mem, dest_addr, len_rtx;
2947 HOST_WIDE_INT expected_size = -1;
2948 unsigned int expected_align = 0;
2949 unsigned HOST_WIDE_INT min_size;
2950 unsigned HOST_WIDE_INT max_size;
2951 unsigned HOST_WIDE_INT probable_max_size;
2953 /* If DEST is not a pointer type, call the normal function. */
2954 if (dest_align == 0)
2955 return NULL_RTX;
2957 /* If either SRC is not a pointer type, don't do this
2958 operation in-line. */
2959 if (src_align == 0)
2960 return NULL_RTX;
2962 if (currently_expanding_gimple_stmt)
2963 stringop_block_profile (currently_expanding_gimple_stmt,
2964 &expected_align, &expected_size);
2966 if (expected_align < dest_align)
2967 expected_align = dest_align;
2968 dest_mem = get_memory_rtx (dest, len);
2969 set_mem_align (dest_mem, dest_align);
2970 len_rtx = expand_normal (len);
2971 determine_block_size (len, len_rtx, &min_size, &max_size,
2972 &probable_max_size);
2973 src_str = c_getstr (src);
2975 /* If SRC is a string constant and block move would be done
2976 by pieces, we can avoid loading the string from memory
2977 and only stored the computed constants. */
2978 if (src_str
2979 && CONST_INT_P (len_rtx)
2980 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2981 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2982 CONST_CAST (char *, src_str),
2983 dest_align, false))
2985 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2986 builtin_memcpy_read_str,
2987 CONST_CAST (char *, src_str),
2988 dest_align, false, 0);
2989 dest_mem = force_operand (XEXP (dest_mem, 0), target);
2990 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2991 return dest_mem;
2994 src_mem = get_memory_rtx (src, len);
2995 set_mem_align (src_mem, src_align);
2997 /* Copy word part most expediently. */
2998 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
2999 CALL_EXPR_TAILCALL (exp)
3000 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3001 expected_align, expected_size,
3002 min_size, max_size, probable_max_size);
3004 if (dest_addr == 0)
3006 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3007 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3010 return dest_addr;
3013 /* Expand a call EXP to the memcpy builtin.
3014 Return NULL_RTX if we failed, the caller should emit a normal call,
3015 otherwise try to get the result in TARGET, if convenient (and in
3016 mode MODE if that's convenient). */
3018 static rtx
3019 expand_builtin_memcpy (tree exp, rtx target)
3021 if (!validate_arglist (exp,
3022 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3023 return NULL_RTX;
3024 else
3026 tree dest = CALL_EXPR_ARG (exp, 0);
3027 tree src = CALL_EXPR_ARG (exp, 1);
3028 tree len = CALL_EXPR_ARG (exp, 2);
3029 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3033 /* Expand an instrumented call EXP to the memcpy builtin.
3034 Return NULL_RTX if we failed, the caller should emit a normal call,
3035 otherwise try to get the result in TARGET, if convenient (and in
3036 mode MODE if that's convenient). */
3038 static rtx
3039 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3041 if (!validate_arglist (exp,
3042 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3043 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3044 INTEGER_TYPE, VOID_TYPE))
3045 return NULL_RTX;
3046 else
3048 tree dest = CALL_EXPR_ARG (exp, 0);
3049 tree src = CALL_EXPR_ARG (exp, 2);
3050 tree len = CALL_EXPR_ARG (exp, 4);
3051 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3053 /* Return src bounds with the result. */
3054 if (res)
3056 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3057 expand_normal (CALL_EXPR_ARG (exp, 1)));
3058 res = chkp_join_splitted_slot (res, bnd);
3060 return res;
3064 /* Expand a call EXP to the mempcpy builtin.
3065 Return NULL_RTX if we failed; the caller should emit a normal call,
3066 otherwise try to get the result in TARGET, if convenient (and in
3067 mode MODE if that's convenient). If ENDP is 0 return the
3068 destination pointer, if ENDP is 1 return the end pointer ala
3069 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3070 stpcpy. */
3072 static rtx
3073 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3075 if (!validate_arglist (exp,
3076 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3077 return NULL_RTX;
3078 else
3080 tree dest = CALL_EXPR_ARG (exp, 0);
3081 tree src = CALL_EXPR_ARG (exp, 1);
3082 tree len = CALL_EXPR_ARG (exp, 2);
3083 return expand_builtin_mempcpy_args (dest, src, len,
3084 target, mode, /*endp=*/ 1,
3085 exp);
3089 /* Expand an instrumented call EXP to the mempcpy builtin.
3090 Return NULL_RTX if we failed, the caller should emit a normal call,
3091 otherwise try to get the result in TARGET, if convenient (and in
3092 mode MODE if that's convenient). */
3094 static rtx
3095 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3097 if (!validate_arglist (exp,
3098 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3099 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3100 INTEGER_TYPE, VOID_TYPE))
3101 return NULL_RTX;
3102 else
3104 tree dest = CALL_EXPR_ARG (exp, 0);
3105 tree src = CALL_EXPR_ARG (exp, 2);
3106 tree len = CALL_EXPR_ARG (exp, 4);
3107 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3108 mode, 1, exp);
3110 /* Return src bounds with the result. */
3111 if (res)
3113 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3114 expand_normal (CALL_EXPR_ARG (exp, 1)));
3115 res = chkp_join_splitted_slot (res, bnd);
3117 return res;
3121 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3122 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3123 so that this can also be called without constructing an actual CALL_EXPR.
3124 The other arguments and return value are the same as for
3125 expand_builtin_mempcpy. */
3127 static rtx
3128 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3129 rtx target, machine_mode mode, int endp,
3130 tree orig_exp)
3132 tree fndecl = get_callee_fndecl (orig_exp);
3134 /* If return value is ignored, transform mempcpy into memcpy. */
3135 if (target == const0_rtx
3136 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3137 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3139 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3140 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3141 dest, src, len);
3142 return expand_expr (result, target, mode, EXPAND_NORMAL);
3144 else if (target == const0_rtx
3145 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3147 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3148 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3149 dest, src, len);
3150 return expand_expr (result, target, mode, EXPAND_NORMAL);
3152 else
3154 const char *src_str;
3155 unsigned int src_align = get_pointer_alignment (src);
3156 unsigned int dest_align = get_pointer_alignment (dest);
3157 rtx dest_mem, src_mem, len_rtx;
3159 /* If either SRC or DEST is not a pointer type, don't do this
3160 operation in-line. */
3161 if (dest_align == 0 || src_align == 0)
3162 return NULL_RTX;
3164 /* If LEN is not constant, call the normal function. */
3165 if (! tree_fits_uhwi_p (len))
3166 return NULL_RTX;
3168 len_rtx = expand_normal (len);
3169 src_str = c_getstr (src);
3171 /* If SRC is a string constant and block move would be done
3172 by pieces, we can avoid loading the string from memory
3173 and only stored the computed constants. */
3174 if (src_str
3175 && CONST_INT_P (len_rtx)
3176 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3177 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3178 CONST_CAST (char *, src_str),
3179 dest_align, false))
3181 dest_mem = get_memory_rtx (dest, len);
3182 set_mem_align (dest_mem, dest_align);
3183 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3184 builtin_memcpy_read_str,
3185 CONST_CAST (char *, src_str),
3186 dest_align, false, endp);
3187 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3188 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3189 return dest_mem;
3192 if (CONST_INT_P (len_rtx)
3193 && can_move_by_pieces (INTVAL (len_rtx),
3194 MIN (dest_align, src_align)))
3196 dest_mem = get_memory_rtx (dest, len);
3197 set_mem_align (dest_mem, dest_align);
3198 src_mem = get_memory_rtx (src, len);
3199 set_mem_align (src_mem, src_align);
3200 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3201 MIN (dest_align, src_align), endp);
3202 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3203 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3204 return dest_mem;
3207 return NULL_RTX;
3211 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3212 we failed, the caller should emit a normal call, otherwise try to
3213 get the result in TARGET, if convenient. If ENDP is 0 return the
3214 destination pointer, if ENDP is 1 return the end pointer ala
3215 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3216 stpcpy. */
3218 static rtx
3219 expand_movstr (tree dest, tree src, rtx target, int endp)
3221 struct expand_operand ops[3];
3222 rtx dest_mem;
3223 rtx src_mem;
3225 if (!targetm.have_movstr ())
3226 return NULL_RTX;
3228 dest_mem = get_memory_rtx (dest, NULL);
3229 src_mem = get_memory_rtx (src, NULL);
3230 if (!endp)
3232 target = force_reg (Pmode, XEXP (dest_mem, 0));
3233 dest_mem = replace_equiv_address (dest_mem, target);
3236 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3237 create_fixed_operand (&ops[1], dest_mem);
3238 create_fixed_operand (&ops[2], src_mem);
3239 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3240 return NULL_RTX;
3242 if (endp && target != const0_rtx)
3244 target = ops[0].value;
3245 /* movstr is supposed to set end to the address of the NUL
3246 terminator. If the caller requested a mempcpy-like return value,
3247 adjust it. */
3248 if (endp == 1)
3250 rtx tem = plus_constant (GET_MODE (target),
3251 gen_lowpart (GET_MODE (target), target), 1);
3252 emit_move_insn (target, force_operand (tem, NULL_RTX));
3255 return target;
3258 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3259 NULL_RTX if we failed the caller should emit a normal call, otherwise
3260 try to get the result in TARGET, if convenient (and in mode MODE if that's
3261 convenient). */
3263 static rtx
3264 expand_builtin_strcpy (tree exp, rtx target)
3266 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3268 tree dest = CALL_EXPR_ARG (exp, 0);
3269 tree src = CALL_EXPR_ARG (exp, 1);
3270 return expand_builtin_strcpy_args (dest, src, target);
3272 return NULL_RTX;
3275 /* Helper function to do the actual work for expand_builtin_strcpy. The
3276 arguments to the builtin_strcpy call DEST and SRC are broken out
3277 so that this can also be called without constructing an actual CALL_EXPR.
3278 The other arguments and return value are the same as for
3279 expand_builtin_strcpy. */
3281 static rtx
3282 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3284 return expand_movstr (dest, src, target, /*endp=*/0);
3287 /* Expand a call EXP to the stpcpy builtin.
3288 Return NULL_RTX if we failed the caller should emit a normal call,
3289 otherwise try to get the result in TARGET, if convenient (and in
3290 mode MODE if that's convenient). */
3292 static rtx
3293 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3295 tree dst, src;
3296 location_t loc = EXPR_LOCATION (exp);
3298 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3299 return NULL_RTX;
3301 dst = CALL_EXPR_ARG (exp, 0);
3302 src = CALL_EXPR_ARG (exp, 1);
3304 /* If return value is ignored, transform stpcpy into strcpy. */
3305 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3307 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3308 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3309 return expand_expr (result, target, mode, EXPAND_NORMAL);
3311 else
3313 tree len, lenp1;
3314 rtx ret;
3316 /* Ensure we get an actual string whose length can be evaluated at
3317 compile-time, not an expression containing a string. This is
3318 because the latter will potentially produce pessimized code
3319 when used to produce the return value. */
3320 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3321 return expand_movstr (dst, src, target, /*endp=*/2);
3323 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3324 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3325 target, mode, /*endp=*/2,
3326 exp);
3328 if (ret)
3329 return ret;
3331 if (TREE_CODE (len) == INTEGER_CST)
3333 rtx len_rtx = expand_normal (len);
3335 if (CONST_INT_P (len_rtx))
3337 ret = expand_builtin_strcpy_args (dst, src, target);
3339 if (ret)
3341 if (! target)
3343 if (mode != VOIDmode)
3344 target = gen_reg_rtx (mode);
3345 else
3346 target = gen_reg_rtx (GET_MODE (ret));
3348 if (GET_MODE (target) != GET_MODE (ret))
3349 ret = gen_lowpart (GET_MODE (target), ret);
3351 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3352 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3353 gcc_assert (ret);
3355 return target;
3360 return expand_movstr (dst, src, target, /*endp=*/2);
3364 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3365 bytes from constant string DATA + OFFSET and return it as target
3366 constant. */
3369 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3370 machine_mode mode)
3372 const char *str = (const char *) data;
3374 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3375 return const0_rtx;
3377 return c_readstr (str + offset, mode);
3380 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3381 NULL_RTX if we failed the caller should emit a normal call. */
3383 static rtx
3384 expand_builtin_strncpy (tree exp, rtx target)
3386 location_t loc = EXPR_LOCATION (exp);
3388 if (validate_arglist (exp,
3389 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3391 tree dest = CALL_EXPR_ARG (exp, 0);
3392 tree src = CALL_EXPR_ARG (exp, 1);
3393 tree len = CALL_EXPR_ARG (exp, 2);
3394 tree slen = c_strlen (src, 1);
3396 /* We must be passed a constant len and src parameter. */
3397 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3398 return NULL_RTX;
3400 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3402 /* We're required to pad with trailing zeros if the requested
3403 len is greater than strlen(s2)+1. In that case try to
3404 use store_by_pieces, if it fails, punt. */
3405 if (tree_int_cst_lt (slen, len))
3407 unsigned int dest_align = get_pointer_alignment (dest);
3408 const char *p = c_getstr (src);
3409 rtx dest_mem;
3411 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3412 || !can_store_by_pieces (tree_to_uhwi (len),
3413 builtin_strncpy_read_str,
3414 CONST_CAST (char *, p),
3415 dest_align, false))
3416 return NULL_RTX;
3418 dest_mem = get_memory_rtx (dest, len);
3419 store_by_pieces (dest_mem, tree_to_uhwi (len),
3420 builtin_strncpy_read_str,
3421 CONST_CAST (char *, p), dest_align, false, 0);
3422 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3423 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3424 return dest_mem;
3427 return NULL_RTX;
3430 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3431 bytes from constant string DATA + OFFSET and return it as target
3432 constant. */
3435 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3436 machine_mode mode)
3438 const char *c = (const char *) data;
3439 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3441 memset (p, *c, GET_MODE_SIZE (mode));
3443 return c_readstr (p, mode);
3446 /* Callback routine for store_by_pieces. Return the RTL of a register
3447 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3448 char value given in the RTL register data. For example, if mode is
3449 4 bytes wide, return the RTL for 0x01010101*data. */
3451 static rtx
3452 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3453 machine_mode mode)
3455 rtx target, coeff;
3456 size_t size;
3457 char *p;
3459 size = GET_MODE_SIZE (mode);
3460 if (size == 1)
3461 return (rtx) data;
3463 p = XALLOCAVEC (char, size);
3464 memset (p, 1, size);
3465 coeff = c_readstr (p, mode);
3467 target = convert_to_mode (mode, (rtx) data, 1);
3468 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3469 return force_reg (mode, target);
3472 /* Expand expression EXP, which is a call to the memset builtin. Return
3473 NULL_RTX if we failed the caller should emit a normal call, otherwise
3474 try to get the result in TARGET, if convenient (and in mode MODE if that's
3475 convenient). */
3477 static rtx
3478 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3480 if (!validate_arglist (exp,
3481 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3482 return NULL_RTX;
3483 else
3485 tree dest = CALL_EXPR_ARG (exp, 0);
3486 tree val = CALL_EXPR_ARG (exp, 1);
3487 tree len = CALL_EXPR_ARG (exp, 2);
3488 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3492 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3493 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3494 try to get the result in TARGET, if convenient (and in mode MODE if that's
3495 convenient). */
3497 static rtx
3498 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3500 if (!validate_arglist (exp,
3501 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3502 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3503 return NULL_RTX;
3504 else
3506 tree dest = CALL_EXPR_ARG (exp, 0);
3507 tree val = CALL_EXPR_ARG (exp, 2);
3508 tree len = CALL_EXPR_ARG (exp, 3);
3509 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3511 /* Return src bounds with the result. */
3512 if (res)
3514 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3515 expand_normal (CALL_EXPR_ARG (exp, 1)));
3516 res = chkp_join_splitted_slot (res, bnd);
3518 return res;
3522 /* Helper function to do the actual work for expand_builtin_memset. The
3523 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3524 so that this can also be called without constructing an actual CALL_EXPR.
3525 The other arguments and return value are the same as for
3526 expand_builtin_memset. */
3528 static rtx
3529 expand_builtin_memset_args (tree dest, tree val, tree len,
3530 rtx target, machine_mode mode, tree orig_exp)
3532 tree fndecl, fn;
3533 enum built_in_function fcode;
3534 machine_mode val_mode;
3535 char c;
3536 unsigned int dest_align;
3537 rtx dest_mem, dest_addr, len_rtx;
3538 HOST_WIDE_INT expected_size = -1;
3539 unsigned int expected_align = 0;
3540 unsigned HOST_WIDE_INT min_size;
3541 unsigned HOST_WIDE_INT max_size;
3542 unsigned HOST_WIDE_INT probable_max_size;
3544 dest_align = get_pointer_alignment (dest);
3546 /* If DEST is not a pointer type, don't do this operation in-line. */
3547 if (dest_align == 0)
3548 return NULL_RTX;
3550 if (currently_expanding_gimple_stmt)
3551 stringop_block_profile (currently_expanding_gimple_stmt,
3552 &expected_align, &expected_size);
3554 if (expected_align < dest_align)
3555 expected_align = dest_align;
3557 /* If the LEN parameter is zero, return DEST. */
3558 if (integer_zerop (len))
3560 /* Evaluate and ignore VAL in case it has side-effects. */
3561 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3562 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3565 /* Stabilize the arguments in case we fail. */
3566 dest = builtin_save_expr (dest);
3567 val = builtin_save_expr (val);
3568 len = builtin_save_expr (len);
3570 len_rtx = expand_normal (len);
3571 determine_block_size (len, len_rtx, &min_size, &max_size,
3572 &probable_max_size);
3573 dest_mem = get_memory_rtx (dest, len);
3574 val_mode = TYPE_MODE (unsigned_char_type_node);
3576 if (TREE_CODE (val) != INTEGER_CST)
3578 rtx val_rtx;
3580 val_rtx = expand_normal (val);
3581 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3583 /* Assume that we can memset by pieces if we can store
3584 * the coefficients by pieces (in the required modes).
3585 * We can't pass builtin_memset_gen_str as that emits RTL. */
3586 c = 1;
3587 if (tree_fits_uhwi_p (len)
3588 && can_store_by_pieces (tree_to_uhwi (len),
3589 builtin_memset_read_str, &c, dest_align,
3590 true))
3592 val_rtx = force_reg (val_mode, val_rtx);
3593 store_by_pieces (dest_mem, tree_to_uhwi (len),
3594 builtin_memset_gen_str, val_rtx, dest_align,
3595 true, 0);
3597 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3598 dest_align, expected_align,
3599 expected_size, min_size, max_size,
3600 probable_max_size))
3601 goto do_libcall;
3603 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3604 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3605 return dest_mem;
3608 if (target_char_cast (val, &c))
3609 goto do_libcall;
3611 if (c)
3613 if (tree_fits_uhwi_p (len)
3614 && can_store_by_pieces (tree_to_uhwi (len),
3615 builtin_memset_read_str, &c, dest_align,
3616 true))
3617 store_by_pieces (dest_mem, tree_to_uhwi (len),
3618 builtin_memset_read_str, &c, dest_align, true, 0);
3619 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3620 gen_int_mode (c, val_mode),
3621 dest_align, expected_align,
3622 expected_size, min_size, max_size,
3623 probable_max_size))
3624 goto do_libcall;
3626 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3627 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3628 return dest_mem;
3631 set_mem_align (dest_mem, dest_align);
3632 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3633 CALL_EXPR_TAILCALL (orig_exp)
3634 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3635 expected_align, expected_size,
3636 min_size, max_size,
3637 probable_max_size);
3639 if (dest_addr == 0)
3641 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3642 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3645 return dest_addr;
3647 do_libcall:
3648 fndecl = get_callee_fndecl (orig_exp);
3649 fcode = DECL_FUNCTION_CODE (fndecl);
3650 if (fcode == BUILT_IN_MEMSET
3651 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3652 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3653 dest, val, len);
3654 else if (fcode == BUILT_IN_BZERO)
3655 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3656 dest, len);
3657 else
3658 gcc_unreachable ();
3659 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3660 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3661 return expand_call (fn, target, target == const0_rtx);
3664 /* Expand expression EXP, which is a call to the bzero builtin. Return
3665 NULL_RTX if we failed the caller should emit a normal call. */
3667 static rtx
3668 expand_builtin_bzero (tree exp)
3670 tree dest, size;
3671 location_t loc = EXPR_LOCATION (exp);
3673 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3674 return NULL_RTX;
3676 dest = CALL_EXPR_ARG (exp, 0);
3677 size = CALL_EXPR_ARG (exp, 1);
3679 /* New argument list transforming bzero(ptr x, int y) to
3680 memset(ptr x, int 0, size_t y). This is done this way
3681 so that if it isn't expanded inline, we fallback to
3682 calling bzero instead of memset. */
3684 return expand_builtin_memset_args (dest, integer_zero_node,
3685 fold_convert_loc (loc,
3686 size_type_node, size),
3687 const0_rtx, VOIDmode, exp);
3690 /* Try to expand cmpstr operation ICODE with the given operands.
3691 Return the result rtx on success, otherwise return null. */
3693 static rtx
3694 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3695 HOST_WIDE_INT align)
3697 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3699 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3700 target = NULL_RTX;
3702 struct expand_operand ops[4];
3703 create_output_operand (&ops[0], target, insn_mode);
3704 create_fixed_operand (&ops[1], arg1_rtx);
3705 create_fixed_operand (&ops[2], arg2_rtx);
3706 create_integer_operand (&ops[3], align);
3707 if (maybe_expand_insn (icode, 4, ops))
3708 return ops[0].value;
3709 return NULL_RTX;
3712 /* Expand expression EXP, which is a call to the memcmp built-in function.
3713 Return NULL_RTX if we failed and the caller should emit a normal call,
3714 otherwise try to get the result in TARGET, if convenient.
3715 RESULT_EQ is true if we can relax the returned value to be either zero
3716 or nonzero, without caring about the sign. */
3718 static rtx
3719 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
3721 if (!validate_arglist (exp,
3722 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3723 return NULL_RTX;
3725 tree arg1 = CALL_EXPR_ARG (exp, 0);
3726 tree arg2 = CALL_EXPR_ARG (exp, 1);
3727 tree len = CALL_EXPR_ARG (exp, 2);
3728 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3729 location_t loc = EXPR_LOCATION (exp);
3731 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3732 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3734 /* If we don't have POINTER_TYPE, call the function. */
3735 if (arg1_align == 0 || arg2_align == 0)
3736 return NULL_RTX;
3738 rtx arg1_rtx = get_memory_rtx (arg1, len);
3739 rtx arg2_rtx = get_memory_rtx (arg2, len);
3740 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3742 /* Set MEM_SIZE as appropriate. */
3743 if (CONST_INT_P (len_rtx))
3745 set_mem_size (arg1_rtx, INTVAL (len_rtx));
3746 set_mem_size (arg2_rtx, INTVAL (len_rtx));
3749 by_pieces_constfn constfn = NULL;
3751 const char *src_str = c_getstr (arg2);
3752 if (result_eq && src_str == NULL)
3754 src_str = c_getstr (arg1);
3755 if (src_str != NULL)
3756 std::swap (arg1_rtx, arg2_rtx);
3759 /* If SRC is a string constant and block move would be done
3760 by pieces, we can avoid loading the string from memory
3761 and only stored the computed constants. */
3762 if (src_str
3763 && CONST_INT_P (len_rtx)
3764 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
3765 constfn = builtin_memcpy_read_str;
3767 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
3768 TREE_TYPE (len), target,
3769 result_eq, constfn,
3770 CONST_CAST (char *, src_str));
3772 if (result)
3774 /* Return the value in the proper mode for this function. */
3775 if (GET_MODE (result) == mode)
3776 return result;
3778 if (target != 0)
3780 convert_move (target, result, 0);
3781 return target;
3784 return convert_to_mode (mode, result, 0);
3787 return NULL_RTX;
3790 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3791 if we failed the caller should emit a normal call, otherwise try to get
3792 the result in TARGET, if convenient. */
3794 static rtx
3795 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3797 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3798 return NULL_RTX;
3800 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
3801 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3802 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
3804 rtx arg1_rtx, arg2_rtx;
3805 tree fndecl, fn;
3806 tree arg1 = CALL_EXPR_ARG (exp, 0);
3807 tree arg2 = CALL_EXPR_ARG (exp, 1);
3808 rtx result = NULL_RTX;
3810 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3811 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3813 /* If we don't have POINTER_TYPE, call the function. */
3814 if (arg1_align == 0 || arg2_align == 0)
3815 return NULL_RTX;
3817 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3818 arg1 = builtin_save_expr (arg1);
3819 arg2 = builtin_save_expr (arg2);
3821 arg1_rtx = get_memory_rtx (arg1, NULL);
3822 arg2_rtx = get_memory_rtx (arg2, NULL);
3824 /* Try to call cmpstrsi. */
3825 if (cmpstr_icode != CODE_FOR_nothing)
3826 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
3827 MIN (arg1_align, arg2_align));
3829 /* Try to determine at least one length and call cmpstrnsi. */
3830 if (!result && cmpstrn_icode != CODE_FOR_nothing)
3832 tree len;
3833 rtx arg3_rtx;
3835 tree len1 = c_strlen (arg1, 1);
3836 tree len2 = c_strlen (arg2, 1);
3838 if (len1)
3839 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3840 if (len2)
3841 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3843 /* If we don't have a constant length for the first, use the length
3844 of the second, if we know it. We don't require a constant for
3845 this case; some cost analysis could be done if both are available
3846 but neither is constant. For now, assume they're equally cheap,
3847 unless one has side effects. If both strings have constant lengths,
3848 use the smaller. */
3850 if (!len1)
3851 len = len2;
3852 else if (!len2)
3853 len = len1;
3854 else if (TREE_SIDE_EFFECTS (len1))
3855 len = len2;
3856 else if (TREE_SIDE_EFFECTS (len2))
3857 len = len1;
3858 else if (TREE_CODE (len1) != INTEGER_CST)
3859 len = len2;
3860 else if (TREE_CODE (len2) != INTEGER_CST)
3861 len = len1;
3862 else if (tree_int_cst_lt (len1, len2))
3863 len = len1;
3864 else
3865 len = len2;
3867 /* If both arguments have side effects, we cannot optimize. */
3868 if (len && !TREE_SIDE_EFFECTS (len))
3870 arg3_rtx = expand_normal (len);
3871 result = expand_cmpstrn_or_cmpmem
3872 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
3873 arg3_rtx, MIN (arg1_align, arg2_align));
3877 if (result)
3879 /* Return the value in the proper mode for this function. */
3880 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3881 if (GET_MODE (result) == mode)
3882 return result;
3883 if (target == 0)
3884 return convert_to_mode (mode, result, 0);
3885 convert_move (target, result, 0);
3886 return target;
3889 /* Expand the library call ourselves using a stabilized argument
3890 list to avoid re-evaluating the function's arguments twice. */
3891 fndecl = get_callee_fndecl (exp);
3892 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3893 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3894 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3895 return expand_call (fn, target, target == const0_rtx);
3897 return NULL_RTX;
3900 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3901 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3902 the result in TARGET, if convenient. */
3904 static rtx
3905 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3906 ATTRIBUTE_UNUSED machine_mode mode)
3908 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3910 if (!validate_arglist (exp,
3911 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3912 return NULL_RTX;
3914 /* If c_strlen can determine an expression for one of the string
3915 lengths, and it doesn't have side effects, then emit cmpstrnsi
3916 using length MIN(strlen(string)+1, arg3). */
3917 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3918 if (cmpstrn_icode != CODE_FOR_nothing)
3920 tree len, len1, len2, len3;
3921 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3922 rtx result;
3923 tree fndecl, fn;
3924 tree arg1 = CALL_EXPR_ARG (exp, 0);
3925 tree arg2 = CALL_EXPR_ARG (exp, 1);
3926 tree arg3 = CALL_EXPR_ARG (exp, 2);
3928 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3929 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3931 len1 = c_strlen (arg1, 1);
3932 len2 = c_strlen (arg2, 1);
3934 if (len1)
3935 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3936 if (len2)
3937 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3939 len3 = fold_convert_loc (loc, sizetype, arg3);
3941 /* If we don't have a constant length for the first, use the length
3942 of the second, if we know it. If neither string is constant length,
3943 use the given length argument. We don't require a constant for
3944 this case; some cost analysis could be done if both are available
3945 but neither is constant. For now, assume they're equally cheap,
3946 unless one has side effects. If both strings have constant lengths,
3947 use the smaller. */
3949 if (!len1 && !len2)
3950 len = len3;
3951 else if (!len1)
3952 len = len2;
3953 else if (!len2)
3954 len = len1;
3955 else if (TREE_SIDE_EFFECTS (len1))
3956 len = len2;
3957 else if (TREE_SIDE_EFFECTS (len2))
3958 len = len1;
3959 else if (TREE_CODE (len1) != INTEGER_CST)
3960 len = len2;
3961 else if (TREE_CODE (len2) != INTEGER_CST)
3962 len = len1;
3963 else if (tree_int_cst_lt (len1, len2))
3964 len = len1;
3965 else
3966 len = len2;
3968 /* If we are not using the given length, we must incorporate it here.
3969 The actual new length parameter will be MIN(len,arg3) in this case. */
3970 if (len != len3)
3971 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
3972 arg1_rtx = get_memory_rtx (arg1, len);
3973 arg2_rtx = get_memory_rtx (arg2, len);
3974 arg3_rtx = expand_normal (len);
3975 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
3976 arg2_rtx, TREE_TYPE (len), arg3_rtx,
3977 MIN (arg1_align, arg2_align));
3978 if (result)
3980 /* Return the value in the proper mode for this function. */
3981 mode = TYPE_MODE (TREE_TYPE (exp));
3982 if (GET_MODE (result) == mode)
3983 return result;
3984 if (target == 0)
3985 return convert_to_mode (mode, result, 0);
3986 convert_move (target, result, 0);
3987 return target;
3990 /* Expand the library call ourselves using a stabilized argument
3991 list to avoid re-evaluating the function's arguments twice. */
3992 fndecl = get_callee_fndecl (exp);
3993 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3994 arg1, arg2, len);
3995 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3996 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3997 return expand_call (fn, target, target == const0_rtx);
3999 return NULL_RTX;
4002 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4003 if that's convenient. */
4006 expand_builtin_saveregs (void)
4008 rtx val;
4009 rtx_insn *seq;
4011 /* Don't do __builtin_saveregs more than once in a function.
4012 Save the result of the first call and reuse it. */
4013 if (saveregs_value != 0)
4014 return saveregs_value;
4016 /* When this function is called, it means that registers must be
4017 saved on entry to this function. So we migrate the call to the
4018 first insn of this function. */
4020 start_sequence ();
4022 /* Do whatever the machine needs done in this case. */
4023 val = targetm.calls.expand_builtin_saveregs ();
4025 seq = get_insns ();
4026 end_sequence ();
4028 saveregs_value = val;
4030 /* Put the insns after the NOTE that starts the function. If this
4031 is inside a start_sequence, make the outer-level insn chain current, so
4032 the code is placed at the start of the function. */
4033 push_topmost_sequence ();
4034 emit_insn_after (seq, entry_of_function ());
4035 pop_topmost_sequence ();
4037 return val;
4040 /* Expand a call to __builtin_next_arg. */
4042 static rtx
4043 expand_builtin_next_arg (void)
4045 /* Checking arguments is already done in fold_builtin_next_arg
4046 that must be called before this function. */
4047 return expand_binop (ptr_mode, add_optab,
4048 crtl->args.internal_arg_pointer,
4049 crtl->args.arg_offset_rtx,
4050 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4053 /* Make it easier for the backends by protecting the valist argument
4054 from multiple evaluations. */
4056 static tree
4057 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4059 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4061 /* The current way of determining the type of valist is completely
4062 bogus. We should have the information on the va builtin instead. */
4063 if (!vatype)
4064 vatype = targetm.fn_abi_va_list (cfun->decl);
4066 if (TREE_CODE (vatype) == ARRAY_TYPE)
4068 if (TREE_SIDE_EFFECTS (valist))
4069 valist = save_expr (valist);
4071 /* For this case, the backends will be expecting a pointer to
4072 vatype, but it's possible we've actually been given an array
4073 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4074 So fix it. */
4075 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4077 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4078 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4081 else
4083 tree pt = build_pointer_type (vatype);
4085 if (! needs_lvalue)
4087 if (! TREE_SIDE_EFFECTS (valist))
4088 return valist;
4090 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4091 TREE_SIDE_EFFECTS (valist) = 1;
4094 if (TREE_SIDE_EFFECTS (valist))
4095 valist = save_expr (valist);
4096 valist = fold_build2_loc (loc, MEM_REF,
4097 vatype, valist, build_int_cst (pt, 0));
4100 return valist;
4103 /* The "standard" definition of va_list is void*. */
4105 tree
4106 std_build_builtin_va_list (void)
4108 return ptr_type_node;
4111 /* The "standard" abi va_list is va_list_type_node. */
4113 tree
4114 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4116 return va_list_type_node;
4119 /* The "standard" type of va_list is va_list_type_node. */
4121 tree
4122 std_canonical_va_list_type (tree type)
4124 tree wtype, htype;
4126 wtype = va_list_type_node;
4127 htype = type;
4129 if (TREE_CODE (wtype) == ARRAY_TYPE)
4131 /* If va_list is an array type, the argument may have decayed
4132 to a pointer type, e.g. by being passed to another function.
4133 In that case, unwrap both types so that we can compare the
4134 underlying records. */
4135 if (TREE_CODE (htype) == ARRAY_TYPE
4136 || POINTER_TYPE_P (htype))
4138 wtype = TREE_TYPE (wtype);
4139 htype = TREE_TYPE (htype);
4142 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4143 return va_list_type_node;
4145 return NULL_TREE;
4148 /* The "standard" implementation of va_start: just assign `nextarg' to
4149 the variable. */
4151 void
4152 std_expand_builtin_va_start (tree valist, rtx nextarg)
4154 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4155 convert_move (va_r, nextarg, 0);
4157 /* We do not have any valid bounds for the pointer, so
4158 just store zero bounds for it. */
4159 if (chkp_function_instrumented_p (current_function_decl))
4160 chkp_expand_bounds_reset_for_mem (valist,
4161 make_tree (TREE_TYPE (valist),
4162 nextarg));
4165 /* Expand EXP, a call to __builtin_va_start. */
4167 static rtx
4168 expand_builtin_va_start (tree exp)
4170 rtx nextarg;
4171 tree valist;
4172 location_t loc = EXPR_LOCATION (exp);
4174 if (call_expr_nargs (exp) < 2)
4176 error_at (loc, "too few arguments to function %<va_start%>");
4177 return const0_rtx;
4180 if (fold_builtin_next_arg (exp, true))
4181 return const0_rtx;
4183 nextarg = expand_builtin_next_arg ();
4184 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4186 if (targetm.expand_builtin_va_start)
4187 targetm.expand_builtin_va_start (valist, nextarg);
4188 else
4189 std_expand_builtin_va_start (valist, nextarg);
4191 return const0_rtx;
4194 /* Expand EXP, a call to __builtin_va_end. */
4196 static rtx
4197 expand_builtin_va_end (tree exp)
4199 tree valist = CALL_EXPR_ARG (exp, 0);
4201 /* Evaluate for side effects, if needed. I hate macros that don't
4202 do that. */
4203 if (TREE_SIDE_EFFECTS (valist))
4204 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4206 return const0_rtx;
4209 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4210 builtin rather than just as an assignment in stdarg.h because of the
4211 nastiness of array-type va_list types. */
4213 static rtx
4214 expand_builtin_va_copy (tree exp)
4216 tree dst, src, t;
4217 location_t loc = EXPR_LOCATION (exp);
4219 dst = CALL_EXPR_ARG (exp, 0);
4220 src = CALL_EXPR_ARG (exp, 1);
4222 dst = stabilize_va_list_loc (loc, dst, 1);
4223 src = stabilize_va_list_loc (loc, src, 0);
4225 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4227 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4229 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4230 TREE_SIDE_EFFECTS (t) = 1;
4231 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4233 else
4235 rtx dstb, srcb, size;
4237 /* Evaluate to pointers. */
4238 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4239 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4240 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4241 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4243 dstb = convert_memory_address (Pmode, dstb);
4244 srcb = convert_memory_address (Pmode, srcb);
4246 /* "Dereference" to BLKmode memories. */
4247 dstb = gen_rtx_MEM (BLKmode, dstb);
4248 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4249 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4250 srcb = gen_rtx_MEM (BLKmode, srcb);
4251 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4252 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4254 /* Copy. */
4255 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4258 return const0_rtx;
4261 /* Expand a call to one of the builtin functions __builtin_frame_address or
4262 __builtin_return_address. */
4264 static rtx
4265 expand_builtin_frame_address (tree fndecl, tree exp)
4267 /* The argument must be a nonnegative integer constant.
4268 It counts the number of frames to scan up the stack.
4269 The value is either the frame pointer value or the return
4270 address saved in that frame. */
4271 if (call_expr_nargs (exp) == 0)
4272 /* Warning about missing arg was already issued. */
4273 return const0_rtx;
4274 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4276 error ("invalid argument to %qD", fndecl);
4277 return const0_rtx;
4279 else
4281 /* Number of frames to scan up the stack. */
4282 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4284 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4286 /* Some ports cannot access arbitrary stack frames. */
4287 if (tem == NULL)
4289 warning (0, "unsupported argument to %qD", fndecl);
4290 return const0_rtx;
4293 if (count)
4295 /* Warn since no effort is made to ensure that any frame
4296 beyond the current one exists or can be safely reached. */
4297 warning (OPT_Wframe_address, "calling %qD with "
4298 "a nonzero argument is unsafe", fndecl);
4301 /* For __builtin_frame_address, return what we've got. */
4302 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4303 return tem;
4305 if (!REG_P (tem)
4306 && ! CONSTANT_P (tem))
4307 tem = copy_addr_to_reg (tem);
4308 return tem;
4312 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4313 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4314 is the same as for allocate_dynamic_stack_space. */
4316 static rtx
4317 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4319 rtx op0;
4320 rtx result;
4321 bool valid_arglist;
4322 unsigned int align;
4323 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4324 == BUILT_IN_ALLOCA_WITH_ALIGN);
4326 valid_arglist
4327 = (alloca_with_align
4328 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4329 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4331 if (!valid_arglist)
4332 return NULL_RTX;
4334 /* Compute the argument. */
4335 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4337 /* Compute the alignment. */
4338 align = (alloca_with_align
4339 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4340 : BIGGEST_ALIGNMENT);
4342 /* Allocate the desired space. */
4343 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4344 result = convert_memory_address (ptr_mode, result);
4346 return result;
4349 /* Expand a call to bswap builtin in EXP.
4350 Return NULL_RTX if a normal call should be emitted rather than expanding the
4351 function in-line. If convenient, the result should be placed in TARGET.
4352 SUBTARGET may be used as the target for computing one of EXP's operands. */
4354 static rtx
4355 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4356 rtx subtarget)
4358 tree arg;
4359 rtx op0;
4361 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4362 return NULL_RTX;
4364 arg = CALL_EXPR_ARG (exp, 0);
4365 op0 = expand_expr (arg,
4366 subtarget && GET_MODE (subtarget) == target_mode
4367 ? subtarget : NULL_RTX,
4368 target_mode, EXPAND_NORMAL);
4369 if (GET_MODE (op0) != target_mode)
4370 op0 = convert_to_mode (target_mode, op0, 1);
4372 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4374 gcc_assert (target);
4376 return convert_to_mode (target_mode, target, 1);
4379 /* Expand a call to a unary builtin in EXP.
4380 Return NULL_RTX if a normal call should be emitted rather than expanding the
4381 function in-line. If convenient, the result should be placed in TARGET.
4382 SUBTARGET may be used as the target for computing one of EXP's operands. */
4384 static rtx
4385 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4386 rtx subtarget, optab op_optab)
4388 rtx op0;
4390 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4391 return NULL_RTX;
4393 /* Compute the argument. */
4394 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4395 (subtarget
4396 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4397 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4398 VOIDmode, EXPAND_NORMAL);
4399 /* Compute op, into TARGET if possible.
4400 Set TARGET to wherever the result comes back. */
4401 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4402 op_optab, op0, target, op_optab != clrsb_optab);
4403 gcc_assert (target);
4405 return convert_to_mode (target_mode, target, 0);
4408 /* Expand a call to __builtin_expect. We just return our argument
4409 as the builtin_expect semantic should've been already executed by
4410 tree branch prediction pass. */
4412 static rtx
4413 expand_builtin_expect (tree exp, rtx target)
4415 tree arg;
4417 if (call_expr_nargs (exp) < 2)
4418 return const0_rtx;
4419 arg = CALL_EXPR_ARG (exp, 0);
4421 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4422 /* When guessing was done, the hints should be already stripped away. */
4423 gcc_assert (!flag_guess_branch_prob
4424 || optimize == 0 || seen_error ());
4425 return target;
4428 /* Expand a call to __builtin_assume_aligned. We just return our first
4429 argument as the builtin_assume_aligned semantic should've been already
4430 executed by CCP. */
4432 static rtx
4433 expand_builtin_assume_aligned (tree exp, rtx target)
4435 if (call_expr_nargs (exp) < 2)
4436 return const0_rtx;
4437 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4438 EXPAND_NORMAL);
4439 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4440 && (call_expr_nargs (exp) < 3
4441 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4442 return target;
4445 void
4446 expand_builtin_trap (void)
4448 if (targetm.have_trap ())
4450 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4451 /* For trap insns when not accumulating outgoing args force
4452 REG_ARGS_SIZE note to prevent crossjumping of calls with
4453 different args sizes. */
4454 if (!ACCUMULATE_OUTGOING_ARGS)
4455 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4457 else
4459 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
4460 tree call_expr = build_call_expr (fn, 0);
4461 expand_call (call_expr, NULL_RTX, false);
4464 emit_barrier ();
4467 /* Expand a call to __builtin_unreachable. We do nothing except emit
4468 a barrier saying that control flow will not pass here.
4470 It is the responsibility of the program being compiled to ensure
4471 that control flow does never reach __builtin_unreachable. */
4472 static void
4473 expand_builtin_unreachable (void)
4475 emit_barrier ();
4478 /* Expand EXP, a call to fabs, fabsf or fabsl.
4479 Return NULL_RTX if a normal call should be emitted rather than expanding
4480 the function inline. If convenient, the result should be placed
4481 in TARGET. SUBTARGET may be used as the target for computing
4482 the operand. */
4484 static rtx
4485 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4487 machine_mode mode;
4488 tree arg;
4489 rtx op0;
4491 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4492 return NULL_RTX;
4494 arg = CALL_EXPR_ARG (exp, 0);
4495 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4496 mode = TYPE_MODE (TREE_TYPE (arg));
4497 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4498 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4501 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4502 Return NULL is a normal call should be emitted rather than expanding the
4503 function inline. If convenient, the result should be placed in TARGET.
4504 SUBTARGET may be used as the target for computing the operand. */
4506 static rtx
4507 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4509 rtx op0, op1;
4510 tree arg;
4512 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4513 return NULL_RTX;
4515 arg = CALL_EXPR_ARG (exp, 0);
4516 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4518 arg = CALL_EXPR_ARG (exp, 1);
4519 op1 = expand_normal (arg);
4521 return expand_copysign (op0, op1, target);
4524 /* Expand a call to __builtin___clear_cache. */
4526 static rtx
4527 expand_builtin___clear_cache (tree exp)
4529 if (!targetm.code_for_clear_cache)
4531 #ifdef CLEAR_INSN_CACHE
4532 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4533 does something. Just do the default expansion to a call to
4534 __clear_cache(). */
4535 return NULL_RTX;
4536 #else
4537 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4538 does nothing. There is no need to call it. Do nothing. */
4539 return const0_rtx;
4540 #endif /* CLEAR_INSN_CACHE */
4543 /* We have a "clear_cache" insn, and it will handle everything. */
4544 tree begin, end;
4545 rtx begin_rtx, end_rtx;
4547 /* We must not expand to a library call. If we did, any
4548 fallback library function in libgcc that might contain a call to
4549 __builtin___clear_cache() would recurse infinitely. */
4550 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4552 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4553 return const0_rtx;
4556 if (targetm.have_clear_cache ())
4558 struct expand_operand ops[2];
4560 begin = CALL_EXPR_ARG (exp, 0);
4561 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4563 end = CALL_EXPR_ARG (exp, 1);
4564 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4566 create_address_operand (&ops[0], begin_rtx);
4567 create_address_operand (&ops[1], end_rtx);
4568 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4569 return const0_rtx;
4571 return const0_rtx;
4574 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4576 static rtx
4577 round_trampoline_addr (rtx tramp)
4579 rtx temp, addend, mask;
4581 /* If we don't need too much alignment, we'll have been guaranteed
4582 proper alignment by get_trampoline_type. */
4583 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4584 return tramp;
4586 /* Round address up to desired boundary. */
4587 temp = gen_reg_rtx (Pmode);
4588 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4589 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4591 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4592 temp, 0, OPTAB_LIB_WIDEN);
4593 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4594 temp, 0, OPTAB_LIB_WIDEN);
4596 return tramp;
4599 static rtx
4600 expand_builtin_init_trampoline (tree exp, bool onstack)
4602 tree t_tramp, t_func, t_chain;
4603 rtx m_tramp, r_tramp, r_chain, tmp;
4605 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4606 POINTER_TYPE, VOID_TYPE))
4607 return NULL_RTX;
4609 t_tramp = CALL_EXPR_ARG (exp, 0);
4610 t_func = CALL_EXPR_ARG (exp, 1);
4611 t_chain = CALL_EXPR_ARG (exp, 2);
4613 r_tramp = expand_normal (t_tramp);
4614 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4615 MEM_NOTRAP_P (m_tramp) = 1;
4617 /* If ONSTACK, the TRAMP argument should be the address of a field
4618 within the local function's FRAME decl. Either way, let's see if
4619 we can fill in the MEM_ATTRs for this memory. */
4620 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4621 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4623 /* Creator of a heap trampoline is responsible for making sure the
4624 address is aligned to at least STACK_BOUNDARY. Normally malloc
4625 will ensure this anyhow. */
4626 tmp = round_trampoline_addr (r_tramp);
4627 if (tmp != r_tramp)
4629 m_tramp = change_address (m_tramp, BLKmode, tmp);
4630 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4631 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4634 /* The FUNC argument should be the address of the nested function.
4635 Extract the actual function decl to pass to the hook. */
4636 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4637 t_func = TREE_OPERAND (t_func, 0);
4638 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4640 r_chain = expand_normal (t_chain);
4642 /* Generate insns to initialize the trampoline. */
4643 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4645 if (onstack)
4647 trampolines_created = 1;
4649 if (targetm.calls.custom_function_descriptors != 0)
4650 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4651 "trampoline generated for nested function %qD", t_func);
4654 return const0_rtx;
4657 static rtx
4658 expand_builtin_adjust_trampoline (tree exp)
4660 rtx tramp;
4662 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4663 return NULL_RTX;
4665 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4666 tramp = round_trampoline_addr (tramp);
4667 if (targetm.calls.trampoline_adjust_address)
4668 tramp = targetm.calls.trampoline_adjust_address (tramp);
4670 return tramp;
4673 /* Expand a call to the builtin descriptor initialization routine.
4674 A descriptor is made up of a couple of pointers to the static
4675 chain and the code entry in this order. */
4677 static rtx
4678 expand_builtin_init_descriptor (tree exp)
4680 tree t_descr, t_func, t_chain;
4681 rtx m_descr, r_descr, r_func, r_chain;
4683 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
4684 VOID_TYPE))
4685 return NULL_RTX;
4687 t_descr = CALL_EXPR_ARG (exp, 0);
4688 t_func = CALL_EXPR_ARG (exp, 1);
4689 t_chain = CALL_EXPR_ARG (exp, 2);
4691 r_descr = expand_normal (t_descr);
4692 m_descr = gen_rtx_MEM (BLKmode, r_descr);
4693 MEM_NOTRAP_P (m_descr) = 1;
4695 r_func = expand_normal (t_func);
4696 r_chain = expand_normal (t_chain);
4698 /* Generate insns to initialize the descriptor. */
4699 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
4700 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
4701 POINTER_SIZE / BITS_PER_UNIT), r_func);
4703 return const0_rtx;
4706 /* Expand a call to the builtin descriptor adjustment routine. */
4708 static rtx
4709 expand_builtin_adjust_descriptor (tree exp)
4711 rtx tramp;
4713 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4714 return NULL_RTX;
4716 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4718 /* Unalign the descriptor to allow runtime identification. */
4719 tramp = plus_constant (ptr_mode, tramp,
4720 targetm.calls.custom_function_descriptors);
4722 return force_operand (tramp, NULL_RTX);
4725 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4726 function. The function first checks whether the back end provides
4727 an insn to implement signbit for the respective mode. If not, it
4728 checks whether the floating point format of the value is such that
4729 the sign bit can be extracted. If that is not the case, error out.
4730 EXP is the expression that is a call to the builtin function; if
4731 convenient, the result should be placed in TARGET. */
4732 static rtx
4733 expand_builtin_signbit (tree exp, rtx target)
4735 const struct real_format *fmt;
4736 machine_mode fmode, imode, rmode;
4737 tree arg;
4738 int word, bitpos;
4739 enum insn_code icode;
4740 rtx temp;
4741 location_t loc = EXPR_LOCATION (exp);
4743 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4744 return NULL_RTX;
4746 arg = CALL_EXPR_ARG (exp, 0);
4747 fmode = TYPE_MODE (TREE_TYPE (arg));
4748 rmode = TYPE_MODE (TREE_TYPE (exp));
4749 fmt = REAL_MODE_FORMAT (fmode);
4751 arg = builtin_save_expr (arg);
4753 /* Expand the argument yielding a RTX expression. */
4754 temp = expand_normal (arg);
4756 /* Check if the back end provides an insn that handles signbit for the
4757 argument's mode. */
4758 icode = optab_handler (signbit_optab, fmode);
4759 if (icode != CODE_FOR_nothing)
4761 rtx_insn *last = get_last_insn ();
4762 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4763 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4764 return target;
4765 delete_insns_since (last);
4768 /* For floating point formats without a sign bit, implement signbit
4769 as "ARG < 0.0". */
4770 bitpos = fmt->signbit_ro;
4771 if (bitpos < 0)
4773 /* But we can't do this if the format supports signed zero. */
4774 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4776 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4777 build_real (TREE_TYPE (arg), dconst0));
4778 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4781 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4783 imode = int_mode_for_mode (fmode);
4784 gcc_assert (imode != BLKmode);
4785 temp = gen_lowpart (imode, temp);
4787 else
4789 imode = word_mode;
4790 /* Handle targets with different FP word orders. */
4791 if (FLOAT_WORDS_BIG_ENDIAN)
4792 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4793 else
4794 word = bitpos / BITS_PER_WORD;
4795 temp = operand_subword_force (temp, word, fmode);
4796 bitpos = bitpos % BITS_PER_WORD;
4799 /* Force the intermediate word_mode (or narrower) result into a
4800 register. This avoids attempting to create paradoxical SUBREGs
4801 of floating point modes below. */
4802 temp = force_reg (imode, temp);
4804 /* If the bitpos is within the "result mode" lowpart, the operation
4805 can be implement with a single bitwise AND. Otherwise, we need
4806 a right shift and an AND. */
4808 if (bitpos < GET_MODE_BITSIZE (rmode))
4810 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4812 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4813 temp = gen_lowpart (rmode, temp);
4814 temp = expand_binop (rmode, and_optab, temp,
4815 immed_wide_int_const (mask, rmode),
4816 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4818 else
4820 /* Perform a logical right shift to place the signbit in the least
4821 significant bit, then truncate the result to the desired mode
4822 and mask just this bit. */
4823 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4824 temp = gen_lowpart (rmode, temp);
4825 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4826 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4829 return temp;
4832 /* Expand fork or exec calls. TARGET is the desired target of the
4833 call. EXP is the call. FN is the
4834 identificator of the actual function. IGNORE is nonzero if the
4835 value is to be ignored. */
4837 static rtx
4838 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4840 tree id, decl;
4841 tree call;
4843 /* If we are not profiling, just call the function. */
4844 if (!profile_arc_flag)
4845 return NULL_RTX;
4847 /* Otherwise call the wrapper. This should be equivalent for the rest of
4848 compiler, so the code does not diverge, and the wrapper may run the
4849 code necessary for keeping the profiling sane. */
4851 switch (DECL_FUNCTION_CODE (fn))
4853 case BUILT_IN_FORK:
4854 id = get_identifier ("__gcov_fork");
4855 break;
4857 case BUILT_IN_EXECL:
4858 id = get_identifier ("__gcov_execl");
4859 break;
4861 case BUILT_IN_EXECV:
4862 id = get_identifier ("__gcov_execv");
4863 break;
4865 case BUILT_IN_EXECLP:
4866 id = get_identifier ("__gcov_execlp");
4867 break;
4869 case BUILT_IN_EXECLE:
4870 id = get_identifier ("__gcov_execle");
4871 break;
4873 case BUILT_IN_EXECVP:
4874 id = get_identifier ("__gcov_execvp");
4875 break;
4877 case BUILT_IN_EXECVE:
4878 id = get_identifier ("__gcov_execve");
4879 break;
4881 default:
4882 gcc_unreachable ();
4885 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4886 FUNCTION_DECL, id, TREE_TYPE (fn));
4887 DECL_EXTERNAL (decl) = 1;
4888 TREE_PUBLIC (decl) = 1;
4889 DECL_ARTIFICIAL (decl) = 1;
4890 TREE_NOTHROW (decl) = 1;
4891 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4892 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4893 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4894 return expand_call (call, target, ignore);
4899 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4900 the pointer in these functions is void*, the tree optimizers may remove
4901 casts. The mode computed in expand_builtin isn't reliable either, due
4902 to __sync_bool_compare_and_swap.
4904 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4905 group of builtins. This gives us log2 of the mode size. */
4907 static inline machine_mode
4908 get_builtin_sync_mode (int fcode_diff)
4910 /* The size is not negotiable, so ask not to get BLKmode in return
4911 if the target indicates that a smaller size would be better. */
4912 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4915 /* Expand the memory expression LOC and return the appropriate memory operand
4916 for the builtin_sync operations. */
4918 static rtx
4919 get_builtin_sync_mem (tree loc, machine_mode mode)
4921 rtx addr, mem;
4923 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4924 addr = convert_memory_address (Pmode, addr);
4926 /* Note that we explicitly do not want any alias information for this
4927 memory, so that we kill all other live memories. Otherwise we don't
4928 satisfy the full barrier semantics of the intrinsic. */
4929 mem = validize_mem (gen_rtx_MEM (mode, addr));
4931 /* The alignment needs to be at least according to that of the mode. */
4932 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4933 get_pointer_alignment (loc)));
4934 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4935 MEM_VOLATILE_P (mem) = 1;
4937 return mem;
4940 /* Make sure an argument is in the right mode.
4941 EXP is the tree argument.
4942 MODE is the mode it should be in. */
4944 static rtx
4945 expand_expr_force_mode (tree exp, machine_mode mode)
4947 rtx val;
4948 machine_mode old_mode;
4950 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4951 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4952 of CONST_INTs, where we know the old_mode only from the call argument. */
4954 old_mode = GET_MODE (val);
4955 if (old_mode == VOIDmode)
4956 old_mode = TYPE_MODE (TREE_TYPE (exp));
4957 val = convert_modes (mode, old_mode, val, 1);
4958 return val;
4962 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4963 EXP is the CALL_EXPR. CODE is the rtx code
4964 that corresponds to the arithmetic or logical operation from the name;
4965 an exception here is that NOT actually means NAND. TARGET is an optional
4966 place for us to store the results; AFTER is true if this is the
4967 fetch_and_xxx form. */
4969 static rtx
4970 expand_builtin_sync_operation (machine_mode mode, tree exp,
4971 enum rtx_code code, bool after,
4972 rtx target)
4974 rtx val, mem;
4975 location_t loc = EXPR_LOCATION (exp);
4977 if (code == NOT && warn_sync_nand)
4979 tree fndecl = get_callee_fndecl (exp);
4980 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4982 static bool warned_f_a_n, warned_n_a_f;
4984 switch (fcode)
4986 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
4987 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
4988 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
4989 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
4990 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
4991 if (warned_f_a_n)
4992 break;
4994 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
4995 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4996 warned_f_a_n = true;
4997 break;
4999 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5000 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5001 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5002 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5003 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5004 if (warned_n_a_f)
5005 break;
5007 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5008 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5009 warned_n_a_f = true;
5010 break;
5012 default:
5013 gcc_unreachable ();
5017 /* Expand the operands. */
5018 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5019 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5021 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5022 after);
5025 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5026 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5027 true if this is the boolean form. TARGET is a place for us to store the
5028 results; this is NOT optional if IS_BOOL is true. */
5030 static rtx
5031 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5032 bool is_bool, rtx target)
5034 rtx old_val, new_val, mem;
5035 rtx *pbool, *poval;
5037 /* Expand the operands. */
5038 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5039 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5040 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5042 pbool = poval = NULL;
5043 if (target != const0_rtx)
5045 if (is_bool)
5046 pbool = &target;
5047 else
5048 poval = &target;
5050 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5051 false, MEMMODEL_SYNC_SEQ_CST,
5052 MEMMODEL_SYNC_SEQ_CST))
5053 return NULL_RTX;
5055 return target;
5058 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5059 general form is actually an atomic exchange, and some targets only
5060 support a reduced form with the second argument being a constant 1.
5061 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5062 the results. */
5064 static rtx
5065 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5066 rtx target)
5068 rtx val, mem;
5070 /* Expand the operands. */
5071 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5072 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5074 return expand_sync_lock_test_and_set (target, mem, val);
5077 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5079 static void
5080 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5082 rtx mem;
5084 /* Expand the operands. */
5085 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5087 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5090 /* Given an integer representing an ``enum memmodel'', verify its
5091 correctness and return the memory model enum. */
5093 static enum memmodel
5094 get_memmodel (tree exp)
5096 rtx op;
5097 unsigned HOST_WIDE_INT val;
5098 source_location loc
5099 = expansion_point_location_if_in_system_header (input_location);
5101 /* If the parameter is not a constant, it's a run time value so we'll just
5102 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5103 if (TREE_CODE (exp) != INTEGER_CST)
5104 return MEMMODEL_SEQ_CST;
5106 op = expand_normal (exp);
5108 val = INTVAL (op);
5109 if (targetm.memmodel_check)
5110 val = targetm.memmodel_check (val);
5111 else if (val & ~MEMMODEL_MASK)
5113 warning_at (loc, OPT_Winvalid_memory_model,
5114 "unknown architecture specifier in memory model to builtin");
5115 return MEMMODEL_SEQ_CST;
5118 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5119 if (memmodel_base (val) >= MEMMODEL_LAST)
5121 warning_at (loc, OPT_Winvalid_memory_model,
5122 "invalid memory model argument to builtin");
5123 return MEMMODEL_SEQ_CST;
5126 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5127 be conservative and promote consume to acquire. */
5128 if (val == MEMMODEL_CONSUME)
5129 val = MEMMODEL_ACQUIRE;
5131 return (enum memmodel) val;
5134 /* Expand the __atomic_exchange intrinsic:
5135 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5136 EXP is the CALL_EXPR.
5137 TARGET is an optional place for us to store the results. */
5139 static rtx
5140 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5142 rtx val, mem;
5143 enum memmodel model;
5145 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5147 if (!flag_inline_atomics)
5148 return NULL_RTX;
5150 /* Expand the operands. */
5151 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5152 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5154 return expand_atomic_exchange (target, mem, val, model);
5157 /* Expand the __atomic_compare_exchange intrinsic:
5158 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5159 TYPE desired, BOOL weak,
5160 enum memmodel success,
5161 enum memmodel failure)
5162 EXP is the CALL_EXPR.
5163 TARGET is an optional place for us to store the results. */
5165 static rtx
5166 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5167 rtx target)
5169 rtx expect, desired, mem, oldval;
5170 rtx_code_label *label;
5171 enum memmodel success, failure;
5172 tree weak;
5173 bool is_weak;
5174 source_location loc
5175 = expansion_point_location_if_in_system_header (input_location);
5177 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5178 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5180 if (failure > success)
5182 warning_at (loc, OPT_Winvalid_memory_model,
5183 "failure memory model cannot be stronger than success "
5184 "memory model for %<__atomic_compare_exchange%>");
5185 success = MEMMODEL_SEQ_CST;
5188 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5190 warning_at (loc, OPT_Winvalid_memory_model,
5191 "invalid failure memory model for "
5192 "%<__atomic_compare_exchange%>");
5193 failure = MEMMODEL_SEQ_CST;
5194 success = MEMMODEL_SEQ_CST;
5198 if (!flag_inline_atomics)
5199 return NULL_RTX;
5201 /* Expand the operands. */
5202 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5204 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5205 expect = convert_memory_address (Pmode, expect);
5206 expect = gen_rtx_MEM (mode, expect);
5207 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5209 weak = CALL_EXPR_ARG (exp, 3);
5210 is_weak = false;
5211 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5212 is_weak = true;
5214 if (target == const0_rtx)
5215 target = NULL;
5217 /* Lest the rtl backend create a race condition with an imporoper store
5218 to memory, always create a new pseudo for OLDVAL. */
5219 oldval = NULL;
5221 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5222 is_weak, success, failure))
5223 return NULL_RTX;
5225 /* Conditionally store back to EXPECT, lest we create a race condition
5226 with an improper store to memory. */
5227 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5228 the normal case where EXPECT is totally private, i.e. a register. At
5229 which point the store can be unconditional. */
5230 label = gen_label_rtx ();
5231 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5232 GET_MODE (target), 1, label);
5233 emit_move_insn (expect, oldval);
5234 emit_label (label);
5236 return target;
5239 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5240 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5241 call. The weak parameter must be dropped to match the expected parameter
5242 list and the expected argument changed from value to pointer to memory
5243 slot. */
5245 static void
5246 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5248 unsigned int z;
5249 vec<tree, va_gc> *vec;
5251 vec_alloc (vec, 5);
5252 vec->quick_push (gimple_call_arg (call, 0));
5253 tree expected = gimple_call_arg (call, 1);
5254 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5255 TREE_TYPE (expected));
5256 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5257 if (expd != x)
5258 emit_move_insn (x, expd);
5259 tree v = make_tree (TREE_TYPE (expected), x);
5260 vec->quick_push (build1 (ADDR_EXPR,
5261 build_pointer_type (TREE_TYPE (expected)), v));
5262 vec->quick_push (gimple_call_arg (call, 2));
5263 /* Skip the boolean weak parameter. */
5264 for (z = 4; z < 6; z++)
5265 vec->quick_push (gimple_call_arg (call, z));
5266 built_in_function fncode
5267 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5268 + exact_log2 (GET_MODE_SIZE (mode)));
5269 tree fndecl = builtin_decl_explicit (fncode);
5270 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5271 fndecl);
5272 tree exp = build_call_vec (boolean_type_node, fn, vec);
5273 tree lhs = gimple_call_lhs (call);
5274 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5275 if (lhs)
5277 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5278 if (GET_MODE (boolret) != mode)
5279 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5280 x = force_reg (mode, x);
5281 write_complex_part (target, boolret, true);
5282 write_complex_part (target, x, false);
5286 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5288 void
5289 expand_ifn_atomic_compare_exchange (gcall *call)
5291 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5292 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5293 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5294 rtx expect, desired, mem, oldval, boolret;
5295 enum memmodel success, failure;
5296 tree lhs;
5297 bool is_weak;
5298 source_location loc
5299 = expansion_point_location_if_in_system_header (gimple_location (call));
5301 success = get_memmodel (gimple_call_arg (call, 4));
5302 failure = get_memmodel (gimple_call_arg (call, 5));
5304 if (failure > success)
5306 warning_at (loc, OPT_Winvalid_memory_model,
5307 "failure memory model cannot be stronger than success "
5308 "memory model for %<__atomic_compare_exchange%>");
5309 success = MEMMODEL_SEQ_CST;
5312 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5314 warning_at (loc, OPT_Winvalid_memory_model,
5315 "invalid failure memory model for "
5316 "%<__atomic_compare_exchange%>");
5317 failure = MEMMODEL_SEQ_CST;
5318 success = MEMMODEL_SEQ_CST;
5321 if (!flag_inline_atomics)
5323 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5324 return;
5327 /* Expand the operands. */
5328 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5330 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5331 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5333 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5335 boolret = NULL;
5336 oldval = NULL;
5338 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5339 is_weak, success, failure))
5341 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5342 return;
5345 lhs = gimple_call_lhs (call);
5346 if (lhs)
5348 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5349 if (GET_MODE (boolret) != mode)
5350 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5351 write_complex_part (target, boolret, true);
5352 write_complex_part (target, oldval, false);
5356 /* Expand the __atomic_load intrinsic:
5357 TYPE __atomic_load (TYPE *object, enum memmodel)
5358 EXP is the CALL_EXPR.
5359 TARGET is an optional place for us to store the results. */
5361 static rtx
5362 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5364 rtx mem;
5365 enum memmodel model;
5367 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5368 if (is_mm_release (model) || is_mm_acq_rel (model))
5370 source_location loc
5371 = expansion_point_location_if_in_system_header (input_location);
5372 warning_at (loc, OPT_Winvalid_memory_model,
5373 "invalid memory model for %<__atomic_load%>");
5374 model = MEMMODEL_SEQ_CST;
5377 if (!flag_inline_atomics)
5378 return NULL_RTX;
5380 /* Expand the operand. */
5381 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5383 return expand_atomic_load (target, mem, model);
5387 /* Expand the __atomic_store intrinsic:
5388 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5389 EXP is the CALL_EXPR.
5390 TARGET is an optional place for us to store the results. */
5392 static rtx
5393 expand_builtin_atomic_store (machine_mode mode, tree exp)
5395 rtx mem, val;
5396 enum memmodel model;
5398 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5399 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5400 || is_mm_release (model)))
5402 source_location loc
5403 = expansion_point_location_if_in_system_header (input_location);
5404 warning_at (loc, OPT_Winvalid_memory_model,
5405 "invalid memory model for %<__atomic_store%>");
5406 model = MEMMODEL_SEQ_CST;
5409 if (!flag_inline_atomics)
5410 return NULL_RTX;
5412 /* Expand the operands. */
5413 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5414 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5416 return expand_atomic_store (mem, val, model, false);
5419 /* Expand the __atomic_fetch_XXX intrinsic:
5420 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5421 EXP is the CALL_EXPR.
5422 TARGET is an optional place for us to store the results.
5423 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5424 FETCH_AFTER is true if returning the result of the operation.
5425 FETCH_AFTER is false if returning the value before the operation.
5426 IGNORE is true if the result is not used.
5427 EXT_CALL is the correct builtin for an external call if this cannot be
5428 resolved to an instruction sequence. */
5430 static rtx
5431 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5432 enum rtx_code code, bool fetch_after,
5433 bool ignore, enum built_in_function ext_call)
5435 rtx val, mem, ret;
5436 enum memmodel model;
5437 tree fndecl;
5438 tree addr;
5440 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5442 /* Expand the operands. */
5443 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5444 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5446 /* Only try generating instructions if inlining is turned on. */
5447 if (flag_inline_atomics)
5449 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5450 if (ret)
5451 return ret;
5454 /* Return if a different routine isn't needed for the library call. */
5455 if (ext_call == BUILT_IN_NONE)
5456 return NULL_RTX;
5458 /* Change the call to the specified function. */
5459 fndecl = get_callee_fndecl (exp);
5460 addr = CALL_EXPR_FN (exp);
5461 STRIP_NOPS (addr);
5463 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5464 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5466 /* Expand the call here so we can emit trailing code. */
5467 ret = expand_call (exp, target, ignore);
5469 /* Replace the original function just in case it matters. */
5470 TREE_OPERAND (addr, 0) = fndecl;
5472 /* Then issue the arithmetic correction to return the right result. */
5473 if (!ignore)
5475 if (code == NOT)
5477 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5478 OPTAB_LIB_WIDEN);
5479 ret = expand_simple_unop (mode, NOT, ret, target, true);
5481 else
5482 ret = expand_simple_binop (mode, code, ret, val, target, true,
5483 OPTAB_LIB_WIDEN);
5485 return ret;
5488 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5490 void
5491 expand_ifn_atomic_bit_test_and (gcall *call)
5493 tree ptr = gimple_call_arg (call, 0);
5494 tree bit = gimple_call_arg (call, 1);
5495 tree flag = gimple_call_arg (call, 2);
5496 tree lhs = gimple_call_lhs (call);
5497 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
5498 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
5499 enum rtx_code code;
5500 optab optab;
5501 struct expand_operand ops[5];
5503 gcc_assert (flag_inline_atomics);
5505 if (gimple_call_num_args (call) == 4)
5506 model = get_memmodel (gimple_call_arg (call, 3));
5508 rtx mem = get_builtin_sync_mem (ptr, mode);
5509 rtx val = expand_expr_force_mode (bit, mode);
5511 switch (gimple_call_internal_fn (call))
5513 case IFN_ATOMIC_BIT_TEST_AND_SET:
5514 code = IOR;
5515 optab = atomic_bit_test_and_set_optab;
5516 break;
5517 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
5518 code = XOR;
5519 optab = atomic_bit_test_and_complement_optab;
5520 break;
5521 case IFN_ATOMIC_BIT_TEST_AND_RESET:
5522 code = AND;
5523 optab = atomic_bit_test_and_reset_optab;
5524 break;
5525 default:
5526 gcc_unreachable ();
5529 if (lhs == NULL_TREE)
5531 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5532 val, NULL_RTX, true, OPTAB_DIRECT);
5533 if (code == AND)
5534 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5535 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
5536 return;
5539 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5540 enum insn_code icode = direct_optab_handler (optab, mode);
5541 gcc_assert (icode != CODE_FOR_nothing);
5542 create_output_operand (&ops[0], target, mode);
5543 create_fixed_operand (&ops[1], mem);
5544 create_convert_operand_to (&ops[2], val, mode, true);
5545 create_integer_operand (&ops[3], model);
5546 create_integer_operand (&ops[4], integer_onep (flag));
5547 if (maybe_expand_insn (icode, 5, ops))
5548 return;
5550 rtx bitval = val;
5551 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5552 val, NULL_RTX, true, OPTAB_DIRECT);
5553 rtx maskval = val;
5554 if (code == AND)
5555 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5556 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
5557 code, model, false);
5558 if (integer_onep (flag))
5560 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
5561 NULL_RTX, true, OPTAB_DIRECT);
5562 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
5563 true, OPTAB_DIRECT);
5565 else
5566 result = expand_simple_binop (mode, AND, result, maskval, target, true,
5567 OPTAB_DIRECT);
5568 if (result != target)
5569 emit_move_insn (target, result);
5572 /* Expand an atomic clear operation.
5573 void _atomic_clear (BOOL *obj, enum memmodel)
5574 EXP is the call expression. */
5576 static rtx
5577 expand_builtin_atomic_clear (tree exp)
5579 machine_mode mode;
5580 rtx mem, ret;
5581 enum memmodel model;
5583 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5584 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5585 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5587 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5589 source_location loc
5590 = expansion_point_location_if_in_system_header (input_location);
5591 warning_at (loc, OPT_Winvalid_memory_model,
5592 "invalid memory model for %<__atomic_store%>");
5593 model = MEMMODEL_SEQ_CST;
5596 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5597 Failing that, a store is issued by __atomic_store. The only way this can
5598 fail is if the bool type is larger than a word size. Unlikely, but
5599 handle it anyway for completeness. Assume a single threaded model since
5600 there is no atomic support in this case, and no barriers are required. */
5601 ret = expand_atomic_store (mem, const0_rtx, model, true);
5602 if (!ret)
5603 emit_move_insn (mem, const0_rtx);
5604 return const0_rtx;
5607 /* Expand an atomic test_and_set operation.
5608 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5609 EXP is the call expression. */
5611 static rtx
5612 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5614 rtx mem;
5615 enum memmodel model;
5616 machine_mode mode;
5618 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5619 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5620 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5622 return expand_atomic_test_and_set (target, mem, model);
5626 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5627 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5629 static tree
5630 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5632 int size;
5633 machine_mode mode;
5634 unsigned int mode_align, type_align;
5636 if (TREE_CODE (arg0) != INTEGER_CST)
5637 return NULL_TREE;
5639 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5640 mode = mode_for_size (size, MODE_INT, 0);
5641 mode_align = GET_MODE_ALIGNMENT (mode);
5643 if (TREE_CODE (arg1) == INTEGER_CST)
5645 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5647 /* Either this argument is null, or it's a fake pointer encoding
5648 the alignment of the object. */
5649 val = least_bit_hwi (val);
5650 val *= BITS_PER_UNIT;
5652 if (val == 0 || mode_align < val)
5653 type_align = mode_align;
5654 else
5655 type_align = val;
5657 else
5659 tree ttype = TREE_TYPE (arg1);
5661 /* This function is usually invoked and folded immediately by the front
5662 end before anything else has a chance to look at it. The pointer
5663 parameter at this point is usually cast to a void *, so check for that
5664 and look past the cast. */
5665 if (CONVERT_EXPR_P (arg1)
5666 && POINTER_TYPE_P (ttype)
5667 && VOID_TYPE_P (TREE_TYPE (ttype))
5668 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5669 arg1 = TREE_OPERAND (arg1, 0);
5671 ttype = TREE_TYPE (arg1);
5672 gcc_assert (POINTER_TYPE_P (ttype));
5674 /* Get the underlying type of the object. */
5675 ttype = TREE_TYPE (ttype);
5676 type_align = TYPE_ALIGN (ttype);
5679 /* If the object has smaller alignment, the lock free routines cannot
5680 be used. */
5681 if (type_align < mode_align)
5682 return boolean_false_node;
5684 /* Check if a compare_and_swap pattern exists for the mode which represents
5685 the required size. The pattern is not allowed to fail, so the existence
5686 of the pattern indicates support is present. */
5687 if (can_compare_and_swap_p (mode, true))
5688 return boolean_true_node;
5689 else
5690 return boolean_false_node;
5693 /* Return true if the parameters to call EXP represent an object which will
5694 always generate lock free instructions. The first argument represents the
5695 size of the object, and the second parameter is a pointer to the object
5696 itself. If NULL is passed for the object, then the result is based on
5697 typical alignment for an object of the specified size. Otherwise return
5698 false. */
5700 static rtx
5701 expand_builtin_atomic_always_lock_free (tree exp)
5703 tree size;
5704 tree arg0 = CALL_EXPR_ARG (exp, 0);
5705 tree arg1 = CALL_EXPR_ARG (exp, 1);
5707 if (TREE_CODE (arg0) != INTEGER_CST)
5709 error ("non-constant argument 1 to __atomic_always_lock_free");
5710 return const0_rtx;
5713 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5714 if (size == boolean_true_node)
5715 return const1_rtx;
5716 return const0_rtx;
5719 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5720 is lock free on this architecture. */
5722 static tree
5723 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5725 if (!flag_inline_atomics)
5726 return NULL_TREE;
5728 /* If it isn't always lock free, don't generate a result. */
5729 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5730 return boolean_true_node;
5732 return NULL_TREE;
5735 /* Return true if the parameters to call EXP represent an object which will
5736 always generate lock free instructions. The first argument represents the
5737 size of the object, and the second parameter is a pointer to the object
5738 itself. If NULL is passed for the object, then the result is based on
5739 typical alignment for an object of the specified size. Otherwise return
5740 NULL*/
5742 static rtx
5743 expand_builtin_atomic_is_lock_free (tree exp)
5745 tree size;
5746 tree arg0 = CALL_EXPR_ARG (exp, 0);
5747 tree arg1 = CALL_EXPR_ARG (exp, 1);
5749 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5751 error ("non-integer argument 1 to __atomic_is_lock_free");
5752 return NULL_RTX;
5755 if (!flag_inline_atomics)
5756 return NULL_RTX;
5758 /* If the value is known at compile time, return the RTX for it. */
5759 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5760 if (size == boolean_true_node)
5761 return const1_rtx;
5763 return NULL_RTX;
5766 /* Expand the __atomic_thread_fence intrinsic:
5767 void __atomic_thread_fence (enum memmodel)
5768 EXP is the CALL_EXPR. */
5770 static void
5771 expand_builtin_atomic_thread_fence (tree exp)
5773 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5774 expand_mem_thread_fence (model);
5777 /* Expand the __atomic_signal_fence intrinsic:
5778 void __atomic_signal_fence (enum memmodel)
5779 EXP is the CALL_EXPR. */
5781 static void
5782 expand_builtin_atomic_signal_fence (tree exp)
5784 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5785 expand_mem_signal_fence (model);
5788 /* Expand the __sync_synchronize intrinsic. */
5790 static void
5791 expand_builtin_sync_synchronize (void)
5793 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5796 static rtx
5797 expand_builtin_thread_pointer (tree exp, rtx target)
5799 enum insn_code icode;
5800 if (!validate_arglist (exp, VOID_TYPE))
5801 return const0_rtx;
5802 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5803 if (icode != CODE_FOR_nothing)
5805 struct expand_operand op;
5806 /* If the target is not sutitable then create a new target. */
5807 if (target == NULL_RTX
5808 || !REG_P (target)
5809 || GET_MODE (target) != Pmode)
5810 target = gen_reg_rtx (Pmode);
5811 create_output_operand (&op, target, Pmode);
5812 expand_insn (icode, 1, &op);
5813 return target;
5815 error ("__builtin_thread_pointer is not supported on this target");
5816 return const0_rtx;
5819 static void
5820 expand_builtin_set_thread_pointer (tree exp)
5822 enum insn_code icode;
5823 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5824 return;
5825 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5826 if (icode != CODE_FOR_nothing)
5828 struct expand_operand op;
5829 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5830 Pmode, EXPAND_NORMAL);
5831 create_input_operand (&op, val, Pmode);
5832 expand_insn (icode, 1, &op);
5833 return;
5835 error ("__builtin_set_thread_pointer is not supported on this target");
5839 /* Emit code to restore the current value of stack. */
5841 static void
5842 expand_stack_restore (tree var)
5844 rtx_insn *prev;
5845 rtx sa = expand_normal (var);
5847 sa = convert_memory_address (Pmode, sa);
5849 prev = get_last_insn ();
5850 emit_stack_restore (SAVE_BLOCK, sa);
5852 record_new_stack_level ();
5854 fixup_args_size_notes (prev, get_last_insn (), 0);
5857 /* Emit code to save the current value of stack. */
5859 static rtx
5860 expand_stack_save (void)
5862 rtx ret = NULL_RTX;
5864 emit_stack_save (SAVE_BLOCK, &ret);
5865 return ret;
5869 /* Expand an expression EXP that calls a built-in function,
5870 with result going to TARGET if that's convenient
5871 (and in mode MODE if that's convenient).
5872 SUBTARGET may be used as the target for computing one of EXP's operands.
5873 IGNORE is nonzero if the value is to be ignored. */
5876 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5877 int ignore)
5879 tree fndecl = get_callee_fndecl (exp);
5880 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5881 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5882 int flags;
5884 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5885 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5887 /* When ASan is enabled, we don't want to expand some memory/string
5888 builtins and rely on libsanitizer's hooks. This allows us to avoid
5889 redundant checks and be sure, that possible overflow will be detected
5890 by ASan. */
5892 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5893 return expand_call (exp, target, ignore);
5895 /* When not optimizing, generate calls to library functions for a certain
5896 set of builtins. */
5897 if (!optimize
5898 && !called_as_built_in (fndecl)
5899 && fcode != BUILT_IN_FORK
5900 && fcode != BUILT_IN_EXECL
5901 && fcode != BUILT_IN_EXECV
5902 && fcode != BUILT_IN_EXECLP
5903 && fcode != BUILT_IN_EXECLE
5904 && fcode != BUILT_IN_EXECVP
5905 && fcode != BUILT_IN_EXECVE
5906 && fcode != BUILT_IN_ALLOCA
5907 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5908 && fcode != BUILT_IN_FREE
5909 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5910 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5911 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5912 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5913 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5914 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5915 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5916 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5917 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5918 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5919 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5920 && fcode != BUILT_IN_CHKP_BNDRET)
5921 return expand_call (exp, target, ignore);
5923 /* The built-in function expanders test for target == const0_rtx
5924 to determine whether the function's result will be ignored. */
5925 if (ignore)
5926 target = const0_rtx;
5928 /* If the result of a pure or const built-in function is ignored, and
5929 none of its arguments are volatile, we can avoid expanding the
5930 built-in call and just evaluate the arguments for side-effects. */
5931 if (target == const0_rtx
5932 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5933 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5935 bool volatilep = false;
5936 tree arg;
5937 call_expr_arg_iterator iter;
5939 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5940 if (TREE_THIS_VOLATILE (arg))
5942 volatilep = true;
5943 break;
5946 if (! volatilep)
5948 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5949 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5950 return const0_rtx;
5954 /* expand_builtin_with_bounds is supposed to be used for
5955 instrumented builtin calls. */
5956 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5958 switch (fcode)
5960 CASE_FLT_FN (BUILT_IN_FABS):
5961 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
5962 case BUILT_IN_FABSD32:
5963 case BUILT_IN_FABSD64:
5964 case BUILT_IN_FABSD128:
5965 target = expand_builtin_fabs (exp, target, subtarget);
5966 if (target)
5967 return target;
5968 break;
5970 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5971 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
5972 target = expand_builtin_copysign (exp, target, subtarget);
5973 if (target)
5974 return target;
5975 break;
5977 /* Just do a normal library call if we were unable to fold
5978 the values. */
5979 CASE_FLT_FN (BUILT_IN_CABS):
5980 break;
5982 CASE_FLT_FN (BUILT_IN_FMA):
5983 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5984 if (target)
5985 return target;
5986 break;
5988 CASE_FLT_FN (BUILT_IN_ILOGB):
5989 if (! flag_unsafe_math_optimizations)
5990 break;
5991 gcc_fallthrough ();
5992 CASE_FLT_FN (BUILT_IN_ISINF):
5993 CASE_FLT_FN (BUILT_IN_FINITE):
5994 case BUILT_IN_ISFINITE:
5995 case BUILT_IN_ISNORMAL:
5996 target = expand_builtin_interclass_mathfn (exp, target);
5997 if (target)
5998 return target;
5999 break;
6001 CASE_FLT_FN (BUILT_IN_ICEIL):
6002 CASE_FLT_FN (BUILT_IN_LCEIL):
6003 CASE_FLT_FN (BUILT_IN_LLCEIL):
6004 CASE_FLT_FN (BUILT_IN_LFLOOR):
6005 CASE_FLT_FN (BUILT_IN_IFLOOR):
6006 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6007 target = expand_builtin_int_roundingfn (exp, target);
6008 if (target)
6009 return target;
6010 break;
6012 CASE_FLT_FN (BUILT_IN_IRINT):
6013 CASE_FLT_FN (BUILT_IN_LRINT):
6014 CASE_FLT_FN (BUILT_IN_LLRINT):
6015 CASE_FLT_FN (BUILT_IN_IROUND):
6016 CASE_FLT_FN (BUILT_IN_LROUND):
6017 CASE_FLT_FN (BUILT_IN_LLROUND):
6018 target = expand_builtin_int_roundingfn_2 (exp, target);
6019 if (target)
6020 return target;
6021 break;
6023 CASE_FLT_FN (BUILT_IN_POWI):
6024 target = expand_builtin_powi (exp, target);
6025 if (target)
6026 return target;
6027 break;
6029 CASE_FLT_FN (BUILT_IN_CEXPI):
6030 target = expand_builtin_cexpi (exp, target);
6031 gcc_assert (target);
6032 return target;
6034 CASE_FLT_FN (BUILT_IN_SIN):
6035 CASE_FLT_FN (BUILT_IN_COS):
6036 if (! flag_unsafe_math_optimizations)
6037 break;
6038 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6039 if (target)
6040 return target;
6041 break;
6043 CASE_FLT_FN (BUILT_IN_SINCOS):
6044 if (! flag_unsafe_math_optimizations)
6045 break;
6046 target = expand_builtin_sincos (exp);
6047 if (target)
6048 return target;
6049 break;
6051 case BUILT_IN_APPLY_ARGS:
6052 return expand_builtin_apply_args ();
6054 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6055 FUNCTION with a copy of the parameters described by
6056 ARGUMENTS, and ARGSIZE. It returns a block of memory
6057 allocated on the stack into which is stored all the registers
6058 that might possibly be used for returning the result of a
6059 function. ARGUMENTS is the value returned by
6060 __builtin_apply_args. ARGSIZE is the number of bytes of
6061 arguments that must be copied. ??? How should this value be
6062 computed? We'll also need a safe worst case value for varargs
6063 functions. */
6064 case BUILT_IN_APPLY:
6065 if (!validate_arglist (exp, POINTER_TYPE,
6066 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6067 && !validate_arglist (exp, REFERENCE_TYPE,
6068 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6069 return const0_rtx;
6070 else
6072 rtx ops[3];
6074 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6075 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6076 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6078 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6081 /* __builtin_return (RESULT) causes the function to return the
6082 value described by RESULT. RESULT is address of the block of
6083 memory returned by __builtin_apply. */
6084 case BUILT_IN_RETURN:
6085 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6086 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6087 return const0_rtx;
6089 case BUILT_IN_SAVEREGS:
6090 return expand_builtin_saveregs ();
6092 case BUILT_IN_VA_ARG_PACK:
6093 /* All valid uses of __builtin_va_arg_pack () are removed during
6094 inlining. */
6095 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6096 return const0_rtx;
6098 case BUILT_IN_VA_ARG_PACK_LEN:
6099 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6100 inlining. */
6101 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6102 return const0_rtx;
6104 /* Return the address of the first anonymous stack arg. */
6105 case BUILT_IN_NEXT_ARG:
6106 if (fold_builtin_next_arg (exp, false))
6107 return const0_rtx;
6108 return expand_builtin_next_arg ();
6110 case BUILT_IN_CLEAR_CACHE:
6111 target = expand_builtin___clear_cache (exp);
6112 if (target)
6113 return target;
6114 break;
6116 case BUILT_IN_CLASSIFY_TYPE:
6117 return expand_builtin_classify_type (exp);
6119 case BUILT_IN_CONSTANT_P:
6120 return const0_rtx;
6122 case BUILT_IN_FRAME_ADDRESS:
6123 case BUILT_IN_RETURN_ADDRESS:
6124 return expand_builtin_frame_address (fndecl, exp);
6126 /* Returns the address of the area where the structure is returned.
6127 0 otherwise. */
6128 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6129 if (call_expr_nargs (exp) != 0
6130 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6131 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6132 return const0_rtx;
6133 else
6134 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6136 case BUILT_IN_ALLOCA:
6137 case BUILT_IN_ALLOCA_WITH_ALIGN:
6138 /* If the allocation stems from the declaration of a variable-sized
6139 object, it cannot accumulate. */
6140 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6141 if (target)
6142 return target;
6143 break;
6145 case BUILT_IN_STACK_SAVE:
6146 return expand_stack_save ();
6148 case BUILT_IN_STACK_RESTORE:
6149 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6150 return const0_rtx;
6152 case BUILT_IN_BSWAP16:
6153 case BUILT_IN_BSWAP32:
6154 case BUILT_IN_BSWAP64:
6155 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6156 if (target)
6157 return target;
6158 break;
6160 CASE_INT_FN (BUILT_IN_FFS):
6161 target = expand_builtin_unop (target_mode, exp, target,
6162 subtarget, ffs_optab);
6163 if (target)
6164 return target;
6165 break;
6167 CASE_INT_FN (BUILT_IN_CLZ):
6168 target = expand_builtin_unop (target_mode, exp, target,
6169 subtarget, clz_optab);
6170 if (target)
6171 return target;
6172 break;
6174 CASE_INT_FN (BUILT_IN_CTZ):
6175 target = expand_builtin_unop (target_mode, exp, target,
6176 subtarget, ctz_optab);
6177 if (target)
6178 return target;
6179 break;
6181 CASE_INT_FN (BUILT_IN_CLRSB):
6182 target = expand_builtin_unop (target_mode, exp, target,
6183 subtarget, clrsb_optab);
6184 if (target)
6185 return target;
6186 break;
6188 CASE_INT_FN (BUILT_IN_POPCOUNT):
6189 target = expand_builtin_unop (target_mode, exp, target,
6190 subtarget, popcount_optab);
6191 if (target)
6192 return target;
6193 break;
6195 CASE_INT_FN (BUILT_IN_PARITY):
6196 target = expand_builtin_unop (target_mode, exp, target,
6197 subtarget, parity_optab);
6198 if (target)
6199 return target;
6200 break;
6202 case BUILT_IN_STRLEN:
6203 target = expand_builtin_strlen (exp, target, target_mode);
6204 if (target)
6205 return target;
6206 break;
6208 case BUILT_IN_STRCPY:
6209 target = expand_builtin_strcpy (exp, target);
6210 if (target)
6211 return target;
6212 break;
6214 case BUILT_IN_STRNCPY:
6215 target = expand_builtin_strncpy (exp, target);
6216 if (target)
6217 return target;
6218 break;
6220 case BUILT_IN_STPCPY:
6221 target = expand_builtin_stpcpy (exp, target, mode);
6222 if (target)
6223 return target;
6224 break;
6226 case BUILT_IN_MEMCPY:
6227 target = expand_builtin_memcpy (exp, target);
6228 if (target)
6229 return target;
6230 break;
6232 case BUILT_IN_MEMPCPY:
6233 target = expand_builtin_mempcpy (exp, target, mode);
6234 if (target)
6235 return target;
6236 break;
6238 case BUILT_IN_MEMSET:
6239 target = expand_builtin_memset (exp, target, mode);
6240 if (target)
6241 return target;
6242 break;
6244 case BUILT_IN_BZERO:
6245 target = expand_builtin_bzero (exp);
6246 if (target)
6247 return target;
6248 break;
6250 case BUILT_IN_STRCMP:
6251 target = expand_builtin_strcmp (exp, target);
6252 if (target)
6253 return target;
6254 break;
6256 case BUILT_IN_STRNCMP:
6257 target = expand_builtin_strncmp (exp, target, mode);
6258 if (target)
6259 return target;
6260 break;
6262 case BUILT_IN_BCMP:
6263 case BUILT_IN_MEMCMP:
6264 case BUILT_IN_MEMCMP_EQ:
6265 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6266 if (target)
6267 return target;
6268 if (fcode == BUILT_IN_MEMCMP_EQ)
6270 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6271 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6273 break;
6275 case BUILT_IN_SETJMP:
6276 /* This should have been lowered to the builtins below. */
6277 gcc_unreachable ();
6279 case BUILT_IN_SETJMP_SETUP:
6280 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6281 and the receiver label. */
6282 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6284 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6285 VOIDmode, EXPAND_NORMAL);
6286 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6287 rtx_insn *label_r = label_rtx (label);
6289 /* This is copied from the handling of non-local gotos. */
6290 expand_builtin_setjmp_setup (buf_addr, label_r);
6291 nonlocal_goto_handler_labels
6292 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6293 nonlocal_goto_handler_labels);
6294 /* ??? Do not let expand_label treat us as such since we would
6295 not want to be both on the list of non-local labels and on
6296 the list of forced labels. */
6297 FORCED_LABEL (label) = 0;
6298 return const0_rtx;
6300 break;
6302 case BUILT_IN_SETJMP_RECEIVER:
6303 /* __builtin_setjmp_receiver is passed the receiver label. */
6304 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6306 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6307 rtx_insn *label_r = label_rtx (label);
6309 expand_builtin_setjmp_receiver (label_r);
6310 return const0_rtx;
6312 break;
6314 /* __builtin_longjmp is passed a pointer to an array of five words.
6315 It's similar to the C library longjmp function but works with
6316 __builtin_setjmp above. */
6317 case BUILT_IN_LONGJMP:
6318 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6320 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6321 VOIDmode, EXPAND_NORMAL);
6322 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6324 if (value != const1_rtx)
6326 error ("%<__builtin_longjmp%> second argument must be 1");
6327 return const0_rtx;
6330 expand_builtin_longjmp (buf_addr, value);
6331 return const0_rtx;
6333 break;
6335 case BUILT_IN_NONLOCAL_GOTO:
6336 target = expand_builtin_nonlocal_goto (exp);
6337 if (target)
6338 return target;
6339 break;
6341 /* This updates the setjmp buffer that is its argument with the value
6342 of the current stack pointer. */
6343 case BUILT_IN_UPDATE_SETJMP_BUF:
6344 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6346 rtx buf_addr
6347 = expand_normal (CALL_EXPR_ARG (exp, 0));
6349 expand_builtin_update_setjmp_buf (buf_addr);
6350 return const0_rtx;
6352 break;
6354 case BUILT_IN_TRAP:
6355 expand_builtin_trap ();
6356 return const0_rtx;
6358 case BUILT_IN_UNREACHABLE:
6359 expand_builtin_unreachable ();
6360 return const0_rtx;
6362 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6363 case BUILT_IN_SIGNBITD32:
6364 case BUILT_IN_SIGNBITD64:
6365 case BUILT_IN_SIGNBITD128:
6366 target = expand_builtin_signbit (exp, target);
6367 if (target)
6368 return target;
6369 break;
6371 /* Various hooks for the DWARF 2 __throw routine. */
6372 case BUILT_IN_UNWIND_INIT:
6373 expand_builtin_unwind_init ();
6374 return const0_rtx;
6375 case BUILT_IN_DWARF_CFA:
6376 return virtual_cfa_rtx;
6377 #ifdef DWARF2_UNWIND_INFO
6378 case BUILT_IN_DWARF_SP_COLUMN:
6379 return expand_builtin_dwarf_sp_column ();
6380 case BUILT_IN_INIT_DWARF_REG_SIZES:
6381 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6382 return const0_rtx;
6383 #endif
6384 case BUILT_IN_FROB_RETURN_ADDR:
6385 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6386 case BUILT_IN_EXTRACT_RETURN_ADDR:
6387 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6388 case BUILT_IN_EH_RETURN:
6389 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6390 CALL_EXPR_ARG (exp, 1));
6391 return const0_rtx;
6392 case BUILT_IN_EH_RETURN_DATA_REGNO:
6393 return expand_builtin_eh_return_data_regno (exp);
6394 case BUILT_IN_EXTEND_POINTER:
6395 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6396 case BUILT_IN_EH_POINTER:
6397 return expand_builtin_eh_pointer (exp);
6398 case BUILT_IN_EH_FILTER:
6399 return expand_builtin_eh_filter (exp);
6400 case BUILT_IN_EH_COPY_VALUES:
6401 return expand_builtin_eh_copy_values (exp);
6403 case BUILT_IN_VA_START:
6404 return expand_builtin_va_start (exp);
6405 case BUILT_IN_VA_END:
6406 return expand_builtin_va_end (exp);
6407 case BUILT_IN_VA_COPY:
6408 return expand_builtin_va_copy (exp);
6409 case BUILT_IN_EXPECT:
6410 return expand_builtin_expect (exp, target);
6411 case BUILT_IN_ASSUME_ALIGNED:
6412 return expand_builtin_assume_aligned (exp, target);
6413 case BUILT_IN_PREFETCH:
6414 expand_builtin_prefetch (exp);
6415 return const0_rtx;
6417 case BUILT_IN_INIT_TRAMPOLINE:
6418 return expand_builtin_init_trampoline (exp, true);
6419 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6420 return expand_builtin_init_trampoline (exp, false);
6421 case BUILT_IN_ADJUST_TRAMPOLINE:
6422 return expand_builtin_adjust_trampoline (exp);
6424 case BUILT_IN_INIT_DESCRIPTOR:
6425 return expand_builtin_init_descriptor (exp);
6426 case BUILT_IN_ADJUST_DESCRIPTOR:
6427 return expand_builtin_adjust_descriptor (exp);
6429 case BUILT_IN_FORK:
6430 case BUILT_IN_EXECL:
6431 case BUILT_IN_EXECV:
6432 case BUILT_IN_EXECLP:
6433 case BUILT_IN_EXECLE:
6434 case BUILT_IN_EXECVP:
6435 case BUILT_IN_EXECVE:
6436 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6437 if (target)
6438 return target;
6439 break;
6441 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6442 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6443 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6444 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6445 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6446 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6447 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6448 if (target)
6449 return target;
6450 break;
6452 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6453 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6454 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6455 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6456 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6457 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6458 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6459 if (target)
6460 return target;
6461 break;
6463 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6464 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6465 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6466 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6467 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6468 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6469 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6470 if (target)
6471 return target;
6472 break;
6474 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6475 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6476 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6477 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6478 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6479 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6480 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6481 if (target)
6482 return target;
6483 break;
6485 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6486 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6487 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6488 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6489 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6490 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6491 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6492 if (target)
6493 return target;
6494 break;
6496 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6497 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6498 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6499 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6500 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6501 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6502 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6503 if (target)
6504 return target;
6505 break;
6507 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6508 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6509 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6510 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6511 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6512 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6513 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6514 if (target)
6515 return target;
6516 break;
6518 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6519 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6520 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6521 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6522 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6523 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6524 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6525 if (target)
6526 return target;
6527 break;
6529 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6530 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6531 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6532 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6533 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6534 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6535 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6536 if (target)
6537 return target;
6538 break;
6540 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6541 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6542 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6543 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6544 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6545 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6546 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6547 if (target)
6548 return target;
6549 break;
6551 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6552 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6553 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6554 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6555 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6556 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6557 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6558 if (target)
6559 return target;
6560 break;
6562 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6563 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6564 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6565 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6566 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6567 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6568 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6569 if (target)
6570 return target;
6571 break;
6573 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6574 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6575 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6576 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6577 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6578 if (mode == VOIDmode)
6579 mode = TYPE_MODE (boolean_type_node);
6580 if (!target || !register_operand (target, mode))
6581 target = gen_reg_rtx (mode);
6583 mode = get_builtin_sync_mode
6584 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6585 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6586 if (target)
6587 return target;
6588 break;
6590 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6591 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6592 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6593 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6594 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6595 mode = get_builtin_sync_mode
6596 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6597 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6598 if (target)
6599 return target;
6600 break;
6602 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6603 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6604 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6605 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6606 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6607 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6608 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6609 if (target)
6610 return target;
6611 break;
6613 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6614 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6615 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6616 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6617 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6618 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6619 expand_builtin_sync_lock_release (mode, exp);
6620 return const0_rtx;
6622 case BUILT_IN_SYNC_SYNCHRONIZE:
6623 expand_builtin_sync_synchronize ();
6624 return const0_rtx;
6626 case BUILT_IN_ATOMIC_EXCHANGE_1:
6627 case BUILT_IN_ATOMIC_EXCHANGE_2:
6628 case BUILT_IN_ATOMIC_EXCHANGE_4:
6629 case BUILT_IN_ATOMIC_EXCHANGE_8:
6630 case BUILT_IN_ATOMIC_EXCHANGE_16:
6631 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6632 target = expand_builtin_atomic_exchange (mode, exp, target);
6633 if (target)
6634 return target;
6635 break;
6637 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6638 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6639 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6640 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6641 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6643 unsigned int nargs, z;
6644 vec<tree, va_gc> *vec;
6646 mode =
6647 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6648 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6649 if (target)
6650 return target;
6652 /* If this is turned into an external library call, the weak parameter
6653 must be dropped to match the expected parameter list. */
6654 nargs = call_expr_nargs (exp);
6655 vec_alloc (vec, nargs - 1);
6656 for (z = 0; z < 3; z++)
6657 vec->quick_push (CALL_EXPR_ARG (exp, z));
6658 /* Skip the boolean weak parameter. */
6659 for (z = 4; z < 6; z++)
6660 vec->quick_push (CALL_EXPR_ARG (exp, z));
6661 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6662 break;
6665 case BUILT_IN_ATOMIC_LOAD_1:
6666 case BUILT_IN_ATOMIC_LOAD_2:
6667 case BUILT_IN_ATOMIC_LOAD_4:
6668 case BUILT_IN_ATOMIC_LOAD_8:
6669 case BUILT_IN_ATOMIC_LOAD_16:
6670 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6671 target = expand_builtin_atomic_load (mode, exp, target);
6672 if (target)
6673 return target;
6674 break;
6676 case BUILT_IN_ATOMIC_STORE_1:
6677 case BUILT_IN_ATOMIC_STORE_2:
6678 case BUILT_IN_ATOMIC_STORE_4:
6679 case BUILT_IN_ATOMIC_STORE_8:
6680 case BUILT_IN_ATOMIC_STORE_16:
6681 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6682 target = expand_builtin_atomic_store (mode, exp);
6683 if (target)
6684 return const0_rtx;
6685 break;
6687 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6688 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6689 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6690 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6691 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6693 enum built_in_function lib;
6694 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6695 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6696 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6697 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6698 ignore, lib);
6699 if (target)
6700 return target;
6701 break;
6703 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6704 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6705 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6706 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6707 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6709 enum built_in_function lib;
6710 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6711 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6712 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6713 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6714 ignore, lib);
6715 if (target)
6716 return target;
6717 break;
6719 case BUILT_IN_ATOMIC_AND_FETCH_1:
6720 case BUILT_IN_ATOMIC_AND_FETCH_2:
6721 case BUILT_IN_ATOMIC_AND_FETCH_4:
6722 case BUILT_IN_ATOMIC_AND_FETCH_8:
6723 case BUILT_IN_ATOMIC_AND_FETCH_16:
6725 enum built_in_function lib;
6726 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6727 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6728 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6729 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6730 ignore, lib);
6731 if (target)
6732 return target;
6733 break;
6735 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6736 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6737 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6738 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6739 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6741 enum built_in_function lib;
6742 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6743 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6744 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6745 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6746 ignore, lib);
6747 if (target)
6748 return target;
6749 break;
6751 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6752 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6753 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6754 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6755 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6757 enum built_in_function lib;
6758 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6759 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6760 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6761 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6762 ignore, lib);
6763 if (target)
6764 return target;
6765 break;
6767 case BUILT_IN_ATOMIC_OR_FETCH_1:
6768 case BUILT_IN_ATOMIC_OR_FETCH_2:
6769 case BUILT_IN_ATOMIC_OR_FETCH_4:
6770 case BUILT_IN_ATOMIC_OR_FETCH_8:
6771 case BUILT_IN_ATOMIC_OR_FETCH_16:
6773 enum built_in_function lib;
6774 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6775 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6776 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6777 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6778 ignore, lib);
6779 if (target)
6780 return target;
6781 break;
6783 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6784 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6785 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6786 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6787 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6788 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6789 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6790 ignore, BUILT_IN_NONE);
6791 if (target)
6792 return target;
6793 break;
6795 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6796 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6797 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6798 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6799 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6800 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6801 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6802 ignore, BUILT_IN_NONE);
6803 if (target)
6804 return target;
6805 break;
6807 case BUILT_IN_ATOMIC_FETCH_AND_1:
6808 case BUILT_IN_ATOMIC_FETCH_AND_2:
6809 case BUILT_IN_ATOMIC_FETCH_AND_4:
6810 case BUILT_IN_ATOMIC_FETCH_AND_8:
6811 case BUILT_IN_ATOMIC_FETCH_AND_16:
6812 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6813 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6814 ignore, BUILT_IN_NONE);
6815 if (target)
6816 return target;
6817 break;
6819 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6820 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6821 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6822 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6823 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6824 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6825 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6826 ignore, BUILT_IN_NONE);
6827 if (target)
6828 return target;
6829 break;
6831 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6832 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6833 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6834 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6835 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6836 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6837 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6838 ignore, BUILT_IN_NONE);
6839 if (target)
6840 return target;
6841 break;
6843 case BUILT_IN_ATOMIC_FETCH_OR_1:
6844 case BUILT_IN_ATOMIC_FETCH_OR_2:
6845 case BUILT_IN_ATOMIC_FETCH_OR_4:
6846 case BUILT_IN_ATOMIC_FETCH_OR_8:
6847 case BUILT_IN_ATOMIC_FETCH_OR_16:
6848 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6849 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6850 ignore, BUILT_IN_NONE);
6851 if (target)
6852 return target;
6853 break;
6855 case BUILT_IN_ATOMIC_TEST_AND_SET:
6856 return expand_builtin_atomic_test_and_set (exp, target);
6858 case BUILT_IN_ATOMIC_CLEAR:
6859 return expand_builtin_atomic_clear (exp);
6861 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6862 return expand_builtin_atomic_always_lock_free (exp);
6864 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6865 target = expand_builtin_atomic_is_lock_free (exp);
6866 if (target)
6867 return target;
6868 break;
6870 case BUILT_IN_ATOMIC_THREAD_FENCE:
6871 expand_builtin_atomic_thread_fence (exp);
6872 return const0_rtx;
6874 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6875 expand_builtin_atomic_signal_fence (exp);
6876 return const0_rtx;
6878 case BUILT_IN_OBJECT_SIZE:
6879 return expand_builtin_object_size (exp);
6881 case BUILT_IN_MEMCPY_CHK:
6882 case BUILT_IN_MEMPCPY_CHK:
6883 case BUILT_IN_MEMMOVE_CHK:
6884 case BUILT_IN_MEMSET_CHK:
6885 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6886 if (target)
6887 return target;
6888 break;
6890 case BUILT_IN_STRCPY_CHK:
6891 case BUILT_IN_STPCPY_CHK:
6892 case BUILT_IN_STRNCPY_CHK:
6893 case BUILT_IN_STPNCPY_CHK:
6894 case BUILT_IN_STRCAT_CHK:
6895 case BUILT_IN_STRNCAT_CHK:
6896 case BUILT_IN_SNPRINTF_CHK:
6897 case BUILT_IN_VSNPRINTF_CHK:
6898 maybe_emit_chk_warning (exp, fcode);
6899 break;
6901 case BUILT_IN_SPRINTF_CHK:
6902 case BUILT_IN_VSPRINTF_CHK:
6903 maybe_emit_sprintf_chk_warning (exp, fcode);
6904 break;
6906 case BUILT_IN_FREE:
6907 if (warn_free_nonheap_object)
6908 maybe_emit_free_warning (exp);
6909 break;
6911 case BUILT_IN_THREAD_POINTER:
6912 return expand_builtin_thread_pointer (exp, target);
6914 case BUILT_IN_SET_THREAD_POINTER:
6915 expand_builtin_set_thread_pointer (exp);
6916 return const0_rtx;
6918 case BUILT_IN_CILK_DETACH:
6919 expand_builtin_cilk_detach (exp);
6920 return const0_rtx;
6922 case BUILT_IN_CILK_POP_FRAME:
6923 expand_builtin_cilk_pop_frame (exp);
6924 return const0_rtx;
6926 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6927 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6928 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6929 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6930 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6931 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6932 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6933 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6934 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6935 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6936 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6937 /* We allow user CHKP builtins if Pointer Bounds
6938 Checker is off. */
6939 if (!chkp_function_instrumented_p (current_function_decl))
6941 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6942 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6943 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6944 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6945 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6946 return expand_normal (CALL_EXPR_ARG (exp, 0));
6947 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6948 return expand_normal (size_zero_node);
6949 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6950 return expand_normal (size_int (-1));
6951 else
6952 return const0_rtx;
6954 /* FALLTHROUGH */
6956 case BUILT_IN_CHKP_BNDMK:
6957 case BUILT_IN_CHKP_BNDSTX:
6958 case BUILT_IN_CHKP_BNDCL:
6959 case BUILT_IN_CHKP_BNDCU:
6960 case BUILT_IN_CHKP_BNDLDX:
6961 case BUILT_IN_CHKP_BNDRET:
6962 case BUILT_IN_CHKP_INTERSECT:
6963 case BUILT_IN_CHKP_NARROW:
6964 case BUILT_IN_CHKP_EXTRACT_LOWER:
6965 case BUILT_IN_CHKP_EXTRACT_UPPER:
6966 /* Software implementation of Pointer Bounds Checker is NYI.
6967 Target support is required. */
6968 error ("Your target platform does not support -fcheck-pointer-bounds");
6969 break;
6971 case BUILT_IN_ACC_ON_DEVICE:
6972 /* Do library call, if we failed to expand the builtin when
6973 folding. */
6974 break;
6976 default: /* just do library call, if unknown builtin */
6977 break;
6980 /* The switch statement above can drop through to cause the function
6981 to be called normally. */
6982 return expand_call (exp, target, ignore);
6985 /* Similar to expand_builtin but is used for instrumented calls. */
6988 expand_builtin_with_bounds (tree exp, rtx target,
6989 rtx subtarget ATTRIBUTE_UNUSED,
6990 machine_mode mode, int ignore)
6992 tree fndecl = get_callee_fndecl (exp);
6993 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6995 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6997 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6998 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7000 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7001 && fcode < END_CHKP_BUILTINS);
7003 switch (fcode)
7005 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7006 target = expand_builtin_memcpy_with_bounds (exp, target);
7007 if (target)
7008 return target;
7009 break;
7011 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7012 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7013 if (target)
7014 return target;
7015 break;
7017 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7018 target = expand_builtin_memset_with_bounds (exp, target, mode);
7019 if (target)
7020 return target;
7021 break;
7023 default:
7024 break;
7027 /* The switch statement above can drop through to cause the function
7028 to be called normally. */
7029 return expand_call (exp, target, ignore);
7032 /* Determine whether a tree node represents a call to a built-in
7033 function. If the tree T is a call to a built-in function with
7034 the right number of arguments of the appropriate types, return
7035 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7036 Otherwise the return value is END_BUILTINS. */
7038 enum built_in_function
7039 builtin_mathfn_code (const_tree t)
7041 const_tree fndecl, arg, parmlist;
7042 const_tree argtype, parmtype;
7043 const_call_expr_arg_iterator iter;
7045 if (TREE_CODE (t) != CALL_EXPR
7046 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7047 return END_BUILTINS;
7049 fndecl = get_callee_fndecl (t);
7050 if (fndecl == NULL_TREE
7051 || TREE_CODE (fndecl) != FUNCTION_DECL
7052 || ! DECL_BUILT_IN (fndecl)
7053 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7054 return END_BUILTINS;
7056 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7057 init_const_call_expr_arg_iterator (t, &iter);
7058 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7060 /* If a function doesn't take a variable number of arguments,
7061 the last element in the list will have type `void'. */
7062 parmtype = TREE_VALUE (parmlist);
7063 if (VOID_TYPE_P (parmtype))
7065 if (more_const_call_expr_args_p (&iter))
7066 return END_BUILTINS;
7067 return DECL_FUNCTION_CODE (fndecl);
7070 if (! more_const_call_expr_args_p (&iter))
7071 return END_BUILTINS;
7073 arg = next_const_call_expr_arg (&iter);
7074 argtype = TREE_TYPE (arg);
7076 if (SCALAR_FLOAT_TYPE_P (parmtype))
7078 if (! SCALAR_FLOAT_TYPE_P (argtype))
7079 return END_BUILTINS;
7081 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7083 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7084 return END_BUILTINS;
7086 else if (POINTER_TYPE_P (parmtype))
7088 if (! POINTER_TYPE_P (argtype))
7089 return END_BUILTINS;
7091 else if (INTEGRAL_TYPE_P (parmtype))
7093 if (! INTEGRAL_TYPE_P (argtype))
7094 return END_BUILTINS;
7096 else
7097 return END_BUILTINS;
7100 /* Variable-length argument list. */
7101 return DECL_FUNCTION_CODE (fndecl);
7104 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7105 evaluate to a constant. */
7107 static tree
7108 fold_builtin_constant_p (tree arg)
7110 /* We return 1 for a numeric type that's known to be a constant
7111 value at compile-time or for an aggregate type that's a
7112 literal constant. */
7113 STRIP_NOPS (arg);
7115 /* If we know this is a constant, emit the constant of one. */
7116 if (CONSTANT_CLASS_P (arg)
7117 || (TREE_CODE (arg) == CONSTRUCTOR
7118 && TREE_CONSTANT (arg)))
7119 return integer_one_node;
7120 if (TREE_CODE (arg) == ADDR_EXPR)
7122 tree op = TREE_OPERAND (arg, 0);
7123 if (TREE_CODE (op) == STRING_CST
7124 || (TREE_CODE (op) == ARRAY_REF
7125 && integer_zerop (TREE_OPERAND (op, 1))
7126 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7127 return integer_one_node;
7130 /* If this expression has side effects, show we don't know it to be a
7131 constant. Likewise if it's a pointer or aggregate type since in
7132 those case we only want literals, since those are only optimized
7133 when generating RTL, not later.
7134 And finally, if we are compiling an initializer, not code, we
7135 need to return a definite result now; there's not going to be any
7136 more optimization done. */
7137 if (TREE_SIDE_EFFECTS (arg)
7138 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7139 || POINTER_TYPE_P (TREE_TYPE (arg))
7140 || cfun == 0
7141 || folding_initializer
7142 || force_folding_builtin_constant_p)
7143 return integer_zero_node;
7145 return NULL_TREE;
7148 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7149 return it as a truthvalue. */
7151 static tree
7152 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7153 tree predictor)
7155 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7157 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7158 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7159 ret_type = TREE_TYPE (TREE_TYPE (fn));
7160 pred_type = TREE_VALUE (arg_types);
7161 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7163 pred = fold_convert_loc (loc, pred_type, pred);
7164 expected = fold_convert_loc (loc, expected_type, expected);
7165 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7166 predictor);
7168 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7169 build_int_cst (ret_type, 0));
7172 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7173 NULL_TREE if no simplification is possible. */
7175 tree
7176 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7178 tree inner, fndecl, inner_arg0;
7179 enum tree_code code;
7181 /* Distribute the expected value over short-circuiting operators.
7182 See through the cast from truthvalue_type_node to long. */
7183 inner_arg0 = arg0;
7184 while (CONVERT_EXPR_P (inner_arg0)
7185 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7186 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7187 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7189 /* If this is a builtin_expect within a builtin_expect keep the
7190 inner one. See through a comparison against a constant. It
7191 might have been added to create a thruthvalue. */
7192 inner = inner_arg0;
7194 if (COMPARISON_CLASS_P (inner)
7195 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7196 inner = TREE_OPERAND (inner, 0);
7198 if (TREE_CODE (inner) == CALL_EXPR
7199 && (fndecl = get_callee_fndecl (inner))
7200 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7201 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7202 return arg0;
7204 inner = inner_arg0;
7205 code = TREE_CODE (inner);
7206 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7208 tree op0 = TREE_OPERAND (inner, 0);
7209 tree op1 = TREE_OPERAND (inner, 1);
7211 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7212 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7213 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7215 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7218 /* If the argument isn't invariant then there's nothing else we can do. */
7219 if (!TREE_CONSTANT (inner_arg0))
7220 return NULL_TREE;
7222 /* If we expect that a comparison against the argument will fold to
7223 a constant return the constant. In practice, this means a true
7224 constant or the address of a non-weak symbol. */
7225 inner = inner_arg0;
7226 STRIP_NOPS (inner);
7227 if (TREE_CODE (inner) == ADDR_EXPR)
7231 inner = TREE_OPERAND (inner, 0);
7233 while (TREE_CODE (inner) == COMPONENT_REF
7234 || TREE_CODE (inner) == ARRAY_REF);
7235 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7236 return NULL_TREE;
7239 /* Otherwise, ARG0 already has the proper type for the return value. */
7240 return arg0;
7243 /* Fold a call to __builtin_classify_type with argument ARG. */
7245 static tree
7246 fold_builtin_classify_type (tree arg)
7248 if (arg == 0)
7249 return build_int_cst (integer_type_node, no_type_class);
7251 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7254 /* Fold a call to __builtin_strlen with argument ARG. */
7256 static tree
7257 fold_builtin_strlen (location_t loc, tree type, tree arg)
7259 if (!validate_arg (arg, POINTER_TYPE))
7260 return NULL_TREE;
7261 else
7263 tree len = c_strlen (arg, 0);
7265 if (len)
7266 return fold_convert_loc (loc, type, len);
7268 return NULL_TREE;
7272 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7274 static tree
7275 fold_builtin_inf (location_t loc, tree type, int warn)
7277 REAL_VALUE_TYPE real;
7279 /* __builtin_inff is intended to be usable to define INFINITY on all
7280 targets. If an infinity is not available, INFINITY expands "to a
7281 positive constant of type float that overflows at translation
7282 time", footnote "In this case, using INFINITY will violate the
7283 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7284 Thus we pedwarn to ensure this constraint violation is
7285 diagnosed. */
7286 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7287 pedwarn (loc, 0, "target format does not support infinity");
7289 real_inf (&real);
7290 return build_real (type, real);
7293 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7294 NULL_TREE if no simplification can be made. */
7296 static tree
7297 fold_builtin_sincos (location_t loc,
7298 tree arg0, tree arg1, tree arg2)
7300 tree type;
7301 tree fndecl, call = NULL_TREE;
7303 if (!validate_arg (arg0, REAL_TYPE)
7304 || !validate_arg (arg1, POINTER_TYPE)
7305 || !validate_arg (arg2, POINTER_TYPE))
7306 return NULL_TREE;
7308 type = TREE_TYPE (arg0);
7310 /* Calculate the result when the argument is a constant. */
7311 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7312 if (fn == END_BUILTINS)
7313 return NULL_TREE;
7315 /* Canonicalize sincos to cexpi. */
7316 if (TREE_CODE (arg0) == REAL_CST)
7318 tree complex_type = build_complex_type (type);
7319 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7321 if (!call)
7323 if (!targetm.libc_has_function (function_c99_math_complex)
7324 || !builtin_decl_implicit_p (fn))
7325 return NULL_TREE;
7326 fndecl = builtin_decl_explicit (fn);
7327 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7328 call = builtin_save_expr (call);
7331 return build2 (COMPOUND_EXPR, void_type_node,
7332 build2 (MODIFY_EXPR, void_type_node,
7333 build_fold_indirect_ref_loc (loc, arg1),
7334 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7335 build2 (MODIFY_EXPR, void_type_node,
7336 build_fold_indirect_ref_loc (loc, arg2),
7337 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7340 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7341 Return NULL_TREE if no simplification can be made. */
7343 static tree
7344 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7346 if (!validate_arg (arg1, POINTER_TYPE)
7347 || !validate_arg (arg2, POINTER_TYPE)
7348 || !validate_arg (len, INTEGER_TYPE))
7349 return NULL_TREE;
7351 /* If the LEN parameter is zero, return zero. */
7352 if (integer_zerop (len))
7353 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7354 arg1, arg2);
7356 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7357 if (operand_equal_p (arg1, arg2, 0))
7358 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7360 /* If len parameter is one, return an expression corresponding to
7361 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7362 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7364 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7365 tree cst_uchar_ptr_node
7366 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7368 tree ind1
7369 = fold_convert_loc (loc, integer_type_node,
7370 build1 (INDIRECT_REF, cst_uchar_node,
7371 fold_convert_loc (loc,
7372 cst_uchar_ptr_node,
7373 arg1)));
7374 tree ind2
7375 = fold_convert_loc (loc, integer_type_node,
7376 build1 (INDIRECT_REF, cst_uchar_node,
7377 fold_convert_loc (loc,
7378 cst_uchar_ptr_node,
7379 arg2)));
7380 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7383 return NULL_TREE;
7386 /* Fold a call to builtin isascii with argument ARG. */
7388 static tree
7389 fold_builtin_isascii (location_t loc, tree arg)
7391 if (!validate_arg (arg, INTEGER_TYPE))
7392 return NULL_TREE;
7393 else
7395 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7396 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7397 build_int_cst (integer_type_node,
7398 ~ (unsigned HOST_WIDE_INT) 0x7f));
7399 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7400 arg, integer_zero_node);
7404 /* Fold a call to builtin toascii with argument ARG. */
7406 static tree
7407 fold_builtin_toascii (location_t loc, tree arg)
7409 if (!validate_arg (arg, INTEGER_TYPE))
7410 return NULL_TREE;
7412 /* Transform toascii(c) -> (c & 0x7f). */
7413 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7414 build_int_cst (integer_type_node, 0x7f));
7417 /* Fold a call to builtin isdigit with argument ARG. */
7419 static tree
7420 fold_builtin_isdigit (location_t loc, tree arg)
7422 if (!validate_arg (arg, INTEGER_TYPE))
7423 return NULL_TREE;
7424 else
7426 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7427 /* According to the C standard, isdigit is unaffected by locale.
7428 However, it definitely is affected by the target character set. */
7429 unsigned HOST_WIDE_INT target_digit0
7430 = lang_hooks.to_target_charset ('0');
7432 if (target_digit0 == 0)
7433 return NULL_TREE;
7435 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7436 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7437 build_int_cst (unsigned_type_node, target_digit0));
7438 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7439 build_int_cst (unsigned_type_node, 9));
7443 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7445 static tree
7446 fold_builtin_fabs (location_t loc, tree arg, tree type)
7448 if (!validate_arg (arg, REAL_TYPE))
7449 return NULL_TREE;
7451 arg = fold_convert_loc (loc, type, arg);
7452 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7455 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7457 static tree
7458 fold_builtin_abs (location_t loc, tree arg, tree type)
7460 if (!validate_arg (arg, INTEGER_TYPE))
7461 return NULL_TREE;
7463 arg = fold_convert_loc (loc, type, arg);
7464 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7467 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7469 static tree
7470 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7472 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7473 if (validate_arg (arg0, REAL_TYPE)
7474 && validate_arg (arg1, REAL_TYPE)
7475 && validate_arg (arg2, REAL_TYPE)
7476 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7477 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7479 return NULL_TREE;
7482 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7484 static tree
7485 fold_builtin_carg (location_t loc, tree arg, tree type)
7487 if (validate_arg (arg, COMPLEX_TYPE)
7488 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7490 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7492 if (atan2_fn)
7494 tree new_arg = builtin_save_expr (arg);
7495 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7496 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7497 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7501 return NULL_TREE;
7504 /* Fold a call to builtin frexp, we can assume the base is 2. */
7506 static tree
7507 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7509 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7510 return NULL_TREE;
7512 STRIP_NOPS (arg0);
7514 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7515 return NULL_TREE;
7517 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7519 /* Proceed if a valid pointer type was passed in. */
7520 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7522 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7523 tree frac, exp;
7525 switch (value->cl)
7527 case rvc_zero:
7528 /* For +-0, return (*exp = 0, +-0). */
7529 exp = integer_zero_node;
7530 frac = arg0;
7531 break;
7532 case rvc_nan:
7533 case rvc_inf:
7534 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7535 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7536 case rvc_normal:
7538 /* Since the frexp function always expects base 2, and in
7539 GCC normalized significands are already in the range
7540 [0.5, 1.0), we have exactly what frexp wants. */
7541 REAL_VALUE_TYPE frac_rvt = *value;
7542 SET_REAL_EXP (&frac_rvt, 0);
7543 frac = build_real (rettype, frac_rvt);
7544 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7546 break;
7547 default:
7548 gcc_unreachable ();
7551 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7552 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7553 TREE_SIDE_EFFECTS (arg1) = 1;
7554 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7557 return NULL_TREE;
7560 /* Fold a call to builtin modf. */
7562 static tree
7563 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7565 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7566 return NULL_TREE;
7568 STRIP_NOPS (arg0);
7570 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7571 return NULL_TREE;
7573 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7575 /* Proceed if a valid pointer type was passed in. */
7576 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7578 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7579 REAL_VALUE_TYPE trunc, frac;
7581 switch (value->cl)
7583 case rvc_nan:
7584 case rvc_zero:
7585 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7586 trunc = frac = *value;
7587 break;
7588 case rvc_inf:
7589 /* For +-Inf, return (*arg1 = arg0, +-0). */
7590 frac = dconst0;
7591 frac.sign = value->sign;
7592 trunc = *value;
7593 break;
7594 case rvc_normal:
7595 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7596 real_trunc (&trunc, VOIDmode, value);
7597 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7598 /* If the original number was negative and already
7599 integral, then the fractional part is -0.0. */
7600 if (value->sign && frac.cl == rvc_zero)
7601 frac.sign = value->sign;
7602 break;
7605 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7606 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7607 build_real (rettype, trunc));
7608 TREE_SIDE_EFFECTS (arg1) = 1;
7609 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7610 build_real (rettype, frac));
7613 return NULL_TREE;
7616 /* Given a location LOC, an interclass builtin function decl FNDECL
7617 and its single argument ARG, return an folded expression computing
7618 the same, or NULL_TREE if we either couldn't or didn't want to fold
7619 (the latter happen if there's an RTL instruction available). */
7621 static tree
7622 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7624 machine_mode mode;
7626 if (!validate_arg (arg, REAL_TYPE))
7627 return NULL_TREE;
7629 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7630 return NULL_TREE;
7632 mode = TYPE_MODE (TREE_TYPE (arg));
7634 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7636 /* If there is no optab, try generic code. */
7637 switch (DECL_FUNCTION_CODE (fndecl))
7639 tree result;
7641 CASE_FLT_FN (BUILT_IN_ISINF):
7643 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7644 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7645 tree type = TREE_TYPE (arg);
7646 REAL_VALUE_TYPE r;
7647 char buf[128];
7649 if (is_ibm_extended)
7651 /* NaN and Inf are encoded in the high-order double value
7652 only. The low-order value is not significant. */
7653 type = double_type_node;
7654 mode = DFmode;
7655 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7657 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7658 real_from_string (&r, buf);
7659 result = build_call_expr (isgr_fn, 2,
7660 fold_build1_loc (loc, ABS_EXPR, type, arg),
7661 build_real (type, r));
7662 return result;
7664 CASE_FLT_FN (BUILT_IN_FINITE):
7665 case BUILT_IN_ISFINITE:
7667 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7668 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7669 tree type = TREE_TYPE (arg);
7670 REAL_VALUE_TYPE r;
7671 char buf[128];
7673 if (is_ibm_extended)
7675 /* NaN and Inf are encoded in the high-order double value
7676 only. The low-order value is not significant. */
7677 type = double_type_node;
7678 mode = DFmode;
7679 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7681 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7682 real_from_string (&r, buf);
7683 result = build_call_expr (isle_fn, 2,
7684 fold_build1_loc (loc, ABS_EXPR, type, arg),
7685 build_real (type, r));
7686 /*result = fold_build2_loc (loc, UNGT_EXPR,
7687 TREE_TYPE (TREE_TYPE (fndecl)),
7688 fold_build1_loc (loc, ABS_EXPR, type, arg),
7689 build_real (type, r));
7690 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7691 TREE_TYPE (TREE_TYPE (fndecl)),
7692 result);*/
7693 return result;
7695 case BUILT_IN_ISNORMAL:
7697 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7698 islessequal(fabs(x),DBL_MAX). */
7699 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7700 tree type = TREE_TYPE (arg);
7701 tree orig_arg, max_exp, min_exp;
7702 machine_mode orig_mode = mode;
7703 REAL_VALUE_TYPE rmax, rmin;
7704 char buf[128];
7706 orig_arg = arg = builtin_save_expr (arg);
7707 if (is_ibm_extended)
7709 /* Use double to test the normal range of IBM extended
7710 precision. Emin for IBM extended precision is
7711 different to emin for IEEE double, being 53 higher
7712 since the low double exponent is at least 53 lower
7713 than the high double exponent. */
7714 type = double_type_node;
7715 mode = DFmode;
7716 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7718 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
7720 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7721 real_from_string (&rmax, buf);
7722 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
7723 real_from_string (&rmin, buf);
7724 max_exp = build_real (type, rmax);
7725 min_exp = build_real (type, rmin);
7727 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
7728 if (is_ibm_extended)
7730 /* Testing the high end of the range is done just using
7731 the high double, using the same test as isfinite().
7732 For the subnormal end of the range we first test the
7733 high double, then if its magnitude is equal to the
7734 limit of 0x1p-969, we test whether the low double is
7735 non-zero and opposite sign to the high double. */
7736 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
7737 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7738 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
7739 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
7740 arg, min_exp);
7741 tree as_complex = build1 (VIEW_CONVERT_EXPR,
7742 complex_double_type_node, orig_arg);
7743 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
7744 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
7745 tree zero = build_real (type, dconst0);
7746 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
7747 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
7748 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
7749 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
7750 fold_build3 (COND_EXPR,
7751 integer_type_node,
7752 hilt, logt, lolt));
7753 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
7754 eq_min, ok_lo);
7755 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
7756 gt_min, eq_min);
7758 else
7760 tree const isge_fn
7761 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7762 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
7764 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
7765 max_exp, min_exp);
7766 return result;
7768 default:
7769 break;
7772 return NULL_TREE;
7775 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7776 ARG is the argument for the call. */
7778 static tree
7779 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7781 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7783 if (!validate_arg (arg, REAL_TYPE))
7784 return NULL_TREE;
7786 switch (builtin_index)
7788 case BUILT_IN_ISINF:
7789 if (!HONOR_INFINITIES (arg))
7790 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7792 return NULL_TREE;
7794 case BUILT_IN_ISINF_SIGN:
7796 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7797 /* In a boolean context, GCC will fold the inner COND_EXPR to
7798 1. So e.g. "if (isinf_sign(x))" would be folded to just
7799 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7800 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
7801 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7802 tree tmp = NULL_TREE;
7804 arg = builtin_save_expr (arg);
7806 if (signbit_fn && isinf_fn)
7808 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7809 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7811 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7812 signbit_call, integer_zero_node);
7813 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7814 isinf_call, integer_zero_node);
7816 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7817 integer_minus_one_node, integer_one_node);
7818 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7819 isinf_call, tmp,
7820 integer_zero_node);
7823 return tmp;
7826 case BUILT_IN_ISFINITE:
7827 if (!HONOR_NANS (arg)
7828 && !HONOR_INFINITIES (arg))
7829 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7831 return NULL_TREE;
7833 case BUILT_IN_ISNAN:
7834 if (!HONOR_NANS (arg))
7835 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7838 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
7839 if (is_ibm_extended)
7841 /* NaN and Inf are encoded in the high-order double value
7842 only. The low-order value is not significant. */
7843 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
7846 arg = builtin_save_expr (arg);
7847 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7849 default:
7850 gcc_unreachable ();
7854 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7855 This builtin will generate code to return the appropriate floating
7856 point classification depending on the value of the floating point
7857 number passed in. The possible return values must be supplied as
7858 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7859 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7860 one floating point argument which is "type generic". */
7862 static tree
7863 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7865 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7866 arg, type, res, tmp;
7867 machine_mode mode;
7868 REAL_VALUE_TYPE r;
7869 char buf[128];
7871 /* Verify the required arguments in the original call. */
7872 if (nargs != 6
7873 || !validate_arg (args[0], INTEGER_TYPE)
7874 || !validate_arg (args[1], INTEGER_TYPE)
7875 || !validate_arg (args[2], INTEGER_TYPE)
7876 || !validate_arg (args[3], INTEGER_TYPE)
7877 || !validate_arg (args[4], INTEGER_TYPE)
7878 || !validate_arg (args[5], REAL_TYPE))
7879 return NULL_TREE;
7881 fp_nan = args[0];
7882 fp_infinite = args[1];
7883 fp_normal = args[2];
7884 fp_subnormal = args[3];
7885 fp_zero = args[4];
7886 arg = args[5];
7887 type = TREE_TYPE (arg);
7888 mode = TYPE_MODE (type);
7889 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7891 /* fpclassify(x) ->
7892 isnan(x) ? FP_NAN :
7893 (fabs(x) == Inf ? FP_INFINITE :
7894 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7895 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
7897 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7898 build_real (type, dconst0));
7899 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7900 tmp, fp_zero, fp_subnormal);
7902 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7903 real_from_string (&r, buf);
7904 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
7905 arg, build_real (type, r));
7906 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
7908 if (HONOR_INFINITIES (mode))
7910 real_inf (&r);
7911 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7912 build_real (type, r));
7913 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
7914 fp_infinite, res);
7917 if (HONOR_NANS (mode))
7919 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
7920 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
7923 return res;
7926 /* Fold a call to an unordered comparison function such as
7927 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
7928 being called and ARG0 and ARG1 are the arguments for the call.
7929 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
7930 the opposite of the desired result. UNORDERED_CODE is used
7931 for modes that can hold NaNs and ORDERED_CODE is used for
7932 the rest. */
7934 static tree
7935 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
7936 enum tree_code unordered_code,
7937 enum tree_code ordered_code)
7939 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7940 enum tree_code code;
7941 tree type0, type1;
7942 enum tree_code code0, code1;
7943 tree cmp_type = NULL_TREE;
7945 type0 = TREE_TYPE (arg0);
7946 type1 = TREE_TYPE (arg1);
7948 code0 = TREE_CODE (type0);
7949 code1 = TREE_CODE (type1);
7951 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
7952 /* Choose the wider of two real types. */
7953 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
7954 ? type0 : type1;
7955 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
7956 cmp_type = type0;
7957 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
7958 cmp_type = type1;
7960 arg0 = fold_convert_loc (loc, cmp_type, arg0);
7961 arg1 = fold_convert_loc (loc, cmp_type, arg1);
7963 if (unordered_code == UNORDERED_EXPR)
7965 if (!HONOR_NANS (arg0))
7966 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
7967 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
7970 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
7971 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
7972 fold_build2_loc (loc, code, type, arg0, arg1));
7975 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
7976 arithmetics if it can never overflow, or into internal functions that
7977 return both result of arithmetics and overflowed boolean flag in
7978 a complex integer result, or some other check for overflow.
7979 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
7980 checking part of that. */
7982 static tree
7983 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
7984 tree arg0, tree arg1, tree arg2)
7986 enum internal_fn ifn = IFN_LAST;
7987 /* The code of the expression corresponding to the type-generic
7988 built-in, or ERROR_MARK for the type-specific ones. */
7989 enum tree_code opcode = ERROR_MARK;
7990 bool ovf_only = false;
7992 switch (fcode)
7994 case BUILT_IN_ADD_OVERFLOW_P:
7995 ovf_only = true;
7996 /* FALLTHRU */
7997 case BUILT_IN_ADD_OVERFLOW:
7998 opcode = PLUS_EXPR;
7999 /* FALLTHRU */
8000 case BUILT_IN_SADD_OVERFLOW:
8001 case BUILT_IN_SADDL_OVERFLOW:
8002 case BUILT_IN_SADDLL_OVERFLOW:
8003 case BUILT_IN_UADD_OVERFLOW:
8004 case BUILT_IN_UADDL_OVERFLOW:
8005 case BUILT_IN_UADDLL_OVERFLOW:
8006 ifn = IFN_ADD_OVERFLOW;
8007 break;
8008 case BUILT_IN_SUB_OVERFLOW_P:
8009 ovf_only = true;
8010 /* FALLTHRU */
8011 case BUILT_IN_SUB_OVERFLOW:
8012 opcode = MINUS_EXPR;
8013 /* FALLTHRU */
8014 case BUILT_IN_SSUB_OVERFLOW:
8015 case BUILT_IN_SSUBL_OVERFLOW:
8016 case BUILT_IN_SSUBLL_OVERFLOW:
8017 case BUILT_IN_USUB_OVERFLOW:
8018 case BUILT_IN_USUBL_OVERFLOW:
8019 case BUILT_IN_USUBLL_OVERFLOW:
8020 ifn = IFN_SUB_OVERFLOW;
8021 break;
8022 case BUILT_IN_MUL_OVERFLOW_P:
8023 ovf_only = true;
8024 /* FALLTHRU */
8025 case BUILT_IN_MUL_OVERFLOW:
8026 opcode = MULT_EXPR;
8027 /* FALLTHRU */
8028 case BUILT_IN_SMUL_OVERFLOW:
8029 case BUILT_IN_SMULL_OVERFLOW:
8030 case BUILT_IN_SMULLL_OVERFLOW:
8031 case BUILT_IN_UMUL_OVERFLOW:
8032 case BUILT_IN_UMULL_OVERFLOW:
8033 case BUILT_IN_UMULLL_OVERFLOW:
8034 ifn = IFN_MUL_OVERFLOW;
8035 break;
8036 default:
8037 gcc_unreachable ();
8040 /* For the "generic" overloads, the first two arguments can have different
8041 types and the last argument determines the target type to use to check
8042 for overflow. The arguments of the other overloads all have the same
8043 type. */
8044 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8046 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8047 arguments are constant, attempt to fold the built-in call into a constant
8048 expression indicating whether or not it detected an overflow. */
8049 if (ovf_only
8050 && TREE_CODE (arg0) == INTEGER_CST
8051 && TREE_CODE (arg1) == INTEGER_CST)
8052 /* Perform the computation in the target type and check for overflow. */
8053 return omit_one_operand_loc (loc, boolean_type_node,
8054 arith_overflowed_p (opcode, type, arg0, arg1)
8055 ? boolean_true_node : boolean_false_node,
8056 arg2);
8058 tree ctype = build_complex_type (type);
8059 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8060 2, arg0, arg1);
8061 tree tgt = save_expr (call);
8062 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8063 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8064 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8066 if (ovf_only)
8067 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8069 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8070 tree store
8071 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8072 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8075 /* Fold a call to __builtin_FILE to a constant string. */
8077 static inline tree
8078 fold_builtin_FILE (location_t loc)
8080 if (const char *fname = LOCATION_FILE (loc))
8081 return build_string_literal (strlen (fname) + 1, fname);
8083 return build_string_literal (1, "");
8086 /* Fold a call to __builtin_FUNCTION to a constant string. */
8088 static inline tree
8089 fold_builtin_FUNCTION ()
8091 if (current_function_decl)
8093 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8094 return build_string_literal (strlen (name) + 1, name);
8097 return build_string_literal (1, "");
8100 /* Fold a call to __builtin_LINE to an integer constant. */
8102 static inline tree
8103 fold_builtin_LINE (location_t loc, tree type)
8105 return build_int_cst (type, LOCATION_LINE (loc));
8108 /* Fold a call to built-in function FNDECL with 0 arguments.
8109 This function returns NULL_TREE if no simplification was possible. */
8111 static tree
8112 fold_builtin_0 (location_t loc, tree fndecl)
8114 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8115 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8116 switch (fcode)
8118 case BUILT_IN_FILE:
8119 return fold_builtin_FILE (loc);
8121 case BUILT_IN_FUNCTION:
8122 return fold_builtin_FUNCTION ();
8124 case BUILT_IN_LINE:
8125 return fold_builtin_LINE (loc, type);
8127 CASE_FLT_FN (BUILT_IN_INF):
8128 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8129 case BUILT_IN_INFD32:
8130 case BUILT_IN_INFD64:
8131 case BUILT_IN_INFD128:
8132 return fold_builtin_inf (loc, type, true);
8134 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8135 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8136 return fold_builtin_inf (loc, type, false);
8138 case BUILT_IN_CLASSIFY_TYPE:
8139 return fold_builtin_classify_type (NULL_TREE);
8141 default:
8142 break;
8144 return NULL_TREE;
8147 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8148 This function returns NULL_TREE if no simplification was possible. */
8150 static tree
8151 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8153 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8154 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8156 if (TREE_CODE (arg0) == ERROR_MARK)
8157 return NULL_TREE;
8159 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8160 return ret;
8162 switch (fcode)
8164 case BUILT_IN_CONSTANT_P:
8166 tree val = fold_builtin_constant_p (arg0);
8168 /* Gimplification will pull the CALL_EXPR for the builtin out of
8169 an if condition. When not optimizing, we'll not CSE it back.
8170 To avoid link error types of regressions, return false now. */
8171 if (!val && !optimize)
8172 val = integer_zero_node;
8174 return val;
8177 case BUILT_IN_CLASSIFY_TYPE:
8178 return fold_builtin_classify_type (arg0);
8180 case BUILT_IN_STRLEN:
8181 return fold_builtin_strlen (loc, type, arg0);
8183 CASE_FLT_FN (BUILT_IN_FABS):
8184 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8185 case BUILT_IN_FABSD32:
8186 case BUILT_IN_FABSD64:
8187 case BUILT_IN_FABSD128:
8188 return fold_builtin_fabs (loc, arg0, type);
8190 case BUILT_IN_ABS:
8191 case BUILT_IN_LABS:
8192 case BUILT_IN_LLABS:
8193 case BUILT_IN_IMAXABS:
8194 return fold_builtin_abs (loc, arg0, type);
8196 CASE_FLT_FN (BUILT_IN_CONJ):
8197 if (validate_arg (arg0, COMPLEX_TYPE)
8198 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8199 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8200 break;
8202 CASE_FLT_FN (BUILT_IN_CREAL):
8203 if (validate_arg (arg0, COMPLEX_TYPE)
8204 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8205 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8206 break;
8208 CASE_FLT_FN (BUILT_IN_CIMAG):
8209 if (validate_arg (arg0, COMPLEX_TYPE)
8210 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8211 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8212 break;
8214 CASE_FLT_FN (BUILT_IN_CARG):
8215 return fold_builtin_carg (loc, arg0, type);
8217 case BUILT_IN_ISASCII:
8218 return fold_builtin_isascii (loc, arg0);
8220 case BUILT_IN_TOASCII:
8221 return fold_builtin_toascii (loc, arg0);
8223 case BUILT_IN_ISDIGIT:
8224 return fold_builtin_isdigit (loc, arg0);
8226 CASE_FLT_FN (BUILT_IN_FINITE):
8227 case BUILT_IN_FINITED32:
8228 case BUILT_IN_FINITED64:
8229 case BUILT_IN_FINITED128:
8230 case BUILT_IN_ISFINITE:
8232 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8233 if (ret)
8234 return ret;
8235 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8238 CASE_FLT_FN (BUILT_IN_ISINF):
8239 case BUILT_IN_ISINFD32:
8240 case BUILT_IN_ISINFD64:
8241 case BUILT_IN_ISINFD128:
8243 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8244 if (ret)
8245 return ret;
8246 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8249 case BUILT_IN_ISNORMAL:
8250 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8252 case BUILT_IN_ISINF_SIGN:
8253 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8255 CASE_FLT_FN (BUILT_IN_ISNAN):
8256 case BUILT_IN_ISNAND32:
8257 case BUILT_IN_ISNAND64:
8258 case BUILT_IN_ISNAND128:
8259 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8261 case BUILT_IN_FREE:
8262 if (integer_zerop (arg0))
8263 return build_empty_stmt (loc);
8264 break;
8266 default:
8267 break;
8270 return NULL_TREE;
8274 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8275 This function returns NULL_TREE if no simplification was possible. */
8277 static tree
8278 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8280 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8281 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8283 if (TREE_CODE (arg0) == ERROR_MARK
8284 || TREE_CODE (arg1) == ERROR_MARK)
8285 return NULL_TREE;
8287 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8288 return ret;
8290 switch (fcode)
8292 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8293 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8294 if (validate_arg (arg0, REAL_TYPE)
8295 && validate_arg (arg1, POINTER_TYPE))
8296 return do_mpfr_lgamma_r (arg0, arg1, type);
8297 break;
8299 CASE_FLT_FN (BUILT_IN_FREXP):
8300 return fold_builtin_frexp (loc, arg0, arg1, type);
8302 CASE_FLT_FN (BUILT_IN_MODF):
8303 return fold_builtin_modf (loc, arg0, arg1, type);
8305 case BUILT_IN_STRSPN:
8306 return fold_builtin_strspn (loc, arg0, arg1);
8308 case BUILT_IN_STRCSPN:
8309 return fold_builtin_strcspn (loc, arg0, arg1);
8311 case BUILT_IN_STRPBRK:
8312 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8314 case BUILT_IN_EXPECT:
8315 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8317 case BUILT_IN_ISGREATER:
8318 return fold_builtin_unordered_cmp (loc, fndecl,
8319 arg0, arg1, UNLE_EXPR, LE_EXPR);
8320 case BUILT_IN_ISGREATEREQUAL:
8321 return fold_builtin_unordered_cmp (loc, fndecl,
8322 arg0, arg1, UNLT_EXPR, LT_EXPR);
8323 case BUILT_IN_ISLESS:
8324 return fold_builtin_unordered_cmp (loc, fndecl,
8325 arg0, arg1, UNGE_EXPR, GE_EXPR);
8326 case BUILT_IN_ISLESSEQUAL:
8327 return fold_builtin_unordered_cmp (loc, fndecl,
8328 arg0, arg1, UNGT_EXPR, GT_EXPR);
8329 case BUILT_IN_ISLESSGREATER:
8330 return fold_builtin_unordered_cmp (loc, fndecl,
8331 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8332 case BUILT_IN_ISUNORDERED:
8333 return fold_builtin_unordered_cmp (loc, fndecl,
8334 arg0, arg1, UNORDERED_EXPR,
8335 NOP_EXPR);
8337 /* We do the folding for va_start in the expander. */
8338 case BUILT_IN_VA_START:
8339 break;
8341 case BUILT_IN_OBJECT_SIZE:
8342 return fold_builtin_object_size (arg0, arg1);
8344 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8345 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8347 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8348 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8350 default:
8351 break;
8353 return NULL_TREE;
8356 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8357 and ARG2.
8358 This function returns NULL_TREE if no simplification was possible. */
8360 static tree
8361 fold_builtin_3 (location_t loc, tree fndecl,
8362 tree arg0, tree arg1, tree arg2)
8364 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8365 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8367 if (TREE_CODE (arg0) == ERROR_MARK
8368 || TREE_CODE (arg1) == ERROR_MARK
8369 || TREE_CODE (arg2) == ERROR_MARK)
8370 return NULL_TREE;
8372 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8373 arg0, arg1, arg2))
8374 return ret;
8376 switch (fcode)
8379 CASE_FLT_FN (BUILT_IN_SINCOS):
8380 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8382 CASE_FLT_FN (BUILT_IN_FMA):
8383 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8385 CASE_FLT_FN (BUILT_IN_REMQUO):
8386 if (validate_arg (arg0, REAL_TYPE)
8387 && validate_arg (arg1, REAL_TYPE)
8388 && validate_arg (arg2, POINTER_TYPE))
8389 return do_mpfr_remquo (arg0, arg1, arg2);
8390 break;
8392 case BUILT_IN_BCMP:
8393 case BUILT_IN_MEMCMP:
8394 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8396 case BUILT_IN_EXPECT:
8397 return fold_builtin_expect (loc, arg0, arg1, arg2);
8399 case BUILT_IN_ADD_OVERFLOW:
8400 case BUILT_IN_SUB_OVERFLOW:
8401 case BUILT_IN_MUL_OVERFLOW:
8402 case BUILT_IN_ADD_OVERFLOW_P:
8403 case BUILT_IN_SUB_OVERFLOW_P:
8404 case BUILT_IN_MUL_OVERFLOW_P:
8405 case BUILT_IN_SADD_OVERFLOW:
8406 case BUILT_IN_SADDL_OVERFLOW:
8407 case BUILT_IN_SADDLL_OVERFLOW:
8408 case BUILT_IN_SSUB_OVERFLOW:
8409 case BUILT_IN_SSUBL_OVERFLOW:
8410 case BUILT_IN_SSUBLL_OVERFLOW:
8411 case BUILT_IN_SMUL_OVERFLOW:
8412 case BUILT_IN_SMULL_OVERFLOW:
8413 case BUILT_IN_SMULLL_OVERFLOW:
8414 case BUILT_IN_UADD_OVERFLOW:
8415 case BUILT_IN_UADDL_OVERFLOW:
8416 case BUILT_IN_UADDLL_OVERFLOW:
8417 case BUILT_IN_USUB_OVERFLOW:
8418 case BUILT_IN_USUBL_OVERFLOW:
8419 case BUILT_IN_USUBLL_OVERFLOW:
8420 case BUILT_IN_UMUL_OVERFLOW:
8421 case BUILT_IN_UMULL_OVERFLOW:
8422 case BUILT_IN_UMULLL_OVERFLOW:
8423 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8425 default:
8426 break;
8428 return NULL_TREE;
8431 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8432 arguments. IGNORE is true if the result of the
8433 function call is ignored. This function returns NULL_TREE if no
8434 simplification was possible. */
8436 tree
8437 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8439 tree ret = NULL_TREE;
8441 switch (nargs)
8443 case 0:
8444 ret = fold_builtin_0 (loc, fndecl);
8445 break;
8446 case 1:
8447 ret = fold_builtin_1 (loc, fndecl, args[0]);
8448 break;
8449 case 2:
8450 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8451 break;
8452 case 3:
8453 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8454 break;
8455 default:
8456 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8457 break;
8459 if (ret)
8461 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8462 SET_EXPR_LOCATION (ret, loc);
8463 TREE_NO_WARNING (ret) = 1;
8464 return ret;
8466 return NULL_TREE;
8469 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8470 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8471 of arguments in ARGS to be omitted. OLDNARGS is the number of
8472 elements in ARGS. */
8474 static tree
8475 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8476 int skip, tree fndecl, int n, va_list newargs)
8478 int nargs = oldnargs - skip + n;
8479 tree *buffer;
8481 if (n > 0)
8483 int i, j;
8485 buffer = XALLOCAVEC (tree, nargs);
8486 for (i = 0; i < n; i++)
8487 buffer[i] = va_arg (newargs, tree);
8488 for (j = skip; j < oldnargs; j++, i++)
8489 buffer[i] = args[j];
8491 else
8492 buffer = args + skip;
8494 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8497 /* Return true if FNDECL shouldn't be folded right now.
8498 If a built-in function has an inline attribute always_inline
8499 wrapper, defer folding it after always_inline functions have
8500 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8501 might not be performed. */
8503 bool
8504 avoid_folding_inline_builtin (tree fndecl)
8506 return (DECL_DECLARED_INLINE_P (fndecl)
8507 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8508 && cfun
8509 && !cfun->always_inline_functions_inlined
8510 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8513 /* A wrapper function for builtin folding that prevents warnings for
8514 "statement without effect" and the like, caused by removing the
8515 call node earlier than the warning is generated. */
8517 tree
8518 fold_call_expr (location_t loc, tree exp, bool ignore)
8520 tree ret = NULL_TREE;
8521 tree fndecl = get_callee_fndecl (exp);
8522 if (fndecl
8523 && TREE_CODE (fndecl) == FUNCTION_DECL
8524 && DECL_BUILT_IN (fndecl)
8525 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8526 yet. Defer folding until we see all the arguments
8527 (after inlining). */
8528 && !CALL_EXPR_VA_ARG_PACK (exp))
8530 int nargs = call_expr_nargs (exp);
8532 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8533 instead last argument is __builtin_va_arg_pack (). Defer folding
8534 even in that case, until arguments are finalized. */
8535 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8537 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8538 if (fndecl2
8539 && TREE_CODE (fndecl2) == FUNCTION_DECL
8540 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8541 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8542 return NULL_TREE;
8545 if (avoid_folding_inline_builtin (fndecl))
8546 return NULL_TREE;
8548 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8549 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8550 CALL_EXPR_ARGP (exp), ignore);
8551 else
8553 tree *args = CALL_EXPR_ARGP (exp);
8554 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8555 if (ret)
8556 return ret;
8559 return NULL_TREE;
8562 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8563 N arguments are passed in the array ARGARRAY. Return a folded
8564 expression or NULL_TREE if no simplification was possible. */
8566 tree
8567 fold_builtin_call_array (location_t loc, tree,
8568 tree fn,
8569 int n,
8570 tree *argarray)
8572 if (TREE_CODE (fn) != ADDR_EXPR)
8573 return NULL_TREE;
8575 tree fndecl = TREE_OPERAND (fn, 0);
8576 if (TREE_CODE (fndecl) == FUNCTION_DECL
8577 && DECL_BUILT_IN (fndecl))
8579 /* If last argument is __builtin_va_arg_pack (), arguments to this
8580 function are not finalized yet. Defer folding until they are. */
8581 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8583 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8584 if (fndecl2
8585 && TREE_CODE (fndecl2) == FUNCTION_DECL
8586 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8587 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8588 return NULL_TREE;
8590 if (avoid_folding_inline_builtin (fndecl))
8591 return NULL_TREE;
8592 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8593 return targetm.fold_builtin (fndecl, n, argarray, false);
8594 else
8595 return fold_builtin_n (loc, fndecl, argarray, n, false);
8598 return NULL_TREE;
8601 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8602 along with N new arguments specified as the "..." parameters. SKIP
8603 is the number of arguments in EXP to be omitted. This function is used
8604 to do varargs-to-varargs transformations. */
8606 static tree
8607 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8609 va_list ap;
8610 tree t;
8612 va_start (ap, n);
8613 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8614 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8615 va_end (ap);
8617 return t;
8620 /* Validate a single argument ARG against a tree code CODE representing
8621 a type. */
8623 static bool
8624 validate_arg (const_tree arg, enum tree_code code)
8626 if (!arg)
8627 return false;
8628 else if (code == POINTER_TYPE)
8629 return POINTER_TYPE_P (TREE_TYPE (arg));
8630 else if (code == INTEGER_TYPE)
8631 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8632 return code == TREE_CODE (TREE_TYPE (arg));
8635 /* This function validates the types of a function call argument list
8636 against a specified list of tree_codes. If the last specifier is a 0,
8637 that represents an ellipses, otherwise the last specifier must be a
8638 VOID_TYPE.
8640 This is the GIMPLE version of validate_arglist. Eventually we want to
8641 completely convert builtins.c to work from GIMPLEs and the tree based
8642 validate_arglist will then be removed. */
8644 bool
8645 validate_gimple_arglist (const gcall *call, ...)
8647 enum tree_code code;
8648 bool res = 0;
8649 va_list ap;
8650 const_tree arg;
8651 size_t i;
8653 va_start (ap, call);
8654 i = 0;
8658 code = (enum tree_code) va_arg (ap, int);
8659 switch (code)
8661 case 0:
8662 /* This signifies an ellipses, any further arguments are all ok. */
8663 res = true;
8664 goto end;
8665 case VOID_TYPE:
8666 /* This signifies an endlink, if no arguments remain, return
8667 true, otherwise return false. */
8668 res = (i == gimple_call_num_args (call));
8669 goto end;
8670 default:
8671 /* If no parameters remain or the parameter's code does not
8672 match the specified code, return false. Otherwise continue
8673 checking any remaining arguments. */
8674 arg = gimple_call_arg (call, i++);
8675 if (!validate_arg (arg, code))
8676 goto end;
8677 break;
8680 while (1);
8682 /* We need gotos here since we can only have one VA_CLOSE in a
8683 function. */
8684 end: ;
8685 va_end (ap);
8687 return res;
8690 /* Default target-specific builtin expander that does nothing. */
8693 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8694 rtx target ATTRIBUTE_UNUSED,
8695 rtx subtarget ATTRIBUTE_UNUSED,
8696 machine_mode mode ATTRIBUTE_UNUSED,
8697 int ignore ATTRIBUTE_UNUSED)
8699 return NULL_RTX;
8702 /* Returns true is EXP represents data that would potentially reside
8703 in a readonly section. */
8705 bool
8706 readonly_data_expr (tree exp)
8708 STRIP_NOPS (exp);
8710 if (TREE_CODE (exp) != ADDR_EXPR)
8711 return false;
8713 exp = get_base_address (TREE_OPERAND (exp, 0));
8714 if (!exp)
8715 return false;
8717 /* Make sure we call decl_readonly_section only for trees it
8718 can handle (since it returns true for everything it doesn't
8719 understand). */
8720 if (TREE_CODE (exp) == STRING_CST
8721 || TREE_CODE (exp) == CONSTRUCTOR
8722 || (VAR_P (exp) && TREE_STATIC (exp)))
8723 return decl_readonly_section (exp, 0);
8724 else
8725 return false;
8728 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
8729 to the call, and TYPE is its return type.
8731 Return NULL_TREE if no simplification was possible, otherwise return the
8732 simplified form of the call as a tree.
8734 The simplified form may be a constant or other expression which
8735 computes the same value, but in a more efficient manner (including
8736 calls to other builtin functions).
8738 The call may contain arguments which need to be evaluated, but
8739 which are not useful to determine the result of the call. In
8740 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8741 COMPOUND_EXPR will be an argument which must be evaluated.
8742 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8743 COMPOUND_EXPR in the chain will contain the tree for the simplified
8744 form of the builtin function call. */
8746 static tree
8747 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
8749 if (!validate_arg (s1, POINTER_TYPE)
8750 || !validate_arg (s2, POINTER_TYPE))
8751 return NULL_TREE;
8752 else
8754 tree fn;
8755 const char *p1, *p2;
8757 p2 = c_getstr (s2);
8758 if (p2 == NULL)
8759 return NULL_TREE;
8761 p1 = c_getstr (s1);
8762 if (p1 != NULL)
8764 const char *r = strpbrk (p1, p2);
8765 tree tem;
8767 if (r == NULL)
8768 return build_int_cst (TREE_TYPE (s1), 0);
8770 /* Return an offset into the constant string argument. */
8771 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8772 return fold_convert_loc (loc, type, tem);
8775 if (p2[0] == '\0')
8776 /* strpbrk(x, "") == NULL.
8777 Evaluate and ignore s1 in case it had side-effects. */
8778 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
8780 if (p2[1] != '\0')
8781 return NULL_TREE; /* Really call strpbrk. */
8783 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8784 if (!fn)
8785 return NULL_TREE;
8787 /* New argument list transforming strpbrk(s1, s2) to
8788 strchr(s1, s2[0]). */
8789 return build_call_expr_loc (loc, fn, 2, s1,
8790 build_int_cst (integer_type_node, p2[0]));
8794 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
8795 to the call.
8797 Return NULL_TREE if no simplification was possible, otherwise return the
8798 simplified form of the call as a tree.
8800 The simplified form may be a constant or other expression which
8801 computes the same value, but in a more efficient manner (including
8802 calls to other builtin functions).
8804 The call may contain arguments which need to be evaluated, but
8805 which are not useful to determine the result of the call. In
8806 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8807 COMPOUND_EXPR will be an argument which must be evaluated.
8808 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8809 COMPOUND_EXPR in the chain will contain the tree for the simplified
8810 form of the builtin function call. */
8812 static tree
8813 fold_builtin_strspn (location_t loc, tree s1, tree s2)
8815 if (!validate_arg (s1, POINTER_TYPE)
8816 || !validate_arg (s2, POINTER_TYPE))
8817 return NULL_TREE;
8818 else
8820 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
8822 /* If either argument is "", return NULL_TREE. */
8823 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
8824 /* Evaluate and ignore both arguments in case either one has
8825 side-effects. */
8826 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
8827 s1, s2);
8828 return NULL_TREE;
8832 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
8833 to the call.
8835 Return NULL_TREE if no simplification was possible, otherwise return the
8836 simplified form of the call as a tree.
8838 The simplified form may be a constant or other expression which
8839 computes the same value, but in a more efficient manner (including
8840 calls to other builtin functions).
8842 The call may contain arguments which need to be evaluated, but
8843 which are not useful to determine the result of the call. In
8844 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8845 COMPOUND_EXPR will be an argument which must be evaluated.
8846 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8847 COMPOUND_EXPR in the chain will contain the tree for the simplified
8848 form of the builtin function call. */
8850 static tree
8851 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
8853 if (!validate_arg (s1, POINTER_TYPE)
8854 || !validate_arg (s2, POINTER_TYPE))
8855 return NULL_TREE;
8856 else
8858 /* If the first argument is "", return NULL_TREE. */
8859 const char *p1 = c_getstr (s1);
8860 if (p1 && *p1 == '\0')
8862 /* Evaluate and ignore argument s2 in case it has
8863 side-effects. */
8864 return omit_one_operand_loc (loc, size_type_node,
8865 size_zero_node, s2);
8868 /* If the second argument is "", return __builtin_strlen(s1). */
8869 const char *p2 = c_getstr (s2);
8870 if (p2 && *p2 == '\0')
8872 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
8874 /* If the replacement _DECL isn't initialized, don't do the
8875 transformation. */
8876 if (!fn)
8877 return NULL_TREE;
8879 return build_call_expr_loc (loc, fn, 1, s1);
8881 return NULL_TREE;
8885 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
8886 produced. False otherwise. This is done so that we don't output the error
8887 or warning twice or three times. */
8889 bool
8890 fold_builtin_next_arg (tree exp, bool va_start_p)
8892 tree fntype = TREE_TYPE (current_function_decl);
8893 int nargs = call_expr_nargs (exp);
8894 tree arg;
8895 /* There is good chance the current input_location points inside the
8896 definition of the va_start macro (perhaps on the token for
8897 builtin) in a system header, so warnings will not be emitted.
8898 Use the location in real source code. */
8899 source_location current_location =
8900 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
8901 NULL);
8903 if (!stdarg_p (fntype))
8905 error ("%<va_start%> used in function with fixed args");
8906 return true;
8909 if (va_start_p)
8911 if (va_start_p && (nargs != 2))
8913 error ("wrong number of arguments to function %<va_start%>");
8914 return true;
8916 arg = CALL_EXPR_ARG (exp, 1);
8918 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
8919 when we checked the arguments and if needed issued a warning. */
8920 else
8922 if (nargs == 0)
8924 /* Evidently an out of date version of <stdarg.h>; can't validate
8925 va_start's second argument, but can still work as intended. */
8926 warning_at (current_location,
8927 OPT_Wvarargs,
8928 "%<__builtin_next_arg%> called without an argument");
8929 return true;
8931 else if (nargs > 1)
8933 error ("wrong number of arguments to function %<__builtin_next_arg%>");
8934 return true;
8936 arg = CALL_EXPR_ARG (exp, 0);
8939 if (TREE_CODE (arg) == SSA_NAME)
8940 arg = SSA_NAME_VAR (arg);
8942 /* We destructively modify the call to be __builtin_va_start (ap, 0)
8943 or __builtin_next_arg (0) the first time we see it, after checking
8944 the arguments and if needed issuing a warning. */
8945 if (!integer_zerop (arg))
8947 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8949 /* Strip off all nops for the sake of the comparison. This
8950 is not quite the same as STRIP_NOPS. It does more.
8951 We must also strip off INDIRECT_EXPR for C++ reference
8952 parameters. */
8953 while (CONVERT_EXPR_P (arg)
8954 || TREE_CODE (arg) == INDIRECT_REF)
8955 arg = TREE_OPERAND (arg, 0);
8956 if (arg != last_parm)
8958 /* FIXME: Sometimes with the tree optimizers we can get the
8959 not the last argument even though the user used the last
8960 argument. We just warn and set the arg to be the last
8961 argument so that we will get wrong-code because of
8962 it. */
8963 warning_at (current_location,
8964 OPT_Wvarargs,
8965 "second parameter of %<va_start%> not last named argument");
8968 /* Undefined by C99 7.15.1.4p4 (va_start):
8969 "If the parameter parmN is declared with the register storage
8970 class, with a function or array type, or with a type that is
8971 not compatible with the type that results after application of
8972 the default argument promotions, the behavior is undefined."
8974 else if (DECL_REGISTER (arg))
8976 warning_at (current_location,
8977 OPT_Wvarargs,
8978 "undefined behavior when second parameter of "
8979 "%<va_start%> is declared with %<register%> storage");
8982 /* We want to verify the second parameter just once before the tree
8983 optimizers are run and then avoid keeping it in the tree,
8984 as otherwise we could warn even for correct code like:
8985 void foo (int i, ...)
8986 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
8987 if (va_start_p)
8988 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
8989 else
8990 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
8992 return false;
8996 /* Expand a call EXP to __builtin_object_size. */
8998 static rtx
8999 expand_builtin_object_size (tree exp)
9001 tree ost;
9002 int object_size_type;
9003 tree fndecl = get_callee_fndecl (exp);
9005 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9007 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9008 exp, fndecl);
9009 expand_builtin_trap ();
9010 return const0_rtx;
9013 ost = CALL_EXPR_ARG (exp, 1);
9014 STRIP_NOPS (ost);
9016 if (TREE_CODE (ost) != INTEGER_CST
9017 || tree_int_cst_sgn (ost) < 0
9018 || compare_tree_int (ost, 3) > 0)
9020 error ("%Klast argument of %D is not integer constant between 0 and 3",
9021 exp, fndecl);
9022 expand_builtin_trap ();
9023 return const0_rtx;
9026 object_size_type = tree_to_shwi (ost);
9028 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9031 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9032 FCODE is the BUILT_IN_* to use.
9033 Return NULL_RTX if we failed; the caller should emit a normal call,
9034 otherwise try to get the result in TARGET, if convenient (and in
9035 mode MODE if that's convenient). */
9037 static rtx
9038 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9039 enum built_in_function fcode)
9041 tree dest, src, len, size;
9043 if (!validate_arglist (exp,
9044 POINTER_TYPE,
9045 fcode == BUILT_IN_MEMSET_CHK
9046 ? INTEGER_TYPE : POINTER_TYPE,
9047 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9048 return NULL_RTX;
9050 dest = CALL_EXPR_ARG (exp, 0);
9051 src = CALL_EXPR_ARG (exp, 1);
9052 len = CALL_EXPR_ARG (exp, 2);
9053 size = CALL_EXPR_ARG (exp, 3);
9055 if (! tree_fits_uhwi_p (size))
9056 return NULL_RTX;
9058 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9060 tree fn;
9062 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9064 warning_at (tree_nonartificial_location (exp),
9065 0, "%Kcall to %D will always overflow destination buffer",
9066 exp, get_callee_fndecl (exp));
9067 return NULL_RTX;
9070 fn = NULL_TREE;
9071 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9072 mem{cpy,pcpy,move,set} is available. */
9073 switch (fcode)
9075 case BUILT_IN_MEMCPY_CHK:
9076 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9077 break;
9078 case BUILT_IN_MEMPCPY_CHK:
9079 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9080 break;
9081 case BUILT_IN_MEMMOVE_CHK:
9082 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9083 break;
9084 case BUILT_IN_MEMSET_CHK:
9085 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9086 break;
9087 default:
9088 break;
9091 if (! fn)
9092 return NULL_RTX;
9094 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9095 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9096 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9097 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9099 else if (fcode == BUILT_IN_MEMSET_CHK)
9100 return NULL_RTX;
9101 else
9103 unsigned int dest_align = get_pointer_alignment (dest);
9105 /* If DEST is not a pointer type, call the normal function. */
9106 if (dest_align == 0)
9107 return NULL_RTX;
9109 /* If SRC and DEST are the same (and not volatile), do nothing. */
9110 if (operand_equal_p (src, dest, 0))
9112 tree expr;
9114 if (fcode != BUILT_IN_MEMPCPY_CHK)
9116 /* Evaluate and ignore LEN in case it has side-effects. */
9117 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9118 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9121 expr = fold_build_pointer_plus (dest, len);
9122 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9125 /* __memmove_chk special case. */
9126 if (fcode == BUILT_IN_MEMMOVE_CHK)
9128 unsigned int src_align = get_pointer_alignment (src);
9130 if (src_align == 0)
9131 return NULL_RTX;
9133 /* If src is categorized for a readonly section we can use
9134 normal __memcpy_chk. */
9135 if (readonly_data_expr (src))
9137 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9138 if (!fn)
9139 return NULL_RTX;
9140 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9141 dest, src, len, size);
9142 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9143 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9144 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9147 return NULL_RTX;
9151 /* Emit warning if a buffer overflow is detected at compile time. */
9153 static void
9154 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9156 int is_strlen = 0;
9157 tree len, size;
9158 location_t loc = tree_nonartificial_location (exp);
9160 switch (fcode)
9162 case BUILT_IN_STRCPY_CHK:
9163 case BUILT_IN_STPCPY_CHK:
9164 /* For __strcat_chk the warning will be emitted only if overflowing
9165 by at least strlen (dest) + 1 bytes. */
9166 case BUILT_IN_STRCAT_CHK:
9167 len = CALL_EXPR_ARG (exp, 1);
9168 size = CALL_EXPR_ARG (exp, 2);
9169 is_strlen = 1;
9170 break;
9171 case BUILT_IN_STRNCAT_CHK:
9172 case BUILT_IN_STRNCPY_CHK:
9173 case BUILT_IN_STPNCPY_CHK:
9174 len = CALL_EXPR_ARG (exp, 2);
9175 size = CALL_EXPR_ARG (exp, 3);
9176 break;
9177 case BUILT_IN_SNPRINTF_CHK:
9178 case BUILT_IN_VSNPRINTF_CHK:
9179 len = CALL_EXPR_ARG (exp, 1);
9180 size = CALL_EXPR_ARG (exp, 3);
9181 break;
9182 default:
9183 gcc_unreachable ();
9186 if (!len || !size)
9187 return;
9189 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9190 return;
9192 if (is_strlen)
9194 len = c_strlen (len, 1);
9195 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9196 return;
9198 else if (fcode == BUILT_IN_STRNCAT_CHK)
9200 tree src = CALL_EXPR_ARG (exp, 1);
9201 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9202 return;
9203 src = c_strlen (src, 1);
9204 if (! src || ! tree_fits_uhwi_p (src))
9206 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9207 exp, get_callee_fndecl (exp));
9208 return;
9210 else if (tree_int_cst_lt (src, size))
9211 return;
9213 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9214 return;
9216 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9217 exp, get_callee_fndecl (exp));
9220 /* Emit warning if a buffer overflow is detected at compile time
9221 in __sprintf_chk/__vsprintf_chk calls. */
9223 static void
9224 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9226 tree size, len, fmt;
9227 const char *fmt_str;
9228 int nargs = call_expr_nargs (exp);
9230 /* Verify the required arguments in the original call. */
9232 if (nargs < 4)
9233 return;
9234 size = CALL_EXPR_ARG (exp, 2);
9235 fmt = CALL_EXPR_ARG (exp, 3);
9237 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9238 return;
9240 /* Check whether the format is a literal string constant. */
9241 fmt_str = c_getstr (fmt);
9242 if (fmt_str == NULL)
9243 return;
9245 if (!init_target_chars ())
9246 return;
9248 /* If the format doesn't contain % args or %%, we know its size. */
9249 if (strchr (fmt_str, target_percent) == 0)
9250 len = build_int_cstu (size_type_node, strlen (fmt_str));
9251 /* If the format is "%s" and first ... argument is a string literal,
9252 we know it too. */
9253 else if (fcode == BUILT_IN_SPRINTF_CHK
9254 && strcmp (fmt_str, target_percent_s) == 0)
9256 tree arg;
9258 if (nargs < 5)
9259 return;
9260 arg = CALL_EXPR_ARG (exp, 4);
9261 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9262 return;
9264 len = c_strlen (arg, 1);
9265 if (!len || ! tree_fits_uhwi_p (len))
9266 return;
9268 else
9269 return;
9271 if (! tree_int_cst_lt (len, size))
9272 warning_at (tree_nonartificial_location (exp),
9273 0, "%Kcall to %D will always overflow destination buffer",
9274 exp, get_callee_fndecl (exp));
9277 /* Emit warning if a free is called with address of a variable. */
9279 static void
9280 maybe_emit_free_warning (tree exp)
9282 tree arg = CALL_EXPR_ARG (exp, 0);
9284 STRIP_NOPS (arg);
9285 if (TREE_CODE (arg) != ADDR_EXPR)
9286 return;
9288 arg = get_base_address (TREE_OPERAND (arg, 0));
9289 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9290 return;
9292 if (SSA_VAR_P (arg))
9293 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9294 "%Kattempt to free a non-heap object %qD", exp, arg);
9295 else
9296 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9297 "%Kattempt to free a non-heap object", exp);
9300 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9301 if possible. */
9303 static tree
9304 fold_builtin_object_size (tree ptr, tree ost)
9306 unsigned HOST_WIDE_INT bytes;
9307 int object_size_type;
9309 if (!validate_arg (ptr, POINTER_TYPE)
9310 || !validate_arg (ost, INTEGER_TYPE))
9311 return NULL_TREE;
9313 STRIP_NOPS (ost);
9315 if (TREE_CODE (ost) != INTEGER_CST
9316 || tree_int_cst_sgn (ost) < 0
9317 || compare_tree_int (ost, 3) > 0)
9318 return NULL_TREE;
9320 object_size_type = tree_to_shwi (ost);
9322 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9323 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9324 and (size_t) 0 for types 2 and 3. */
9325 if (TREE_SIDE_EFFECTS (ptr))
9326 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9328 if (TREE_CODE (ptr) == ADDR_EXPR)
9330 compute_builtin_object_size (ptr, object_size_type, &bytes);
9331 if (wi::fits_to_tree_p (bytes, size_type_node))
9332 return build_int_cstu (size_type_node, bytes);
9334 else if (TREE_CODE (ptr) == SSA_NAME)
9336 /* If object size is not known yet, delay folding until
9337 later. Maybe subsequent passes will help determining
9338 it. */
9339 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9340 && wi::fits_to_tree_p (bytes, size_type_node))
9341 return build_int_cstu (size_type_node, bytes);
9344 return NULL_TREE;
9347 /* Builtins with folding operations that operate on "..." arguments
9348 need special handling; we need to store the arguments in a convenient
9349 data structure before attempting any folding. Fortunately there are
9350 only a few builtins that fall into this category. FNDECL is the
9351 function, EXP is the CALL_EXPR for the call. */
9353 static tree
9354 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9356 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9357 tree ret = NULL_TREE;
9359 switch (fcode)
9361 case BUILT_IN_FPCLASSIFY:
9362 ret = fold_builtin_fpclassify (loc, args, nargs);
9363 break;
9365 default:
9366 break;
9368 if (ret)
9370 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9371 SET_EXPR_LOCATION (ret, loc);
9372 TREE_NO_WARNING (ret) = 1;
9373 return ret;
9375 return NULL_TREE;
9378 /* Initialize format string characters in the target charset. */
9380 bool
9381 init_target_chars (void)
9383 static bool init;
9384 if (!init)
9386 target_newline = lang_hooks.to_target_charset ('\n');
9387 target_percent = lang_hooks.to_target_charset ('%');
9388 target_c = lang_hooks.to_target_charset ('c');
9389 target_s = lang_hooks.to_target_charset ('s');
9390 if (target_newline == 0 || target_percent == 0 || target_c == 0
9391 || target_s == 0)
9392 return false;
9394 target_percent_c[0] = target_percent;
9395 target_percent_c[1] = target_c;
9396 target_percent_c[2] = '\0';
9398 target_percent_s[0] = target_percent;
9399 target_percent_s[1] = target_s;
9400 target_percent_s[2] = '\0';
9402 target_percent_s_newline[0] = target_percent;
9403 target_percent_s_newline[1] = target_s;
9404 target_percent_s_newline[2] = target_newline;
9405 target_percent_s_newline[3] = '\0';
9407 init = true;
9409 return true;
9412 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9413 and no overflow/underflow occurred. INEXACT is true if M was not
9414 exactly calculated. TYPE is the tree type for the result. This
9415 function assumes that you cleared the MPFR flags and then
9416 calculated M to see if anything subsequently set a flag prior to
9417 entering this function. Return NULL_TREE if any checks fail. */
9419 static tree
9420 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9422 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9423 overflow/underflow occurred. If -frounding-math, proceed iff the
9424 result of calling FUNC was exact. */
9425 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9426 && (!flag_rounding_math || !inexact))
9428 REAL_VALUE_TYPE rr;
9430 real_from_mpfr (&rr, m, type, GMP_RNDN);
9431 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9432 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9433 but the mpft_t is not, then we underflowed in the
9434 conversion. */
9435 if (real_isfinite (&rr)
9436 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9438 REAL_VALUE_TYPE rmode;
9440 real_convert (&rmode, TYPE_MODE (type), &rr);
9441 /* Proceed iff the specified mode can hold the value. */
9442 if (real_identical (&rmode, &rr))
9443 return build_real (type, rmode);
9446 return NULL_TREE;
9449 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9450 number and no overflow/underflow occurred. INEXACT is true if M
9451 was not exactly calculated. TYPE is the tree type for the result.
9452 This function assumes that you cleared the MPFR flags and then
9453 calculated M to see if anything subsequently set a flag prior to
9454 entering this function. Return NULL_TREE if any checks fail, if
9455 FORCE_CONVERT is true, then bypass the checks. */
9457 static tree
9458 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9460 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9461 overflow/underflow occurred. If -frounding-math, proceed iff the
9462 result of calling FUNC was exact. */
9463 if (force_convert
9464 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9465 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9466 && (!flag_rounding_math || !inexact)))
9468 REAL_VALUE_TYPE re, im;
9470 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9471 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9472 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9473 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9474 but the mpft_t is not, then we underflowed in the
9475 conversion. */
9476 if (force_convert
9477 || (real_isfinite (&re) && real_isfinite (&im)
9478 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9479 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9481 REAL_VALUE_TYPE re_mode, im_mode;
9483 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9484 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9485 /* Proceed iff the specified mode can hold the value. */
9486 if (force_convert
9487 || (real_identical (&re_mode, &re)
9488 && real_identical (&im_mode, &im)))
9489 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9490 build_real (TREE_TYPE (type), im_mode));
9493 return NULL_TREE;
9496 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9497 the pointer *(ARG_QUO) and return the result. The type is taken
9498 from the type of ARG0 and is used for setting the precision of the
9499 calculation and results. */
9501 static tree
9502 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9504 tree const type = TREE_TYPE (arg0);
9505 tree result = NULL_TREE;
9507 STRIP_NOPS (arg0);
9508 STRIP_NOPS (arg1);
9510 /* To proceed, MPFR must exactly represent the target floating point
9511 format, which only happens when the target base equals two. */
9512 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9513 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9514 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9516 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9517 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9519 if (real_isfinite (ra0) && real_isfinite (ra1))
9521 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9522 const int prec = fmt->p;
9523 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9524 tree result_rem;
9525 long integer_quo;
9526 mpfr_t m0, m1;
9528 mpfr_inits2 (prec, m0, m1, NULL);
9529 mpfr_from_real (m0, ra0, GMP_RNDN);
9530 mpfr_from_real (m1, ra1, GMP_RNDN);
9531 mpfr_clear_flags ();
9532 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9533 /* Remquo is independent of the rounding mode, so pass
9534 inexact=0 to do_mpfr_ckconv(). */
9535 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9536 mpfr_clears (m0, m1, NULL);
9537 if (result_rem)
9539 /* MPFR calculates quo in the host's long so it may
9540 return more bits in quo than the target int can hold
9541 if sizeof(host long) > sizeof(target int). This can
9542 happen even for native compilers in LP64 mode. In
9543 these cases, modulo the quo value with the largest
9544 number that the target int can hold while leaving one
9545 bit for the sign. */
9546 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9547 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9549 /* Dereference the quo pointer argument. */
9550 arg_quo = build_fold_indirect_ref (arg_quo);
9551 /* Proceed iff a valid pointer type was passed in. */
9552 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9554 /* Set the value. */
9555 tree result_quo
9556 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9557 build_int_cst (TREE_TYPE (arg_quo),
9558 integer_quo));
9559 TREE_SIDE_EFFECTS (result_quo) = 1;
9560 /* Combine the quo assignment with the rem. */
9561 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9562 result_quo, result_rem));
9567 return result;
9570 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9571 resulting value as a tree with type TYPE. The mpfr precision is
9572 set to the precision of TYPE. We assume that this mpfr function
9573 returns zero if the result could be calculated exactly within the
9574 requested precision. In addition, the integer pointer represented
9575 by ARG_SG will be dereferenced and set to the appropriate signgam
9576 (-1,1) value. */
9578 static tree
9579 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9581 tree result = NULL_TREE;
9583 STRIP_NOPS (arg);
9585 /* To proceed, MPFR must exactly represent the target floating point
9586 format, which only happens when the target base equals two. Also
9587 verify ARG is a constant and that ARG_SG is an int pointer. */
9588 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9589 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9590 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9591 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9593 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9595 /* In addition to NaN and Inf, the argument cannot be zero or a
9596 negative integer. */
9597 if (real_isfinite (ra)
9598 && ra->cl != rvc_zero
9599 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9601 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9602 const int prec = fmt->p;
9603 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9604 int inexact, sg;
9605 mpfr_t m;
9606 tree result_lg;
9608 mpfr_init2 (m, prec);
9609 mpfr_from_real (m, ra, GMP_RNDN);
9610 mpfr_clear_flags ();
9611 inexact = mpfr_lgamma (m, &sg, m, rnd);
9612 result_lg = do_mpfr_ckconv (m, type, inexact);
9613 mpfr_clear (m);
9614 if (result_lg)
9616 tree result_sg;
9618 /* Dereference the arg_sg pointer argument. */
9619 arg_sg = build_fold_indirect_ref (arg_sg);
9620 /* Assign the signgam value into *arg_sg. */
9621 result_sg = fold_build2 (MODIFY_EXPR,
9622 TREE_TYPE (arg_sg), arg_sg,
9623 build_int_cst (TREE_TYPE (arg_sg), sg));
9624 TREE_SIDE_EFFECTS (result_sg) = 1;
9625 /* Combine the signgam assignment with the lgamma result. */
9626 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9627 result_sg, result_lg));
9632 return result;
9635 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9636 mpc function FUNC on it and return the resulting value as a tree
9637 with type TYPE. The mpfr precision is set to the precision of
9638 TYPE. We assume that function FUNC returns zero if the result
9639 could be calculated exactly within the requested precision. If
9640 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9641 in the arguments and/or results. */
9643 tree
9644 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9645 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9647 tree result = NULL_TREE;
9649 STRIP_NOPS (arg0);
9650 STRIP_NOPS (arg1);
9652 /* To proceed, MPFR must exactly represent the target floating point
9653 format, which only happens when the target base equals two. */
9654 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9655 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9656 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9657 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9658 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9660 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9661 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9662 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9663 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9665 if (do_nonfinite
9666 || (real_isfinite (re0) && real_isfinite (im0)
9667 && real_isfinite (re1) && real_isfinite (im1)))
9669 const struct real_format *const fmt =
9670 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9671 const int prec = fmt->p;
9672 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9673 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9674 int inexact;
9675 mpc_t m0, m1;
9677 mpc_init2 (m0, prec);
9678 mpc_init2 (m1, prec);
9679 mpfr_from_real (mpc_realref (m0), re0, rnd);
9680 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9681 mpfr_from_real (mpc_realref (m1), re1, rnd);
9682 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9683 mpfr_clear_flags ();
9684 inexact = func (m0, m0, m1, crnd);
9685 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9686 mpc_clear (m0);
9687 mpc_clear (m1);
9691 return result;
9694 /* A wrapper function for builtin folding that prevents warnings for
9695 "statement without effect" and the like, caused by removing the
9696 call node earlier than the warning is generated. */
9698 tree
9699 fold_call_stmt (gcall *stmt, bool ignore)
9701 tree ret = NULL_TREE;
9702 tree fndecl = gimple_call_fndecl (stmt);
9703 location_t loc = gimple_location (stmt);
9704 if (fndecl
9705 && TREE_CODE (fndecl) == FUNCTION_DECL
9706 && DECL_BUILT_IN (fndecl)
9707 && !gimple_call_va_arg_pack_p (stmt))
9709 int nargs = gimple_call_num_args (stmt);
9710 tree *args = (nargs > 0
9711 ? gimple_call_arg_ptr (stmt, 0)
9712 : &error_mark_node);
9714 if (avoid_folding_inline_builtin (fndecl))
9715 return NULL_TREE;
9716 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9718 return targetm.fold_builtin (fndecl, nargs, args, ignore);
9720 else
9722 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9723 if (ret)
9725 /* Propagate location information from original call to
9726 expansion of builtin. Otherwise things like
9727 maybe_emit_chk_warning, that operate on the expansion
9728 of a builtin, will use the wrong location information. */
9729 if (gimple_has_location (stmt))
9731 tree realret = ret;
9732 if (TREE_CODE (ret) == NOP_EXPR)
9733 realret = TREE_OPERAND (ret, 0);
9734 if (CAN_HAVE_LOCATION_P (realret)
9735 && !EXPR_HAS_LOCATION (realret))
9736 SET_EXPR_LOCATION (realret, loc);
9737 return realret;
9739 return ret;
9743 return NULL_TREE;
9746 /* Look up the function in builtin_decl that corresponds to DECL
9747 and set ASMSPEC as its user assembler name. DECL must be a
9748 function decl that declares a builtin. */
9750 void
9751 set_builtin_user_assembler_name (tree decl, const char *asmspec)
9753 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
9754 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
9755 && asmspec != 0);
9757 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
9758 set_user_assembler_name (builtin, asmspec);
9760 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
9761 && INT_TYPE_SIZE < BITS_PER_WORD)
9763 set_user_assembler_libfunc ("ffs", asmspec);
9764 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
9765 "ffs");
9769 /* Return true if DECL is a builtin that expands to a constant or similarly
9770 simple code. */
9771 bool
9772 is_simple_builtin (tree decl)
9774 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9775 switch (DECL_FUNCTION_CODE (decl))
9777 /* Builtins that expand to constants. */
9778 case BUILT_IN_CONSTANT_P:
9779 case BUILT_IN_EXPECT:
9780 case BUILT_IN_OBJECT_SIZE:
9781 case BUILT_IN_UNREACHABLE:
9782 /* Simple register moves or loads from stack. */
9783 case BUILT_IN_ASSUME_ALIGNED:
9784 case BUILT_IN_RETURN_ADDRESS:
9785 case BUILT_IN_EXTRACT_RETURN_ADDR:
9786 case BUILT_IN_FROB_RETURN_ADDR:
9787 case BUILT_IN_RETURN:
9788 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9789 case BUILT_IN_FRAME_ADDRESS:
9790 case BUILT_IN_VA_END:
9791 case BUILT_IN_STACK_SAVE:
9792 case BUILT_IN_STACK_RESTORE:
9793 /* Exception state returns or moves registers around. */
9794 case BUILT_IN_EH_FILTER:
9795 case BUILT_IN_EH_POINTER:
9796 case BUILT_IN_EH_COPY_VALUES:
9797 return true;
9799 default:
9800 return false;
9803 return false;
9806 /* Return true if DECL is a builtin that is not expensive, i.e., they are
9807 most probably expanded inline into reasonably simple code. This is a
9808 superset of is_simple_builtin. */
9809 bool
9810 is_inexpensive_builtin (tree decl)
9812 if (!decl)
9813 return false;
9814 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
9815 return true;
9816 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9817 switch (DECL_FUNCTION_CODE (decl))
9819 case BUILT_IN_ABS:
9820 case BUILT_IN_ALLOCA:
9821 case BUILT_IN_ALLOCA_WITH_ALIGN:
9822 case BUILT_IN_BSWAP16:
9823 case BUILT_IN_BSWAP32:
9824 case BUILT_IN_BSWAP64:
9825 case BUILT_IN_CLZ:
9826 case BUILT_IN_CLZIMAX:
9827 case BUILT_IN_CLZL:
9828 case BUILT_IN_CLZLL:
9829 case BUILT_IN_CTZ:
9830 case BUILT_IN_CTZIMAX:
9831 case BUILT_IN_CTZL:
9832 case BUILT_IN_CTZLL:
9833 case BUILT_IN_FFS:
9834 case BUILT_IN_FFSIMAX:
9835 case BUILT_IN_FFSL:
9836 case BUILT_IN_FFSLL:
9837 case BUILT_IN_IMAXABS:
9838 case BUILT_IN_FINITE:
9839 case BUILT_IN_FINITEF:
9840 case BUILT_IN_FINITEL:
9841 case BUILT_IN_FINITED32:
9842 case BUILT_IN_FINITED64:
9843 case BUILT_IN_FINITED128:
9844 case BUILT_IN_FPCLASSIFY:
9845 case BUILT_IN_ISFINITE:
9846 case BUILT_IN_ISINF_SIGN:
9847 case BUILT_IN_ISINF:
9848 case BUILT_IN_ISINFF:
9849 case BUILT_IN_ISINFL:
9850 case BUILT_IN_ISINFD32:
9851 case BUILT_IN_ISINFD64:
9852 case BUILT_IN_ISINFD128:
9853 case BUILT_IN_ISNAN:
9854 case BUILT_IN_ISNANF:
9855 case BUILT_IN_ISNANL:
9856 case BUILT_IN_ISNAND32:
9857 case BUILT_IN_ISNAND64:
9858 case BUILT_IN_ISNAND128:
9859 case BUILT_IN_ISNORMAL:
9860 case BUILT_IN_ISGREATER:
9861 case BUILT_IN_ISGREATEREQUAL:
9862 case BUILT_IN_ISLESS:
9863 case BUILT_IN_ISLESSEQUAL:
9864 case BUILT_IN_ISLESSGREATER:
9865 case BUILT_IN_ISUNORDERED:
9866 case BUILT_IN_VA_ARG_PACK:
9867 case BUILT_IN_VA_ARG_PACK_LEN:
9868 case BUILT_IN_VA_COPY:
9869 case BUILT_IN_TRAP:
9870 case BUILT_IN_SAVEREGS:
9871 case BUILT_IN_POPCOUNTL:
9872 case BUILT_IN_POPCOUNTLL:
9873 case BUILT_IN_POPCOUNTIMAX:
9874 case BUILT_IN_POPCOUNT:
9875 case BUILT_IN_PARITYL:
9876 case BUILT_IN_PARITYLL:
9877 case BUILT_IN_PARITYIMAX:
9878 case BUILT_IN_PARITY:
9879 case BUILT_IN_LABS:
9880 case BUILT_IN_LLABS:
9881 case BUILT_IN_PREFETCH:
9882 case BUILT_IN_ACC_ON_DEVICE:
9883 return true;
9885 default:
9886 return is_simple_builtin (decl);
9889 return false;
9892 /* Return true if T is a constant and the value cast to a target char
9893 can be represented by a host char.
9894 Store the casted char constant in *P if so. */
9896 bool
9897 target_char_cst_p (tree t, char *p)
9899 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
9900 return false;
9902 *p = (char)tree_to_uhwi (t);
9903 return true;