* g++.dg/debug/dwarf2/ref-3.C: XFAIL AIX.
[official-gcc.git] / gcc / builtins.c
blobcc711a0f8430de41f77d1ff2b10abc9560a1025a
1 /* Expand builtin functions.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "asan.h"
64 #include "cilk.h"
65 #include "tree-chkp.h"
66 #include "rtl-chkp.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
84 #include "builtins.def"
87 /* Setup an array of builtin_info_type, make sure each element decl is
88 initialized to NULL_TREE. */
89 builtin_info_type builtin_info[(int)END_BUILTINS];
91 /* Non-zero if __builtin_constant_p should be folded right away. */
92 bool force_folding_builtin_constant_p;
94 static rtx c_readstr (const char *, machine_mode);
95 static int target_char_cast (tree, char *);
96 static rtx get_memory_rtx (tree, tree);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 static rtx result_vector (int, rtx);
100 static void expand_builtin_prefetch (tree);
101 static rtx expand_builtin_apply_args (void);
102 static rtx expand_builtin_apply_args_1 (void);
103 static rtx expand_builtin_apply (rtx, rtx, rtx);
104 static void expand_builtin_return (rtx);
105 static enum type_class type_to_class (tree);
106 static rtx expand_builtin_classify_type (tree);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_strcmp (tree, rtx);
119 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx);
122 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
123 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
124 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 machine_mode, int, tree);
128 static rtx expand_builtin_strcpy (tree, rtx);
129 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
130 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
131 static rtx expand_builtin_strncpy (tree, rtx);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, machine_mode);
134 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
138 static rtx expand_builtin_alloca (tree, bool);
139 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static tree stabilize_va_list_loc (location_t, tree, int);
142 static rtx expand_builtin_expect (tree, rtx);
143 static tree fold_builtin_constant_p (tree);
144 static tree fold_builtin_classify_type (tree);
145 static tree fold_builtin_strlen (location_t, tree, tree);
146 static tree fold_builtin_inf (location_t, tree, int);
147 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
148 static bool validate_arg (const_tree, enum tree_code code);
149 static rtx expand_builtin_fabs (tree, rtx, rtx);
150 static rtx expand_builtin_signbit (tree, rtx);
151 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
152 static tree fold_builtin_isascii (location_t, tree);
153 static tree fold_builtin_toascii (location_t, tree);
154 static tree fold_builtin_isdigit (location_t, tree);
155 static tree fold_builtin_fabs (location_t, tree, tree);
156 static tree fold_builtin_abs (location_t, tree, tree);
157 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
158 enum tree_code);
159 static tree fold_builtin_0 (location_t, tree);
160 static tree fold_builtin_1 (location_t, tree, tree);
161 static tree fold_builtin_2 (location_t, tree, tree, tree);
162 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
163 static tree fold_builtin_varargs (location_t, tree, tree*, int);
165 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
166 static tree fold_builtin_strstr (location_t, tree, tree, tree);
167 static tree fold_builtin_strspn (location_t, tree, tree);
168 static tree fold_builtin_strcspn (location_t, tree, tree);
170 static rtx expand_builtin_object_size (tree);
171 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
172 enum built_in_function);
173 static void maybe_emit_chk_warning (tree, enum built_in_function);
174 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
175 static void maybe_emit_free_warning (tree);
176 static tree fold_builtin_object_size (tree, tree);
178 unsigned HOST_WIDE_INT target_newline;
179 unsigned HOST_WIDE_INT target_percent;
180 static unsigned HOST_WIDE_INT target_c;
181 static unsigned HOST_WIDE_INT target_s;
182 char target_percent_c[3];
183 char target_percent_s[3];
184 char target_percent_s_newline[4];
185 static tree do_mpfr_remquo (tree, tree, tree);
186 static tree do_mpfr_lgamma_r (tree, tree, tree);
187 static void expand_builtin_sync_synchronize (void);
189 /* Return true if NAME starts with __builtin_ or __sync_. */
191 static bool
192 is_builtin_name (const char *name)
194 if (strncmp (name, "__builtin_", 10) == 0)
195 return true;
196 if (strncmp (name, "__sync_", 7) == 0)
197 return true;
198 if (strncmp (name, "__atomic_", 9) == 0)
199 return true;
200 if (flag_cilkplus
201 && (!strcmp (name, "__cilkrts_detach")
202 || !strcmp (name, "__cilkrts_pop_frame")))
203 return true;
204 return false;
208 /* Return true if DECL is a function symbol representing a built-in. */
210 bool
211 is_builtin_fn (tree decl)
213 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
216 /* Return true if NODE should be considered for inline expansion regardless
217 of the optimization level. This means whenever a function is invoked with
218 its "internal" name, which normally contains the prefix "__builtin". */
220 bool
221 called_as_built_in (tree node)
223 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
224 we want the name used to call the function, not the name it
225 will have. */
226 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
227 return is_builtin_name (name);
230 /* Compute values M and N such that M divides (address of EXP - N) and such
231 that N < M. If these numbers can be determined, store M in alignp and N in
232 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
233 *alignp and any bit-offset to *bitposp.
235 Note that the address (and thus the alignment) computed here is based
236 on the address to which a symbol resolves, whereas DECL_ALIGN is based
237 on the address at which an object is actually located. These two
238 addresses are not always the same. For example, on ARM targets,
239 the address &foo of a Thumb function foo() has the lowest bit set,
240 whereas foo() itself starts on an even address.
242 If ADDR_P is true we are taking the address of the memory reference EXP
243 and thus cannot rely on the access taking place. */
245 static bool
246 get_object_alignment_2 (tree exp, unsigned int *alignp,
247 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
249 HOST_WIDE_INT bitsize, bitpos;
250 tree offset;
251 machine_mode mode;
252 int unsignedp, reversep, volatilep;
253 unsigned int align = BITS_PER_UNIT;
254 bool known_alignment = false;
256 /* Get the innermost object and the constant (bitpos) and possibly
257 variable (offset) offset of the access. */
258 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
259 &unsignedp, &reversep, &volatilep);
261 /* Extract alignment information from the innermost object and
262 possibly adjust bitpos and offset. */
263 if (TREE_CODE (exp) == FUNCTION_DECL)
265 /* Function addresses can encode extra information besides their
266 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
267 allows the low bit to be used as a virtual bit, we know
268 that the address itself must be at least 2-byte aligned. */
269 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
270 align = 2 * BITS_PER_UNIT;
272 else if (TREE_CODE (exp) == LABEL_DECL)
274 else if (TREE_CODE (exp) == CONST_DECL)
276 /* The alignment of a CONST_DECL is determined by its initializer. */
277 exp = DECL_INITIAL (exp);
278 align = TYPE_ALIGN (TREE_TYPE (exp));
279 if (CONSTANT_CLASS_P (exp))
280 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
282 known_alignment = true;
284 else if (DECL_P (exp))
286 align = DECL_ALIGN (exp);
287 known_alignment = true;
289 else if (TREE_CODE (exp) == INDIRECT_REF
290 || TREE_CODE (exp) == MEM_REF
291 || TREE_CODE (exp) == TARGET_MEM_REF)
293 tree addr = TREE_OPERAND (exp, 0);
294 unsigned ptr_align;
295 unsigned HOST_WIDE_INT ptr_bitpos;
296 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
298 /* If the address is explicitely aligned, handle that. */
299 if (TREE_CODE (addr) == BIT_AND_EXPR
300 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
302 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
303 ptr_bitmask *= BITS_PER_UNIT;
304 align = least_bit_hwi (ptr_bitmask);
305 addr = TREE_OPERAND (addr, 0);
308 known_alignment
309 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
310 align = MAX (ptr_align, align);
312 /* Re-apply explicit alignment to the bitpos. */
313 ptr_bitpos &= ptr_bitmask;
315 /* The alignment of the pointer operand in a TARGET_MEM_REF
316 has to take the variable offset parts into account. */
317 if (TREE_CODE (exp) == TARGET_MEM_REF)
319 if (TMR_INDEX (exp))
321 unsigned HOST_WIDE_INT step = 1;
322 if (TMR_STEP (exp))
323 step = TREE_INT_CST_LOW (TMR_STEP (exp));
324 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
326 if (TMR_INDEX2 (exp))
327 align = BITS_PER_UNIT;
328 known_alignment = false;
331 /* When EXP is an actual memory reference then we can use
332 TYPE_ALIGN of a pointer indirection to derive alignment.
333 Do so only if get_pointer_alignment_1 did not reveal absolute
334 alignment knowledge and if using that alignment would
335 improve the situation. */
336 if (!addr_p && !known_alignment
337 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
338 align = TYPE_ALIGN (TREE_TYPE (exp));
339 else
341 /* Else adjust bitpos accordingly. */
342 bitpos += ptr_bitpos;
343 if (TREE_CODE (exp) == MEM_REF
344 || TREE_CODE (exp) == TARGET_MEM_REF)
345 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
348 else if (TREE_CODE (exp) == STRING_CST)
350 /* STRING_CST are the only constant objects we allow to be not
351 wrapped inside a CONST_DECL. */
352 align = TYPE_ALIGN (TREE_TYPE (exp));
353 if (CONSTANT_CLASS_P (exp))
354 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
356 known_alignment = true;
359 /* If there is a non-constant offset part extract the maximum
360 alignment that can prevail. */
361 if (offset)
363 unsigned int trailing_zeros = tree_ctz (offset);
364 if (trailing_zeros < HOST_BITS_PER_INT)
366 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
367 if (inner)
368 align = MIN (align, inner);
372 *alignp = align;
373 *bitposp = bitpos & (*alignp - 1);
374 return known_alignment;
377 /* For a memory reference expression EXP compute values M and N such that M
378 divides (&EXP - N) and such that N < M. If these numbers can be determined,
379 store M in alignp and N in *BITPOSP and return true. Otherwise return false
380 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
382 bool
383 get_object_alignment_1 (tree exp, unsigned int *alignp,
384 unsigned HOST_WIDE_INT *bitposp)
386 return get_object_alignment_2 (exp, alignp, bitposp, false);
389 /* Return the alignment in bits of EXP, an object. */
391 unsigned int
392 get_object_alignment (tree exp)
394 unsigned HOST_WIDE_INT bitpos = 0;
395 unsigned int align;
397 get_object_alignment_1 (exp, &align, &bitpos);
399 /* align and bitpos now specify known low bits of the pointer.
400 ptr & (align - 1) == bitpos. */
402 if (bitpos != 0)
403 align = least_bit_hwi (bitpos);
404 return align;
407 /* For a pointer valued expression EXP compute values M and N such that M
408 divides (EXP - N) and such that N < M. If these numbers can be determined,
409 store M in alignp and N in *BITPOSP and return true. Return false if
410 the results are just a conservative approximation.
412 If EXP is not a pointer, false is returned too. */
414 bool
415 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
416 unsigned HOST_WIDE_INT *bitposp)
418 STRIP_NOPS (exp);
420 if (TREE_CODE (exp) == ADDR_EXPR)
421 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
422 alignp, bitposp, true);
423 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
425 unsigned int align;
426 unsigned HOST_WIDE_INT bitpos;
427 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
428 &align, &bitpos);
429 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
430 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
431 else
433 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
434 if (trailing_zeros < HOST_BITS_PER_INT)
436 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
437 if (inner)
438 align = MIN (align, inner);
441 *alignp = align;
442 *bitposp = bitpos & (align - 1);
443 return res;
445 else if (TREE_CODE (exp) == SSA_NAME
446 && POINTER_TYPE_P (TREE_TYPE (exp)))
448 unsigned int ptr_align, ptr_misalign;
449 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
451 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
453 *bitposp = ptr_misalign * BITS_PER_UNIT;
454 *alignp = ptr_align * BITS_PER_UNIT;
455 /* Make sure to return a sensible alignment when the multiplication
456 by BITS_PER_UNIT overflowed. */
457 if (*alignp == 0)
458 *alignp = 1u << (HOST_BITS_PER_INT - 1);
459 /* We cannot really tell whether this result is an approximation. */
460 return false;
462 else
464 *bitposp = 0;
465 *alignp = BITS_PER_UNIT;
466 return false;
469 else if (TREE_CODE (exp) == INTEGER_CST)
471 *alignp = BIGGEST_ALIGNMENT;
472 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
473 & (BIGGEST_ALIGNMENT - 1));
474 return true;
477 *bitposp = 0;
478 *alignp = BITS_PER_UNIT;
479 return false;
482 /* Return the alignment in bits of EXP, a pointer valued expression.
483 The alignment returned is, by default, the alignment of the thing that
484 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
486 Otherwise, look at the expression to see if we can do better, i.e., if the
487 expression is actually pointing at an object whose alignment is tighter. */
489 unsigned int
490 get_pointer_alignment (tree exp)
492 unsigned HOST_WIDE_INT bitpos = 0;
493 unsigned int align;
495 get_pointer_alignment_1 (exp, &align, &bitpos);
497 /* align and bitpos now specify known low bits of the pointer.
498 ptr & (align - 1) == bitpos. */
500 if (bitpos != 0)
501 align = least_bit_hwi (bitpos);
503 return align;
506 /* Return the number of non-zero elements in the sequence
507 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
508 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
510 static unsigned
511 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
513 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
515 unsigned n;
517 if (eltsize == 1)
519 /* Optimize the common case of plain char. */
520 for (n = 0; n < maxelts; n++)
522 const char *elt = (const char*) ptr + n;
523 if (!*elt)
524 break;
527 else
529 for (n = 0; n < maxelts; n++)
531 const char *elt = (const char*) ptr + n * eltsize;
532 if (!memcmp (elt, "\0\0\0\0", eltsize))
533 break;
536 return n;
539 /* Compute the length of a null-terminated character string or wide
540 character string handling character sizes of 1, 2, and 4 bytes.
541 TREE_STRING_LENGTH is not the right way because it evaluates to
542 the size of the character array in bytes (as opposed to characters)
543 and because it can contain a zero byte in the middle.
545 ONLY_VALUE should be nonzero if the result is not going to be emitted
546 into the instruction stream and zero if it is going to be expanded.
547 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
548 is returned, otherwise NULL, since
549 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
550 evaluate the side-effects.
552 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
553 accesses. Note that this implies the result is not going to be emitted
554 into the instruction stream.
556 The value returned is of type `ssizetype'.
558 Unfortunately, string_constant can't access the values of const char
559 arrays with initializers, so neither can we do so here. */
561 tree
562 c_strlen (tree src, int only_value)
564 STRIP_NOPS (src);
565 if (TREE_CODE (src) == COND_EXPR
566 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
568 tree len1, len2;
570 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
571 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
572 if (tree_int_cst_equal (len1, len2))
573 return len1;
576 if (TREE_CODE (src) == COMPOUND_EXPR
577 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
578 return c_strlen (TREE_OPERAND (src, 1), only_value);
580 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
582 /* Offset from the beginning of the string in bytes. */
583 tree byteoff;
584 src = string_constant (src, &byteoff);
585 if (src == 0)
586 return NULL_TREE;
588 /* Determine the size of the string element. */
589 unsigned eltsize
590 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
592 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
593 length of SRC. */
594 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
596 /* PTR can point to the byte representation of any string type, including
597 char* and wchar_t*. */
598 const char *ptr = TREE_STRING_POINTER (src);
600 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
602 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
603 compute the offset to the following null if we don't know where to
604 start searching for it. */
605 if (string_length (ptr, eltsize, maxelts) < maxelts)
607 /* Return when an embedded null character is found. */
608 return NULL_TREE;
611 /* We don't know the starting offset, but we do know that the string
612 has no internal zero bytes. We can assume that the offset falls
613 within the bounds of the string; otherwise, the programmer deserves
614 what he gets. Subtract the offset from the length of the string,
615 and return that. This would perhaps not be valid if we were dealing
616 with named arrays in addition to literal string constants. */
618 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
621 /* Offset from the beginning of the string in elements. */
622 HOST_WIDE_INT eltoff;
624 /* We have a known offset into the string. Start searching there for
625 a null character if we can represent it as a single HOST_WIDE_INT. */
626 if (byteoff == 0)
627 eltoff = 0;
628 else if (! tree_fits_shwi_p (byteoff))
629 eltoff = -1;
630 else
631 eltoff = tree_to_shwi (byteoff) / eltsize;
633 /* If the offset is known to be out of bounds, warn, and call strlen at
634 runtime. */
635 if (eltoff < 0 || eltoff > maxelts)
637 /* Suppress multiple warnings for propagated constant strings. */
638 if (only_value != 2
639 && !TREE_NO_WARNING (src))
641 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
642 eltoff);
643 TREE_NO_WARNING (src) = 1;
645 return NULL_TREE;
648 /* Use strlen to search for the first zero byte. Since any strings
649 constructed with build_string will have nulls appended, we win even
650 if we get handed something like (char[4])"abcd".
652 Since ELTOFF is our starting index into the string, no further
653 calculation is needed. */
654 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
655 maxelts - eltoff);
657 return ssize_int (len);
660 /* Return a constant integer corresponding to target reading
661 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
663 static rtx
664 c_readstr (const char *str, machine_mode mode)
666 HOST_WIDE_INT ch;
667 unsigned int i, j;
668 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
670 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
671 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
672 / HOST_BITS_PER_WIDE_INT;
674 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
675 for (i = 0; i < len; i++)
676 tmp[i] = 0;
678 ch = 1;
679 for (i = 0; i < GET_MODE_SIZE (mode); i++)
681 j = i;
682 if (WORDS_BIG_ENDIAN)
683 j = GET_MODE_SIZE (mode) - i - 1;
684 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
685 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
686 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
687 j *= BITS_PER_UNIT;
689 if (ch)
690 ch = (unsigned char) str[i];
691 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
694 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
695 return immed_wide_int_const (c, mode);
698 /* Cast a target constant CST to target CHAR and if that value fits into
699 host char type, return zero and put that value into variable pointed to by
700 P. */
702 static int
703 target_char_cast (tree cst, char *p)
705 unsigned HOST_WIDE_INT val, hostval;
707 if (TREE_CODE (cst) != INTEGER_CST
708 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
709 return 1;
711 /* Do not care if it fits or not right here. */
712 val = TREE_INT_CST_LOW (cst);
714 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
715 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
717 hostval = val;
718 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
719 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
721 if (val != hostval)
722 return 1;
724 *p = hostval;
725 return 0;
728 /* Similar to save_expr, but assumes that arbitrary code is not executed
729 in between the multiple evaluations. In particular, we assume that a
730 non-addressable local variable will not be modified. */
732 static tree
733 builtin_save_expr (tree exp)
735 if (TREE_CODE (exp) == SSA_NAME
736 || (TREE_ADDRESSABLE (exp) == 0
737 && (TREE_CODE (exp) == PARM_DECL
738 || (VAR_P (exp) && !TREE_STATIC (exp)))))
739 return exp;
741 return save_expr (exp);
744 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
745 times to get the address of either a higher stack frame, or a return
746 address located within it (depending on FNDECL_CODE). */
748 static rtx
749 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
751 int i;
752 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
753 if (tem == NULL_RTX)
755 /* For a zero count with __builtin_return_address, we don't care what
756 frame address we return, because target-specific definitions will
757 override us. Therefore frame pointer elimination is OK, and using
758 the soft frame pointer is OK.
760 For a nonzero count, or a zero count with __builtin_frame_address,
761 we require a stable offset from the current frame pointer to the
762 previous one, so we must use the hard frame pointer, and
763 we must disable frame pointer elimination. */
764 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
765 tem = frame_pointer_rtx;
766 else
768 tem = hard_frame_pointer_rtx;
770 /* Tell reload not to eliminate the frame pointer. */
771 crtl->accesses_prior_frames = 1;
775 if (count > 0)
776 SETUP_FRAME_ADDRESSES ();
778 /* On the SPARC, the return address is not in the frame, it is in a
779 register. There is no way to access it off of the current frame
780 pointer, but it can be accessed off the previous frame pointer by
781 reading the value from the register window save area. */
782 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
783 count--;
785 /* Scan back COUNT frames to the specified frame. */
786 for (i = 0; i < count; i++)
788 /* Assume the dynamic chain pointer is in the word that the
789 frame address points to, unless otherwise specified. */
790 tem = DYNAMIC_CHAIN_ADDRESS (tem);
791 tem = memory_address (Pmode, tem);
792 tem = gen_frame_mem (Pmode, tem);
793 tem = copy_to_reg (tem);
796 /* For __builtin_frame_address, return what we've got. But, on
797 the SPARC for example, we may have to add a bias. */
798 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
799 return FRAME_ADDR_RTX (tem);
801 /* For __builtin_return_address, get the return address from that frame. */
802 #ifdef RETURN_ADDR_RTX
803 tem = RETURN_ADDR_RTX (count, tem);
804 #else
805 tem = memory_address (Pmode,
806 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
807 tem = gen_frame_mem (Pmode, tem);
808 #endif
809 return tem;
812 /* Alias set used for setjmp buffer. */
813 static alias_set_type setjmp_alias_set = -1;
815 /* Construct the leading half of a __builtin_setjmp call. Control will
816 return to RECEIVER_LABEL. This is also called directly by the SJLJ
817 exception handling code. */
819 void
820 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
822 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
823 rtx stack_save;
824 rtx mem;
826 if (setjmp_alias_set == -1)
827 setjmp_alias_set = new_alias_set ();
829 buf_addr = convert_memory_address (Pmode, buf_addr);
831 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
833 /* We store the frame pointer and the address of receiver_label in
834 the buffer and use the rest of it for the stack save area, which
835 is machine-dependent. */
837 mem = gen_rtx_MEM (Pmode, buf_addr);
838 set_mem_alias_set (mem, setjmp_alias_set);
839 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
841 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
842 GET_MODE_SIZE (Pmode))),
843 set_mem_alias_set (mem, setjmp_alias_set);
845 emit_move_insn (validize_mem (mem),
846 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
848 stack_save = gen_rtx_MEM (sa_mode,
849 plus_constant (Pmode, buf_addr,
850 2 * GET_MODE_SIZE (Pmode)));
851 set_mem_alias_set (stack_save, setjmp_alias_set);
852 emit_stack_save (SAVE_NONLOCAL, &stack_save);
854 /* If there is further processing to do, do it. */
855 if (targetm.have_builtin_setjmp_setup ())
856 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
858 /* We have a nonlocal label. */
859 cfun->has_nonlocal_label = 1;
862 /* Construct the trailing part of a __builtin_setjmp call. This is
863 also called directly by the SJLJ exception handling code.
864 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
866 void
867 expand_builtin_setjmp_receiver (rtx receiver_label)
869 rtx chain;
871 /* Mark the FP as used when we get here, so we have to make sure it's
872 marked as used by this function. */
873 emit_use (hard_frame_pointer_rtx);
875 /* Mark the static chain as clobbered here so life information
876 doesn't get messed up for it. */
877 chain = targetm.calls.static_chain (current_function_decl, true);
878 if (chain && REG_P (chain))
879 emit_clobber (chain);
881 /* Now put in the code to restore the frame pointer, and argument
882 pointer, if needed. */
883 if (! targetm.have_nonlocal_goto ())
885 /* First adjust our frame pointer to its actual value. It was
886 previously set to the start of the virtual area corresponding to
887 the stacked variables when we branched here and now needs to be
888 adjusted to the actual hardware fp value.
890 Assignments to virtual registers are converted by
891 instantiate_virtual_regs into the corresponding assignment
892 to the underlying register (fp in this case) that makes
893 the original assignment true.
894 So the following insn will actually be decrementing fp by
895 STARTING_FRAME_OFFSET. */
896 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
898 /* Restoring the frame pointer also modifies the hard frame pointer.
899 Mark it used (so that the previous assignment remains live once
900 the frame pointer is eliminated) and clobbered (to represent the
901 implicit update from the assignment). */
902 emit_use (hard_frame_pointer_rtx);
903 emit_clobber (hard_frame_pointer_rtx);
906 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
908 /* If the argument pointer can be eliminated in favor of the
909 frame pointer, we don't need to restore it. We assume here
910 that if such an elimination is present, it can always be used.
911 This is the case on all known machines; if we don't make this
912 assumption, we do unnecessary saving on many machines. */
913 size_t i;
914 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
916 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
917 if (elim_regs[i].from == ARG_POINTER_REGNUM
918 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
919 break;
921 if (i == ARRAY_SIZE (elim_regs))
923 /* Now restore our arg pointer from the address at which it
924 was saved in our stack frame. */
925 emit_move_insn (crtl->args.internal_arg_pointer,
926 copy_to_reg (get_arg_pointer_save_area ()));
930 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
931 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
932 else if (targetm.have_nonlocal_goto_receiver ())
933 emit_insn (targetm.gen_nonlocal_goto_receiver ());
934 else
935 { /* Nothing */ }
937 /* We must not allow the code we just generated to be reordered by
938 scheduling. Specifically, the update of the frame pointer must
939 happen immediately, not later. */
940 emit_insn (gen_blockage ());
943 /* __builtin_longjmp is passed a pointer to an array of five words (not
944 all will be used on all machines). It operates similarly to the C
945 library function of the same name, but is more efficient. Much of
946 the code below is copied from the handling of non-local gotos. */
948 static void
949 expand_builtin_longjmp (rtx buf_addr, rtx value)
951 rtx fp, lab, stack;
952 rtx_insn *insn, *last;
953 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
955 /* DRAP is needed for stack realign if longjmp is expanded to current
956 function */
957 if (SUPPORTS_STACK_ALIGNMENT)
958 crtl->need_drap = true;
960 if (setjmp_alias_set == -1)
961 setjmp_alias_set = new_alias_set ();
963 buf_addr = convert_memory_address (Pmode, buf_addr);
965 buf_addr = force_reg (Pmode, buf_addr);
967 /* We require that the user must pass a second argument of 1, because
968 that is what builtin_setjmp will return. */
969 gcc_assert (value == const1_rtx);
971 last = get_last_insn ();
972 if (targetm.have_builtin_longjmp ())
973 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
974 else
976 fp = gen_rtx_MEM (Pmode, buf_addr);
977 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
978 GET_MODE_SIZE (Pmode)));
980 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
981 2 * GET_MODE_SIZE (Pmode)));
982 set_mem_alias_set (fp, setjmp_alias_set);
983 set_mem_alias_set (lab, setjmp_alias_set);
984 set_mem_alias_set (stack, setjmp_alias_set);
986 /* Pick up FP, label, and SP from the block and jump. This code is
987 from expand_goto in stmt.c; see there for detailed comments. */
988 if (targetm.have_nonlocal_goto ())
989 /* We have to pass a value to the nonlocal_goto pattern that will
990 get copied into the static_chain pointer, but it does not matter
991 what that value is, because builtin_setjmp does not use it. */
992 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
993 else
995 lab = copy_to_reg (lab);
997 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
998 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1000 emit_move_insn (hard_frame_pointer_rtx, fp);
1001 emit_stack_restore (SAVE_NONLOCAL, stack);
1003 emit_use (hard_frame_pointer_rtx);
1004 emit_use (stack_pointer_rtx);
1005 emit_indirect_jump (lab);
1009 /* Search backwards and mark the jump insn as a non-local goto.
1010 Note that this precludes the use of __builtin_longjmp to a
1011 __builtin_setjmp target in the same function. However, we've
1012 already cautioned the user that these functions are for
1013 internal exception handling use only. */
1014 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1016 gcc_assert (insn != last);
1018 if (JUMP_P (insn))
1020 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1021 break;
1023 else if (CALL_P (insn))
1024 break;
1028 static inline bool
1029 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1031 return (iter->i < iter->n);
1034 /* This function validates the types of a function call argument list
1035 against a specified list of tree_codes. If the last specifier is a 0,
1036 that represents an ellipses, otherwise the last specifier must be a
1037 VOID_TYPE. */
1039 static bool
1040 validate_arglist (const_tree callexpr, ...)
1042 enum tree_code code;
1043 bool res = 0;
1044 va_list ap;
1045 const_call_expr_arg_iterator iter;
1046 const_tree arg;
1048 va_start (ap, callexpr);
1049 init_const_call_expr_arg_iterator (callexpr, &iter);
1053 code = (enum tree_code) va_arg (ap, int);
1054 switch (code)
1056 case 0:
1057 /* This signifies an ellipses, any further arguments are all ok. */
1058 res = true;
1059 goto end;
1060 case VOID_TYPE:
1061 /* This signifies an endlink, if no arguments remain, return
1062 true, otherwise return false. */
1063 res = !more_const_call_expr_args_p (&iter);
1064 goto end;
1065 default:
1066 /* If no parameters remain or the parameter's code does not
1067 match the specified code, return false. Otherwise continue
1068 checking any remaining arguments. */
1069 arg = next_const_call_expr_arg (&iter);
1070 if (!validate_arg (arg, code))
1071 goto end;
1072 break;
1075 while (1);
1077 /* We need gotos here since we can only have one VA_CLOSE in a
1078 function. */
1079 end: ;
1080 va_end (ap);
1082 return res;
1085 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1086 and the address of the save area. */
1088 static rtx
1089 expand_builtin_nonlocal_goto (tree exp)
1091 tree t_label, t_save_area;
1092 rtx r_label, r_save_area, r_fp, r_sp;
1093 rtx_insn *insn;
1095 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1096 return NULL_RTX;
1098 t_label = CALL_EXPR_ARG (exp, 0);
1099 t_save_area = CALL_EXPR_ARG (exp, 1);
1101 r_label = expand_normal (t_label);
1102 r_label = convert_memory_address (Pmode, r_label);
1103 r_save_area = expand_normal (t_save_area);
1104 r_save_area = convert_memory_address (Pmode, r_save_area);
1105 /* Copy the address of the save location to a register just in case it was
1106 based on the frame pointer. */
1107 r_save_area = copy_to_reg (r_save_area);
1108 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1109 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1110 plus_constant (Pmode, r_save_area,
1111 GET_MODE_SIZE (Pmode)));
1113 crtl->has_nonlocal_goto = 1;
1115 /* ??? We no longer need to pass the static chain value, afaik. */
1116 if (targetm.have_nonlocal_goto ())
1117 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1118 else
1120 r_label = copy_to_reg (r_label);
1122 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1123 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1125 /* Restore frame pointer for containing function. */
1126 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1127 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1129 /* USE of hard_frame_pointer_rtx added for consistency;
1130 not clear if really needed. */
1131 emit_use (hard_frame_pointer_rtx);
1132 emit_use (stack_pointer_rtx);
1134 /* If the architecture is using a GP register, we must
1135 conservatively assume that the target function makes use of it.
1136 The prologue of functions with nonlocal gotos must therefore
1137 initialize the GP register to the appropriate value, and we
1138 must then make sure that this value is live at the point
1139 of the jump. (Note that this doesn't necessarily apply
1140 to targets with a nonlocal_goto pattern; they are free
1141 to implement it in their own way. Note also that this is
1142 a no-op if the GP register is a global invariant.) */
1143 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1144 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1145 emit_use (pic_offset_table_rtx);
1147 emit_indirect_jump (r_label);
1150 /* Search backwards to the jump insn and mark it as a
1151 non-local goto. */
1152 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1154 if (JUMP_P (insn))
1156 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1157 break;
1159 else if (CALL_P (insn))
1160 break;
1163 return const0_rtx;
1166 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1167 (not all will be used on all machines) that was passed to __builtin_setjmp.
1168 It updates the stack pointer in that block to the current value. This is
1169 also called directly by the SJLJ exception handling code. */
1171 void
1172 expand_builtin_update_setjmp_buf (rtx buf_addr)
1174 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1175 rtx stack_save
1176 = gen_rtx_MEM (sa_mode,
1177 memory_address
1178 (sa_mode,
1179 plus_constant (Pmode, buf_addr,
1180 2 * GET_MODE_SIZE (Pmode))));
1182 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1185 /* Expand a call to __builtin_prefetch. For a target that does not support
1186 data prefetch, evaluate the memory address argument in case it has side
1187 effects. */
1189 static void
1190 expand_builtin_prefetch (tree exp)
1192 tree arg0, arg1, arg2;
1193 int nargs;
1194 rtx op0, op1, op2;
1196 if (!validate_arglist (exp, POINTER_TYPE, 0))
1197 return;
1199 arg0 = CALL_EXPR_ARG (exp, 0);
1201 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1202 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1203 locality). */
1204 nargs = call_expr_nargs (exp);
1205 if (nargs > 1)
1206 arg1 = CALL_EXPR_ARG (exp, 1);
1207 else
1208 arg1 = integer_zero_node;
1209 if (nargs > 2)
1210 arg2 = CALL_EXPR_ARG (exp, 2);
1211 else
1212 arg2 = integer_three_node;
1214 /* Argument 0 is an address. */
1215 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1217 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1218 if (TREE_CODE (arg1) != INTEGER_CST)
1220 error ("second argument to %<__builtin_prefetch%> must be a constant");
1221 arg1 = integer_zero_node;
1223 op1 = expand_normal (arg1);
1224 /* Argument 1 must be either zero or one. */
1225 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1227 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1228 " using zero");
1229 op1 = const0_rtx;
1232 /* Argument 2 (locality) must be a compile-time constant int. */
1233 if (TREE_CODE (arg2) != INTEGER_CST)
1235 error ("third argument to %<__builtin_prefetch%> must be a constant");
1236 arg2 = integer_zero_node;
1238 op2 = expand_normal (arg2);
1239 /* Argument 2 must be 0, 1, 2, or 3. */
1240 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1242 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1243 op2 = const0_rtx;
1246 if (targetm.have_prefetch ())
1248 struct expand_operand ops[3];
1250 create_address_operand (&ops[0], op0);
1251 create_integer_operand (&ops[1], INTVAL (op1));
1252 create_integer_operand (&ops[2], INTVAL (op2));
1253 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1254 return;
1257 /* Don't do anything with direct references to volatile memory, but
1258 generate code to handle other side effects. */
1259 if (!MEM_P (op0) && side_effects_p (op0))
1260 emit_insn (op0);
1263 /* Get a MEM rtx for expression EXP which is the address of an operand
1264 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1265 the maximum length of the block of memory that might be accessed or
1266 NULL if unknown. */
1268 static rtx
1269 get_memory_rtx (tree exp, tree len)
1271 tree orig_exp = exp;
1272 rtx addr, mem;
1274 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1275 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1276 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1277 exp = TREE_OPERAND (exp, 0);
1279 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1280 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1282 /* Get an expression we can use to find the attributes to assign to MEM.
1283 First remove any nops. */
1284 while (CONVERT_EXPR_P (exp)
1285 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1286 exp = TREE_OPERAND (exp, 0);
1288 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1289 (as builtin stringops may alias with anything). */
1290 exp = fold_build2 (MEM_REF,
1291 build_array_type (char_type_node,
1292 build_range_type (sizetype,
1293 size_one_node, len)),
1294 exp, build_int_cst (ptr_type_node, 0));
1296 /* If the MEM_REF has no acceptable address, try to get the base object
1297 from the original address we got, and build an all-aliasing
1298 unknown-sized access to that one. */
1299 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1300 set_mem_attributes (mem, exp, 0);
1301 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1302 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1303 0))))
1305 exp = build_fold_addr_expr (exp);
1306 exp = fold_build2 (MEM_REF,
1307 build_array_type (char_type_node,
1308 build_range_type (sizetype,
1309 size_zero_node,
1310 NULL)),
1311 exp, build_int_cst (ptr_type_node, 0));
1312 set_mem_attributes (mem, exp, 0);
1314 set_mem_alias_set (mem, 0);
1315 return mem;
1318 /* Built-in functions to perform an untyped call and return. */
1320 #define apply_args_mode \
1321 (this_target_builtins->x_apply_args_mode)
1322 #define apply_result_mode \
1323 (this_target_builtins->x_apply_result_mode)
1325 /* Return the size required for the block returned by __builtin_apply_args,
1326 and initialize apply_args_mode. */
1328 static int
1329 apply_args_size (void)
1331 static int size = -1;
1332 int align;
1333 unsigned int regno;
1334 machine_mode mode;
1336 /* The values computed by this function never change. */
1337 if (size < 0)
1339 /* The first value is the incoming arg-pointer. */
1340 size = GET_MODE_SIZE (Pmode);
1342 /* The second value is the structure value address unless this is
1343 passed as an "invisible" first argument. */
1344 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1345 size += GET_MODE_SIZE (Pmode);
1347 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1348 if (FUNCTION_ARG_REGNO_P (regno))
1350 mode = targetm.calls.get_raw_arg_mode (regno);
1352 gcc_assert (mode != VOIDmode);
1354 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1355 if (size % align != 0)
1356 size = CEIL (size, align) * align;
1357 size += GET_MODE_SIZE (mode);
1358 apply_args_mode[regno] = mode;
1360 else
1362 apply_args_mode[regno] = VOIDmode;
1365 return size;
1368 /* Return the size required for the block returned by __builtin_apply,
1369 and initialize apply_result_mode. */
1371 static int
1372 apply_result_size (void)
1374 static int size = -1;
1375 int align, regno;
1376 machine_mode mode;
1378 /* The values computed by this function never change. */
1379 if (size < 0)
1381 size = 0;
1383 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1384 if (targetm.calls.function_value_regno_p (regno))
1386 mode = targetm.calls.get_raw_result_mode (regno);
1388 gcc_assert (mode != VOIDmode);
1390 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1391 if (size % align != 0)
1392 size = CEIL (size, align) * align;
1393 size += GET_MODE_SIZE (mode);
1394 apply_result_mode[regno] = mode;
1396 else
1397 apply_result_mode[regno] = VOIDmode;
1399 /* Allow targets that use untyped_call and untyped_return to override
1400 the size so that machine-specific information can be stored here. */
1401 #ifdef APPLY_RESULT_SIZE
1402 size = APPLY_RESULT_SIZE;
1403 #endif
1405 return size;
1408 /* Create a vector describing the result block RESULT. If SAVEP is true,
1409 the result block is used to save the values; otherwise it is used to
1410 restore the values. */
1412 static rtx
1413 result_vector (int savep, rtx result)
1415 int regno, size, align, nelts;
1416 machine_mode mode;
1417 rtx reg, mem;
1418 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1420 size = nelts = 0;
1421 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1422 if ((mode = apply_result_mode[regno]) != VOIDmode)
1424 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1425 if (size % align != 0)
1426 size = CEIL (size, align) * align;
1427 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1428 mem = adjust_address (result, mode, size);
1429 savevec[nelts++] = (savep
1430 ? gen_rtx_SET (mem, reg)
1431 : gen_rtx_SET (reg, mem));
1432 size += GET_MODE_SIZE (mode);
1434 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1437 /* Save the state required to perform an untyped call with the same
1438 arguments as were passed to the current function. */
1440 static rtx
1441 expand_builtin_apply_args_1 (void)
1443 rtx registers, tem;
1444 int size, align, regno;
1445 machine_mode mode;
1446 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1448 /* Create a block where the arg-pointer, structure value address,
1449 and argument registers can be saved. */
1450 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1452 /* Walk past the arg-pointer and structure value address. */
1453 size = GET_MODE_SIZE (Pmode);
1454 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1455 size += GET_MODE_SIZE (Pmode);
1457 /* Save each register used in calling a function to the block. */
1458 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1459 if ((mode = apply_args_mode[regno]) != VOIDmode)
1461 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1462 if (size % align != 0)
1463 size = CEIL (size, align) * align;
1465 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1467 emit_move_insn (adjust_address (registers, mode, size), tem);
1468 size += GET_MODE_SIZE (mode);
1471 /* Save the arg pointer to the block. */
1472 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1473 /* We need the pointer as the caller actually passed them to us, not
1474 as we might have pretended they were passed. Make sure it's a valid
1475 operand, as emit_move_insn isn't expected to handle a PLUS. */
1476 if (STACK_GROWS_DOWNWARD)
1478 = force_operand (plus_constant (Pmode, tem,
1479 crtl->args.pretend_args_size),
1480 NULL_RTX);
1481 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1483 size = GET_MODE_SIZE (Pmode);
1485 /* Save the structure value address unless this is passed as an
1486 "invisible" first argument. */
1487 if (struct_incoming_value)
1489 emit_move_insn (adjust_address (registers, Pmode, size),
1490 copy_to_reg (struct_incoming_value));
1491 size += GET_MODE_SIZE (Pmode);
1494 /* Return the address of the block. */
1495 return copy_addr_to_reg (XEXP (registers, 0));
1498 /* __builtin_apply_args returns block of memory allocated on
1499 the stack into which is stored the arg pointer, structure
1500 value address, static chain, and all the registers that might
1501 possibly be used in performing a function call. The code is
1502 moved to the start of the function so the incoming values are
1503 saved. */
1505 static rtx
1506 expand_builtin_apply_args (void)
1508 /* Don't do __builtin_apply_args more than once in a function.
1509 Save the result of the first call and reuse it. */
1510 if (apply_args_value != 0)
1511 return apply_args_value;
1513 /* When this function is called, it means that registers must be
1514 saved on entry to this function. So we migrate the
1515 call to the first insn of this function. */
1516 rtx temp;
1518 start_sequence ();
1519 temp = expand_builtin_apply_args_1 ();
1520 rtx_insn *seq = get_insns ();
1521 end_sequence ();
1523 apply_args_value = temp;
1525 /* Put the insns after the NOTE that starts the function.
1526 If this is inside a start_sequence, make the outer-level insn
1527 chain current, so the code is placed at the start of the
1528 function. If internal_arg_pointer is a non-virtual pseudo,
1529 it needs to be placed after the function that initializes
1530 that pseudo. */
1531 push_topmost_sequence ();
1532 if (REG_P (crtl->args.internal_arg_pointer)
1533 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1534 emit_insn_before (seq, parm_birth_insn);
1535 else
1536 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1537 pop_topmost_sequence ();
1538 return temp;
1542 /* Perform an untyped call and save the state required to perform an
1543 untyped return of whatever value was returned by the given function. */
1545 static rtx
1546 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1548 int size, align, regno;
1549 machine_mode mode;
1550 rtx incoming_args, result, reg, dest, src;
1551 rtx_call_insn *call_insn;
1552 rtx old_stack_level = 0;
1553 rtx call_fusage = 0;
1554 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1556 arguments = convert_memory_address (Pmode, arguments);
1558 /* Create a block where the return registers can be saved. */
1559 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1561 /* Fetch the arg pointer from the ARGUMENTS block. */
1562 incoming_args = gen_reg_rtx (Pmode);
1563 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1564 if (!STACK_GROWS_DOWNWARD)
1565 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1566 incoming_args, 0, OPTAB_LIB_WIDEN);
1568 /* Push a new argument block and copy the arguments. Do not allow
1569 the (potential) memcpy call below to interfere with our stack
1570 manipulations. */
1571 do_pending_stack_adjust ();
1572 NO_DEFER_POP;
1574 /* Save the stack with nonlocal if available. */
1575 if (targetm.have_save_stack_nonlocal ())
1576 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1577 else
1578 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1580 /* Allocate a block of memory onto the stack and copy the memory
1581 arguments to the outgoing arguments address. We can pass TRUE
1582 as the 4th argument because we just saved the stack pointer
1583 and will restore it right after the call. */
1584 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1586 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1587 may have already set current_function_calls_alloca to true.
1588 current_function_calls_alloca won't be set if argsize is zero,
1589 so we have to guarantee need_drap is true here. */
1590 if (SUPPORTS_STACK_ALIGNMENT)
1591 crtl->need_drap = true;
1593 dest = virtual_outgoing_args_rtx;
1594 if (!STACK_GROWS_DOWNWARD)
1596 if (CONST_INT_P (argsize))
1597 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1598 else
1599 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1601 dest = gen_rtx_MEM (BLKmode, dest);
1602 set_mem_align (dest, PARM_BOUNDARY);
1603 src = gen_rtx_MEM (BLKmode, incoming_args);
1604 set_mem_align (src, PARM_BOUNDARY);
1605 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1607 /* Refer to the argument block. */
1608 apply_args_size ();
1609 arguments = gen_rtx_MEM (BLKmode, arguments);
1610 set_mem_align (arguments, PARM_BOUNDARY);
1612 /* Walk past the arg-pointer and structure value address. */
1613 size = GET_MODE_SIZE (Pmode);
1614 if (struct_value)
1615 size += GET_MODE_SIZE (Pmode);
1617 /* Restore each of the registers previously saved. Make USE insns
1618 for each of these registers for use in making the call. */
1619 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1620 if ((mode = apply_args_mode[regno]) != VOIDmode)
1622 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1623 if (size % align != 0)
1624 size = CEIL (size, align) * align;
1625 reg = gen_rtx_REG (mode, regno);
1626 emit_move_insn (reg, adjust_address (arguments, mode, size));
1627 use_reg (&call_fusage, reg);
1628 size += GET_MODE_SIZE (mode);
1631 /* Restore the structure value address unless this is passed as an
1632 "invisible" first argument. */
1633 size = GET_MODE_SIZE (Pmode);
1634 if (struct_value)
1636 rtx value = gen_reg_rtx (Pmode);
1637 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1638 emit_move_insn (struct_value, value);
1639 if (REG_P (struct_value))
1640 use_reg (&call_fusage, struct_value);
1641 size += GET_MODE_SIZE (Pmode);
1644 /* All arguments and registers used for the call are set up by now! */
1645 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1647 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1648 and we don't want to load it into a register as an optimization,
1649 because prepare_call_address already did it if it should be done. */
1650 if (GET_CODE (function) != SYMBOL_REF)
1651 function = memory_address (FUNCTION_MODE, function);
1653 /* Generate the actual call instruction and save the return value. */
1654 if (targetm.have_untyped_call ())
1656 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1657 emit_call_insn (targetm.gen_untyped_call (mem, result,
1658 result_vector (1, result)));
1660 else if (targetm.have_call_value ())
1662 rtx valreg = 0;
1664 /* Locate the unique return register. It is not possible to
1665 express a call that sets more than one return register using
1666 call_value; use untyped_call for that. In fact, untyped_call
1667 only needs to save the return registers in the given block. */
1668 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1669 if ((mode = apply_result_mode[regno]) != VOIDmode)
1671 gcc_assert (!valreg); /* have_untyped_call required. */
1673 valreg = gen_rtx_REG (mode, regno);
1676 emit_insn (targetm.gen_call_value (valreg,
1677 gen_rtx_MEM (FUNCTION_MODE, function),
1678 const0_rtx, NULL_RTX, const0_rtx));
1680 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1682 else
1683 gcc_unreachable ();
1685 /* Find the CALL insn we just emitted, and attach the register usage
1686 information. */
1687 call_insn = last_call_insn ();
1688 add_function_usage_to (call_insn, call_fusage);
1690 /* Restore the stack. */
1691 if (targetm.have_save_stack_nonlocal ())
1692 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1693 else
1694 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1695 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1697 OK_DEFER_POP;
1699 /* Return the address of the result block. */
1700 result = copy_addr_to_reg (XEXP (result, 0));
1701 return convert_memory_address (ptr_mode, result);
1704 /* Perform an untyped return. */
1706 static void
1707 expand_builtin_return (rtx result)
1709 int size, align, regno;
1710 machine_mode mode;
1711 rtx reg;
1712 rtx_insn *call_fusage = 0;
1714 result = convert_memory_address (Pmode, result);
1716 apply_result_size ();
1717 result = gen_rtx_MEM (BLKmode, result);
1719 if (targetm.have_untyped_return ())
1721 rtx vector = result_vector (0, result);
1722 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1723 emit_barrier ();
1724 return;
1727 /* Restore the return value and note that each value is used. */
1728 size = 0;
1729 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1730 if ((mode = apply_result_mode[regno]) != VOIDmode)
1732 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1733 if (size % align != 0)
1734 size = CEIL (size, align) * align;
1735 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1736 emit_move_insn (reg, adjust_address (result, mode, size));
1738 push_to_sequence (call_fusage);
1739 emit_use (reg);
1740 call_fusage = get_insns ();
1741 end_sequence ();
1742 size += GET_MODE_SIZE (mode);
1745 /* Put the USE insns before the return. */
1746 emit_insn (call_fusage);
1748 /* Return whatever values was restored by jumping directly to the end
1749 of the function. */
1750 expand_naked_return ();
1753 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1755 static enum type_class
1756 type_to_class (tree type)
1758 switch (TREE_CODE (type))
1760 case VOID_TYPE: return void_type_class;
1761 case INTEGER_TYPE: return integer_type_class;
1762 case ENUMERAL_TYPE: return enumeral_type_class;
1763 case BOOLEAN_TYPE: return boolean_type_class;
1764 case POINTER_TYPE: return pointer_type_class;
1765 case REFERENCE_TYPE: return reference_type_class;
1766 case OFFSET_TYPE: return offset_type_class;
1767 case REAL_TYPE: return real_type_class;
1768 case COMPLEX_TYPE: return complex_type_class;
1769 case FUNCTION_TYPE: return function_type_class;
1770 case METHOD_TYPE: return method_type_class;
1771 case RECORD_TYPE: return record_type_class;
1772 case UNION_TYPE:
1773 case QUAL_UNION_TYPE: return union_type_class;
1774 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1775 ? string_type_class : array_type_class);
1776 case LANG_TYPE: return lang_type_class;
1777 default: return no_type_class;
1781 /* Expand a call EXP to __builtin_classify_type. */
1783 static rtx
1784 expand_builtin_classify_type (tree exp)
1786 if (call_expr_nargs (exp))
1787 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1788 return GEN_INT (no_type_class);
1791 /* This helper macro, meant to be used in mathfn_built_in below,
1792 determines which among a set of three builtin math functions is
1793 appropriate for a given type mode. The `F' and `L' cases are
1794 automatically generated from the `double' case. */
1795 #define CASE_MATHFN(MATHFN) \
1796 CASE_CFN_##MATHFN: \
1797 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1798 fcodel = BUILT_IN_##MATHFN##L ; break;
1799 /* Similar to above, but appends _R after any F/L suffix. */
1800 #define CASE_MATHFN_REENT(MATHFN) \
1801 case CFN_BUILT_IN_##MATHFN##_R: \
1802 case CFN_BUILT_IN_##MATHFN##F_R: \
1803 case CFN_BUILT_IN_##MATHFN##L_R: \
1804 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1805 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1807 /* Return a function equivalent to FN but operating on floating-point
1808 values of type TYPE, or END_BUILTINS if no such function exists.
1809 This is purely an operation on function codes; it does not guarantee
1810 that the target actually has an implementation of the function. */
1812 static built_in_function
1813 mathfn_built_in_2 (tree type, combined_fn fn)
1815 built_in_function fcode, fcodef, fcodel;
1817 switch (fn)
1819 CASE_MATHFN (ACOS)
1820 CASE_MATHFN (ACOSH)
1821 CASE_MATHFN (ASIN)
1822 CASE_MATHFN (ASINH)
1823 CASE_MATHFN (ATAN)
1824 CASE_MATHFN (ATAN2)
1825 CASE_MATHFN (ATANH)
1826 CASE_MATHFN (CBRT)
1827 CASE_MATHFN (CEIL)
1828 CASE_MATHFN (CEXPI)
1829 CASE_MATHFN (COPYSIGN)
1830 CASE_MATHFN (COS)
1831 CASE_MATHFN (COSH)
1832 CASE_MATHFN (DREM)
1833 CASE_MATHFN (ERF)
1834 CASE_MATHFN (ERFC)
1835 CASE_MATHFN (EXP)
1836 CASE_MATHFN (EXP10)
1837 CASE_MATHFN (EXP2)
1838 CASE_MATHFN (EXPM1)
1839 CASE_MATHFN (FABS)
1840 CASE_MATHFN (FDIM)
1841 CASE_MATHFN (FLOOR)
1842 CASE_MATHFN (FMA)
1843 CASE_MATHFN (FMAX)
1844 CASE_MATHFN (FMIN)
1845 CASE_MATHFN (FMOD)
1846 CASE_MATHFN (FREXP)
1847 CASE_MATHFN (GAMMA)
1848 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1849 CASE_MATHFN (HUGE_VAL)
1850 CASE_MATHFN (HYPOT)
1851 CASE_MATHFN (ILOGB)
1852 CASE_MATHFN (ICEIL)
1853 CASE_MATHFN (IFLOOR)
1854 CASE_MATHFN (INF)
1855 CASE_MATHFN (IRINT)
1856 CASE_MATHFN (IROUND)
1857 CASE_MATHFN (ISINF)
1858 CASE_MATHFN (J0)
1859 CASE_MATHFN (J1)
1860 CASE_MATHFN (JN)
1861 CASE_MATHFN (LCEIL)
1862 CASE_MATHFN (LDEXP)
1863 CASE_MATHFN (LFLOOR)
1864 CASE_MATHFN (LGAMMA)
1865 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1866 CASE_MATHFN (LLCEIL)
1867 CASE_MATHFN (LLFLOOR)
1868 CASE_MATHFN (LLRINT)
1869 CASE_MATHFN (LLROUND)
1870 CASE_MATHFN (LOG)
1871 CASE_MATHFN (LOG10)
1872 CASE_MATHFN (LOG1P)
1873 CASE_MATHFN (LOG2)
1874 CASE_MATHFN (LOGB)
1875 CASE_MATHFN (LRINT)
1876 CASE_MATHFN (LROUND)
1877 CASE_MATHFN (MODF)
1878 CASE_MATHFN (NAN)
1879 CASE_MATHFN (NANS)
1880 CASE_MATHFN (NEARBYINT)
1881 CASE_MATHFN (NEXTAFTER)
1882 CASE_MATHFN (NEXTTOWARD)
1883 CASE_MATHFN (POW)
1884 CASE_MATHFN (POWI)
1885 CASE_MATHFN (POW10)
1886 CASE_MATHFN (REMAINDER)
1887 CASE_MATHFN (REMQUO)
1888 CASE_MATHFN (RINT)
1889 CASE_MATHFN (ROUND)
1890 CASE_MATHFN (SCALB)
1891 CASE_MATHFN (SCALBLN)
1892 CASE_MATHFN (SCALBN)
1893 CASE_MATHFN (SIGNBIT)
1894 CASE_MATHFN (SIGNIFICAND)
1895 CASE_MATHFN (SIN)
1896 CASE_MATHFN (SINCOS)
1897 CASE_MATHFN (SINH)
1898 CASE_MATHFN (SQRT)
1899 CASE_MATHFN (TAN)
1900 CASE_MATHFN (TANH)
1901 CASE_MATHFN (TGAMMA)
1902 CASE_MATHFN (TRUNC)
1903 CASE_MATHFN (Y0)
1904 CASE_MATHFN (Y1)
1905 CASE_MATHFN (YN)
1907 default:
1908 return END_BUILTINS;
1911 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1912 return fcode;
1913 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1914 return fcodef;
1915 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1916 return fcodel;
1917 else
1918 return END_BUILTINS;
1921 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1922 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1923 otherwise use the explicit declaration. If we can't do the conversion,
1924 return null. */
1926 static tree
1927 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1929 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1930 if (fcode2 == END_BUILTINS)
1931 return NULL_TREE;
1933 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1934 return NULL_TREE;
1936 return builtin_decl_explicit (fcode2);
1939 /* Like mathfn_built_in_1, but always use the implicit array. */
1941 tree
1942 mathfn_built_in (tree type, combined_fn fn)
1944 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1947 /* Like mathfn_built_in_1, but take a built_in_function and
1948 always use the implicit array. */
1950 tree
1951 mathfn_built_in (tree type, enum built_in_function fn)
1953 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1956 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1957 return its code, otherwise return IFN_LAST. Note that this function
1958 only tests whether the function is defined in internals.def, not whether
1959 it is actually available on the target. */
1961 internal_fn
1962 associated_internal_fn (tree fndecl)
1964 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1965 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1966 switch (DECL_FUNCTION_CODE (fndecl))
1968 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1969 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1970 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1971 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1972 #include "internal-fn.def"
1974 CASE_FLT_FN (BUILT_IN_POW10):
1975 return IFN_EXP10;
1977 CASE_FLT_FN (BUILT_IN_DREM):
1978 return IFN_REMAINDER;
1980 CASE_FLT_FN (BUILT_IN_SCALBN):
1981 CASE_FLT_FN (BUILT_IN_SCALBLN):
1982 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1983 return IFN_LDEXP;
1984 return IFN_LAST;
1986 default:
1987 return IFN_LAST;
1991 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1992 on the current target by a call to an internal function, return the
1993 code of that internal function, otherwise return IFN_LAST. The caller
1994 is responsible for ensuring that any side-effects of the built-in
1995 call are dealt with correctly. E.g. if CALL sets errno, the caller
1996 must decide that the errno result isn't needed or make it available
1997 in some other way. */
1999 internal_fn
2000 replacement_internal_fn (gcall *call)
2002 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2004 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2005 if (ifn != IFN_LAST)
2007 tree_pair types = direct_internal_fn_types (ifn, call);
2008 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2009 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2010 return ifn;
2013 return IFN_LAST;
2016 /* Expand a call to the builtin trinary math functions (fma).
2017 Return NULL_RTX if a normal call should be emitted rather than expanding the
2018 function in-line. EXP is the expression that is a call to the builtin
2019 function; if convenient, the result should be placed in TARGET.
2020 SUBTARGET may be used as the target for computing one of EXP's
2021 operands. */
2023 static rtx
2024 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2026 optab builtin_optab;
2027 rtx op0, op1, op2, result;
2028 rtx_insn *insns;
2029 tree fndecl = get_callee_fndecl (exp);
2030 tree arg0, arg1, arg2;
2031 machine_mode mode;
2033 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2034 return NULL_RTX;
2036 arg0 = CALL_EXPR_ARG (exp, 0);
2037 arg1 = CALL_EXPR_ARG (exp, 1);
2038 arg2 = CALL_EXPR_ARG (exp, 2);
2040 switch (DECL_FUNCTION_CODE (fndecl))
2042 CASE_FLT_FN (BUILT_IN_FMA):
2043 builtin_optab = fma_optab; break;
2044 default:
2045 gcc_unreachable ();
2048 /* Make a suitable register to place result in. */
2049 mode = TYPE_MODE (TREE_TYPE (exp));
2051 /* Before working hard, check whether the instruction is available. */
2052 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2053 return NULL_RTX;
2055 result = gen_reg_rtx (mode);
2057 /* Always stabilize the argument list. */
2058 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2059 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2060 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2062 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2063 op1 = expand_normal (arg1);
2064 op2 = expand_normal (arg2);
2066 start_sequence ();
2068 /* Compute into RESULT.
2069 Set RESULT to wherever the result comes back. */
2070 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2071 result, 0);
2073 /* If we were unable to expand via the builtin, stop the sequence
2074 (without outputting the insns) and call to the library function
2075 with the stabilized argument list. */
2076 if (result == 0)
2078 end_sequence ();
2079 return expand_call (exp, target, target == const0_rtx);
2082 /* Output the entire sequence. */
2083 insns = get_insns ();
2084 end_sequence ();
2085 emit_insn (insns);
2087 return result;
2090 /* Expand a call to the builtin sin and cos math functions.
2091 Return NULL_RTX if a normal call should be emitted rather than expanding the
2092 function in-line. EXP is the expression that is a call to the builtin
2093 function; if convenient, the result should be placed in TARGET.
2094 SUBTARGET may be used as the target for computing one of EXP's
2095 operands. */
2097 static rtx
2098 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2100 optab builtin_optab;
2101 rtx op0;
2102 rtx_insn *insns;
2103 tree fndecl = get_callee_fndecl (exp);
2104 machine_mode mode;
2105 tree arg;
2107 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2108 return NULL_RTX;
2110 arg = CALL_EXPR_ARG (exp, 0);
2112 switch (DECL_FUNCTION_CODE (fndecl))
2114 CASE_FLT_FN (BUILT_IN_SIN):
2115 CASE_FLT_FN (BUILT_IN_COS):
2116 builtin_optab = sincos_optab; break;
2117 default:
2118 gcc_unreachable ();
2121 /* Make a suitable register to place result in. */
2122 mode = TYPE_MODE (TREE_TYPE (exp));
2124 /* Check if sincos insn is available, otherwise fallback
2125 to sin or cos insn. */
2126 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2127 switch (DECL_FUNCTION_CODE (fndecl))
2129 CASE_FLT_FN (BUILT_IN_SIN):
2130 builtin_optab = sin_optab; break;
2131 CASE_FLT_FN (BUILT_IN_COS):
2132 builtin_optab = cos_optab; break;
2133 default:
2134 gcc_unreachable ();
2137 /* Before working hard, check whether the instruction is available. */
2138 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2140 rtx result = gen_reg_rtx (mode);
2142 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2143 need to expand the argument again. This way, we will not perform
2144 side-effects more the once. */
2145 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2147 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2149 start_sequence ();
2151 /* Compute into RESULT.
2152 Set RESULT to wherever the result comes back. */
2153 if (builtin_optab == sincos_optab)
2155 int ok;
2157 switch (DECL_FUNCTION_CODE (fndecl))
2159 CASE_FLT_FN (BUILT_IN_SIN):
2160 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2161 break;
2162 CASE_FLT_FN (BUILT_IN_COS):
2163 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2164 break;
2165 default:
2166 gcc_unreachable ();
2168 gcc_assert (ok);
2170 else
2171 result = expand_unop (mode, builtin_optab, op0, result, 0);
2173 if (result != 0)
2175 /* Output the entire sequence. */
2176 insns = get_insns ();
2177 end_sequence ();
2178 emit_insn (insns);
2179 return result;
2182 /* If we were unable to expand via the builtin, stop the sequence
2183 (without outputting the insns) and call to the library function
2184 with the stabilized argument list. */
2185 end_sequence ();
2188 return expand_call (exp, target, target == const0_rtx);
2191 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2192 return an RTL instruction code that implements the functionality.
2193 If that isn't possible or available return CODE_FOR_nothing. */
2195 static enum insn_code
2196 interclass_mathfn_icode (tree arg, tree fndecl)
2198 bool errno_set = false;
2199 optab builtin_optab = unknown_optab;
2200 machine_mode mode;
2202 switch (DECL_FUNCTION_CODE (fndecl))
2204 CASE_FLT_FN (BUILT_IN_ILOGB):
2205 errno_set = true; builtin_optab = ilogb_optab; break;
2206 CASE_FLT_FN (BUILT_IN_ISINF):
2207 builtin_optab = isinf_optab; break;
2208 case BUILT_IN_ISNORMAL:
2209 case BUILT_IN_ISFINITE:
2210 CASE_FLT_FN (BUILT_IN_FINITE):
2211 case BUILT_IN_FINITED32:
2212 case BUILT_IN_FINITED64:
2213 case BUILT_IN_FINITED128:
2214 case BUILT_IN_ISINFD32:
2215 case BUILT_IN_ISINFD64:
2216 case BUILT_IN_ISINFD128:
2217 /* These builtins have no optabs (yet). */
2218 break;
2219 default:
2220 gcc_unreachable ();
2223 /* There's no easy way to detect the case we need to set EDOM. */
2224 if (flag_errno_math && errno_set)
2225 return CODE_FOR_nothing;
2227 /* Optab mode depends on the mode of the input argument. */
2228 mode = TYPE_MODE (TREE_TYPE (arg));
2230 if (builtin_optab)
2231 return optab_handler (builtin_optab, mode);
2232 return CODE_FOR_nothing;
2235 /* Expand a call to one of the builtin math functions that operate on
2236 floating point argument and output an integer result (ilogb, isinf,
2237 isnan, etc).
2238 Return 0 if a normal call should be emitted rather than expanding the
2239 function in-line. EXP is the expression that is a call to the builtin
2240 function; if convenient, the result should be placed in TARGET. */
2242 static rtx
2243 expand_builtin_interclass_mathfn (tree exp, rtx target)
2245 enum insn_code icode = CODE_FOR_nothing;
2246 rtx op0;
2247 tree fndecl = get_callee_fndecl (exp);
2248 machine_mode mode;
2249 tree arg;
2251 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2252 return NULL_RTX;
2254 arg = CALL_EXPR_ARG (exp, 0);
2255 icode = interclass_mathfn_icode (arg, fndecl);
2256 mode = TYPE_MODE (TREE_TYPE (arg));
2258 if (icode != CODE_FOR_nothing)
2260 struct expand_operand ops[1];
2261 rtx_insn *last = get_last_insn ();
2262 tree orig_arg = arg;
2264 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2265 need to expand the argument again. This way, we will not perform
2266 side-effects more the once. */
2267 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2269 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2271 if (mode != GET_MODE (op0))
2272 op0 = convert_to_mode (mode, op0, 0);
2274 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2275 if (maybe_legitimize_operands (icode, 0, 1, ops)
2276 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2277 return ops[0].value;
2279 delete_insns_since (last);
2280 CALL_EXPR_ARG (exp, 0) = orig_arg;
2283 return NULL_RTX;
2286 /* Expand a call to the builtin sincos math function.
2287 Return NULL_RTX if a normal call should be emitted rather than expanding the
2288 function in-line. EXP is the expression that is a call to the builtin
2289 function. */
2291 static rtx
2292 expand_builtin_sincos (tree exp)
2294 rtx op0, op1, op2, target1, target2;
2295 machine_mode mode;
2296 tree arg, sinp, cosp;
2297 int result;
2298 location_t loc = EXPR_LOCATION (exp);
2299 tree alias_type, alias_off;
2301 if (!validate_arglist (exp, REAL_TYPE,
2302 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2303 return NULL_RTX;
2305 arg = CALL_EXPR_ARG (exp, 0);
2306 sinp = CALL_EXPR_ARG (exp, 1);
2307 cosp = CALL_EXPR_ARG (exp, 2);
2309 /* Make a suitable register to place result in. */
2310 mode = TYPE_MODE (TREE_TYPE (arg));
2312 /* Check if sincos insn is available, otherwise emit the call. */
2313 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2314 return NULL_RTX;
2316 target1 = gen_reg_rtx (mode);
2317 target2 = gen_reg_rtx (mode);
2319 op0 = expand_normal (arg);
2320 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2321 alias_off = build_int_cst (alias_type, 0);
2322 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2323 sinp, alias_off));
2324 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2325 cosp, alias_off));
2327 /* Compute into target1 and target2.
2328 Set TARGET to wherever the result comes back. */
2329 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2330 gcc_assert (result);
2332 /* Move target1 and target2 to the memory locations indicated
2333 by op1 and op2. */
2334 emit_move_insn (op1, target1);
2335 emit_move_insn (op2, target2);
2337 return const0_rtx;
2340 /* Expand a call to the internal cexpi builtin to the sincos math function.
2341 EXP is the expression that is a call to the builtin function; if convenient,
2342 the result should be placed in TARGET. */
2344 static rtx
2345 expand_builtin_cexpi (tree exp, rtx target)
2347 tree fndecl = get_callee_fndecl (exp);
2348 tree arg, type;
2349 machine_mode mode;
2350 rtx op0, op1, op2;
2351 location_t loc = EXPR_LOCATION (exp);
2353 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2354 return NULL_RTX;
2356 arg = CALL_EXPR_ARG (exp, 0);
2357 type = TREE_TYPE (arg);
2358 mode = TYPE_MODE (TREE_TYPE (arg));
2360 /* Try expanding via a sincos optab, fall back to emitting a libcall
2361 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2362 is only generated from sincos, cexp or if we have either of them. */
2363 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2365 op1 = gen_reg_rtx (mode);
2366 op2 = gen_reg_rtx (mode);
2368 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2370 /* Compute into op1 and op2. */
2371 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2373 else if (targetm.libc_has_function (function_sincos))
2375 tree call, fn = NULL_TREE;
2376 tree top1, top2;
2377 rtx op1a, op2a;
2379 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2380 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2381 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2382 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2383 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2384 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2385 else
2386 gcc_unreachable ();
2388 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2389 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2390 op1a = copy_addr_to_reg (XEXP (op1, 0));
2391 op2a = copy_addr_to_reg (XEXP (op2, 0));
2392 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2393 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2395 /* Make sure not to fold the sincos call again. */
2396 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2397 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2398 call, 3, arg, top1, top2));
2400 else
2402 tree call, fn = NULL_TREE, narg;
2403 tree ctype = build_complex_type (type);
2405 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2406 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2407 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2408 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2409 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2410 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2411 else
2412 gcc_unreachable ();
2414 /* If we don't have a decl for cexp create one. This is the
2415 friendliest fallback if the user calls __builtin_cexpi
2416 without full target C99 function support. */
2417 if (fn == NULL_TREE)
2419 tree fntype;
2420 const char *name = NULL;
2422 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2423 name = "cexpf";
2424 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2425 name = "cexp";
2426 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2427 name = "cexpl";
2429 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2430 fn = build_fn_decl (name, fntype);
2433 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2434 build_real (type, dconst0), arg);
2436 /* Make sure not to fold the cexp call again. */
2437 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2438 return expand_expr (build_call_nary (ctype, call, 1, narg),
2439 target, VOIDmode, EXPAND_NORMAL);
2442 /* Now build the proper return type. */
2443 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2444 make_tree (TREE_TYPE (arg), op2),
2445 make_tree (TREE_TYPE (arg), op1)),
2446 target, VOIDmode, EXPAND_NORMAL);
2449 /* Conveniently construct a function call expression. FNDECL names the
2450 function to be called, N is the number of arguments, and the "..."
2451 parameters are the argument expressions. Unlike build_call_exr
2452 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2454 static tree
2455 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2457 va_list ap;
2458 tree fntype = TREE_TYPE (fndecl);
2459 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2461 va_start (ap, n);
2462 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2463 va_end (ap);
2464 SET_EXPR_LOCATION (fn, loc);
2465 return fn;
2468 /* Expand a call to one of the builtin rounding functions gcc defines
2469 as an extension (lfloor and lceil). As these are gcc extensions we
2470 do not need to worry about setting errno to EDOM.
2471 If expanding via optab fails, lower expression to (int)(floor(x)).
2472 EXP is the expression that is a call to the builtin function;
2473 if convenient, the result should be placed in TARGET. */
2475 static rtx
2476 expand_builtin_int_roundingfn (tree exp, rtx target)
2478 convert_optab builtin_optab;
2479 rtx op0, tmp;
2480 rtx_insn *insns;
2481 tree fndecl = get_callee_fndecl (exp);
2482 enum built_in_function fallback_fn;
2483 tree fallback_fndecl;
2484 machine_mode mode;
2485 tree arg;
2487 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2488 gcc_unreachable ();
2490 arg = CALL_EXPR_ARG (exp, 0);
2492 switch (DECL_FUNCTION_CODE (fndecl))
2494 CASE_FLT_FN (BUILT_IN_ICEIL):
2495 CASE_FLT_FN (BUILT_IN_LCEIL):
2496 CASE_FLT_FN (BUILT_IN_LLCEIL):
2497 builtin_optab = lceil_optab;
2498 fallback_fn = BUILT_IN_CEIL;
2499 break;
2501 CASE_FLT_FN (BUILT_IN_IFLOOR):
2502 CASE_FLT_FN (BUILT_IN_LFLOOR):
2503 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2504 builtin_optab = lfloor_optab;
2505 fallback_fn = BUILT_IN_FLOOR;
2506 break;
2508 default:
2509 gcc_unreachable ();
2512 /* Make a suitable register to place result in. */
2513 mode = TYPE_MODE (TREE_TYPE (exp));
2515 target = gen_reg_rtx (mode);
2517 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2518 need to expand the argument again. This way, we will not perform
2519 side-effects more the once. */
2520 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2522 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2524 start_sequence ();
2526 /* Compute into TARGET. */
2527 if (expand_sfix_optab (target, op0, builtin_optab))
2529 /* Output the entire sequence. */
2530 insns = get_insns ();
2531 end_sequence ();
2532 emit_insn (insns);
2533 return target;
2536 /* If we were unable to expand via the builtin, stop the sequence
2537 (without outputting the insns). */
2538 end_sequence ();
2540 /* Fall back to floating point rounding optab. */
2541 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2543 /* For non-C99 targets we may end up without a fallback fndecl here
2544 if the user called __builtin_lfloor directly. In this case emit
2545 a call to the floor/ceil variants nevertheless. This should result
2546 in the best user experience for not full C99 targets. */
2547 if (fallback_fndecl == NULL_TREE)
2549 tree fntype;
2550 const char *name = NULL;
2552 switch (DECL_FUNCTION_CODE (fndecl))
2554 case BUILT_IN_ICEIL:
2555 case BUILT_IN_LCEIL:
2556 case BUILT_IN_LLCEIL:
2557 name = "ceil";
2558 break;
2559 case BUILT_IN_ICEILF:
2560 case BUILT_IN_LCEILF:
2561 case BUILT_IN_LLCEILF:
2562 name = "ceilf";
2563 break;
2564 case BUILT_IN_ICEILL:
2565 case BUILT_IN_LCEILL:
2566 case BUILT_IN_LLCEILL:
2567 name = "ceill";
2568 break;
2569 case BUILT_IN_IFLOOR:
2570 case BUILT_IN_LFLOOR:
2571 case BUILT_IN_LLFLOOR:
2572 name = "floor";
2573 break;
2574 case BUILT_IN_IFLOORF:
2575 case BUILT_IN_LFLOORF:
2576 case BUILT_IN_LLFLOORF:
2577 name = "floorf";
2578 break;
2579 case BUILT_IN_IFLOORL:
2580 case BUILT_IN_LFLOORL:
2581 case BUILT_IN_LLFLOORL:
2582 name = "floorl";
2583 break;
2584 default:
2585 gcc_unreachable ();
2588 fntype = build_function_type_list (TREE_TYPE (arg),
2589 TREE_TYPE (arg), NULL_TREE);
2590 fallback_fndecl = build_fn_decl (name, fntype);
2593 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2595 tmp = expand_normal (exp);
2596 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2598 /* Truncate the result of floating point optab to integer
2599 via expand_fix (). */
2600 target = gen_reg_rtx (mode);
2601 expand_fix (target, tmp, 0);
2603 return target;
2606 /* Expand a call to one of the builtin math functions doing integer
2607 conversion (lrint).
2608 Return 0 if a normal call should be emitted rather than expanding the
2609 function in-line. EXP is the expression that is a call to the builtin
2610 function; if convenient, the result should be placed in TARGET. */
2612 static rtx
2613 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2615 convert_optab builtin_optab;
2616 rtx op0;
2617 rtx_insn *insns;
2618 tree fndecl = get_callee_fndecl (exp);
2619 tree arg;
2620 machine_mode mode;
2621 enum built_in_function fallback_fn = BUILT_IN_NONE;
2623 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2624 gcc_unreachable ();
2626 arg = CALL_EXPR_ARG (exp, 0);
2628 switch (DECL_FUNCTION_CODE (fndecl))
2630 CASE_FLT_FN (BUILT_IN_IRINT):
2631 fallback_fn = BUILT_IN_LRINT;
2632 gcc_fallthrough ();
2633 CASE_FLT_FN (BUILT_IN_LRINT):
2634 CASE_FLT_FN (BUILT_IN_LLRINT):
2635 builtin_optab = lrint_optab;
2636 break;
2638 CASE_FLT_FN (BUILT_IN_IROUND):
2639 fallback_fn = BUILT_IN_LROUND;
2640 gcc_fallthrough ();
2641 CASE_FLT_FN (BUILT_IN_LROUND):
2642 CASE_FLT_FN (BUILT_IN_LLROUND):
2643 builtin_optab = lround_optab;
2644 break;
2646 default:
2647 gcc_unreachable ();
2650 /* There's no easy way to detect the case we need to set EDOM. */
2651 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2652 return NULL_RTX;
2654 /* Make a suitable register to place result in. */
2655 mode = TYPE_MODE (TREE_TYPE (exp));
2657 /* There's no easy way to detect the case we need to set EDOM. */
2658 if (!flag_errno_math)
2660 rtx result = gen_reg_rtx (mode);
2662 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2663 need to expand the argument again. This way, we will not perform
2664 side-effects more the once. */
2665 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2667 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2669 start_sequence ();
2671 if (expand_sfix_optab (result, op0, builtin_optab))
2673 /* Output the entire sequence. */
2674 insns = get_insns ();
2675 end_sequence ();
2676 emit_insn (insns);
2677 return result;
2680 /* If we were unable to expand via the builtin, stop the sequence
2681 (without outputting the insns) and call to the library function
2682 with the stabilized argument list. */
2683 end_sequence ();
2686 if (fallback_fn != BUILT_IN_NONE)
2688 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2689 targets, (int) round (x) should never be transformed into
2690 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2691 a call to lround in the hope that the target provides at least some
2692 C99 functions. This should result in the best user experience for
2693 not full C99 targets. */
2694 tree fallback_fndecl = mathfn_built_in_1
2695 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2697 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2698 fallback_fndecl, 1, arg);
2700 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2701 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2702 return convert_to_mode (mode, target, 0);
2705 return expand_call (exp, target, target == const0_rtx);
2708 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2709 a normal call should be emitted rather than expanding the function
2710 in-line. EXP is the expression that is a call to the builtin
2711 function; if convenient, the result should be placed in TARGET. */
2713 static rtx
2714 expand_builtin_powi (tree exp, rtx target)
2716 tree arg0, arg1;
2717 rtx op0, op1;
2718 machine_mode mode;
2719 machine_mode mode2;
2721 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2722 return NULL_RTX;
2724 arg0 = CALL_EXPR_ARG (exp, 0);
2725 arg1 = CALL_EXPR_ARG (exp, 1);
2726 mode = TYPE_MODE (TREE_TYPE (exp));
2728 /* Emit a libcall to libgcc. */
2730 /* Mode of the 2nd argument must match that of an int. */
2731 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2733 if (target == NULL_RTX)
2734 target = gen_reg_rtx (mode);
2736 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2737 if (GET_MODE (op0) != mode)
2738 op0 = convert_to_mode (mode, op0, 0);
2739 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2740 if (GET_MODE (op1) != mode2)
2741 op1 = convert_to_mode (mode2, op1, 0);
2743 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2744 target, LCT_CONST, mode, 2,
2745 op0, mode, op1, mode2);
2747 return target;
2750 /* Expand expression EXP which is a call to the strlen builtin. Return
2751 NULL_RTX if we failed the caller should emit a normal call, otherwise
2752 try to get the result in TARGET, if convenient. */
2754 static rtx
2755 expand_builtin_strlen (tree exp, rtx target,
2756 machine_mode target_mode)
2758 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2759 return NULL_RTX;
2760 else
2762 struct expand_operand ops[4];
2763 rtx pat;
2764 tree len;
2765 tree src = CALL_EXPR_ARG (exp, 0);
2766 rtx src_reg;
2767 rtx_insn *before_strlen;
2768 machine_mode insn_mode = target_mode;
2769 enum insn_code icode = CODE_FOR_nothing;
2770 unsigned int align;
2772 /* If the length can be computed at compile-time, return it. */
2773 len = c_strlen (src, 0);
2774 if (len)
2775 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2777 /* If the length can be computed at compile-time and is constant
2778 integer, but there are side-effects in src, evaluate
2779 src for side-effects, then return len.
2780 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2781 can be optimized into: i++; x = 3; */
2782 len = c_strlen (src, 1);
2783 if (len && TREE_CODE (len) == INTEGER_CST)
2785 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2786 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2789 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2791 /* If SRC is not a pointer type, don't do this operation inline. */
2792 if (align == 0)
2793 return NULL_RTX;
2795 /* Bail out if we can't compute strlen in the right mode. */
2796 while (insn_mode != VOIDmode)
2798 icode = optab_handler (strlen_optab, insn_mode);
2799 if (icode != CODE_FOR_nothing)
2800 break;
2802 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2804 if (insn_mode == VOIDmode)
2805 return NULL_RTX;
2807 /* Make a place to hold the source address. We will not expand
2808 the actual source until we are sure that the expansion will
2809 not fail -- there are trees that cannot be expanded twice. */
2810 src_reg = gen_reg_rtx (Pmode);
2812 /* Mark the beginning of the strlen sequence so we can emit the
2813 source operand later. */
2814 before_strlen = get_last_insn ();
2816 create_output_operand (&ops[0], target, insn_mode);
2817 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2818 create_integer_operand (&ops[2], 0);
2819 create_integer_operand (&ops[3], align);
2820 if (!maybe_expand_insn (icode, 4, ops))
2821 return NULL_RTX;
2823 /* Now that we are assured of success, expand the source. */
2824 start_sequence ();
2825 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2826 if (pat != src_reg)
2828 #ifdef POINTERS_EXTEND_UNSIGNED
2829 if (GET_MODE (pat) != Pmode)
2830 pat = convert_to_mode (Pmode, pat,
2831 POINTERS_EXTEND_UNSIGNED);
2832 #endif
2833 emit_move_insn (src_reg, pat);
2835 pat = get_insns ();
2836 end_sequence ();
2838 if (before_strlen)
2839 emit_insn_after (pat, before_strlen);
2840 else
2841 emit_insn_before (pat, get_insns ());
2843 /* Return the value in the proper mode for this function. */
2844 if (GET_MODE (ops[0].value) == target_mode)
2845 target = ops[0].value;
2846 else if (target != 0)
2847 convert_move (target, ops[0].value, 0);
2848 else
2849 target = convert_to_mode (target_mode, ops[0].value, 0);
2851 return target;
2855 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2856 bytes from constant string DATA + OFFSET and return it as target
2857 constant. */
2859 static rtx
2860 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2861 machine_mode mode)
2863 const char *str = (const char *) data;
2865 gcc_assert (offset >= 0
2866 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2867 <= strlen (str) + 1));
2869 return c_readstr (str + offset, mode);
2872 /* LEN specify length of the block of memcpy/memset operation.
2873 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2874 In some cases we can make very likely guess on max size, then we
2875 set it into PROBABLE_MAX_SIZE. */
2877 static void
2878 determine_block_size (tree len, rtx len_rtx,
2879 unsigned HOST_WIDE_INT *min_size,
2880 unsigned HOST_WIDE_INT *max_size,
2881 unsigned HOST_WIDE_INT *probable_max_size)
2883 if (CONST_INT_P (len_rtx))
2885 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2886 return;
2888 else
2890 wide_int min, max;
2891 enum value_range_type range_type = VR_UNDEFINED;
2893 /* Determine bounds from the type. */
2894 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2895 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2896 else
2897 *min_size = 0;
2898 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2899 *probable_max_size = *max_size
2900 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2901 else
2902 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2904 if (TREE_CODE (len) == SSA_NAME)
2905 range_type = get_range_info (len, &min, &max);
2906 if (range_type == VR_RANGE)
2908 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2909 *min_size = min.to_uhwi ();
2910 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2911 *probable_max_size = *max_size = max.to_uhwi ();
2913 else if (range_type == VR_ANTI_RANGE)
2915 /* Anti range 0...N lets us to determine minimal size to N+1. */
2916 if (min == 0)
2918 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2919 *min_size = max.to_uhwi () + 1;
2921 /* Code like
2923 int n;
2924 if (n < 100)
2925 memcpy (a, b, n)
2927 Produce anti range allowing negative values of N. We still
2928 can use the information and make a guess that N is not negative.
2930 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2931 *probable_max_size = min.to_uhwi () - 1;
2934 gcc_checking_assert (*max_size <=
2935 (unsigned HOST_WIDE_INT)
2936 GET_MODE_MASK (GET_MODE (len_rtx)));
2939 /* Helper function to do the actual work for expand_builtin_memcpy. */
2941 static rtx
2942 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2944 const char *src_str;
2945 unsigned int src_align = get_pointer_alignment (src);
2946 unsigned int dest_align = get_pointer_alignment (dest);
2947 rtx dest_mem, src_mem, dest_addr, len_rtx;
2948 HOST_WIDE_INT expected_size = -1;
2949 unsigned int expected_align = 0;
2950 unsigned HOST_WIDE_INT min_size;
2951 unsigned HOST_WIDE_INT max_size;
2952 unsigned HOST_WIDE_INT probable_max_size;
2954 /* If DEST is not a pointer type, call the normal function. */
2955 if (dest_align == 0)
2956 return NULL_RTX;
2958 /* If either SRC is not a pointer type, don't do this
2959 operation in-line. */
2960 if (src_align == 0)
2961 return NULL_RTX;
2963 if (currently_expanding_gimple_stmt)
2964 stringop_block_profile (currently_expanding_gimple_stmt,
2965 &expected_align, &expected_size);
2967 if (expected_align < dest_align)
2968 expected_align = dest_align;
2969 dest_mem = get_memory_rtx (dest, len);
2970 set_mem_align (dest_mem, dest_align);
2971 len_rtx = expand_normal (len);
2972 determine_block_size (len, len_rtx, &min_size, &max_size,
2973 &probable_max_size);
2974 src_str = c_getstr (src);
2976 /* If SRC is a string constant and block move would be done
2977 by pieces, we can avoid loading the string from memory
2978 and only stored the computed constants. */
2979 if (src_str
2980 && CONST_INT_P (len_rtx)
2981 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2982 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2983 CONST_CAST (char *, src_str),
2984 dest_align, false))
2986 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2987 builtin_memcpy_read_str,
2988 CONST_CAST (char *, src_str),
2989 dest_align, false, 0);
2990 dest_mem = force_operand (XEXP (dest_mem, 0), target);
2991 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2992 return dest_mem;
2995 src_mem = get_memory_rtx (src, len);
2996 set_mem_align (src_mem, src_align);
2998 /* Copy word part most expediently. */
2999 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3000 CALL_EXPR_TAILCALL (exp)
3001 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3002 expected_align, expected_size,
3003 min_size, max_size, probable_max_size);
3005 if (dest_addr == 0)
3007 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3008 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3011 return dest_addr;
3014 /* Expand a call EXP to the memcpy builtin.
3015 Return NULL_RTX if we failed, the caller should emit a normal call,
3016 otherwise try to get the result in TARGET, if convenient (and in
3017 mode MODE if that's convenient). */
3019 static rtx
3020 expand_builtin_memcpy (tree exp, rtx target)
3022 if (!validate_arglist (exp,
3023 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3024 return NULL_RTX;
3025 else
3027 tree dest = CALL_EXPR_ARG (exp, 0);
3028 tree src = CALL_EXPR_ARG (exp, 1);
3029 tree len = CALL_EXPR_ARG (exp, 2);
3030 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3034 /* Expand an instrumented call EXP to the memcpy builtin.
3035 Return NULL_RTX if we failed, the caller should emit a normal call,
3036 otherwise try to get the result in TARGET, if convenient (and in
3037 mode MODE if that's convenient). */
3039 static rtx
3040 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3042 if (!validate_arglist (exp,
3043 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3044 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3045 INTEGER_TYPE, VOID_TYPE))
3046 return NULL_RTX;
3047 else
3049 tree dest = CALL_EXPR_ARG (exp, 0);
3050 tree src = CALL_EXPR_ARG (exp, 2);
3051 tree len = CALL_EXPR_ARG (exp, 4);
3052 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3054 /* Return src bounds with the result. */
3055 if (res)
3057 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3058 expand_normal (CALL_EXPR_ARG (exp, 1)));
3059 res = chkp_join_splitted_slot (res, bnd);
3061 return res;
3065 /* Expand a call EXP to the mempcpy builtin.
3066 Return NULL_RTX if we failed; the caller should emit a normal call,
3067 otherwise try to get the result in TARGET, if convenient (and in
3068 mode MODE if that's convenient). If ENDP is 0 return the
3069 destination pointer, if ENDP is 1 return the end pointer ala
3070 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3071 stpcpy. */
3073 static rtx
3074 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3076 if (!validate_arglist (exp,
3077 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3078 return NULL_RTX;
3079 else
3081 tree dest = CALL_EXPR_ARG (exp, 0);
3082 tree src = CALL_EXPR_ARG (exp, 1);
3083 tree len = CALL_EXPR_ARG (exp, 2);
3084 return expand_builtin_mempcpy_args (dest, src, len,
3085 target, mode, /*endp=*/ 1,
3086 exp);
3090 /* Expand an instrumented call EXP to the mempcpy builtin.
3091 Return NULL_RTX if we failed, the caller should emit a normal call,
3092 otherwise try to get the result in TARGET, if convenient (and in
3093 mode MODE if that's convenient). */
3095 static rtx
3096 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3098 if (!validate_arglist (exp,
3099 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3100 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3101 INTEGER_TYPE, VOID_TYPE))
3102 return NULL_RTX;
3103 else
3105 tree dest = CALL_EXPR_ARG (exp, 0);
3106 tree src = CALL_EXPR_ARG (exp, 2);
3107 tree len = CALL_EXPR_ARG (exp, 4);
3108 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3109 mode, 1, exp);
3111 /* Return src bounds with the result. */
3112 if (res)
3114 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3115 expand_normal (CALL_EXPR_ARG (exp, 1)));
3116 res = chkp_join_splitted_slot (res, bnd);
3118 return res;
3122 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3123 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3124 so that this can also be called without constructing an actual CALL_EXPR.
3125 The other arguments and return value are the same as for
3126 expand_builtin_mempcpy. */
3128 static rtx
3129 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3130 rtx target, machine_mode mode, int endp,
3131 tree orig_exp)
3133 tree fndecl = get_callee_fndecl (orig_exp);
3135 /* If return value is ignored, transform mempcpy into memcpy. */
3136 if (target == const0_rtx
3137 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3138 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3140 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3141 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3142 dest, src, len);
3143 return expand_expr (result, target, mode, EXPAND_NORMAL);
3145 else if (target == const0_rtx
3146 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3148 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3149 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3150 dest, src, len);
3151 return expand_expr (result, target, mode, EXPAND_NORMAL);
3153 else
3155 const char *src_str;
3156 unsigned int src_align = get_pointer_alignment (src);
3157 unsigned int dest_align = get_pointer_alignment (dest);
3158 rtx dest_mem, src_mem, len_rtx;
3160 /* If either SRC or DEST is not a pointer type, don't do this
3161 operation in-line. */
3162 if (dest_align == 0 || src_align == 0)
3163 return NULL_RTX;
3165 /* If LEN is not constant, call the normal function. */
3166 if (! tree_fits_uhwi_p (len))
3167 return NULL_RTX;
3169 len_rtx = expand_normal (len);
3170 src_str = c_getstr (src);
3172 /* If SRC is a string constant and block move would be done
3173 by pieces, we can avoid loading the string from memory
3174 and only stored the computed constants. */
3175 if (src_str
3176 && CONST_INT_P (len_rtx)
3177 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3178 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3179 CONST_CAST (char *, src_str),
3180 dest_align, false))
3182 dest_mem = get_memory_rtx (dest, len);
3183 set_mem_align (dest_mem, dest_align);
3184 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3185 builtin_memcpy_read_str,
3186 CONST_CAST (char *, src_str),
3187 dest_align, false, endp);
3188 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3189 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3190 return dest_mem;
3193 if (CONST_INT_P (len_rtx)
3194 && can_move_by_pieces (INTVAL (len_rtx),
3195 MIN (dest_align, src_align)))
3197 dest_mem = get_memory_rtx (dest, len);
3198 set_mem_align (dest_mem, dest_align);
3199 src_mem = get_memory_rtx (src, len);
3200 set_mem_align (src_mem, src_align);
3201 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3202 MIN (dest_align, src_align), endp);
3203 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3204 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3205 return dest_mem;
3208 return NULL_RTX;
3212 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3213 we failed, the caller should emit a normal call, otherwise try to
3214 get the result in TARGET, if convenient. If ENDP is 0 return the
3215 destination pointer, if ENDP is 1 return the end pointer ala
3216 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3217 stpcpy. */
3219 static rtx
3220 expand_movstr (tree dest, tree src, rtx target, int endp)
3222 struct expand_operand ops[3];
3223 rtx dest_mem;
3224 rtx src_mem;
3226 if (!targetm.have_movstr ())
3227 return NULL_RTX;
3229 dest_mem = get_memory_rtx (dest, NULL);
3230 src_mem = get_memory_rtx (src, NULL);
3231 if (!endp)
3233 target = force_reg (Pmode, XEXP (dest_mem, 0));
3234 dest_mem = replace_equiv_address (dest_mem, target);
3237 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3238 create_fixed_operand (&ops[1], dest_mem);
3239 create_fixed_operand (&ops[2], src_mem);
3240 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3241 return NULL_RTX;
3243 if (endp && target != const0_rtx)
3245 target = ops[0].value;
3246 /* movstr is supposed to set end to the address of the NUL
3247 terminator. If the caller requested a mempcpy-like return value,
3248 adjust it. */
3249 if (endp == 1)
3251 rtx tem = plus_constant (GET_MODE (target),
3252 gen_lowpart (GET_MODE (target), target), 1);
3253 emit_move_insn (target, force_operand (tem, NULL_RTX));
3256 return target;
3259 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3260 NULL_RTX if we failed the caller should emit a normal call, otherwise
3261 try to get the result in TARGET, if convenient (and in mode MODE if that's
3262 convenient). */
3264 static rtx
3265 expand_builtin_strcpy (tree exp, rtx target)
3267 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3269 tree dest = CALL_EXPR_ARG (exp, 0);
3270 tree src = CALL_EXPR_ARG (exp, 1);
3271 return expand_builtin_strcpy_args (dest, src, target);
3273 return NULL_RTX;
3276 /* Helper function to do the actual work for expand_builtin_strcpy. The
3277 arguments to the builtin_strcpy call DEST and SRC are broken out
3278 so that this can also be called without constructing an actual CALL_EXPR.
3279 The other arguments and return value are the same as for
3280 expand_builtin_strcpy. */
3282 static rtx
3283 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3285 return expand_movstr (dest, src, target, /*endp=*/0);
3288 /* Expand a call EXP to the stpcpy builtin.
3289 Return NULL_RTX if we failed the caller should emit a normal call,
3290 otherwise try to get the result in TARGET, if convenient (and in
3291 mode MODE if that's convenient). */
3293 static rtx
3294 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3296 tree dst, src;
3297 location_t loc = EXPR_LOCATION (exp);
3299 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3300 return NULL_RTX;
3302 dst = CALL_EXPR_ARG (exp, 0);
3303 src = CALL_EXPR_ARG (exp, 1);
3305 /* If return value is ignored, transform stpcpy into strcpy. */
3306 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3308 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3309 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3310 return expand_expr (result, target, mode, EXPAND_NORMAL);
3312 else
3314 tree len, lenp1;
3315 rtx ret;
3317 /* Ensure we get an actual string whose length can be evaluated at
3318 compile-time, not an expression containing a string. This is
3319 because the latter will potentially produce pessimized code
3320 when used to produce the return value. */
3321 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3322 return expand_movstr (dst, src, target, /*endp=*/2);
3324 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3325 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3326 target, mode, /*endp=*/2,
3327 exp);
3329 if (ret)
3330 return ret;
3332 if (TREE_CODE (len) == INTEGER_CST)
3334 rtx len_rtx = expand_normal (len);
3336 if (CONST_INT_P (len_rtx))
3338 ret = expand_builtin_strcpy_args (dst, src, target);
3340 if (ret)
3342 if (! target)
3344 if (mode != VOIDmode)
3345 target = gen_reg_rtx (mode);
3346 else
3347 target = gen_reg_rtx (GET_MODE (ret));
3349 if (GET_MODE (target) != GET_MODE (ret))
3350 ret = gen_lowpart (GET_MODE (target), ret);
3352 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3353 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3354 gcc_assert (ret);
3356 return target;
3361 return expand_movstr (dst, src, target, /*endp=*/2);
3365 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3366 bytes from constant string DATA + OFFSET and return it as target
3367 constant. */
3370 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3371 machine_mode mode)
3373 const char *str = (const char *) data;
3375 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3376 return const0_rtx;
3378 return c_readstr (str + offset, mode);
3381 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3382 NULL_RTX if we failed the caller should emit a normal call. */
3384 static rtx
3385 expand_builtin_strncpy (tree exp, rtx target)
3387 location_t loc = EXPR_LOCATION (exp);
3389 if (validate_arglist (exp,
3390 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3392 tree dest = CALL_EXPR_ARG (exp, 0);
3393 tree src = CALL_EXPR_ARG (exp, 1);
3394 tree len = CALL_EXPR_ARG (exp, 2);
3395 tree slen = c_strlen (src, 1);
3397 /* We must be passed a constant len and src parameter. */
3398 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3399 return NULL_RTX;
3401 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3403 /* We're required to pad with trailing zeros if the requested
3404 len is greater than strlen(s2)+1. In that case try to
3405 use store_by_pieces, if it fails, punt. */
3406 if (tree_int_cst_lt (slen, len))
3408 unsigned int dest_align = get_pointer_alignment (dest);
3409 const char *p = c_getstr (src);
3410 rtx dest_mem;
3412 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3413 || !can_store_by_pieces (tree_to_uhwi (len),
3414 builtin_strncpy_read_str,
3415 CONST_CAST (char *, p),
3416 dest_align, false))
3417 return NULL_RTX;
3419 dest_mem = get_memory_rtx (dest, len);
3420 store_by_pieces (dest_mem, tree_to_uhwi (len),
3421 builtin_strncpy_read_str,
3422 CONST_CAST (char *, p), dest_align, false, 0);
3423 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3424 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3425 return dest_mem;
3428 return NULL_RTX;
3431 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3432 bytes from constant string DATA + OFFSET and return it as target
3433 constant. */
3436 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3437 machine_mode mode)
3439 const char *c = (const char *) data;
3440 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3442 memset (p, *c, GET_MODE_SIZE (mode));
3444 return c_readstr (p, mode);
3447 /* Callback routine for store_by_pieces. Return the RTL of a register
3448 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3449 char value given in the RTL register data. For example, if mode is
3450 4 bytes wide, return the RTL for 0x01010101*data. */
3452 static rtx
3453 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3454 machine_mode mode)
3456 rtx target, coeff;
3457 size_t size;
3458 char *p;
3460 size = GET_MODE_SIZE (mode);
3461 if (size == 1)
3462 return (rtx) data;
3464 p = XALLOCAVEC (char, size);
3465 memset (p, 1, size);
3466 coeff = c_readstr (p, mode);
3468 target = convert_to_mode (mode, (rtx) data, 1);
3469 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3470 return force_reg (mode, target);
3473 /* Expand expression EXP, which is a call to the memset builtin. Return
3474 NULL_RTX if we failed the caller should emit a normal call, otherwise
3475 try to get the result in TARGET, if convenient (and in mode MODE if that's
3476 convenient). */
3478 static rtx
3479 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3481 if (!validate_arglist (exp,
3482 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3483 return NULL_RTX;
3484 else
3486 tree dest = CALL_EXPR_ARG (exp, 0);
3487 tree val = CALL_EXPR_ARG (exp, 1);
3488 tree len = CALL_EXPR_ARG (exp, 2);
3489 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3493 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3494 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3495 try to get the result in TARGET, if convenient (and in mode MODE if that's
3496 convenient). */
3498 static rtx
3499 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3501 if (!validate_arglist (exp,
3502 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3503 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3504 return NULL_RTX;
3505 else
3507 tree dest = CALL_EXPR_ARG (exp, 0);
3508 tree val = CALL_EXPR_ARG (exp, 2);
3509 tree len = CALL_EXPR_ARG (exp, 3);
3510 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3512 /* Return src bounds with the result. */
3513 if (res)
3515 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3516 expand_normal (CALL_EXPR_ARG (exp, 1)));
3517 res = chkp_join_splitted_slot (res, bnd);
3519 return res;
3523 /* Helper function to do the actual work for expand_builtin_memset. The
3524 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3525 so that this can also be called without constructing an actual CALL_EXPR.
3526 The other arguments and return value are the same as for
3527 expand_builtin_memset. */
3529 static rtx
3530 expand_builtin_memset_args (tree dest, tree val, tree len,
3531 rtx target, machine_mode mode, tree orig_exp)
3533 tree fndecl, fn;
3534 enum built_in_function fcode;
3535 machine_mode val_mode;
3536 char c;
3537 unsigned int dest_align;
3538 rtx dest_mem, dest_addr, len_rtx;
3539 HOST_WIDE_INT expected_size = -1;
3540 unsigned int expected_align = 0;
3541 unsigned HOST_WIDE_INT min_size;
3542 unsigned HOST_WIDE_INT max_size;
3543 unsigned HOST_WIDE_INT probable_max_size;
3545 dest_align = get_pointer_alignment (dest);
3547 /* If DEST is not a pointer type, don't do this operation in-line. */
3548 if (dest_align == 0)
3549 return NULL_RTX;
3551 if (currently_expanding_gimple_stmt)
3552 stringop_block_profile (currently_expanding_gimple_stmt,
3553 &expected_align, &expected_size);
3555 if (expected_align < dest_align)
3556 expected_align = dest_align;
3558 /* If the LEN parameter is zero, return DEST. */
3559 if (integer_zerop (len))
3561 /* Evaluate and ignore VAL in case it has side-effects. */
3562 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3563 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3566 /* Stabilize the arguments in case we fail. */
3567 dest = builtin_save_expr (dest);
3568 val = builtin_save_expr (val);
3569 len = builtin_save_expr (len);
3571 len_rtx = expand_normal (len);
3572 determine_block_size (len, len_rtx, &min_size, &max_size,
3573 &probable_max_size);
3574 dest_mem = get_memory_rtx (dest, len);
3575 val_mode = TYPE_MODE (unsigned_char_type_node);
3577 if (TREE_CODE (val) != INTEGER_CST)
3579 rtx val_rtx;
3581 val_rtx = expand_normal (val);
3582 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3584 /* Assume that we can memset by pieces if we can store
3585 * the coefficients by pieces (in the required modes).
3586 * We can't pass builtin_memset_gen_str as that emits RTL. */
3587 c = 1;
3588 if (tree_fits_uhwi_p (len)
3589 && can_store_by_pieces (tree_to_uhwi (len),
3590 builtin_memset_read_str, &c, dest_align,
3591 true))
3593 val_rtx = force_reg (val_mode, val_rtx);
3594 store_by_pieces (dest_mem, tree_to_uhwi (len),
3595 builtin_memset_gen_str, val_rtx, dest_align,
3596 true, 0);
3598 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3599 dest_align, expected_align,
3600 expected_size, min_size, max_size,
3601 probable_max_size))
3602 goto do_libcall;
3604 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3605 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3606 return dest_mem;
3609 if (target_char_cast (val, &c))
3610 goto do_libcall;
3612 if (c)
3614 if (tree_fits_uhwi_p (len)
3615 && can_store_by_pieces (tree_to_uhwi (len),
3616 builtin_memset_read_str, &c, dest_align,
3617 true))
3618 store_by_pieces (dest_mem, tree_to_uhwi (len),
3619 builtin_memset_read_str, &c, dest_align, true, 0);
3620 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3621 gen_int_mode (c, val_mode),
3622 dest_align, expected_align,
3623 expected_size, min_size, max_size,
3624 probable_max_size))
3625 goto do_libcall;
3627 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3628 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3629 return dest_mem;
3632 set_mem_align (dest_mem, dest_align);
3633 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3634 CALL_EXPR_TAILCALL (orig_exp)
3635 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3636 expected_align, expected_size,
3637 min_size, max_size,
3638 probable_max_size);
3640 if (dest_addr == 0)
3642 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3643 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3646 return dest_addr;
3648 do_libcall:
3649 fndecl = get_callee_fndecl (orig_exp);
3650 fcode = DECL_FUNCTION_CODE (fndecl);
3651 if (fcode == BUILT_IN_MEMSET
3652 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3653 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3654 dest, val, len);
3655 else if (fcode == BUILT_IN_BZERO)
3656 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3657 dest, len);
3658 else
3659 gcc_unreachable ();
3660 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3661 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3662 return expand_call (fn, target, target == const0_rtx);
3665 /* Expand expression EXP, which is a call to the bzero builtin. Return
3666 NULL_RTX if we failed the caller should emit a normal call. */
3668 static rtx
3669 expand_builtin_bzero (tree exp)
3671 tree dest, size;
3672 location_t loc = EXPR_LOCATION (exp);
3674 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3675 return NULL_RTX;
3677 dest = CALL_EXPR_ARG (exp, 0);
3678 size = CALL_EXPR_ARG (exp, 1);
3680 /* New argument list transforming bzero(ptr x, int y) to
3681 memset(ptr x, int 0, size_t y). This is done this way
3682 so that if it isn't expanded inline, we fallback to
3683 calling bzero instead of memset. */
3685 return expand_builtin_memset_args (dest, integer_zero_node,
3686 fold_convert_loc (loc,
3687 size_type_node, size),
3688 const0_rtx, VOIDmode, exp);
3691 /* Try to expand cmpstr operation ICODE with the given operands.
3692 Return the result rtx on success, otherwise return null. */
3694 static rtx
3695 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3696 HOST_WIDE_INT align)
3698 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3700 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3701 target = NULL_RTX;
3703 struct expand_operand ops[4];
3704 create_output_operand (&ops[0], target, insn_mode);
3705 create_fixed_operand (&ops[1], arg1_rtx);
3706 create_fixed_operand (&ops[2], arg2_rtx);
3707 create_integer_operand (&ops[3], align);
3708 if (maybe_expand_insn (icode, 4, ops))
3709 return ops[0].value;
3710 return NULL_RTX;
3713 /* Expand expression EXP, which is a call to the memcmp built-in function.
3714 Return NULL_RTX if we failed and the caller should emit a normal call,
3715 otherwise try to get the result in TARGET, if convenient.
3716 RESULT_EQ is true if we can relax the returned value to be either zero
3717 or nonzero, without caring about the sign. */
3719 static rtx
3720 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
3722 if (!validate_arglist (exp,
3723 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3724 return NULL_RTX;
3726 tree arg1 = CALL_EXPR_ARG (exp, 0);
3727 tree arg2 = CALL_EXPR_ARG (exp, 1);
3728 tree len = CALL_EXPR_ARG (exp, 2);
3729 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3730 location_t loc = EXPR_LOCATION (exp);
3732 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3733 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3735 /* If we don't have POINTER_TYPE, call the function. */
3736 if (arg1_align == 0 || arg2_align == 0)
3737 return NULL_RTX;
3739 rtx arg1_rtx = get_memory_rtx (arg1, len);
3740 rtx arg2_rtx = get_memory_rtx (arg2, len);
3741 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3743 /* Set MEM_SIZE as appropriate. */
3744 if (CONST_INT_P (len_rtx))
3746 set_mem_size (arg1_rtx, INTVAL (len_rtx));
3747 set_mem_size (arg2_rtx, INTVAL (len_rtx));
3750 by_pieces_constfn constfn = NULL;
3752 const char *src_str = c_getstr (arg2);
3753 if (result_eq && src_str == NULL)
3755 src_str = c_getstr (arg1);
3756 if (src_str != NULL)
3757 std::swap (arg1_rtx, arg2_rtx);
3760 /* If SRC is a string constant and block move would be done
3761 by pieces, we can avoid loading the string from memory
3762 and only stored the computed constants. */
3763 if (src_str
3764 && CONST_INT_P (len_rtx)
3765 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
3766 constfn = builtin_memcpy_read_str;
3768 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
3769 TREE_TYPE (len), target,
3770 result_eq, constfn,
3771 CONST_CAST (char *, src_str));
3773 if (result)
3775 /* Return the value in the proper mode for this function. */
3776 if (GET_MODE (result) == mode)
3777 return result;
3779 if (target != 0)
3781 convert_move (target, result, 0);
3782 return target;
3785 return convert_to_mode (mode, result, 0);
3788 return NULL_RTX;
3791 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3792 if we failed the caller should emit a normal call, otherwise try to get
3793 the result in TARGET, if convenient. */
3795 static rtx
3796 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3798 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3799 return NULL_RTX;
3801 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
3802 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3803 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
3805 rtx arg1_rtx, arg2_rtx;
3806 tree fndecl, fn;
3807 tree arg1 = CALL_EXPR_ARG (exp, 0);
3808 tree arg2 = CALL_EXPR_ARG (exp, 1);
3809 rtx result = NULL_RTX;
3811 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3812 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3814 /* If we don't have POINTER_TYPE, call the function. */
3815 if (arg1_align == 0 || arg2_align == 0)
3816 return NULL_RTX;
3818 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3819 arg1 = builtin_save_expr (arg1);
3820 arg2 = builtin_save_expr (arg2);
3822 arg1_rtx = get_memory_rtx (arg1, NULL);
3823 arg2_rtx = get_memory_rtx (arg2, NULL);
3825 /* Try to call cmpstrsi. */
3826 if (cmpstr_icode != CODE_FOR_nothing)
3827 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
3828 MIN (arg1_align, arg2_align));
3830 /* Try to determine at least one length and call cmpstrnsi. */
3831 if (!result && cmpstrn_icode != CODE_FOR_nothing)
3833 tree len;
3834 rtx arg3_rtx;
3836 tree len1 = c_strlen (arg1, 1);
3837 tree len2 = c_strlen (arg2, 1);
3839 if (len1)
3840 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3841 if (len2)
3842 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3844 /* If we don't have a constant length for the first, use the length
3845 of the second, if we know it. We don't require a constant for
3846 this case; some cost analysis could be done if both are available
3847 but neither is constant. For now, assume they're equally cheap,
3848 unless one has side effects. If both strings have constant lengths,
3849 use the smaller. */
3851 if (!len1)
3852 len = len2;
3853 else if (!len2)
3854 len = len1;
3855 else if (TREE_SIDE_EFFECTS (len1))
3856 len = len2;
3857 else if (TREE_SIDE_EFFECTS (len2))
3858 len = len1;
3859 else if (TREE_CODE (len1) != INTEGER_CST)
3860 len = len2;
3861 else if (TREE_CODE (len2) != INTEGER_CST)
3862 len = len1;
3863 else if (tree_int_cst_lt (len1, len2))
3864 len = len1;
3865 else
3866 len = len2;
3868 /* If both arguments have side effects, we cannot optimize. */
3869 if (len && !TREE_SIDE_EFFECTS (len))
3871 arg3_rtx = expand_normal (len);
3872 result = expand_cmpstrn_or_cmpmem
3873 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
3874 arg3_rtx, MIN (arg1_align, arg2_align));
3878 if (result)
3880 /* Return the value in the proper mode for this function. */
3881 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3882 if (GET_MODE (result) == mode)
3883 return result;
3884 if (target == 0)
3885 return convert_to_mode (mode, result, 0);
3886 convert_move (target, result, 0);
3887 return target;
3890 /* Expand the library call ourselves using a stabilized argument
3891 list to avoid re-evaluating the function's arguments twice. */
3892 fndecl = get_callee_fndecl (exp);
3893 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3894 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3895 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3896 return expand_call (fn, target, target == const0_rtx);
3898 return NULL_RTX;
3901 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3902 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3903 the result in TARGET, if convenient. */
3905 static rtx
3906 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3907 ATTRIBUTE_UNUSED machine_mode mode)
3909 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3911 if (!validate_arglist (exp,
3912 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3913 return NULL_RTX;
3915 /* If c_strlen can determine an expression for one of the string
3916 lengths, and it doesn't have side effects, then emit cmpstrnsi
3917 using length MIN(strlen(string)+1, arg3). */
3918 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3919 if (cmpstrn_icode != CODE_FOR_nothing)
3921 tree len, len1, len2;
3922 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3923 rtx result;
3924 tree fndecl, fn;
3925 tree arg1 = CALL_EXPR_ARG (exp, 0);
3926 tree arg2 = CALL_EXPR_ARG (exp, 1);
3927 tree arg3 = CALL_EXPR_ARG (exp, 2);
3929 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3930 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3932 len1 = c_strlen (arg1, 1);
3933 len2 = c_strlen (arg2, 1);
3935 if (len1)
3936 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3937 if (len2)
3938 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3940 /* If we don't have a constant length for the first, use the length
3941 of the second, if we know it. We don't require a constant for
3942 this case; some cost analysis could be done if both are available
3943 but neither is constant. For now, assume they're equally cheap,
3944 unless one has side effects. If both strings have constant lengths,
3945 use the smaller. */
3947 if (!len1)
3948 len = len2;
3949 else if (!len2)
3950 len = len1;
3951 else if (TREE_SIDE_EFFECTS (len1))
3952 len = len2;
3953 else if (TREE_SIDE_EFFECTS (len2))
3954 len = len1;
3955 else if (TREE_CODE (len1) != INTEGER_CST)
3956 len = len2;
3957 else if (TREE_CODE (len2) != INTEGER_CST)
3958 len = len1;
3959 else if (tree_int_cst_lt (len1, len2))
3960 len = len1;
3961 else
3962 len = len2;
3964 /* If both arguments have side effects, we cannot optimize. */
3965 if (!len || TREE_SIDE_EFFECTS (len))
3966 return NULL_RTX;
3968 /* The actual new length parameter is MIN(len,arg3). */
3969 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3970 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3972 /* If we don't have POINTER_TYPE, call the function. */
3973 if (arg1_align == 0 || arg2_align == 0)
3974 return NULL_RTX;
3976 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3977 arg1 = builtin_save_expr (arg1);
3978 arg2 = builtin_save_expr (arg2);
3979 len = builtin_save_expr (len);
3981 arg1_rtx = get_memory_rtx (arg1, len);
3982 arg2_rtx = get_memory_rtx (arg2, len);
3983 arg3_rtx = expand_normal (len);
3984 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
3985 arg2_rtx, TREE_TYPE (len), arg3_rtx,
3986 MIN (arg1_align, arg2_align));
3987 if (result)
3989 /* Return the value in the proper mode for this function. */
3990 mode = TYPE_MODE (TREE_TYPE (exp));
3991 if (GET_MODE (result) == mode)
3992 return result;
3993 if (target == 0)
3994 return convert_to_mode (mode, result, 0);
3995 convert_move (target, result, 0);
3996 return target;
3999 /* Expand the library call ourselves using a stabilized argument
4000 list to avoid re-evaluating the function's arguments twice. */
4001 fndecl = get_callee_fndecl (exp);
4002 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4003 arg1, arg2, len);
4004 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4005 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4006 return expand_call (fn, target, target == const0_rtx);
4008 return NULL_RTX;
4011 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4012 if that's convenient. */
4015 expand_builtin_saveregs (void)
4017 rtx val;
4018 rtx_insn *seq;
4020 /* Don't do __builtin_saveregs more than once in a function.
4021 Save the result of the first call and reuse it. */
4022 if (saveregs_value != 0)
4023 return saveregs_value;
4025 /* When this function is called, it means that registers must be
4026 saved on entry to this function. So we migrate the call to the
4027 first insn of this function. */
4029 start_sequence ();
4031 /* Do whatever the machine needs done in this case. */
4032 val = targetm.calls.expand_builtin_saveregs ();
4034 seq = get_insns ();
4035 end_sequence ();
4037 saveregs_value = val;
4039 /* Put the insns after the NOTE that starts the function. If this
4040 is inside a start_sequence, make the outer-level insn chain current, so
4041 the code is placed at the start of the function. */
4042 push_topmost_sequence ();
4043 emit_insn_after (seq, entry_of_function ());
4044 pop_topmost_sequence ();
4046 return val;
4049 /* Expand a call to __builtin_next_arg. */
4051 static rtx
4052 expand_builtin_next_arg (void)
4054 /* Checking arguments is already done in fold_builtin_next_arg
4055 that must be called before this function. */
4056 return expand_binop (ptr_mode, add_optab,
4057 crtl->args.internal_arg_pointer,
4058 crtl->args.arg_offset_rtx,
4059 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4062 /* Make it easier for the backends by protecting the valist argument
4063 from multiple evaluations. */
4065 static tree
4066 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4068 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4070 /* The current way of determining the type of valist is completely
4071 bogus. We should have the information on the va builtin instead. */
4072 if (!vatype)
4073 vatype = targetm.fn_abi_va_list (cfun->decl);
4075 if (TREE_CODE (vatype) == ARRAY_TYPE)
4077 if (TREE_SIDE_EFFECTS (valist))
4078 valist = save_expr (valist);
4080 /* For this case, the backends will be expecting a pointer to
4081 vatype, but it's possible we've actually been given an array
4082 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4083 So fix it. */
4084 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4086 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4087 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4090 else
4092 tree pt = build_pointer_type (vatype);
4094 if (! needs_lvalue)
4096 if (! TREE_SIDE_EFFECTS (valist))
4097 return valist;
4099 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4100 TREE_SIDE_EFFECTS (valist) = 1;
4103 if (TREE_SIDE_EFFECTS (valist))
4104 valist = save_expr (valist);
4105 valist = fold_build2_loc (loc, MEM_REF,
4106 vatype, valist, build_int_cst (pt, 0));
4109 return valist;
4112 /* The "standard" definition of va_list is void*. */
4114 tree
4115 std_build_builtin_va_list (void)
4117 return ptr_type_node;
4120 /* The "standard" abi va_list is va_list_type_node. */
4122 tree
4123 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4125 return va_list_type_node;
4128 /* The "standard" type of va_list is va_list_type_node. */
4130 tree
4131 std_canonical_va_list_type (tree type)
4133 tree wtype, htype;
4135 wtype = va_list_type_node;
4136 htype = type;
4138 if (TREE_CODE (wtype) == ARRAY_TYPE)
4140 /* If va_list is an array type, the argument may have decayed
4141 to a pointer type, e.g. by being passed to another function.
4142 In that case, unwrap both types so that we can compare the
4143 underlying records. */
4144 if (TREE_CODE (htype) == ARRAY_TYPE
4145 || POINTER_TYPE_P (htype))
4147 wtype = TREE_TYPE (wtype);
4148 htype = TREE_TYPE (htype);
4151 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4152 return va_list_type_node;
4154 return NULL_TREE;
4157 /* The "standard" implementation of va_start: just assign `nextarg' to
4158 the variable. */
4160 void
4161 std_expand_builtin_va_start (tree valist, rtx nextarg)
4163 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4164 convert_move (va_r, nextarg, 0);
4166 /* We do not have any valid bounds for the pointer, so
4167 just store zero bounds for it. */
4168 if (chkp_function_instrumented_p (current_function_decl))
4169 chkp_expand_bounds_reset_for_mem (valist,
4170 make_tree (TREE_TYPE (valist),
4171 nextarg));
4174 /* Expand EXP, a call to __builtin_va_start. */
4176 static rtx
4177 expand_builtin_va_start (tree exp)
4179 rtx nextarg;
4180 tree valist;
4181 location_t loc = EXPR_LOCATION (exp);
4183 if (call_expr_nargs (exp) < 2)
4185 error_at (loc, "too few arguments to function %<va_start%>");
4186 return const0_rtx;
4189 if (fold_builtin_next_arg (exp, true))
4190 return const0_rtx;
4192 nextarg = expand_builtin_next_arg ();
4193 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4195 if (targetm.expand_builtin_va_start)
4196 targetm.expand_builtin_va_start (valist, nextarg);
4197 else
4198 std_expand_builtin_va_start (valist, nextarg);
4200 return const0_rtx;
4203 /* Expand EXP, a call to __builtin_va_end. */
4205 static rtx
4206 expand_builtin_va_end (tree exp)
4208 tree valist = CALL_EXPR_ARG (exp, 0);
4210 /* Evaluate for side effects, if needed. I hate macros that don't
4211 do that. */
4212 if (TREE_SIDE_EFFECTS (valist))
4213 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4215 return const0_rtx;
4218 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4219 builtin rather than just as an assignment in stdarg.h because of the
4220 nastiness of array-type va_list types. */
4222 static rtx
4223 expand_builtin_va_copy (tree exp)
4225 tree dst, src, t;
4226 location_t loc = EXPR_LOCATION (exp);
4228 dst = CALL_EXPR_ARG (exp, 0);
4229 src = CALL_EXPR_ARG (exp, 1);
4231 dst = stabilize_va_list_loc (loc, dst, 1);
4232 src = stabilize_va_list_loc (loc, src, 0);
4234 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4236 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4238 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4239 TREE_SIDE_EFFECTS (t) = 1;
4240 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4242 else
4244 rtx dstb, srcb, size;
4246 /* Evaluate to pointers. */
4247 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4248 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4249 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4250 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4252 dstb = convert_memory_address (Pmode, dstb);
4253 srcb = convert_memory_address (Pmode, srcb);
4255 /* "Dereference" to BLKmode memories. */
4256 dstb = gen_rtx_MEM (BLKmode, dstb);
4257 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4258 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4259 srcb = gen_rtx_MEM (BLKmode, srcb);
4260 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4261 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4263 /* Copy. */
4264 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4267 return const0_rtx;
4270 /* Expand a call to one of the builtin functions __builtin_frame_address or
4271 __builtin_return_address. */
4273 static rtx
4274 expand_builtin_frame_address (tree fndecl, tree exp)
4276 /* The argument must be a nonnegative integer constant.
4277 It counts the number of frames to scan up the stack.
4278 The value is either the frame pointer value or the return
4279 address saved in that frame. */
4280 if (call_expr_nargs (exp) == 0)
4281 /* Warning about missing arg was already issued. */
4282 return const0_rtx;
4283 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4285 error ("invalid argument to %qD", fndecl);
4286 return const0_rtx;
4288 else
4290 /* Number of frames to scan up the stack. */
4291 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4293 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4295 /* Some ports cannot access arbitrary stack frames. */
4296 if (tem == NULL)
4298 warning (0, "unsupported argument to %qD", fndecl);
4299 return const0_rtx;
4302 if (count)
4304 /* Warn since no effort is made to ensure that any frame
4305 beyond the current one exists or can be safely reached. */
4306 warning (OPT_Wframe_address, "calling %qD with "
4307 "a nonzero argument is unsafe", fndecl);
4310 /* For __builtin_frame_address, return what we've got. */
4311 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4312 return tem;
4314 if (!REG_P (tem)
4315 && ! CONSTANT_P (tem))
4316 tem = copy_addr_to_reg (tem);
4317 return tem;
4321 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4322 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4323 is the same as for allocate_dynamic_stack_space. */
4325 static rtx
4326 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4328 rtx op0;
4329 rtx result;
4330 bool valid_arglist;
4331 unsigned int align;
4332 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4333 == BUILT_IN_ALLOCA_WITH_ALIGN);
4335 valid_arglist
4336 = (alloca_with_align
4337 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4338 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4340 if (!valid_arglist)
4341 return NULL_RTX;
4343 /* Compute the argument. */
4344 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4346 /* Compute the alignment. */
4347 align = (alloca_with_align
4348 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4349 : BIGGEST_ALIGNMENT);
4351 /* Allocate the desired space. */
4352 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4353 result = convert_memory_address (ptr_mode, result);
4355 return result;
4358 /* Expand a call to bswap builtin in EXP.
4359 Return NULL_RTX if a normal call should be emitted rather than expanding the
4360 function in-line. If convenient, the result should be placed in TARGET.
4361 SUBTARGET may be used as the target for computing one of EXP's operands. */
4363 static rtx
4364 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4365 rtx subtarget)
4367 tree arg;
4368 rtx op0;
4370 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4371 return NULL_RTX;
4373 arg = CALL_EXPR_ARG (exp, 0);
4374 op0 = expand_expr (arg,
4375 subtarget && GET_MODE (subtarget) == target_mode
4376 ? subtarget : NULL_RTX,
4377 target_mode, EXPAND_NORMAL);
4378 if (GET_MODE (op0) != target_mode)
4379 op0 = convert_to_mode (target_mode, op0, 1);
4381 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4383 gcc_assert (target);
4385 return convert_to_mode (target_mode, target, 1);
4388 /* Expand a call to a unary builtin in EXP.
4389 Return NULL_RTX if a normal call should be emitted rather than expanding the
4390 function in-line. If convenient, the result should be placed in TARGET.
4391 SUBTARGET may be used as the target for computing one of EXP's operands. */
4393 static rtx
4394 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4395 rtx subtarget, optab op_optab)
4397 rtx op0;
4399 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4400 return NULL_RTX;
4402 /* Compute the argument. */
4403 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4404 (subtarget
4405 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4406 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4407 VOIDmode, EXPAND_NORMAL);
4408 /* Compute op, into TARGET if possible.
4409 Set TARGET to wherever the result comes back. */
4410 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4411 op_optab, op0, target, op_optab != clrsb_optab);
4412 gcc_assert (target);
4414 return convert_to_mode (target_mode, target, 0);
4417 /* Expand a call to __builtin_expect. We just return our argument
4418 as the builtin_expect semantic should've been already executed by
4419 tree branch prediction pass. */
4421 static rtx
4422 expand_builtin_expect (tree exp, rtx target)
4424 tree arg;
4426 if (call_expr_nargs (exp) < 2)
4427 return const0_rtx;
4428 arg = CALL_EXPR_ARG (exp, 0);
4430 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4431 /* When guessing was done, the hints should be already stripped away. */
4432 gcc_assert (!flag_guess_branch_prob
4433 || optimize == 0 || seen_error ());
4434 return target;
4437 /* Expand a call to __builtin_assume_aligned. We just return our first
4438 argument as the builtin_assume_aligned semantic should've been already
4439 executed by CCP. */
4441 static rtx
4442 expand_builtin_assume_aligned (tree exp, rtx target)
4444 if (call_expr_nargs (exp) < 2)
4445 return const0_rtx;
4446 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4447 EXPAND_NORMAL);
4448 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4449 && (call_expr_nargs (exp) < 3
4450 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4451 return target;
4454 void
4455 expand_builtin_trap (void)
4457 if (targetm.have_trap ())
4459 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4460 /* For trap insns when not accumulating outgoing args force
4461 REG_ARGS_SIZE note to prevent crossjumping of calls with
4462 different args sizes. */
4463 if (!ACCUMULATE_OUTGOING_ARGS)
4464 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4466 else
4468 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
4469 tree call_expr = build_call_expr (fn, 0);
4470 expand_call (call_expr, NULL_RTX, false);
4473 emit_barrier ();
4476 /* Expand a call to __builtin_unreachable. We do nothing except emit
4477 a barrier saying that control flow will not pass here.
4479 It is the responsibility of the program being compiled to ensure
4480 that control flow does never reach __builtin_unreachable. */
4481 static void
4482 expand_builtin_unreachable (void)
4484 emit_barrier ();
4487 /* Expand EXP, a call to fabs, fabsf or fabsl.
4488 Return NULL_RTX if a normal call should be emitted rather than expanding
4489 the function inline. If convenient, the result should be placed
4490 in TARGET. SUBTARGET may be used as the target for computing
4491 the operand. */
4493 static rtx
4494 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4496 machine_mode mode;
4497 tree arg;
4498 rtx op0;
4500 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4501 return NULL_RTX;
4503 arg = CALL_EXPR_ARG (exp, 0);
4504 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4505 mode = TYPE_MODE (TREE_TYPE (arg));
4506 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4507 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4510 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4511 Return NULL is a normal call should be emitted rather than expanding the
4512 function inline. If convenient, the result should be placed in TARGET.
4513 SUBTARGET may be used as the target for computing the operand. */
4515 static rtx
4516 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4518 rtx op0, op1;
4519 tree arg;
4521 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4522 return NULL_RTX;
4524 arg = CALL_EXPR_ARG (exp, 0);
4525 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4527 arg = CALL_EXPR_ARG (exp, 1);
4528 op1 = expand_normal (arg);
4530 return expand_copysign (op0, op1, target);
4533 /* Expand a call to __builtin___clear_cache. */
4535 static rtx
4536 expand_builtin___clear_cache (tree exp)
4538 if (!targetm.code_for_clear_cache)
4540 #ifdef CLEAR_INSN_CACHE
4541 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4542 does something. Just do the default expansion to a call to
4543 __clear_cache(). */
4544 return NULL_RTX;
4545 #else
4546 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4547 does nothing. There is no need to call it. Do nothing. */
4548 return const0_rtx;
4549 #endif /* CLEAR_INSN_CACHE */
4552 /* We have a "clear_cache" insn, and it will handle everything. */
4553 tree begin, end;
4554 rtx begin_rtx, end_rtx;
4556 /* We must not expand to a library call. If we did, any
4557 fallback library function in libgcc that might contain a call to
4558 __builtin___clear_cache() would recurse infinitely. */
4559 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4561 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4562 return const0_rtx;
4565 if (targetm.have_clear_cache ())
4567 struct expand_operand ops[2];
4569 begin = CALL_EXPR_ARG (exp, 0);
4570 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4572 end = CALL_EXPR_ARG (exp, 1);
4573 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4575 create_address_operand (&ops[0], begin_rtx);
4576 create_address_operand (&ops[1], end_rtx);
4577 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4578 return const0_rtx;
4580 return const0_rtx;
4583 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4585 static rtx
4586 round_trampoline_addr (rtx tramp)
4588 rtx temp, addend, mask;
4590 /* If we don't need too much alignment, we'll have been guaranteed
4591 proper alignment by get_trampoline_type. */
4592 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4593 return tramp;
4595 /* Round address up to desired boundary. */
4596 temp = gen_reg_rtx (Pmode);
4597 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4598 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4600 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4601 temp, 0, OPTAB_LIB_WIDEN);
4602 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4603 temp, 0, OPTAB_LIB_WIDEN);
4605 return tramp;
4608 static rtx
4609 expand_builtin_init_trampoline (tree exp, bool onstack)
4611 tree t_tramp, t_func, t_chain;
4612 rtx m_tramp, r_tramp, r_chain, tmp;
4614 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4615 POINTER_TYPE, VOID_TYPE))
4616 return NULL_RTX;
4618 t_tramp = CALL_EXPR_ARG (exp, 0);
4619 t_func = CALL_EXPR_ARG (exp, 1);
4620 t_chain = CALL_EXPR_ARG (exp, 2);
4622 r_tramp = expand_normal (t_tramp);
4623 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4624 MEM_NOTRAP_P (m_tramp) = 1;
4626 /* If ONSTACK, the TRAMP argument should be the address of a field
4627 within the local function's FRAME decl. Either way, let's see if
4628 we can fill in the MEM_ATTRs for this memory. */
4629 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4630 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4632 /* Creator of a heap trampoline is responsible for making sure the
4633 address is aligned to at least STACK_BOUNDARY. Normally malloc
4634 will ensure this anyhow. */
4635 tmp = round_trampoline_addr (r_tramp);
4636 if (tmp != r_tramp)
4638 m_tramp = change_address (m_tramp, BLKmode, tmp);
4639 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4640 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4643 /* The FUNC argument should be the address of the nested function.
4644 Extract the actual function decl to pass to the hook. */
4645 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4646 t_func = TREE_OPERAND (t_func, 0);
4647 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4649 r_chain = expand_normal (t_chain);
4651 /* Generate insns to initialize the trampoline. */
4652 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4654 if (onstack)
4656 trampolines_created = 1;
4658 if (targetm.calls.custom_function_descriptors != 0)
4659 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4660 "trampoline generated for nested function %qD", t_func);
4663 return const0_rtx;
4666 static rtx
4667 expand_builtin_adjust_trampoline (tree exp)
4669 rtx tramp;
4671 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4672 return NULL_RTX;
4674 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4675 tramp = round_trampoline_addr (tramp);
4676 if (targetm.calls.trampoline_adjust_address)
4677 tramp = targetm.calls.trampoline_adjust_address (tramp);
4679 return tramp;
4682 /* Expand a call to the builtin descriptor initialization routine.
4683 A descriptor is made up of a couple of pointers to the static
4684 chain and the code entry in this order. */
4686 static rtx
4687 expand_builtin_init_descriptor (tree exp)
4689 tree t_descr, t_func, t_chain;
4690 rtx m_descr, r_descr, r_func, r_chain;
4692 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
4693 VOID_TYPE))
4694 return NULL_RTX;
4696 t_descr = CALL_EXPR_ARG (exp, 0);
4697 t_func = CALL_EXPR_ARG (exp, 1);
4698 t_chain = CALL_EXPR_ARG (exp, 2);
4700 r_descr = expand_normal (t_descr);
4701 m_descr = gen_rtx_MEM (BLKmode, r_descr);
4702 MEM_NOTRAP_P (m_descr) = 1;
4704 r_func = expand_normal (t_func);
4705 r_chain = expand_normal (t_chain);
4707 /* Generate insns to initialize the descriptor. */
4708 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
4709 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
4710 POINTER_SIZE / BITS_PER_UNIT), r_func);
4712 return const0_rtx;
4715 /* Expand a call to the builtin descriptor adjustment routine. */
4717 static rtx
4718 expand_builtin_adjust_descriptor (tree exp)
4720 rtx tramp;
4722 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4723 return NULL_RTX;
4725 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4727 /* Unalign the descriptor to allow runtime identification. */
4728 tramp = plus_constant (ptr_mode, tramp,
4729 targetm.calls.custom_function_descriptors);
4731 return force_operand (tramp, NULL_RTX);
4734 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4735 function. The function first checks whether the back end provides
4736 an insn to implement signbit for the respective mode. If not, it
4737 checks whether the floating point format of the value is such that
4738 the sign bit can be extracted. If that is not the case, error out.
4739 EXP is the expression that is a call to the builtin function; if
4740 convenient, the result should be placed in TARGET. */
4741 static rtx
4742 expand_builtin_signbit (tree exp, rtx target)
4744 const struct real_format *fmt;
4745 machine_mode fmode, imode, rmode;
4746 tree arg;
4747 int word, bitpos;
4748 enum insn_code icode;
4749 rtx temp;
4750 location_t loc = EXPR_LOCATION (exp);
4752 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4753 return NULL_RTX;
4755 arg = CALL_EXPR_ARG (exp, 0);
4756 fmode = TYPE_MODE (TREE_TYPE (arg));
4757 rmode = TYPE_MODE (TREE_TYPE (exp));
4758 fmt = REAL_MODE_FORMAT (fmode);
4760 arg = builtin_save_expr (arg);
4762 /* Expand the argument yielding a RTX expression. */
4763 temp = expand_normal (arg);
4765 /* Check if the back end provides an insn that handles signbit for the
4766 argument's mode. */
4767 icode = optab_handler (signbit_optab, fmode);
4768 if (icode != CODE_FOR_nothing)
4770 rtx_insn *last = get_last_insn ();
4771 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4772 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4773 return target;
4774 delete_insns_since (last);
4777 /* For floating point formats without a sign bit, implement signbit
4778 as "ARG < 0.0". */
4779 bitpos = fmt->signbit_ro;
4780 if (bitpos < 0)
4782 /* But we can't do this if the format supports signed zero. */
4783 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4785 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4786 build_real (TREE_TYPE (arg), dconst0));
4787 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4790 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4792 imode = int_mode_for_mode (fmode);
4793 gcc_assert (imode != BLKmode);
4794 temp = gen_lowpart (imode, temp);
4796 else
4798 imode = word_mode;
4799 /* Handle targets with different FP word orders. */
4800 if (FLOAT_WORDS_BIG_ENDIAN)
4801 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4802 else
4803 word = bitpos / BITS_PER_WORD;
4804 temp = operand_subword_force (temp, word, fmode);
4805 bitpos = bitpos % BITS_PER_WORD;
4808 /* Force the intermediate word_mode (or narrower) result into a
4809 register. This avoids attempting to create paradoxical SUBREGs
4810 of floating point modes below. */
4811 temp = force_reg (imode, temp);
4813 /* If the bitpos is within the "result mode" lowpart, the operation
4814 can be implement with a single bitwise AND. Otherwise, we need
4815 a right shift and an AND. */
4817 if (bitpos < GET_MODE_BITSIZE (rmode))
4819 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4821 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4822 temp = gen_lowpart (rmode, temp);
4823 temp = expand_binop (rmode, and_optab, temp,
4824 immed_wide_int_const (mask, rmode),
4825 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4827 else
4829 /* Perform a logical right shift to place the signbit in the least
4830 significant bit, then truncate the result to the desired mode
4831 and mask just this bit. */
4832 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4833 temp = gen_lowpart (rmode, temp);
4834 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4835 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4838 return temp;
4841 /* Expand fork or exec calls. TARGET is the desired target of the
4842 call. EXP is the call. FN is the
4843 identificator of the actual function. IGNORE is nonzero if the
4844 value is to be ignored. */
4846 static rtx
4847 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4849 tree id, decl;
4850 tree call;
4852 /* If we are not profiling, just call the function. */
4853 if (!profile_arc_flag)
4854 return NULL_RTX;
4856 /* Otherwise call the wrapper. This should be equivalent for the rest of
4857 compiler, so the code does not diverge, and the wrapper may run the
4858 code necessary for keeping the profiling sane. */
4860 switch (DECL_FUNCTION_CODE (fn))
4862 case BUILT_IN_FORK:
4863 id = get_identifier ("__gcov_fork");
4864 break;
4866 case BUILT_IN_EXECL:
4867 id = get_identifier ("__gcov_execl");
4868 break;
4870 case BUILT_IN_EXECV:
4871 id = get_identifier ("__gcov_execv");
4872 break;
4874 case BUILT_IN_EXECLP:
4875 id = get_identifier ("__gcov_execlp");
4876 break;
4878 case BUILT_IN_EXECLE:
4879 id = get_identifier ("__gcov_execle");
4880 break;
4882 case BUILT_IN_EXECVP:
4883 id = get_identifier ("__gcov_execvp");
4884 break;
4886 case BUILT_IN_EXECVE:
4887 id = get_identifier ("__gcov_execve");
4888 break;
4890 default:
4891 gcc_unreachable ();
4894 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4895 FUNCTION_DECL, id, TREE_TYPE (fn));
4896 DECL_EXTERNAL (decl) = 1;
4897 TREE_PUBLIC (decl) = 1;
4898 DECL_ARTIFICIAL (decl) = 1;
4899 TREE_NOTHROW (decl) = 1;
4900 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4901 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4902 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4903 return expand_call (call, target, ignore);
4908 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4909 the pointer in these functions is void*, the tree optimizers may remove
4910 casts. The mode computed in expand_builtin isn't reliable either, due
4911 to __sync_bool_compare_and_swap.
4913 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4914 group of builtins. This gives us log2 of the mode size. */
4916 static inline machine_mode
4917 get_builtin_sync_mode (int fcode_diff)
4919 /* The size is not negotiable, so ask not to get BLKmode in return
4920 if the target indicates that a smaller size would be better. */
4921 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4924 /* Expand the memory expression LOC and return the appropriate memory operand
4925 for the builtin_sync operations. */
4927 static rtx
4928 get_builtin_sync_mem (tree loc, machine_mode mode)
4930 rtx addr, mem;
4932 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4933 addr = convert_memory_address (Pmode, addr);
4935 /* Note that we explicitly do not want any alias information for this
4936 memory, so that we kill all other live memories. Otherwise we don't
4937 satisfy the full barrier semantics of the intrinsic. */
4938 mem = validize_mem (gen_rtx_MEM (mode, addr));
4940 /* The alignment needs to be at least according to that of the mode. */
4941 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4942 get_pointer_alignment (loc)));
4943 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4944 MEM_VOLATILE_P (mem) = 1;
4946 return mem;
4949 /* Make sure an argument is in the right mode.
4950 EXP is the tree argument.
4951 MODE is the mode it should be in. */
4953 static rtx
4954 expand_expr_force_mode (tree exp, machine_mode mode)
4956 rtx val;
4957 machine_mode old_mode;
4959 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4960 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4961 of CONST_INTs, where we know the old_mode only from the call argument. */
4963 old_mode = GET_MODE (val);
4964 if (old_mode == VOIDmode)
4965 old_mode = TYPE_MODE (TREE_TYPE (exp));
4966 val = convert_modes (mode, old_mode, val, 1);
4967 return val;
4971 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4972 EXP is the CALL_EXPR. CODE is the rtx code
4973 that corresponds to the arithmetic or logical operation from the name;
4974 an exception here is that NOT actually means NAND. TARGET is an optional
4975 place for us to store the results; AFTER is true if this is the
4976 fetch_and_xxx form. */
4978 static rtx
4979 expand_builtin_sync_operation (machine_mode mode, tree exp,
4980 enum rtx_code code, bool after,
4981 rtx target)
4983 rtx val, mem;
4984 location_t loc = EXPR_LOCATION (exp);
4986 if (code == NOT && warn_sync_nand)
4988 tree fndecl = get_callee_fndecl (exp);
4989 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4991 static bool warned_f_a_n, warned_n_a_f;
4993 switch (fcode)
4995 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
4996 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
4997 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
4998 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
4999 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5000 if (warned_f_a_n)
5001 break;
5003 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5004 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5005 warned_f_a_n = true;
5006 break;
5008 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5009 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5010 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5011 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5012 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5013 if (warned_n_a_f)
5014 break;
5016 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5017 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5018 warned_n_a_f = true;
5019 break;
5021 default:
5022 gcc_unreachable ();
5026 /* Expand the operands. */
5027 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5028 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5030 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5031 after);
5034 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5035 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5036 true if this is the boolean form. TARGET is a place for us to store the
5037 results; this is NOT optional if IS_BOOL is true. */
5039 static rtx
5040 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5041 bool is_bool, rtx target)
5043 rtx old_val, new_val, mem;
5044 rtx *pbool, *poval;
5046 /* Expand the operands. */
5047 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5048 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5049 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5051 pbool = poval = NULL;
5052 if (target != const0_rtx)
5054 if (is_bool)
5055 pbool = &target;
5056 else
5057 poval = &target;
5059 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5060 false, MEMMODEL_SYNC_SEQ_CST,
5061 MEMMODEL_SYNC_SEQ_CST))
5062 return NULL_RTX;
5064 return target;
5067 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5068 general form is actually an atomic exchange, and some targets only
5069 support a reduced form with the second argument being a constant 1.
5070 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5071 the results. */
5073 static rtx
5074 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5075 rtx target)
5077 rtx val, mem;
5079 /* Expand the operands. */
5080 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5081 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5083 return expand_sync_lock_test_and_set (target, mem, val);
5086 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5088 static void
5089 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5091 rtx mem;
5093 /* Expand the operands. */
5094 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5096 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5099 /* Given an integer representing an ``enum memmodel'', verify its
5100 correctness and return the memory model enum. */
5102 static enum memmodel
5103 get_memmodel (tree exp)
5105 rtx op;
5106 unsigned HOST_WIDE_INT val;
5107 source_location loc
5108 = expansion_point_location_if_in_system_header (input_location);
5110 /* If the parameter is not a constant, it's a run time value so we'll just
5111 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5112 if (TREE_CODE (exp) != INTEGER_CST)
5113 return MEMMODEL_SEQ_CST;
5115 op = expand_normal (exp);
5117 val = INTVAL (op);
5118 if (targetm.memmodel_check)
5119 val = targetm.memmodel_check (val);
5120 else if (val & ~MEMMODEL_MASK)
5122 warning_at (loc, OPT_Winvalid_memory_model,
5123 "unknown architecture specifier in memory model to builtin");
5124 return MEMMODEL_SEQ_CST;
5127 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5128 if (memmodel_base (val) >= MEMMODEL_LAST)
5130 warning_at (loc, OPT_Winvalid_memory_model,
5131 "invalid memory model argument to builtin");
5132 return MEMMODEL_SEQ_CST;
5135 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5136 be conservative and promote consume to acquire. */
5137 if (val == MEMMODEL_CONSUME)
5138 val = MEMMODEL_ACQUIRE;
5140 return (enum memmodel) val;
5143 /* Expand the __atomic_exchange intrinsic:
5144 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5145 EXP is the CALL_EXPR.
5146 TARGET is an optional place for us to store the results. */
5148 static rtx
5149 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5151 rtx val, mem;
5152 enum memmodel model;
5154 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5156 if (!flag_inline_atomics)
5157 return NULL_RTX;
5159 /* Expand the operands. */
5160 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5161 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5163 return expand_atomic_exchange (target, mem, val, model);
5166 /* Expand the __atomic_compare_exchange intrinsic:
5167 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5168 TYPE desired, BOOL weak,
5169 enum memmodel success,
5170 enum memmodel failure)
5171 EXP is the CALL_EXPR.
5172 TARGET is an optional place for us to store the results. */
5174 static rtx
5175 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5176 rtx target)
5178 rtx expect, desired, mem, oldval;
5179 rtx_code_label *label;
5180 enum memmodel success, failure;
5181 tree weak;
5182 bool is_weak;
5183 source_location loc
5184 = expansion_point_location_if_in_system_header (input_location);
5186 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5187 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5189 if (failure > success)
5191 warning_at (loc, OPT_Winvalid_memory_model,
5192 "failure memory model cannot be stronger than success "
5193 "memory model for %<__atomic_compare_exchange%>");
5194 success = MEMMODEL_SEQ_CST;
5197 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5199 warning_at (loc, OPT_Winvalid_memory_model,
5200 "invalid failure memory model for "
5201 "%<__atomic_compare_exchange%>");
5202 failure = MEMMODEL_SEQ_CST;
5203 success = MEMMODEL_SEQ_CST;
5207 if (!flag_inline_atomics)
5208 return NULL_RTX;
5210 /* Expand the operands. */
5211 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5213 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5214 expect = convert_memory_address (Pmode, expect);
5215 expect = gen_rtx_MEM (mode, expect);
5216 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5218 weak = CALL_EXPR_ARG (exp, 3);
5219 is_weak = false;
5220 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5221 is_weak = true;
5223 if (target == const0_rtx)
5224 target = NULL;
5226 /* Lest the rtl backend create a race condition with an imporoper store
5227 to memory, always create a new pseudo for OLDVAL. */
5228 oldval = NULL;
5230 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5231 is_weak, success, failure))
5232 return NULL_RTX;
5234 /* Conditionally store back to EXPECT, lest we create a race condition
5235 with an improper store to memory. */
5236 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5237 the normal case where EXPECT is totally private, i.e. a register. At
5238 which point the store can be unconditional. */
5239 label = gen_label_rtx ();
5240 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5241 GET_MODE (target), 1, label);
5242 emit_move_insn (expect, oldval);
5243 emit_label (label);
5245 return target;
5248 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5249 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5250 call. The weak parameter must be dropped to match the expected parameter
5251 list and the expected argument changed from value to pointer to memory
5252 slot. */
5254 static void
5255 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5257 unsigned int z;
5258 vec<tree, va_gc> *vec;
5260 vec_alloc (vec, 5);
5261 vec->quick_push (gimple_call_arg (call, 0));
5262 tree expected = gimple_call_arg (call, 1);
5263 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5264 TREE_TYPE (expected));
5265 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5266 if (expd != x)
5267 emit_move_insn (x, expd);
5268 tree v = make_tree (TREE_TYPE (expected), x);
5269 vec->quick_push (build1 (ADDR_EXPR,
5270 build_pointer_type (TREE_TYPE (expected)), v));
5271 vec->quick_push (gimple_call_arg (call, 2));
5272 /* Skip the boolean weak parameter. */
5273 for (z = 4; z < 6; z++)
5274 vec->quick_push (gimple_call_arg (call, z));
5275 built_in_function fncode
5276 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5277 + exact_log2 (GET_MODE_SIZE (mode)));
5278 tree fndecl = builtin_decl_explicit (fncode);
5279 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5280 fndecl);
5281 tree exp = build_call_vec (boolean_type_node, fn, vec);
5282 tree lhs = gimple_call_lhs (call);
5283 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5284 if (lhs)
5286 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5287 if (GET_MODE (boolret) != mode)
5288 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5289 x = force_reg (mode, x);
5290 write_complex_part (target, boolret, true);
5291 write_complex_part (target, x, false);
5295 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5297 void
5298 expand_ifn_atomic_compare_exchange (gcall *call)
5300 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5301 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5302 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5303 rtx expect, desired, mem, oldval, boolret;
5304 enum memmodel success, failure;
5305 tree lhs;
5306 bool is_weak;
5307 source_location loc
5308 = expansion_point_location_if_in_system_header (gimple_location (call));
5310 success = get_memmodel (gimple_call_arg (call, 4));
5311 failure = get_memmodel (gimple_call_arg (call, 5));
5313 if (failure > success)
5315 warning_at (loc, OPT_Winvalid_memory_model,
5316 "failure memory model cannot be stronger than success "
5317 "memory model for %<__atomic_compare_exchange%>");
5318 success = MEMMODEL_SEQ_CST;
5321 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5323 warning_at (loc, OPT_Winvalid_memory_model,
5324 "invalid failure memory model for "
5325 "%<__atomic_compare_exchange%>");
5326 failure = MEMMODEL_SEQ_CST;
5327 success = MEMMODEL_SEQ_CST;
5330 if (!flag_inline_atomics)
5332 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5333 return;
5336 /* Expand the operands. */
5337 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5339 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5340 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5342 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5344 boolret = NULL;
5345 oldval = NULL;
5347 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5348 is_weak, success, failure))
5350 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5351 return;
5354 lhs = gimple_call_lhs (call);
5355 if (lhs)
5357 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5358 if (GET_MODE (boolret) != mode)
5359 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5360 write_complex_part (target, boolret, true);
5361 write_complex_part (target, oldval, false);
5365 /* Expand the __atomic_load intrinsic:
5366 TYPE __atomic_load (TYPE *object, enum memmodel)
5367 EXP is the CALL_EXPR.
5368 TARGET is an optional place for us to store the results. */
5370 static rtx
5371 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5373 rtx mem;
5374 enum memmodel model;
5376 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5377 if (is_mm_release (model) || is_mm_acq_rel (model))
5379 source_location loc
5380 = expansion_point_location_if_in_system_header (input_location);
5381 warning_at (loc, OPT_Winvalid_memory_model,
5382 "invalid memory model for %<__atomic_load%>");
5383 model = MEMMODEL_SEQ_CST;
5386 if (!flag_inline_atomics)
5387 return NULL_RTX;
5389 /* Expand the operand. */
5390 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5392 return expand_atomic_load (target, mem, model);
5396 /* Expand the __atomic_store intrinsic:
5397 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5398 EXP is the CALL_EXPR.
5399 TARGET is an optional place for us to store the results. */
5401 static rtx
5402 expand_builtin_atomic_store (machine_mode mode, tree exp)
5404 rtx mem, val;
5405 enum memmodel model;
5407 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5408 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5409 || is_mm_release (model)))
5411 source_location loc
5412 = expansion_point_location_if_in_system_header (input_location);
5413 warning_at (loc, OPT_Winvalid_memory_model,
5414 "invalid memory model for %<__atomic_store%>");
5415 model = MEMMODEL_SEQ_CST;
5418 if (!flag_inline_atomics)
5419 return NULL_RTX;
5421 /* Expand the operands. */
5422 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5423 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5425 return expand_atomic_store (mem, val, model, false);
5428 /* Expand the __atomic_fetch_XXX intrinsic:
5429 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5430 EXP is the CALL_EXPR.
5431 TARGET is an optional place for us to store the results.
5432 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5433 FETCH_AFTER is true if returning the result of the operation.
5434 FETCH_AFTER is false if returning the value before the operation.
5435 IGNORE is true if the result is not used.
5436 EXT_CALL is the correct builtin for an external call if this cannot be
5437 resolved to an instruction sequence. */
5439 static rtx
5440 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5441 enum rtx_code code, bool fetch_after,
5442 bool ignore, enum built_in_function ext_call)
5444 rtx val, mem, ret;
5445 enum memmodel model;
5446 tree fndecl;
5447 tree addr;
5449 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5451 /* Expand the operands. */
5452 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5453 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5455 /* Only try generating instructions if inlining is turned on. */
5456 if (flag_inline_atomics)
5458 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5459 if (ret)
5460 return ret;
5463 /* Return if a different routine isn't needed for the library call. */
5464 if (ext_call == BUILT_IN_NONE)
5465 return NULL_RTX;
5467 /* Change the call to the specified function. */
5468 fndecl = get_callee_fndecl (exp);
5469 addr = CALL_EXPR_FN (exp);
5470 STRIP_NOPS (addr);
5472 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5473 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5475 /* Expand the call here so we can emit trailing code. */
5476 ret = expand_call (exp, target, ignore);
5478 /* Replace the original function just in case it matters. */
5479 TREE_OPERAND (addr, 0) = fndecl;
5481 /* Then issue the arithmetic correction to return the right result. */
5482 if (!ignore)
5484 if (code == NOT)
5486 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5487 OPTAB_LIB_WIDEN);
5488 ret = expand_simple_unop (mode, NOT, ret, target, true);
5490 else
5491 ret = expand_simple_binop (mode, code, ret, val, target, true,
5492 OPTAB_LIB_WIDEN);
5494 return ret;
5497 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5499 void
5500 expand_ifn_atomic_bit_test_and (gcall *call)
5502 tree ptr = gimple_call_arg (call, 0);
5503 tree bit = gimple_call_arg (call, 1);
5504 tree flag = gimple_call_arg (call, 2);
5505 tree lhs = gimple_call_lhs (call);
5506 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
5507 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
5508 enum rtx_code code;
5509 optab optab;
5510 struct expand_operand ops[5];
5512 gcc_assert (flag_inline_atomics);
5514 if (gimple_call_num_args (call) == 4)
5515 model = get_memmodel (gimple_call_arg (call, 3));
5517 rtx mem = get_builtin_sync_mem (ptr, mode);
5518 rtx val = expand_expr_force_mode (bit, mode);
5520 switch (gimple_call_internal_fn (call))
5522 case IFN_ATOMIC_BIT_TEST_AND_SET:
5523 code = IOR;
5524 optab = atomic_bit_test_and_set_optab;
5525 break;
5526 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
5527 code = XOR;
5528 optab = atomic_bit_test_and_complement_optab;
5529 break;
5530 case IFN_ATOMIC_BIT_TEST_AND_RESET:
5531 code = AND;
5532 optab = atomic_bit_test_and_reset_optab;
5533 break;
5534 default:
5535 gcc_unreachable ();
5538 if (lhs == NULL_TREE)
5540 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5541 val, NULL_RTX, true, OPTAB_DIRECT);
5542 if (code == AND)
5543 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5544 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
5545 return;
5548 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5549 enum insn_code icode = direct_optab_handler (optab, mode);
5550 gcc_assert (icode != CODE_FOR_nothing);
5551 create_output_operand (&ops[0], target, mode);
5552 create_fixed_operand (&ops[1], mem);
5553 create_convert_operand_to (&ops[2], val, mode, true);
5554 create_integer_operand (&ops[3], model);
5555 create_integer_operand (&ops[4], integer_onep (flag));
5556 if (maybe_expand_insn (icode, 5, ops))
5557 return;
5559 rtx bitval = val;
5560 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5561 val, NULL_RTX, true, OPTAB_DIRECT);
5562 rtx maskval = val;
5563 if (code == AND)
5564 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5565 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
5566 code, model, false);
5567 if (integer_onep (flag))
5569 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
5570 NULL_RTX, true, OPTAB_DIRECT);
5571 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
5572 true, OPTAB_DIRECT);
5574 else
5575 result = expand_simple_binop (mode, AND, result, maskval, target, true,
5576 OPTAB_DIRECT);
5577 if (result != target)
5578 emit_move_insn (target, result);
5581 /* Expand an atomic clear operation.
5582 void _atomic_clear (BOOL *obj, enum memmodel)
5583 EXP is the call expression. */
5585 static rtx
5586 expand_builtin_atomic_clear (tree exp)
5588 machine_mode mode;
5589 rtx mem, ret;
5590 enum memmodel model;
5592 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5593 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5594 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5596 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5598 source_location loc
5599 = expansion_point_location_if_in_system_header (input_location);
5600 warning_at (loc, OPT_Winvalid_memory_model,
5601 "invalid memory model for %<__atomic_store%>");
5602 model = MEMMODEL_SEQ_CST;
5605 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5606 Failing that, a store is issued by __atomic_store. The only way this can
5607 fail is if the bool type is larger than a word size. Unlikely, but
5608 handle it anyway for completeness. Assume a single threaded model since
5609 there is no atomic support in this case, and no barriers are required. */
5610 ret = expand_atomic_store (mem, const0_rtx, model, true);
5611 if (!ret)
5612 emit_move_insn (mem, const0_rtx);
5613 return const0_rtx;
5616 /* Expand an atomic test_and_set operation.
5617 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5618 EXP is the call expression. */
5620 static rtx
5621 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5623 rtx mem;
5624 enum memmodel model;
5625 machine_mode mode;
5627 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5628 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5629 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5631 return expand_atomic_test_and_set (target, mem, model);
5635 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5636 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5638 static tree
5639 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5641 int size;
5642 machine_mode mode;
5643 unsigned int mode_align, type_align;
5645 if (TREE_CODE (arg0) != INTEGER_CST)
5646 return NULL_TREE;
5648 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5649 mode = mode_for_size (size, MODE_INT, 0);
5650 mode_align = GET_MODE_ALIGNMENT (mode);
5652 if (TREE_CODE (arg1) == INTEGER_CST)
5654 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5656 /* Either this argument is null, or it's a fake pointer encoding
5657 the alignment of the object. */
5658 val = least_bit_hwi (val);
5659 val *= BITS_PER_UNIT;
5661 if (val == 0 || mode_align < val)
5662 type_align = mode_align;
5663 else
5664 type_align = val;
5666 else
5668 tree ttype = TREE_TYPE (arg1);
5670 /* This function is usually invoked and folded immediately by the front
5671 end before anything else has a chance to look at it. The pointer
5672 parameter at this point is usually cast to a void *, so check for that
5673 and look past the cast. */
5674 if (CONVERT_EXPR_P (arg1)
5675 && POINTER_TYPE_P (ttype)
5676 && VOID_TYPE_P (TREE_TYPE (ttype))
5677 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5678 arg1 = TREE_OPERAND (arg1, 0);
5680 ttype = TREE_TYPE (arg1);
5681 gcc_assert (POINTER_TYPE_P (ttype));
5683 /* Get the underlying type of the object. */
5684 ttype = TREE_TYPE (ttype);
5685 type_align = TYPE_ALIGN (ttype);
5688 /* If the object has smaller alignment, the lock free routines cannot
5689 be used. */
5690 if (type_align < mode_align)
5691 return boolean_false_node;
5693 /* Check if a compare_and_swap pattern exists for the mode which represents
5694 the required size. The pattern is not allowed to fail, so the existence
5695 of the pattern indicates support is present. */
5696 if (can_compare_and_swap_p (mode, true))
5697 return boolean_true_node;
5698 else
5699 return boolean_false_node;
5702 /* Return true if the parameters to call EXP represent an object which will
5703 always generate lock free instructions. The first argument represents the
5704 size of the object, and the second parameter is a pointer to the object
5705 itself. If NULL is passed for the object, then the result is based on
5706 typical alignment for an object of the specified size. Otherwise return
5707 false. */
5709 static rtx
5710 expand_builtin_atomic_always_lock_free (tree exp)
5712 tree size;
5713 tree arg0 = CALL_EXPR_ARG (exp, 0);
5714 tree arg1 = CALL_EXPR_ARG (exp, 1);
5716 if (TREE_CODE (arg0) != INTEGER_CST)
5718 error ("non-constant argument 1 to __atomic_always_lock_free");
5719 return const0_rtx;
5722 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5723 if (size == boolean_true_node)
5724 return const1_rtx;
5725 return const0_rtx;
5728 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5729 is lock free on this architecture. */
5731 static tree
5732 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5734 if (!flag_inline_atomics)
5735 return NULL_TREE;
5737 /* If it isn't always lock free, don't generate a result. */
5738 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5739 return boolean_true_node;
5741 return NULL_TREE;
5744 /* Return true if the parameters to call EXP represent an object which will
5745 always generate lock free instructions. The first argument represents the
5746 size of the object, and the second parameter is a pointer to the object
5747 itself. If NULL is passed for the object, then the result is based on
5748 typical alignment for an object of the specified size. Otherwise return
5749 NULL*/
5751 static rtx
5752 expand_builtin_atomic_is_lock_free (tree exp)
5754 tree size;
5755 tree arg0 = CALL_EXPR_ARG (exp, 0);
5756 tree arg1 = CALL_EXPR_ARG (exp, 1);
5758 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5760 error ("non-integer argument 1 to __atomic_is_lock_free");
5761 return NULL_RTX;
5764 if (!flag_inline_atomics)
5765 return NULL_RTX;
5767 /* If the value is known at compile time, return the RTX for it. */
5768 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5769 if (size == boolean_true_node)
5770 return const1_rtx;
5772 return NULL_RTX;
5775 /* Expand the __atomic_thread_fence intrinsic:
5776 void __atomic_thread_fence (enum memmodel)
5777 EXP is the CALL_EXPR. */
5779 static void
5780 expand_builtin_atomic_thread_fence (tree exp)
5782 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5783 expand_mem_thread_fence (model);
5786 /* Expand the __atomic_signal_fence intrinsic:
5787 void __atomic_signal_fence (enum memmodel)
5788 EXP is the CALL_EXPR. */
5790 static void
5791 expand_builtin_atomic_signal_fence (tree exp)
5793 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5794 expand_mem_signal_fence (model);
5797 /* Expand the __sync_synchronize intrinsic. */
5799 static void
5800 expand_builtin_sync_synchronize (void)
5802 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5805 static rtx
5806 expand_builtin_thread_pointer (tree exp, rtx target)
5808 enum insn_code icode;
5809 if (!validate_arglist (exp, VOID_TYPE))
5810 return const0_rtx;
5811 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5812 if (icode != CODE_FOR_nothing)
5814 struct expand_operand op;
5815 /* If the target is not sutitable then create a new target. */
5816 if (target == NULL_RTX
5817 || !REG_P (target)
5818 || GET_MODE (target) != Pmode)
5819 target = gen_reg_rtx (Pmode);
5820 create_output_operand (&op, target, Pmode);
5821 expand_insn (icode, 1, &op);
5822 return target;
5824 error ("__builtin_thread_pointer is not supported on this target");
5825 return const0_rtx;
5828 static void
5829 expand_builtin_set_thread_pointer (tree exp)
5831 enum insn_code icode;
5832 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5833 return;
5834 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5835 if (icode != CODE_FOR_nothing)
5837 struct expand_operand op;
5838 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5839 Pmode, EXPAND_NORMAL);
5840 create_input_operand (&op, val, Pmode);
5841 expand_insn (icode, 1, &op);
5842 return;
5844 error ("__builtin_set_thread_pointer is not supported on this target");
5848 /* Emit code to restore the current value of stack. */
5850 static void
5851 expand_stack_restore (tree var)
5853 rtx_insn *prev;
5854 rtx sa = expand_normal (var);
5856 sa = convert_memory_address (Pmode, sa);
5858 prev = get_last_insn ();
5859 emit_stack_restore (SAVE_BLOCK, sa);
5861 record_new_stack_level ();
5863 fixup_args_size_notes (prev, get_last_insn (), 0);
5866 /* Emit code to save the current value of stack. */
5868 static rtx
5869 expand_stack_save (void)
5871 rtx ret = NULL_RTX;
5873 emit_stack_save (SAVE_BLOCK, &ret);
5874 return ret;
5878 /* Expand an expression EXP that calls a built-in function,
5879 with result going to TARGET if that's convenient
5880 (and in mode MODE if that's convenient).
5881 SUBTARGET may be used as the target for computing one of EXP's operands.
5882 IGNORE is nonzero if the value is to be ignored. */
5885 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5886 int ignore)
5888 tree fndecl = get_callee_fndecl (exp);
5889 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5890 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5891 int flags;
5893 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5894 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5896 /* When ASan is enabled, we don't want to expand some memory/string
5897 builtins and rely on libsanitizer's hooks. This allows us to avoid
5898 redundant checks and be sure, that possible overflow will be detected
5899 by ASan. */
5901 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5902 return expand_call (exp, target, ignore);
5904 /* When not optimizing, generate calls to library functions for a certain
5905 set of builtins. */
5906 if (!optimize
5907 && !called_as_built_in (fndecl)
5908 && fcode != BUILT_IN_FORK
5909 && fcode != BUILT_IN_EXECL
5910 && fcode != BUILT_IN_EXECV
5911 && fcode != BUILT_IN_EXECLP
5912 && fcode != BUILT_IN_EXECLE
5913 && fcode != BUILT_IN_EXECVP
5914 && fcode != BUILT_IN_EXECVE
5915 && fcode != BUILT_IN_ALLOCA
5916 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5917 && fcode != BUILT_IN_FREE
5918 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5919 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5920 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5921 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5922 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5923 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5924 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5925 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5926 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5927 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5928 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5929 && fcode != BUILT_IN_CHKP_BNDRET)
5930 return expand_call (exp, target, ignore);
5932 /* The built-in function expanders test for target == const0_rtx
5933 to determine whether the function's result will be ignored. */
5934 if (ignore)
5935 target = const0_rtx;
5937 /* If the result of a pure or const built-in function is ignored, and
5938 none of its arguments are volatile, we can avoid expanding the
5939 built-in call and just evaluate the arguments for side-effects. */
5940 if (target == const0_rtx
5941 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5942 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5944 bool volatilep = false;
5945 tree arg;
5946 call_expr_arg_iterator iter;
5948 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5949 if (TREE_THIS_VOLATILE (arg))
5951 volatilep = true;
5952 break;
5955 if (! volatilep)
5957 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5958 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5959 return const0_rtx;
5963 /* expand_builtin_with_bounds is supposed to be used for
5964 instrumented builtin calls. */
5965 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5967 switch (fcode)
5969 CASE_FLT_FN (BUILT_IN_FABS):
5970 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
5971 case BUILT_IN_FABSD32:
5972 case BUILT_IN_FABSD64:
5973 case BUILT_IN_FABSD128:
5974 target = expand_builtin_fabs (exp, target, subtarget);
5975 if (target)
5976 return target;
5977 break;
5979 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5980 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
5981 target = expand_builtin_copysign (exp, target, subtarget);
5982 if (target)
5983 return target;
5984 break;
5986 /* Just do a normal library call if we were unable to fold
5987 the values. */
5988 CASE_FLT_FN (BUILT_IN_CABS):
5989 break;
5991 CASE_FLT_FN (BUILT_IN_FMA):
5992 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5993 if (target)
5994 return target;
5995 break;
5997 CASE_FLT_FN (BUILT_IN_ILOGB):
5998 if (! flag_unsafe_math_optimizations)
5999 break;
6000 gcc_fallthrough ();
6001 CASE_FLT_FN (BUILT_IN_ISINF):
6002 CASE_FLT_FN (BUILT_IN_FINITE):
6003 case BUILT_IN_ISFINITE:
6004 case BUILT_IN_ISNORMAL:
6005 target = expand_builtin_interclass_mathfn (exp, target);
6006 if (target)
6007 return target;
6008 break;
6010 CASE_FLT_FN (BUILT_IN_ICEIL):
6011 CASE_FLT_FN (BUILT_IN_LCEIL):
6012 CASE_FLT_FN (BUILT_IN_LLCEIL):
6013 CASE_FLT_FN (BUILT_IN_LFLOOR):
6014 CASE_FLT_FN (BUILT_IN_IFLOOR):
6015 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6016 target = expand_builtin_int_roundingfn (exp, target);
6017 if (target)
6018 return target;
6019 break;
6021 CASE_FLT_FN (BUILT_IN_IRINT):
6022 CASE_FLT_FN (BUILT_IN_LRINT):
6023 CASE_FLT_FN (BUILT_IN_LLRINT):
6024 CASE_FLT_FN (BUILT_IN_IROUND):
6025 CASE_FLT_FN (BUILT_IN_LROUND):
6026 CASE_FLT_FN (BUILT_IN_LLROUND):
6027 target = expand_builtin_int_roundingfn_2 (exp, target);
6028 if (target)
6029 return target;
6030 break;
6032 CASE_FLT_FN (BUILT_IN_POWI):
6033 target = expand_builtin_powi (exp, target);
6034 if (target)
6035 return target;
6036 break;
6038 CASE_FLT_FN (BUILT_IN_CEXPI):
6039 target = expand_builtin_cexpi (exp, target);
6040 gcc_assert (target);
6041 return target;
6043 CASE_FLT_FN (BUILT_IN_SIN):
6044 CASE_FLT_FN (BUILT_IN_COS):
6045 if (! flag_unsafe_math_optimizations)
6046 break;
6047 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6048 if (target)
6049 return target;
6050 break;
6052 CASE_FLT_FN (BUILT_IN_SINCOS):
6053 if (! flag_unsafe_math_optimizations)
6054 break;
6055 target = expand_builtin_sincos (exp);
6056 if (target)
6057 return target;
6058 break;
6060 case BUILT_IN_APPLY_ARGS:
6061 return expand_builtin_apply_args ();
6063 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6064 FUNCTION with a copy of the parameters described by
6065 ARGUMENTS, and ARGSIZE. It returns a block of memory
6066 allocated on the stack into which is stored all the registers
6067 that might possibly be used for returning the result of a
6068 function. ARGUMENTS is the value returned by
6069 __builtin_apply_args. ARGSIZE is the number of bytes of
6070 arguments that must be copied. ??? How should this value be
6071 computed? We'll also need a safe worst case value for varargs
6072 functions. */
6073 case BUILT_IN_APPLY:
6074 if (!validate_arglist (exp, POINTER_TYPE,
6075 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6076 && !validate_arglist (exp, REFERENCE_TYPE,
6077 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6078 return const0_rtx;
6079 else
6081 rtx ops[3];
6083 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6084 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6085 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6087 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6090 /* __builtin_return (RESULT) causes the function to return the
6091 value described by RESULT. RESULT is address of the block of
6092 memory returned by __builtin_apply. */
6093 case BUILT_IN_RETURN:
6094 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6095 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6096 return const0_rtx;
6098 case BUILT_IN_SAVEREGS:
6099 return expand_builtin_saveregs ();
6101 case BUILT_IN_VA_ARG_PACK:
6102 /* All valid uses of __builtin_va_arg_pack () are removed during
6103 inlining. */
6104 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6105 return const0_rtx;
6107 case BUILT_IN_VA_ARG_PACK_LEN:
6108 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6109 inlining. */
6110 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6111 return const0_rtx;
6113 /* Return the address of the first anonymous stack arg. */
6114 case BUILT_IN_NEXT_ARG:
6115 if (fold_builtin_next_arg (exp, false))
6116 return const0_rtx;
6117 return expand_builtin_next_arg ();
6119 case BUILT_IN_CLEAR_CACHE:
6120 target = expand_builtin___clear_cache (exp);
6121 if (target)
6122 return target;
6123 break;
6125 case BUILT_IN_CLASSIFY_TYPE:
6126 return expand_builtin_classify_type (exp);
6128 case BUILT_IN_CONSTANT_P:
6129 return const0_rtx;
6131 case BUILT_IN_FRAME_ADDRESS:
6132 case BUILT_IN_RETURN_ADDRESS:
6133 return expand_builtin_frame_address (fndecl, exp);
6135 /* Returns the address of the area where the structure is returned.
6136 0 otherwise. */
6137 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6138 if (call_expr_nargs (exp) != 0
6139 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6140 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6141 return const0_rtx;
6142 else
6143 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6145 case BUILT_IN_ALLOCA:
6146 case BUILT_IN_ALLOCA_WITH_ALIGN:
6147 /* If the allocation stems from the declaration of a variable-sized
6148 object, it cannot accumulate. */
6149 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6150 if (target)
6151 return target;
6152 break;
6154 case BUILT_IN_STACK_SAVE:
6155 return expand_stack_save ();
6157 case BUILT_IN_STACK_RESTORE:
6158 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6159 return const0_rtx;
6161 case BUILT_IN_BSWAP16:
6162 case BUILT_IN_BSWAP32:
6163 case BUILT_IN_BSWAP64:
6164 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6165 if (target)
6166 return target;
6167 break;
6169 CASE_INT_FN (BUILT_IN_FFS):
6170 target = expand_builtin_unop (target_mode, exp, target,
6171 subtarget, ffs_optab);
6172 if (target)
6173 return target;
6174 break;
6176 CASE_INT_FN (BUILT_IN_CLZ):
6177 target = expand_builtin_unop (target_mode, exp, target,
6178 subtarget, clz_optab);
6179 if (target)
6180 return target;
6181 break;
6183 CASE_INT_FN (BUILT_IN_CTZ):
6184 target = expand_builtin_unop (target_mode, exp, target,
6185 subtarget, ctz_optab);
6186 if (target)
6187 return target;
6188 break;
6190 CASE_INT_FN (BUILT_IN_CLRSB):
6191 target = expand_builtin_unop (target_mode, exp, target,
6192 subtarget, clrsb_optab);
6193 if (target)
6194 return target;
6195 break;
6197 CASE_INT_FN (BUILT_IN_POPCOUNT):
6198 target = expand_builtin_unop (target_mode, exp, target,
6199 subtarget, popcount_optab);
6200 if (target)
6201 return target;
6202 break;
6204 CASE_INT_FN (BUILT_IN_PARITY):
6205 target = expand_builtin_unop (target_mode, exp, target,
6206 subtarget, parity_optab);
6207 if (target)
6208 return target;
6209 break;
6211 case BUILT_IN_STRLEN:
6212 target = expand_builtin_strlen (exp, target, target_mode);
6213 if (target)
6214 return target;
6215 break;
6217 case BUILT_IN_STRCPY:
6218 target = expand_builtin_strcpy (exp, target);
6219 if (target)
6220 return target;
6221 break;
6223 case BUILT_IN_STRNCPY:
6224 target = expand_builtin_strncpy (exp, target);
6225 if (target)
6226 return target;
6227 break;
6229 case BUILT_IN_STPCPY:
6230 target = expand_builtin_stpcpy (exp, target, mode);
6231 if (target)
6232 return target;
6233 break;
6235 case BUILT_IN_MEMCPY:
6236 target = expand_builtin_memcpy (exp, target);
6237 if (target)
6238 return target;
6239 break;
6241 case BUILT_IN_MEMPCPY:
6242 target = expand_builtin_mempcpy (exp, target, mode);
6243 if (target)
6244 return target;
6245 break;
6247 case BUILT_IN_MEMSET:
6248 target = expand_builtin_memset (exp, target, mode);
6249 if (target)
6250 return target;
6251 break;
6253 case BUILT_IN_BZERO:
6254 target = expand_builtin_bzero (exp);
6255 if (target)
6256 return target;
6257 break;
6259 case BUILT_IN_STRCMP:
6260 target = expand_builtin_strcmp (exp, target);
6261 if (target)
6262 return target;
6263 break;
6265 case BUILT_IN_STRNCMP:
6266 target = expand_builtin_strncmp (exp, target, mode);
6267 if (target)
6268 return target;
6269 break;
6271 case BUILT_IN_BCMP:
6272 case BUILT_IN_MEMCMP:
6273 case BUILT_IN_MEMCMP_EQ:
6274 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6275 if (target)
6276 return target;
6277 if (fcode == BUILT_IN_MEMCMP_EQ)
6279 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6280 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6282 break;
6284 case BUILT_IN_SETJMP:
6285 /* This should have been lowered to the builtins below. */
6286 gcc_unreachable ();
6288 case BUILT_IN_SETJMP_SETUP:
6289 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6290 and the receiver label. */
6291 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6293 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6294 VOIDmode, EXPAND_NORMAL);
6295 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6296 rtx_insn *label_r = label_rtx (label);
6298 /* This is copied from the handling of non-local gotos. */
6299 expand_builtin_setjmp_setup (buf_addr, label_r);
6300 nonlocal_goto_handler_labels
6301 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6302 nonlocal_goto_handler_labels);
6303 /* ??? Do not let expand_label treat us as such since we would
6304 not want to be both on the list of non-local labels and on
6305 the list of forced labels. */
6306 FORCED_LABEL (label) = 0;
6307 return const0_rtx;
6309 break;
6311 case BUILT_IN_SETJMP_RECEIVER:
6312 /* __builtin_setjmp_receiver is passed the receiver label. */
6313 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6315 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6316 rtx_insn *label_r = label_rtx (label);
6318 expand_builtin_setjmp_receiver (label_r);
6319 return const0_rtx;
6321 break;
6323 /* __builtin_longjmp is passed a pointer to an array of five words.
6324 It's similar to the C library longjmp function but works with
6325 __builtin_setjmp above. */
6326 case BUILT_IN_LONGJMP:
6327 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6329 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6330 VOIDmode, EXPAND_NORMAL);
6331 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6333 if (value != const1_rtx)
6335 error ("%<__builtin_longjmp%> second argument must be 1");
6336 return const0_rtx;
6339 expand_builtin_longjmp (buf_addr, value);
6340 return const0_rtx;
6342 break;
6344 case BUILT_IN_NONLOCAL_GOTO:
6345 target = expand_builtin_nonlocal_goto (exp);
6346 if (target)
6347 return target;
6348 break;
6350 /* This updates the setjmp buffer that is its argument with the value
6351 of the current stack pointer. */
6352 case BUILT_IN_UPDATE_SETJMP_BUF:
6353 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6355 rtx buf_addr
6356 = expand_normal (CALL_EXPR_ARG (exp, 0));
6358 expand_builtin_update_setjmp_buf (buf_addr);
6359 return const0_rtx;
6361 break;
6363 case BUILT_IN_TRAP:
6364 expand_builtin_trap ();
6365 return const0_rtx;
6367 case BUILT_IN_UNREACHABLE:
6368 expand_builtin_unreachable ();
6369 return const0_rtx;
6371 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6372 case BUILT_IN_SIGNBITD32:
6373 case BUILT_IN_SIGNBITD64:
6374 case BUILT_IN_SIGNBITD128:
6375 target = expand_builtin_signbit (exp, target);
6376 if (target)
6377 return target;
6378 break;
6380 /* Various hooks for the DWARF 2 __throw routine. */
6381 case BUILT_IN_UNWIND_INIT:
6382 expand_builtin_unwind_init ();
6383 return const0_rtx;
6384 case BUILT_IN_DWARF_CFA:
6385 return virtual_cfa_rtx;
6386 #ifdef DWARF2_UNWIND_INFO
6387 case BUILT_IN_DWARF_SP_COLUMN:
6388 return expand_builtin_dwarf_sp_column ();
6389 case BUILT_IN_INIT_DWARF_REG_SIZES:
6390 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6391 return const0_rtx;
6392 #endif
6393 case BUILT_IN_FROB_RETURN_ADDR:
6394 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6395 case BUILT_IN_EXTRACT_RETURN_ADDR:
6396 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6397 case BUILT_IN_EH_RETURN:
6398 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6399 CALL_EXPR_ARG (exp, 1));
6400 return const0_rtx;
6401 case BUILT_IN_EH_RETURN_DATA_REGNO:
6402 return expand_builtin_eh_return_data_regno (exp);
6403 case BUILT_IN_EXTEND_POINTER:
6404 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6405 case BUILT_IN_EH_POINTER:
6406 return expand_builtin_eh_pointer (exp);
6407 case BUILT_IN_EH_FILTER:
6408 return expand_builtin_eh_filter (exp);
6409 case BUILT_IN_EH_COPY_VALUES:
6410 return expand_builtin_eh_copy_values (exp);
6412 case BUILT_IN_VA_START:
6413 return expand_builtin_va_start (exp);
6414 case BUILT_IN_VA_END:
6415 return expand_builtin_va_end (exp);
6416 case BUILT_IN_VA_COPY:
6417 return expand_builtin_va_copy (exp);
6418 case BUILT_IN_EXPECT:
6419 return expand_builtin_expect (exp, target);
6420 case BUILT_IN_ASSUME_ALIGNED:
6421 return expand_builtin_assume_aligned (exp, target);
6422 case BUILT_IN_PREFETCH:
6423 expand_builtin_prefetch (exp);
6424 return const0_rtx;
6426 case BUILT_IN_INIT_TRAMPOLINE:
6427 return expand_builtin_init_trampoline (exp, true);
6428 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6429 return expand_builtin_init_trampoline (exp, false);
6430 case BUILT_IN_ADJUST_TRAMPOLINE:
6431 return expand_builtin_adjust_trampoline (exp);
6433 case BUILT_IN_INIT_DESCRIPTOR:
6434 return expand_builtin_init_descriptor (exp);
6435 case BUILT_IN_ADJUST_DESCRIPTOR:
6436 return expand_builtin_adjust_descriptor (exp);
6438 case BUILT_IN_FORK:
6439 case BUILT_IN_EXECL:
6440 case BUILT_IN_EXECV:
6441 case BUILT_IN_EXECLP:
6442 case BUILT_IN_EXECLE:
6443 case BUILT_IN_EXECVP:
6444 case BUILT_IN_EXECVE:
6445 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6446 if (target)
6447 return target;
6448 break;
6450 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6451 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6452 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6453 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6454 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6455 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6456 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6457 if (target)
6458 return target;
6459 break;
6461 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6462 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6463 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6464 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6465 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6466 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6467 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6468 if (target)
6469 return target;
6470 break;
6472 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6473 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6474 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6475 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6476 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6477 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6478 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6479 if (target)
6480 return target;
6481 break;
6483 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6484 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6485 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6486 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6487 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6488 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6489 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6490 if (target)
6491 return target;
6492 break;
6494 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6495 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6496 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6497 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6498 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6499 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6500 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6501 if (target)
6502 return target;
6503 break;
6505 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6506 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6507 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6508 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6509 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6510 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6511 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6512 if (target)
6513 return target;
6514 break;
6516 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6517 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6518 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6519 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6520 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6521 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6522 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6523 if (target)
6524 return target;
6525 break;
6527 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6528 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6529 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6530 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6531 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6532 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6533 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6534 if (target)
6535 return target;
6536 break;
6538 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6539 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6540 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6541 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6542 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6543 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6544 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6545 if (target)
6546 return target;
6547 break;
6549 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6550 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6551 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6552 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6553 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6554 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6555 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6556 if (target)
6557 return target;
6558 break;
6560 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6561 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6562 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6563 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6564 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6565 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6566 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6567 if (target)
6568 return target;
6569 break;
6571 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6572 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6573 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6574 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6575 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6576 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6577 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6578 if (target)
6579 return target;
6580 break;
6582 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6583 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6584 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6585 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6586 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6587 if (mode == VOIDmode)
6588 mode = TYPE_MODE (boolean_type_node);
6589 if (!target || !register_operand (target, mode))
6590 target = gen_reg_rtx (mode);
6592 mode = get_builtin_sync_mode
6593 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6594 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6595 if (target)
6596 return target;
6597 break;
6599 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6600 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6601 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6602 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6603 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6604 mode = get_builtin_sync_mode
6605 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6606 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6607 if (target)
6608 return target;
6609 break;
6611 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6612 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6613 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6614 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6615 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6616 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6617 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6618 if (target)
6619 return target;
6620 break;
6622 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6623 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6624 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6625 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6626 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6627 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6628 expand_builtin_sync_lock_release (mode, exp);
6629 return const0_rtx;
6631 case BUILT_IN_SYNC_SYNCHRONIZE:
6632 expand_builtin_sync_synchronize ();
6633 return const0_rtx;
6635 case BUILT_IN_ATOMIC_EXCHANGE_1:
6636 case BUILT_IN_ATOMIC_EXCHANGE_2:
6637 case BUILT_IN_ATOMIC_EXCHANGE_4:
6638 case BUILT_IN_ATOMIC_EXCHANGE_8:
6639 case BUILT_IN_ATOMIC_EXCHANGE_16:
6640 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6641 target = expand_builtin_atomic_exchange (mode, exp, target);
6642 if (target)
6643 return target;
6644 break;
6646 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6647 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6648 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6649 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6650 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6652 unsigned int nargs, z;
6653 vec<tree, va_gc> *vec;
6655 mode =
6656 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6657 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6658 if (target)
6659 return target;
6661 /* If this is turned into an external library call, the weak parameter
6662 must be dropped to match the expected parameter list. */
6663 nargs = call_expr_nargs (exp);
6664 vec_alloc (vec, nargs - 1);
6665 for (z = 0; z < 3; z++)
6666 vec->quick_push (CALL_EXPR_ARG (exp, z));
6667 /* Skip the boolean weak parameter. */
6668 for (z = 4; z < 6; z++)
6669 vec->quick_push (CALL_EXPR_ARG (exp, z));
6670 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6671 break;
6674 case BUILT_IN_ATOMIC_LOAD_1:
6675 case BUILT_IN_ATOMIC_LOAD_2:
6676 case BUILT_IN_ATOMIC_LOAD_4:
6677 case BUILT_IN_ATOMIC_LOAD_8:
6678 case BUILT_IN_ATOMIC_LOAD_16:
6679 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6680 target = expand_builtin_atomic_load (mode, exp, target);
6681 if (target)
6682 return target;
6683 break;
6685 case BUILT_IN_ATOMIC_STORE_1:
6686 case BUILT_IN_ATOMIC_STORE_2:
6687 case BUILT_IN_ATOMIC_STORE_4:
6688 case BUILT_IN_ATOMIC_STORE_8:
6689 case BUILT_IN_ATOMIC_STORE_16:
6690 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6691 target = expand_builtin_atomic_store (mode, exp);
6692 if (target)
6693 return const0_rtx;
6694 break;
6696 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6697 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6698 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6699 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6700 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6702 enum built_in_function lib;
6703 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6704 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6705 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6706 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6707 ignore, lib);
6708 if (target)
6709 return target;
6710 break;
6712 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6713 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6714 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6715 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6716 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6718 enum built_in_function lib;
6719 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6720 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6721 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6722 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6723 ignore, lib);
6724 if (target)
6725 return target;
6726 break;
6728 case BUILT_IN_ATOMIC_AND_FETCH_1:
6729 case BUILT_IN_ATOMIC_AND_FETCH_2:
6730 case BUILT_IN_ATOMIC_AND_FETCH_4:
6731 case BUILT_IN_ATOMIC_AND_FETCH_8:
6732 case BUILT_IN_ATOMIC_AND_FETCH_16:
6734 enum built_in_function lib;
6735 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6736 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6737 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6738 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6739 ignore, lib);
6740 if (target)
6741 return target;
6742 break;
6744 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6745 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6746 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6747 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6748 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6750 enum built_in_function lib;
6751 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6752 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6753 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6754 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6755 ignore, lib);
6756 if (target)
6757 return target;
6758 break;
6760 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6761 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6762 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6763 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6764 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6766 enum built_in_function lib;
6767 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6768 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6769 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6770 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6771 ignore, lib);
6772 if (target)
6773 return target;
6774 break;
6776 case BUILT_IN_ATOMIC_OR_FETCH_1:
6777 case BUILT_IN_ATOMIC_OR_FETCH_2:
6778 case BUILT_IN_ATOMIC_OR_FETCH_4:
6779 case BUILT_IN_ATOMIC_OR_FETCH_8:
6780 case BUILT_IN_ATOMIC_OR_FETCH_16:
6782 enum built_in_function lib;
6783 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6784 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6785 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6786 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6787 ignore, lib);
6788 if (target)
6789 return target;
6790 break;
6792 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6793 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6794 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6795 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6796 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6797 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6798 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6799 ignore, BUILT_IN_NONE);
6800 if (target)
6801 return target;
6802 break;
6804 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6805 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6806 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6807 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6808 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6809 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6810 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6811 ignore, BUILT_IN_NONE);
6812 if (target)
6813 return target;
6814 break;
6816 case BUILT_IN_ATOMIC_FETCH_AND_1:
6817 case BUILT_IN_ATOMIC_FETCH_AND_2:
6818 case BUILT_IN_ATOMIC_FETCH_AND_4:
6819 case BUILT_IN_ATOMIC_FETCH_AND_8:
6820 case BUILT_IN_ATOMIC_FETCH_AND_16:
6821 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6822 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6823 ignore, BUILT_IN_NONE);
6824 if (target)
6825 return target;
6826 break;
6828 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6829 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6830 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6831 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6832 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6833 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6834 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6835 ignore, BUILT_IN_NONE);
6836 if (target)
6837 return target;
6838 break;
6840 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6841 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6842 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6843 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6844 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6845 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6846 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6847 ignore, BUILT_IN_NONE);
6848 if (target)
6849 return target;
6850 break;
6852 case BUILT_IN_ATOMIC_FETCH_OR_1:
6853 case BUILT_IN_ATOMIC_FETCH_OR_2:
6854 case BUILT_IN_ATOMIC_FETCH_OR_4:
6855 case BUILT_IN_ATOMIC_FETCH_OR_8:
6856 case BUILT_IN_ATOMIC_FETCH_OR_16:
6857 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6858 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6859 ignore, BUILT_IN_NONE);
6860 if (target)
6861 return target;
6862 break;
6864 case BUILT_IN_ATOMIC_TEST_AND_SET:
6865 return expand_builtin_atomic_test_and_set (exp, target);
6867 case BUILT_IN_ATOMIC_CLEAR:
6868 return expand_builtin_atomic_clear (exp);
6870 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6871 return expand_builtin_atomic_always_lock_free (exp);
6873 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6874 target = expand_builtin_atomic_is_lock_free (exp);
6875 if (target)
6876 return target;
6877 break;
6879 case BUILT_IN_ATOMIC_THREAD_FENCE:
6880 expand_builtin_atomic_thread_fence (exp);
6881 return const0_rtx;
6883 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6884 expand_builtin_atomic_signal_fence (exp);
6885 return const0_rtx;
6887 case BUILT_IN_OBJECT_SIZE:
6888 return expand_builtin_object_size (exp);
6890 case BUILT_IN_MEMCPY_CHK:
6891 case BUILT_IN_MEMPCPY_CHK:
6892 case BUILT_IN_MEMMOVE_CHK:
6893 case BUILT_IN_MEMSET_CHK:
6894 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6895 if (target)
6896 return target;
6897 break;
6899 case BUILT_IN_STRCPY_CHK:
6900 case BUILT_IN_STPCPY_CHK:
6901 case BUILT_IN_STRNCPY_CHK:
6902 case BUILT_IN_STPNCPY_CHK:
6903 case BUILT_IN_STRCAT_CHK:
6904 case BUILT_IN_STRNCAT_CHK:
6905 case BUILT_IN_SNPRINTF_CHK:
6906 case BUILT_IN_VSNPRINTF_CHK:
6907 maybe_emit_chk_warning (exp, fcode);
6908 break;
6910 case BUILT_IN_SPRINTF_CHK:
6911 case BUILT_IN_VSPRINTF_CHK:
6912 maybe_emit_sprintf_chk_warning (exp, fcode);
6913 break;
6915 case BUILT_IN_FREE:
6916 if (warn_free_nonheap_object)
6917 maybe_emit_free_warning (exp);
6918 break;
6920 case BUILT_IN_THREAD_POINTER:
6921 return expand_builtin_thread_pointer (exp, target);
6923 case BUILT_IN_SET_THREAD_POINTER:
6924 expand_builtin_set_thread_pointer (exp);
6925 return const0_rtx;
6927 case BUILT_IN_CILK_DETACH:
6928 expand_builtin_cilk_detach (exp);
6929 return const0_rtx;
6931 case BUILT_IN_CILK_POP_FRAME:
6932 expand_builtin_cilk_pop_frame (exp);
6933 return const0_rtx;
6935 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6936 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6937 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6938 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6939 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6940 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6941 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6942 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6943 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6944 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6945 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6946 /* We allow user CHKP builtins if Pointer Bounds
6947 Checker is off. */
6948 if (!chkp_function_instrumented_p (current_function_decl))
6950 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6951 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6952 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6953 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6954 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6955 return expand_normal (CALL_EXPR_ARG (exp, 0));
6956 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6957 return expand_normal (size_zero_node);
6958 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6959 return expand_normal (size_int (-1));
6960 else
6961 return const0_rtx;
6963 /* FALLTHROUGH */
6965 case BUILT_IN_CHKP_BNDMK:
6966 case BUILT_IN_CHKP_BNDSTX:
6967 case BUILT_IN_CHKP_BNDCL:
6968 case BUILT_IN_CHKP_BNDCU:
6969 case BUILT_IN_CHKP_BNDLDX:
6970 case BUILT_IN_CHKP_BNDRET:
6971 case BUILT_IN_CHKP_INTERSECT:
6972 case BUILT_IN_CHKP_NARROW:
6973 case BUILT_IN_CHKP_EXTRACT_LOWER:
6974 case BUILT_IN_CHKP_EXTRACT_UPPER:
6975 /* Software implementation of Pointer Bounds Checker is NYI.
6976 Target support is required. */
6977 error ("Your target platform does not support -fcheck-pointer-bounds");
6978 break;
6980 case BUILT_IN_ACC_ON_DEVICE:
6981 /* Do library call, if we failed to expand the builtin when
6982 folding. */
6983 break;
6985 default: /* just do library call, if unknown builtin */
6986 break;
6989 /* The switch statement above can drop through to cause the function
6990 to be called normally. */
6991 return expand_call (exp, target, ignore);
6994 /* Similar to expand_builtin but is used for instrumented calls. */
6997 expand_builtin_with_bounds (tree exp, rtx target,
6998 rtx subtarget ATTRIBUTE_UNUSED,
6999 machine_mode mode, int ignore)
7001 tree fndecl = get_callee_fndecl (exp);
7002 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7004 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7006 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7007 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7009 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7010 && fcode < END_CHKP_BUILTINS);
7012 switch (fcode)
7014 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7015 target = expand_builtin_memcpy_with_bounds (exp, target);
7016 if (target)
7017 return target;
7018 break;
7020 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7021 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7022 if (target)
7023 return target;
7024 break;
7026 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7027 target = expand_builtin_memset_with_bounds (exp, target, mode);
7028 if (target)
7029 return target;
7030 break;
7032 default:
7033 break;
7036 /* The switch statement above can drop through to cause the function
7037 to be called normally. */
7038 return expand_call (exp, target, ignore);
7041 /* Determine whether a tree node represents a call to a built-in
7042 function. If the tree T is a call to a built-in function with
7043 the right number of arguments of the appropriate types, return
7044 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7045 Otherwise the return value is END_BUILTINS. */
7047 enum built_in_function
7048 builtin_mathfn_code (const_tree t)
7050 const_tree fndecl, arg, parmlist;
7051 const_tree argtype, parmtype;
7052 const_call_expr_arg_iterator iter;
7054 if (TREE_CODE (t) != CALL_EXPR
7055 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7056 return END_BUILTINS;
7058 fndecl = get_callee_fndecl (t);
7059 if (fndecl == NULL_TREE
7060 || TREE_CODE (fndecl) != FUNCTION_DECL
7061 || ! DECL_BUILT_IN (fndecl)
7062 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7063 return END_BUILTINS;
7065 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7066 init_const_call_expr_arg_iterator (t, &iter);
7067 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7069 /* If a function doesn't take a variable number of arguments,
7070 the last element in the list will have type `void'. */
7071 parmtype = TREE_VALUE (parmlist);
7072 if (VOID_TYPE_P (parmtype))
7074 if (more_const_call_expr_args_p (&iter))
7075 return END_BUILTINS;
7076 return DECL_FUNCTION_CODE (fndecl);
7079 if (! more_const_call_expr_args_p (&iter))
7080 return END_BUILTINS;
7082 arg = next_const_call_expr_arg (&iter);
7083 argtype = TREE_TYPE (arg);
7085 if (SCALAR_FLOAT_TYPE_P (parmtype))
7087 if (! SCALAR_FLOAT_TYPE_P (argtype))
7088 return END_BUILTINS;
7090 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7092 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7093 return END_BUILTINS;
7095 else if (POINTER_TYPE_P (parmtype))
7097 if (! POINTER_TYPE_P (argtype))
7098 return END_BUILTINS;
7100 else if (INTEGRAL_TYPE_P (parmtype))
7102 if (! INTEGRAL_TYPE_P (argtype))
7103 return END_BUILTINS;
7105 else
7106 return END_BUILTINS;
7109 /* Variable-length argument list. */
7110 return DECL_FUNCTION_CODE (fndecl);
7113 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7114 evaluate to a constant. */
7116 static tree
7117 fold_builtin_constant_p (tree arg)
7119 /* We return 1 for a numeric type that's known to be a constant
7120 value at compile-time or for an aggregate type that's a
7121 literal constant. */
7122 STRIP_NOPS (arg);
7124 /* If we know this is a constant, emit the constant of one. */
7125 if (CONSTANT_CLASS_P (arg)
7126 || (TREE_CODE (arg) == CONSTRUCTOR
7127 && TREE_CONSTANT (arg)))
7128 return integer_one_node;
7129 if (TREE_CODE (arg) == ADDR_EXPR)
7131 tree op = TREE_OPERAND (arg, 0);
7132 if (TREE_CODE (op) == STRING_CST
7133 || (TREE_CODE (op) == ARRAY_REF
7134 && integer_zerop (TREE_OPERAND (op, 1))
7135 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7136 return integer_one_node;
7139 /* If this expression has side effects, show we don't know it to be a
7140 constant. Likewise if it's a pointer or aggregate type since in
7141 those case we only want literals, since those are only optimized
7142 when generating RTL, not later.
7143 And finally, if we are compiling an initializer, not code, we
7144 need to return a definite result now; there's not going to be any
7145 more optimization done. */
7146 if (TREE_SIDE_EFFECTS (arg)
7147 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7148 || POINTER_TYPE_P (TREE_TYPE (arg))
7149 || cfun == 0
7150 || folding_initializer
7151 || force_folding_builtin_constant_p)
7152 return integer_zero_node;
7154 return NULL_TREE;
7157 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7158 return it as a truthvalue. */
7160 static tree
7161 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7162 tree predictor)
7164 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7166 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7167 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7168 ret_type = TREE_TYPE (TREE_TYPE (fn));
7169 pred_type = TREE_VALUE (arg_types);
7170 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7172 pred = fold_convert_loc (loc, pred_type, pred);
7173 expected = fold_convert_loc (loc, expected_type, expected);
7174 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7175 predictor);
7177 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7178 build_int_cst (ret_type, 0));
7181 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7182 NULL_TREE if no simplification is possible. */
7184 tree
7185 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7187 tree inner, fndecl, inner_arg0;
7188 enum tree_code code;
7190 /* Distribute the expected value over short-circuiting operators.
7191 See through the cast from truthvalue_type_node to long. */
7192 inner_arg0 = arg0;
7193 while (CONVERT_EXPR_P (inner_arg0)
7194 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7195 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7196 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7198 /* If this is a builtin_expect within a builtin_expect keep the
7199 inner one. See through a comparison against a constant. It
7200 might have been added to create a thruthvalue. */
7201 inner = inner_arg0;
7203 if (COMPARISON_CLASS_P (inner)
7204 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7205 inner = TREE_OPERAND (inner, 0);
7207 if (TREE_CODE (inner) == CALL_EXPR
7208 && (fndecl = get_callee_fndecl (inner))
7209 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7210 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7211 return arg0;
7213 inner = inner_arg0;
7214 code = TREE_CODE (inner);
7215 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7217 tree op0 = TREE_OPERAND (inner, 0);
7218 tree op1 = TREE_OPERAND (inner, 1);
7220 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7221 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7222 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7224 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7227 /* If the argument isn't invariant then there's nothing else we can do. */
7228 if (!TREE_CONSTANT (inner_arg0))
7229 return NULL_TREE;
7231 /* If we expect that a comparison against the argument will fold to
7232 a constant return the constant. In practice, this means a true
7233 constant or the address of a non-weak symbol. */
7234 inner = inner_arg0;
7235 STRIP_NOPS (inner);
7236 if (TREE_CODE (inner) == ADDR_EXPR)
7240 inner = TREE_OPERAND (inner, 0);
7242 while (TREE_CODE (inner) == COMPONENT_REF
7243 || TREE_CODE (inner) == ARRAY_REF);
7244 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7245 return NULL_TREE;
7248 /* Otherwise, ARG0 already has the proper type for the return value. */
7249 return arg0;
7252 /* Fold a call to __builtin_classify_type with argument ARG. */
7254 static tree
7255 fold_builtin_classify_type (tree arg)
7257 if (arg == 0)
7258 return build_int_cst (integer_type_node, no_type_class);
7260 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7263 /* Fold a call to __builtin_strlen with argument ARG. */
7265 static tree
7266 fold_builtin_strlen (location_t loc, tree type, tree arg)
7268 if (!validate_arg (arg, POINTER_TYPE))
7269 return NULL_TREE;
7270 else
7272 tree len = c_strlen (arg, 0);
7274 if (len)
7275 return fold_convert_loc (loc, type, len);
7277 return NULL_TREE;
7281 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7283 static tree
7284 fold_builtin_inf (location_t loc, tree type, int warn)
7286 REAL_VALUE_TYPE real;
7288 /* __builtin_inff is intended to be usable to define INFINITY on all
7289 targets. If an infinity is not available, INFINITY expands "to a
7290 positive constant of type float that overflows at translation
7291 time", footnote "In this case, using INFINITY will violate the
7292 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7293 Thus we pedwarn to ensure this constraint violation is
7294 diagnosed. */
7295 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7296 pedwarn (loc, 0, "target format does not support infinity");
7298 real_inf (&real);
7299 return build_real (type, real);
7302 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7303 NULL_TREE if no simplification can be made. */
7305 static tree
7306 fold_builtin_sincos (location_t loc,
7307 tree arg0, tree arg1, tree arg2)
7309 tree type;
7310 tree fndecl, call = NULL_TREE;
7312 if (!validate_arg (arg0, REAL_TYPE)
7313 || !validate_arg (arg1, POINTER_TYPE)
7314 || !validate_arg (arg2, POINTER_TYPE))
7315 return NULL_TREE;
7317 type = TREE_TYPE (arg0);
7319 /* Calculate the result when the argument is a constant. */
7320 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7321 if (fn == END_BUILTINS)
7322 return NULL_TREE;
7324 /* Canonicalize sincos to cexpi. */
7325 if (TREE_CODE (arg0) == REAL_CST)
7327 tree complex_type = build_complex_type (type);
7328 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7330 if (!call)
7332 if (!targetm.libc_has_function (function_c99_math_complex)
7333 || !builtin_decl_implicit_p (fn))
7334 return NULL_TREE;
7335 fndecl = builtin_decl_explicit (fn);
7336 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7337 call = builtin_save_expr (call);
7340 return build2 (COMPOUND_EXPR, void_type_node,
7341 build2 (MODIFY_EXPR, void_type_node,
7342 build_fold_indirect_ref_loc (loc, arg1),
7343 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7344 build2 (MODIFY_EXPR, void_type_node,
7345 build_fold_indirect_ref_loc (loc, arg2),
7346 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7349 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7350 Return NULL_TREE if no simplification can be made. */
7352 static tree
7353 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7355 if (!validate_arg (arg1, POINTER_TYPE)
7356 || !validate_arg (arg2, POINTER_TYPE)
7357 || !validate_arg (len, INTEGER_TYPE))
7358 return NULL_TREE;
7360 /* If the LEN parameter is zero, return zero. */
7361 if (integer_zerop (len))
7362 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7363 arg1, arg2);
7365 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7366 if (operand_equal_p (arg1, arg2, 0))
7367 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7369 /* If len parameter is one, return an expression corresponding to
7370 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7371 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7373 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7374 tree cst_uchar_ptr_node
7375 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7377 tree ind1
7378 = fold_convert_loc (loc, integer_type_node,
7379 build1 (INDIRECT_REF, cst_uchar_node,
7380 fold_convert_loc (loc,
7381 cst_uchar_ptr_node,
7382 arg1)));
7383 tree ind2
7384 = fold_convert_loc (loc, integer_type_node,
7385 build1 (INDIRECT_REF, cst_uchar_node,
7386 fold_convert_loc (loc,
7387 cst_uchar_ptr_node,
7388 arg2)));
7389 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7392 return NULL_TREE;
7395 /* Fold a call to builtin isascii with argument ARG. */
7397 static tree
7398 fold_builtin_isascii (location_t loc, tree arg)
7400 if (!validate_arg (arg, INTEGER_TYPE))
7401 return NULL_TREE;
7402 else
7404 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7405 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7406 build_int_cst (integer_type_node,
7407 ~ (unsigned HOST_WIDE_INT) 0x7f));
7408 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7409 arg, integer_zero_node);
7413 /* Fold a call to builtin toascii with argument ARG. */
7415 static tree
7416 fold_builtin_toascii (location_t loc, tree arg)
7418 if (!validate_arg (arg, INTEGER_TYPE))
7419 return NULL_TREE;
7421 /* Transform toascii(c) -> (c & 0x7f). */
7422 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7423 build_int_cst (integer_type_node, 0x7f));
7426 /* Fold a call to builtin isdigit with argument ARG. */
7428 static tree
7429 fold_builtin_isdigit (location_t loc, tree arg)
7431 if (!validate_arg (arg, INTEGER_TYPE))
7432 return NULL_TREE;
7433 else
7435 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7436 /* According to the C standard, isdigit is unaffected by locale.
7437 However, it definitely is affected by the target character set. */
7438 unsigned HOST_WIDE_INT target_digit0
7439 = lang_hooks.to_target_charset ('0');
7441 if (target_digit0 == 0)
7442 return NULL_TREE;
7444 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7445 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7446 build_int_cst (unsigned_type_node, target_digit0));
7447 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7448 build_int_cst (unsigned_type_node, 9));
7452 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7454 static tree
7455 fold_builtin_fabs (location_t loc, tree arg, tree type)
7457 if (!validate_arg (arg, REAL_TYPE))
7458 return NULL_TREE;
7460 arg = fold_convert_loc (loc, type, arg);
7461 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7464 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7466 static tree
7467 fold_builtin_abs (location_t loc, tree arg, tree type)
7469 if (!validate_arg (arg, INTEGER_TYPE))
7470 return NULL_TREE;
7472 arg = fold_convert_loc (loc, type, arg);
7473 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7476 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7478 static tree
7479 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7481 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7482 if (validate_arg (arg0, REAL_TYPE)
7483 && validate_arg (arg1, REAL_TYPE)
7484 && validate_arg (arg2, REAL_TYPE)
7485 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7486 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7488 return NULL_TREE;
7491 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7493 static tree
7494 fold_builtin_carg (location_t loc, tree arg, tree type)
7496 if (validate_arg (arg, COMPLEX_TYPE)
7497 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7499 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7501 if (atan2_fn)
7503 tree new_arg = builtin_save_expr (arg);
7504 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7505 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7506 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7510 return NULL_TREE;
7513 /* Fold a call to builtin frexp, we can assume the base is 2. */
7515 static tree
7516 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7518 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7519 return NULL_TREE;
7521 STRIP_NOPS (arg0);
7523 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7524 return NULL_TREE;
7526 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7528 /* Proceed if a valid pointer type was passed in. */
7529 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7531 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7532 tree frac, exp;
7534 switch (value->cl)
7536 case rvc_zero:
7537 /* For +-0, return (*exp = 0, +-0). */
7538 exp = integer_zero_node;
7539 frac = arg0;
7540 break;
7541 case rvc_nan:
7542 case rvc_inf:
7543 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7544 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7545 case rvc_normal:
7547 /* Since the frexp function always expects base 2, and in
7548 GCC normalized significands are already in the range
7549 [0.5, 1.0), we have exactly what frexp wants. */
7550 REAL_VALUE_TYPE frac_rvt = *value;
7551 SET_REAL_EXP (&frac_rvt, 0);
7552 frac = build_real (rettype, frac_rvt);
7553 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7555 break;
7556 default:
7557 gcc_unreachable ();
7560 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7561 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7562 TREE_SIDE_EFFECTS (arg1) = 1;
7563 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7566 return NULL_TREE;
7569 /* Fold a call to builtin modf. */
7571 static tree
7572 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7574 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7575 return NULL_TREE;
7577 STRIP_NOPS (arg0);
7579 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7580 return NULL_TREE;
7582 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7584 /* Proceed if a valid pointer type was passed in. */
7585 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7587 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7588 REAL_VALUE_TYPE trunc, frac;
7590 switch (value->cl)
7592 case rvc_nan:
7593 case rvc_zero:
7594 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7595 trunc = frac = *value;
7596 break;
7597 case rvc_inf:
7598 /* For +-Inf, return (*arg1 = arg0, +-0). */
7599 frac = dconst0;
7600 frac.sign = value->sign;
7601 trunc = *value;
7602 break;
7603 case rvc_normal:
7604 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7605 real_trunc (&trunc, VOIDmode, value);
7606 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7607 /* If the original number was negative and already
7608 integral, then the fractional part is -0.0. */
7609 if (value->sign && frac.cl == rvc_zero)
7610 frac.sign = value->sign;
7611 break;
7614 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7615 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7616 build_real (rettype, trunc));
7617 TREE_SIDE_EFFECTS (arg1) = 1;
7618 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7619 build_real (rettype, frac));
7622 return NULL_TREE;
7625 /* Given a location LOC, an interclass builtin function decl FNDECL
7626 and its single argument ARG, return an folded expression computing
7627 the same, or NULL_TREE if we either couldn't or didn't want to fold
7628 (the latter happen if there's an RTL instruction available). */
7630 static tree
7631 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7633 machine_mode mode;
7635 if (!validate_arg (arg, REAL_TYPE))
7636 return NULL_TREE;
7638 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7639 return NULL_TREE;
7641 mode = TYPE_MODE (TREE_TYPE (arg));
7643 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7645 /* If there is no optab, try generic code. */
7646 switch (DECL_FUNCTION_CODE (fndecl))
7648 tree result;
7650 CASE_FLT_FN (BUILT_IN_ISINF):
7652 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7653 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7654 tree type = TREE_TYPE (arg);
7655 REAL_VALUE_TYPE r;
7656 char buf[128];
7658 if (is_ibm_extended)
7660 /* NaN and Inf are encoded in the high-order double value
7661 only. The low-order value is not significant. */
7662 type = double_type_node;
7663 mode = DFmode;
7664 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7666 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7667 real_from_string (&r, buf);
7668 result = build_call_expr (isgr_fn, 2,
7669 fold_build1_loc (loc, ABS_EXPR, type, arg),
7670 build_real (type, r));
7671 return result;
7673 CASE_FLT_FN (BUILT_IN_FINITE):
7674 case BUILT_IN_ISFINITE:
7676 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7677 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7678 tree type = TREE_TYPE (arg);
7679 REAL_VALUE_TYPE r;
7680 char buf[128];
7682 if (is_ibm_extended)
7684 /* NaN and Inf are encoded in the high-order double value
7685 only. The low-order value is not significant. */
7686 type = double_type_node;
7687 mode = DFmode;
7688 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7690 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7691 real_from_string (&r, buf);
7692 result = build_call_expr (isle_fn, 2,
7693 fold_build1_loc (loc, ABS_EXPR, type, arg),
7694 build_real (type, r));
7695 /*result = fold_build2_loc (loc, UNGT_EXPR,
7696 TREE_TYPE (TREE_TYPE (fndecl)),
7697 fold_build1_loc (loc, ABS_EXPR, type, arg),
7698 build_real (type, r));
7699 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7700 TREE_TYPE (TREE_TYPE (fndecl)),
7701 result);*/
7702 return result;
7704 case BUILT_IN_ISNORMAL:
7706 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7707 islessequal(fabs(x),DBL_MAX). */
7708 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7709 tree type = TREE_TYPE (arg);
7710 tree orig_arg, max_exp, min_exp;
7711 machine_mode orig_mode = mode;
7712 REAL_VALUE_TYPE rmax, rmin;
7713 char buf[128];
7715 orig_arg = arg = builtin_save_expr (arg);
7716 if (is_ibm_extended)
7718 /* Use double to test the normal range of IBM extended
7719 precision. Emin for IBM extended precision is
7720 different to emin for IEEE double, being 53 higher
7721 since the low double exponent is at least 53 lower
7722 than the high double exponent. */
7723 type = double_type_node;
7724 mode = DFmode;
7725 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7727 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
7729 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7730 real_from_string (&rmax, buf);
7731 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
7732 real_from_string (&rmin, buf);
7733 max_exp = build_real (type, rmax);
7734 min_exp = build_real (type, rmin);
7736 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
7737 if (is_ibm_extended)
7739 /* Testing the high end of the range is done just using
7740 the high double, using the same test as isfinite().
7741 For the subnormal end of the range we first test the
7742 high double, then if its magnitude is equal to the
7743 limit of 0x1p-969, we test whether the low double is
7744 non-zero and opposite sign to the high double. */
7745 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
7746 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7747 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
7748 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
7749 arg, min_exp);
7750 tree as_complex = build1 (VIEW_CONVERT_EXPR,
7751 complex_double_type_node, orig_arg);
7752 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
7753 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
7754 tree zero = build_real (type, dconst0);
7755 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
7756 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
7757 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
7758 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
7759 fold_build3 (COND_EXPR,
7760 integer_type_node,
7761 hilt, logt, lolt));
7762 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
7763 eq_min, ok_lo);
7764 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
7765 gt_min, eq_min);
7767 else
7769 tree const isge_fn
7770 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7771 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
7773 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
7774 max_exp, min_exp);
7775 return result;
7777 default:
7778 break;
7781 return NULL_TREE;
7784 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7785 ARG is the argument for the call. */
7787 static tree
7788 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7790 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7792 if (!validate_arg (arg, REAL_TYPE))
7793 return NULL_TREE;
7795 switch (builtin_index)
7797 case BUILT_IN_ISINF:
7798 if (!HONOR_INFINITIES (arg))
7799 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7801 return NULL_TREE;
7803 case BUILT_IN_ISINF_SIGN:
7805 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7806 /* In a boolean context, GCC will fold the inner COND_EXPR to
7807 1. So e.g. "if (isinf_sign(x))" would be folded to just
7808 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7809 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
7810 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7811 tree tmp = NULL_TREE;
7813 arg = builtin_save_expr (arg);
7815 if (signbit_fn && isinf_fn)
7817 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7818 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7820 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7821 signbit_call, integer_zero_node);
7822 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7823 isinf_call, integer_zero_node);
7825 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7826 integer_minus_one_node, integer_one_node);
7827 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7828 isinf_call, tmp,
7829 integer_zero_node);
7832 return tmp;
7835 case BUILT_IN_ISFINITE:
7836 if (!HONOR_NANS (arg)
7837 && !HONOR_INFINITIES (arg))
7838 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7840 return NULL_TREE;
7842 case BUILT_IN_ISNAN:
7843 if (!HONOR_NANS (arg))
7844 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7847 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
7848 if (is_ibm_extended)
7850 /* NaN and Inf are encoded in the high-order double value
7851 only. The low-order value is not significant. */
7852 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
7855 arg = builtin_save_expr (arg);
7856 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7858 default:
7859 gcc_unreachable ();
7863 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7864 This builtin will generate code to return the appropriate floating
7865 point classification depending on the value of the floating point
7866 number passed in. The possible return values must be supplied as
7867 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7868 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7869 one floating point argument which is "type generic". */
7871 static tree
7872 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7874 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7875 arg, type, res, tmp;
7876 machine_mode mode;
7877 REAL_VALUE_TYPE r;
7878 char buf[128];
7880 /* Verify the required arguments in the original call. */
7881 if (nargs != 6
7882 || !validate_arg (args[0], INTEGER_TYPE)
7883 || !validate_arg (args[1], INTEGER_TYPE)
7884 || !validate_arg (args[2], INTEGER_TYPE)
7885 || !validate_arg (args[3], INTEGER_TYPE)
7886 || !validate_arg (args[4], INTEGER_TYPE)
7887 || !validate_arg (args[5], REAL_TYPE))
7888 return NULL_TREE;
7890 fp_nan = args[0];
7891 fp_infinite = args[1];
7892 fp_normal = args[2];
7893 fp_subnormal = args[3];
7894 fp_zero = args[4];
7895 arg = args[5];
7896 type = TREE_TYPE (arg);
7897 mode = TYPE_MODE (type);
7898 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7900 /* fpclassify(x) ->
7901 isnan(x) ? FP_NAN :
7902 (fabs(x) == Inf ? FP_INFINITE :
7903 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7904 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
7906 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7907 build_real (type, dconst0));
7908 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7909 tmp, fp_zero, fp_subnormal);
7911 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7912 real_from_string (&r, buf);
7913 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
7914 arg, build_real (type, r));
7915 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
7917 if (HONOR_INFINITIES (mode))
7919 real_inf (&r);
7920 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7921 build_real (type, r));
7922 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
7923 fp_infinite, res);
7926 if (HONOR_NANS (mode))
7928 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
7929 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
7932 return res;
7935 /* Fold a call to an unordered comparison function such as
7936 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
7937 being called and ARG0 and ARG1 are the arguments for the call.
7938 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
7939 the opposite of the desired result. UNORDERED_CODE is used
7940 for modes that can hold NaNs and ORDERED_CODE is used for
7941 the rest. */
7943 static tree
7944 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
7945 enum tree_code unordered_code,
7946 enum tree_code ordered_code)
7948 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7949 enum tree_code code;
7950 tree type0, type1;
7951 enum tree_code code0, code1;
7952 tree cmp_type = NULL_TREE;
7954 type0 = TREE_TYPE (arg0);
7955 type1 = TREE_TYPE (arg1);
7957 code0 = TREE_CODE (type0);
7958 code1 = TREE_CODE (type1);
7960 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
7961 /* Choose the wider of two real types. */
7962 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
7963 ? type0 : type1;
7964 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
7965 cmp_type = type0;
7966 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
7967 cmp_type = type1;
7969 arg0 = fold_convert_loc (loc, cmp_type, arg0);
7970 arg1 = fold_convert_loc (loc, cmp_type, arg1);
7972 if (unordered_code == UNORDERED_EXPR)
7974 if (!HONOR_NANS (arg0))
7975 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
7976 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
7979 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
7980 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
7981 fold_build2_loc (loc, code, type, arg0, arg1));
7984 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
7985 arithmetics if it can never overflow, or into internal functions that
7986 return both result of arithmetics and overflowed boolean flag in
7987 a complex integer result, or some other check for overflow.
7988 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
7989 checking part of that. */
7991 static tree
7992 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
7993 tree arg0, tree arg1, tree arg2)
7995 enum internal_fn ifn = IFN_LAST;
7996 /* The code of the expression corresponding to the type-generic
7997 built-in, or ERROR_MARK for the type-specific ones. */
7998 enum tree_code opcode = ERROR_MARK;
7999 bool ovf_only = false;
8001 switch (fcode)
8003 case BUILT_IN_ADD_OVERFLOW_P:
8004 ovf_only = true;
8005 /* FALLTHRU */
8006 case BUILT_IN_ADD_OVERFLOW:
8007 opcode = PLUS_EXPR;
8008 /* FALLTHRU */
8009 case BUILT_IN_SADD_OVERFLOW:
8010 case BUILT_IN_SADDL_OVERFLOW:
8011 case BUILT_IN_SADDLL_OVERFLOW:
8012 case BUILT_IN_UADD_OVERFLOW:
8013 case BUILT_IN_UADDL_OVERFLOW:
8014 case BUILT_IN_UADDLL_OVERFLOW:
8015 ifn = IFN_ADD_OVERFLOW;
8016 break;
8017 case BUILT_IN_SUB_OVERFLOW_P:
8018 ovf_only = true;
8019 /* FALLTHRU */
8020 case BUILT_IN_SUB_OVERFLOW:
8021 opcode = MINUS_EXPR;
8022 /* FALLTHRU */
8023 case BUILT_IN_SSUB_OVERFLOW:
8024 case BUILT_IN_SSUBL_OVERFLOW:
8025 case BUILT_IN_SSUBLL_OVERFLOW:
8026 case BUILT_IN_USUB_OVERFLOW:
8027 case BUILT_IN_USUBL_OVERFLOW:
8028 case BUILT_IN_USUBLL_OVERFLOW:
8029 ifn = IFN_SUB_OVERFLOW;
8030 break;
8031 case BUILT_IN_MUL_OVERFLOW_P:
8032 ovf_only = true;
8033 /* FALLTHRU */
8034 case BUILT_IN_MUL_OVERFLOW:
8035 opcode = MULT_EXPR;
8036 /* FALLTHRU */
8037 case BUILT_IN_SMUL_OVERFLOW:
8038 case BUILT_IN_SMULL_OVERFLOW:
8039 case BUILT_IN_SMULLL_OVERFLOW:
8040 case BUILT_IN_UMUL_OVERFLOW:
8041 case BUILT_IN_UMULL_OVERFLOW:
8042 case BUILT_IN_UMULLL_OVERFLOW:
8043 ifn = IFN_MUL_OVERFLOW;
8044 break;
8045 default:
8046 gcc_unreachable ();
8049 /* For the "generic" overloads, the first two arguments can have different
8050 types and the last argument determines the target type to use to check
8051 for overflow. The arguments of the other overloads all have the same
8052 type. */
8053 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8055 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8056 arguments are constant, attempt to fold the built-in call into a constant
8057 expression indicating whether or not it detected an overflow. */
8058 if (ovf_only
8059 && TREE_CODE (arg0) == INTEGER_CST
8060 && TREE_CODE (arg1) == INTEGER_CST)
8061 /* Perform the computation in the target type and check for overflow. */
8062 return omit_one_operand_loc (loc, boolean_type_node,
8063 arith_overflowed_p (opcode, type, arg0, arg1)
8064 ? boolean_true_node : boolean_false_node,
8065 arg2);
8067 tree ctype = build_complex_type (type);
8068 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8069 2, arg0, arg1);
8070 tree tgt = save_expr (call);
8071 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8072 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8073 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8075 if (ovf_only)
8076 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8078 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8079 tree store
8080 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8081 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8084 /* Fold a call to __builtin_FILE to a constant string. */
8086 static inline tree
8087 fold_builtin_FILE (location_t loc)
8089 if (const char *fname = LOCATION_FILE (loc))
8090 return build_string_literal (strlen (fname) + 1, fname);
8092 return build_string_literal (1, "");
8095 /* Fold a call to __builtin_FUNCTION to a constant string. */
8097 static inline tree
8098 fold_builtin_FUNCTION ()
8100 if (current_function_decl)
8102 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8103 return build_string_literal (strlen (name) + 1, name);
8106 return build_string_literal (1, "");
8109 /* Fold a call to __builtin_LINE to an integer constant. */
8111 static inline tree
8112 fold_builtin_LINE (location_t loc, tree type)
8114 return build_int_cst (type, LOCATION_LINE (loc));
8117 /* Fold a call to built-in function FNDECL with 0 arguments.
8118 This function returns NULL_TREE if no simplification was possible. */
8120 static tree
8121 fold_builtin_0 (location_t loc, tree fndecl)
8123 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8124 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8125 switch (fcode)
8127 case BUILT_IN_FILE:
8128 return fold_builtin_FILE (loc);
8130 case BUILT_IN_FUNCTION:
8131 return fold_builtin_FUNCTION ();
8133 case BUILT_IN_LINE:
8134 return fold_builtin_LINE (loc, type);
8136 CASE_FLT_FN (BUILT_IN_INF):
8137 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8138 case BUILT_IN_INFD32:
8139 case BUILT_IN_INFD64:
8140 case BUILT_IN_INFD128:
8141 return fold_builtin_inf (loc, type, true);
8143 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8144 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8145 return fold_builtin_inf (loc, type, false);
8147 case BUILT_IN_CLASSIFY_TYPE:
8148 return fold_builtin_classify_type (NULL_TREE);
8150 default:
8151 break;
8153 return NULL_TREE;
8156 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8157 This function returns NULL_TREE if no simplification was possible. */
8159 static tree
8160 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8162 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8163 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8165 if (TREE_CODE (arg0) == ERROR_MARK)
8166 return NULL_TREE;
8168 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8169 return ret;
8171 switch (fcode)
8173 case BUILT_IN_CONSTANT_P:
8175 tree val = fold_builtin_constant_p (arg0);
8177 /* Gimplification will pull the CALL_EXPR for the builtin out of
8178 an if condition. When not optimizing, we'll not CSE it back.
8179 To avoid link error types of regressions, return false now. */
8180 if (!val && !optimize)
8181 val = integer_zero_node;
8183 return val;
8186 case BUILT_IN_CLASSIFY_TYPE:
8187 return fold_builtin_classify_type (arg0);
8189 case BUILT_IN_STRLEN:
8190 return fold_builtin_strlen (loc, type, arg0);
8192 CASE_FLT_FN (BUILT_IN_FABS):
8193 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8194 case BUILT_IN_FABSD32:
8195 case BUILT_IN_FABSD64:
8196 case BUILT_IN_FABSD128:
8197 return fold_builtin_fabs (loc, arg0, type);
8199 case BUILT_IN_ABS:
8200 case BUILT_IN_LABS:
8201 case BUILT_IN_LLABS:
8202 case BUILT_IN_IMAXABS:
8203 return fold_builtin_abs (loc, arg0, type);
8205 CASE_FLT_FN (BUILT_IN_CONJ):
8206 if (validate_arg (arg0, COMPLEX_TYPE)
8207 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8208 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8209 break;
8211 CASE_FLT_FN (BUILT_IN_CREAL):
8212 if (validate_arg (arg0, COMPLEX_TYPE)
8213 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8214 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8215 break;
8217 CASE_FLT_FN (BUILT_IN_CIMAG):
8218 if (validate_arg (arg0, COMPLEX_TYPE)
8219 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8220 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8221 break;
8223 CASE_FLT_FN (BUILT_IN_CARG):
8224 return fold_builtin_carg (loc, arg0, type);
8226 case BUILT_IN_ISASCII:
8227 return fold_builtin_isascii (loc, arg0);
8229 case BUILT_IN_TOASCII:
8230 return fold_builtin_toascii (loc, arg0);
8232 case BUILT_IN_ISDIGIT:
8233 return fold_builtin_isdigit (loc, arg0);
8235 CASE_FLT_FN (BUILT_IN_FINITE):
8236 case BUILT_IN_FINITED32:
8237 case BUILT_IN_FINITED64:
8238 case BUILT_IN_FINITED128:
8239 case BUILT_IN_ISFINITE:
8241 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8242 if (ret)
8243 return ret;
8244 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8247 CASE_FLT_FN (BUILT_IN_ISINF):
8248 case BUILT_IN_ISINFD32:
8249 case BUILT_IN_ISINFD64:
8250 case BUILT_IN_ISINFD128:
8252 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8253 if (ret)
8254 return ret;
8255 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8258 case BUILT_IN_ISNORMAL:
8259 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8261 case BUILT_IN_ISINF_SIGN:
8262 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8264 CASE_FLT_FN (BUILT_IN_ISNAN):
8265 case BUILT_IN_ISNAND32:
8266 case BUILT_IN_ISNAND64:
8267 case BUILT_IN_ISNAND128:
8268 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8270 case BUILT_IN_FREE:
8271 if (integer_zerop (arg0))
8272 return build_empty_stmt (loc);
8273 break;
8275 default:
8276 break;
8279 return NULL_TREE;
8283 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8284 This function returns NULL_TREE if no simplification was possible. */
8286 static tree
8287 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8289 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8290 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8292 if (TREE_CODE (arg0) == ERROR_MARK
8293 || TREE_CODE (arg1) == ERROR_MARK)
8294 return NULL_TREE;
8296 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8297 return ret;
8299 switch (fcode)
8301 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8302 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8303 if (validate_arg (arg0, REAL_TYPE)
8304 && validate_arg (arg1, POINTER_TYPE))
8305 return do_mpfr_lgamma_r (arg0, arg1, type);
8306 break;
8308 CASE_FLT_FN (BUILT_IN_FREXP):
8309 return fold_builtin_frexp (loc, arg0, arg1, type);
8311 CASE_FLT_FN (BUILT_IN_MODF):
8312 return fold_builtin_modf (loc, arg0, arg1, type);
8314 case BUILT_IN_STRSTR:
8315 return fold_builtin_strstr (loc, arg0, arg1, type);
8317 case BUILT_IN_STRSPN:
8318 return fold_builtin_strspn (loc, arg0, arg1);
8320 case BUILT_IN_STRCSPN:
8321 return fold_builtin_strcspn (loc, arg0, arg1);
8323 case BUILT_IN_STRPBRK:
8324 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8326 case BUILT_IN_EXPECT:
8327 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8329 case BUILT_IN_ISGREATER:
8330 return fold_builtin_unordered_cmp (loc, fndecl,
8331 arg0, arg1, UNLE_EXPR, LE_EXPR);
8332 case BUILT_IN_ISGREATEREQUAL:
8333 return fold_builtin_unordered_cmp (loc, fndecl,
8334 arg0, arg1, UNLT_EXPR, LT_EXPR);
8335 case BUILT_IN_ISLESS:
8336 return fold_builtin_unordered_cmp (loc, fndecl,
8337 arg0, arg1, UNGE_EXPR, GE_EXPR);
8338 case BUILT_IN_ISLESSEQUAL:
8339 return fold_builtin_unordered_cmp (loc, fndecl,
8340 arg0, arg1, UNGT_EXPR, GT_EXPR);
8341 case BUILT_IN_ISLESSGREATER:
8342 return fold_builtin_unordered_cmp (loc, fndecl,
8343 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8344 case BUILT_IN_ISUNORDERED:
8345 return fold_builtin_unordered_cmp (loc, fndecl,
8346 arg0, arg1, UNORDERED_EXPR,
8347 NOP_EXPR);
8349 /* We do the folding for va_start in the expander. */
8350 case BUILT_IN_VA_START:
8351 break;
8353 case BUILT_IN_OBJECT_SIZE:
8354 return fold_builtin_object_size (arg0, arg1);
8356 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8357 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8359 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8360 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8362 default:
8363 break;
8365 return NULL_TREE;
8368 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8369 and ARG2.
8370 This function returns NULL_TREE if no simplification was possible. */
8372 static tree
8373 fold_builtin_3 (location_t loc, tree fndecl,
8374 tree arg0, tree arg1, tree arg2)
8376 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8377 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8379 if (TREE_CODE (arg0) == ERROR_MARK
8380 || TREE_CODE (arg1) == ERROR_MARK
8381 || TREE_CODE (arg2) == ERROR_MARK)
8382 return NULL_TREE;
8384 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8385 arg0, arg1, arg2))
8386 return ret;
8388 switch (fcode)
8391 CASE_FLT_FN (BUILT_IN_SINCOS):
8392 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8394 CASE_FLT_FN (BUILT_IN_FMA):
8395 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8397 CASE_FLT_FN (BUILT_IN_REMQUO):
8398 if (validate_arg (arg0, REAL_TYPE)
8399 && validate_arg (arg1, REAL_TYPE)
8400 && validate_arg (arg2, POINTER_TYPE))
8401 return do_mpfr_remquo (arg0, arg1, arg2);
8402 break;
8404 case BUILT_IN_BCMP:
8405 case BUILT_IN_MEMCMP:
8406 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8408 case BUILT_IN_EXPECT:
8409 return fold_builtin_expect (loc, arg0, arg1, arg2);
8411 case BUILT_IN_ADD_OVERFLOW:
8412 case BUILT_IN_SUB_OVERFLOW:
8413 case BUILT_IN_MUL_OVERFLOW:
8414 case BUILT_IN_ADD_OVERFLOW_P:
8415 case BUILT_IN_SUB_OVERFLOW_P:
8416 case BUILT_IN_MUL_OVERFLOW_P:
8417 case BUILT_IN_SADD_OVERFLOW:
8418 case BUILT_IN_SADDL_OVERFLOW:
8419 case BUILT_IN_SADDLL_OVERFLOW:
8420 case BUILT_IN_SSUB_OVERFLOW:
8421 case BUILT_IN_SSUBL_OVERFLOW:
8422 case BUILT_IN_SSUBLL_OVERFLOW:
8423 case BUILT_IN_SMUL_OVERFLOW:
8424 case BUILT_IN_SMULL_OVERFLOW:
8425 case BUILT_IN_SMULLL_OVERFLOW:
8426 case BUILT_IN_UADD_OVERFLOW:
8427 case BUILT_IN_UADDL_OVERFLOW:
8428 case BUILT_IN_UADDLL_OVERFLOW:
8429 case BUILT_IN_USUB_OVERFLOW:
8430 case BUILT_IN_USUBL_OVERFLOW:
8431 case BUILT_IN_USUBLL_OVERFLOW:
8432 case BUILT_IN_UMUL_OVERFLOW:
8433 case BUILT_IN_UMULL_OVERFLOW:
8434 case BUILT_IN_UMULLL_OVERFLOW:
8435 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8437 default:
8438 break;
8440 return NULL_TREE;
8443 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8444 arguments. IGNORE is true if the result of the
8445 function call is ignored. This function returns NULL_TREE if no
8446 simplification was possible. */
8448 tree
8449 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8451 tree ret = NULL_TREE;
8453 switch (nargs)
8455 case 0:
8456 ret = fold_builtin_0 (loc, fndecl);
8457 break;
8458 case 1:
8459 ret = fold_builtin_1 (loc, fndecl, args[0]);
8460 break;
8461 case 2:
8462 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8463 break;
8464 case 3:
8465 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8466 break;
8467 default:
8468 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8469 break;
8471 if (ret)
8473 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8474 SET_EXPR_LOCATION (ret, loc);
8475 TREE_NO_WARNING (ret) = 1;
8476 return ret;
8478 return NULL_TREE;
8481 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8482 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8483 of arguments in ARGS to be omitted. OLDNARGS is the number of
8484 elements in ARGS. */
8486 static tree
8487 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8488 int skip, tree fndecl, int n, va_list newargs)
8490 int nargs = oldnargs - skip + n;
8491 tree *buffer;
8493 if (n > 0)
8495 int i, j;
8497 buffer = XALLOCAVEC (tree, nargs);
8498 for (i = 0; i < n; i++)
8499 buffer[i] = va_arg (newargs, tree);
8500 for (j = skip; j < oldnargs; j++, i++)
8501 buffer[i] = args[j];
8503 else
8504 buffer = args + skip;
8506 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8509 /* Return true if FNDECL shouldn't be folded right now.
8510 If a built-in function has an inline attribute always_inline
8511 wrapper, defer folding it after always_inline functions have
8512 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8513 might not be performed. */
8515 bool
8516 avoid_folding_inline_builtin (tree fndecl)
8518 return (DECL_DECLARED_INLINE_P (fndecl)
8519 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8520 && cfun
8521 && !cfun->always_inline_functions_inlined
8522 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8525 /* A wrapper function for builtin folding that prevents warnings for
8526 "statement without effect" and the like, caused by removing the
8527 call node earlier than the warning is generated. */
8529 tree
8530 fold_call_expr (location_t loc, tree exp, bool ignore)
8532 tree ret = NULL_TREE;
8533 tree fndecl = get_callee_fndecl (exp);
8534 if (fndecl
8535 && TREE_CODE (fndecl) == FUNCTION_DECL
8536 && DECL_BUILT_IN (fndecl)
8537 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8538 yet. Defer folding until we see all the arguments
8539 (after inlining). */
8540 && !CALL_EXPR_VA_ARG_PACK (exp))
8542 int nargs = call_expr_nargs (exp);
8544 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8545 instead last argument is __builtin_va_arg_pack (). Defer folding
8546 even in that case, until arguments are finalized. */
8547 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8549 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8550 if (fndecl2
8551 && TREE_CODE (fndecl2) == FUNCTION_DECL
8552 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8553 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8554 return NULL_TREE;
8557 if (avoid_folding_inline_builtin (fndecl))
8558 return NULL_TREE;
8560 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8561 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8562 CALL_EXPR_ARGP (exp), ignore);
8563 else
8565 tree *args = CALL_EXPR_ARGP (exp);
8566 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8567 if (ret)
8568 return ret;
8571 return NULL_TREE;
8574 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8575 N arguments are passed in the array ARGARRAY. Return a folded
8576 expression or NULL_TREE if no simplification was possible. */
8578 tree
8579 fold_builtin_call_array (location_t loc, tree,
8580 tree fn,
8581 int n,
8582 tree *argarray)
8584 if (TREE_CODE (fn) != ADDR_EXPR)
8585 return NULL_TREE;
8587 tree fndecl = TREE_OPERAND (fn, 0);
8588 if (TREE_CODE (fndecl) == FUNCTION_DECL
8589 && DECL_BUILT_IN (fndecl))
8591 /* If last argument is __builtin_va_arg_pack (), arguments to this
8592 function are not finalized yet. Defer folding until they are. */
8593 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8595 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8596 if (fndecl2
8597 && TREE_CODE (fndecl2) == FUNCTION_DECL
8598 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8599 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8600 return NULL_TREE;
8602 if (avoid_folding_inline_builtin (fndecl))
8603 return NULL_TREE;
8604 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8605 return targetm.fold_builtin (fndecl, n, argarray, false);
8606 else
8607 return fold_builtin_n (loc, fndecl, argarray, n, false);
8610 return NULL_TREE;
8613 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8614 along with N new arguments specified as the "..." parameters. SKIP
8615 is the number of arguments in EXP to be omitted. This function is used
8616 to do varargs-to-varargs transformations. */
8618 static tree
8619 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8621 va_list ap;
8622 tree t;
8624 va_start (ap, n);
8625 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8626 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8627 va_end (ap);
8629 return t;
8632 /* Validate a single argument ARG against a tree code CODE representing
8633 a type. */
8635 static bool
8636 validate_arg (const_tree arg, enum tree_code code)
8638 if (!arg)
8639 return false;
8640 else if (code == POINTER_TYPE)
8641 return POINTER_TYPE_P (TREE_TYPE (arg));
8642 else if (code == INTEGER_TYPE)
8643 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8644 return code == TREE_CODE (TREE_TYPE (arg));
8647 /* This function validates the types of a function call argument list
8648 against a specified list of tree_codes. If the last specifier is a 0,
8649 that represents an ellipses, otherwise the last specifier must be a
8650 VOID_TYPE.
8652 This is the GIMPLE version of validate_arglist. Eventually we want to
8653 completely convert builtins.c to work from GIMPLEs and the tree based
8654 validate_arglist will then be removed. */
8656 bool
8657 validate_gimple_arglist (const gcall *call, ...)
8659 enum tree_code code;
8660 bool res = 0;
8661 va_list ap;
8662 const_tree arg;
8663 size_t i;
8665 va_start (ap, call);
8666 i = 0;
8670 code = (enum tree_code) va_arg (ap, int);
8671 switch (code)
8673 case 0:
8674 /* This signifies an ellipses, any further arguments are all ok. */
8675 res = true;
8676 goto end;
8677 case VOID_TYPE:
8678 /* This signifies an endlink, if no arguments remain, return
8679 true, otherwise return false. */
8680 res = (i == gimple_call_num_args (call));
8681 goto end;
8682 default:
8683 /* If no parameters remain or the parameter's code does not
8684 match the specified code, return false. Otherwise continue
8685 checking any remaining arguments. */
8686 arg = gimple_call_arg (call, i++);
8687 if (!validate_arg (arg, code))
8688 goto end;
8689 break;
8692 while (1);
8694 /* We need gotos here since we can only have one VA_CLOSE in a
8695 function. */
8696 end: ;
8697 va_end (ap);
8699 return res;
8702 /* Default target-specific builtin expander that does nothing. */
8705 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8706 rtx target ATTRIBUTE_UNUSED,
8707 rtx subtarget ATTRIBUTE_UNUSED,
8708 machine_mode mode ATTRIBUTE_UNUSED,
8709 int ignore ATTRIBUTE_UNUSED)
8711 return NULL_RTX;
8714 /* Returns true is EXP represents data that would potentially reside
8715 in a readonly section. */
8717 bool
8718 readonly_data_expr (tree exp)
8720 STRIP_NOPS (exp);
8722 if (TREE_CODE (exp) != ADDR_EXPR)
8723 return false;
8725 exp = get_base_address (TREE_OPERAND (exp, 0));
8726 if (!exp)
8727 return false;
8729 /* Make sure we call decl_readonly_section only for trees it
8730 can handle (since it returns true for everything it doesn't
8731 understand). */
8732 if (TREE_CODE (exp) == STRING_CST
8733 || TREE_CODE (exp) == CONSTRUCTOR
8734 || (VAR_P (exp) && TREE_STATIC (exp)))
8735 return decl_readonly_section (exp, 0);
8736 else
8737 return false;
8740 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8741 to the call, and TYPE is its return type.
8743 Return NULL_TREE if no simplification was possible, otherwise return the
8744 simplified form of the call as a tree.
8746 The simplified form may be a constant or other expression which
8747 computes the same value, but in a more efficient manner (including
8748 calls to other builtin functions).
8750 The call may contain arguments which need to be evaluated, but
8751 which are not useful to determine the result of the call. In
8752 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8753 COMPOUND_EXPR will be an argument which must be evaluated.
8754 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8755 COMPOUND_EXPR in the chain will contain the tree for the simplified
8756 form of the builtin function call. */
8758 static tree
8759 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8761 if (!validate_arg (s1, POINTER_TYPE)
8762 || !validate_arg (s2, POINTER_TYPE))
8763 return NULL_TREE;
8764 else
8766 tree fn;
8767 const char *p1, *p2;
8769 p2 = c_getstr (s2);
8770 if (p2 == NULL)
8771 return NULL_TREE;
8773 p1 = c_getstr (s1);
8774 if (p1 != NULL)
8776 const char *r = strstr (p1, p2);
8777 tree tem;
8779 if (r == NULL)
8780 return build_int_cst (TREE_TYPE (s1), 0);
8782 /* Return an offset into the constant string argument. */
8783 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8784 return fold_convert_loc (loc, type, tem);
8787 /* The argument is const char *, and the result is char *, so we need
8788 a type conversion here to avoid a warning. */
8789 if (p2[0] == '\0')
8790 return fold_convert_loc (loc, type, s1);
8792 if (p2[1] != '\0')
8793 return NULL_TREE;
8795 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8796 if (!fn)
8797 return NULL_TREE;
8799 /* New argument list transforming strstr(s1, s2) to
8800 strchr(s1, s2[0]). */
8801 return build_call_expr_loc (loc, fn, 2, s1,
8802 build_int_cst (integer_type_node, p2[0]));
8806 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
8807 to the call, and TYPE is its return type.
8809 Return NULL_TREE if no simplification was possible, otherwise return the
8810 simplified form of the call as a tree.
8812 The simplified form may be a constant or other expression which
8813 computes the same value, but in a more efficient manner (including
8814 calls to other builtin functions).
8816 The call may contain arguments which need to be evaluated, but
8817 which are not useful to determine the result of the call. In
8818 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8819 COMPOUND_EXPR will be an argument which must be evaluated.
8820 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8821 COMPOUND_EXPR in the chain will contain the tree for the simplified
8822 form of the builtin function call. */
8824 static tree
8825 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
8827 if (!validate_arg (s1, POINTER_TYPE)
8828 || !validate_arg (s2, POINTER_TYPE))
8829 return NULL_TREE;
8830 else
8832 tree fn;
8833 const char *p1, *p2;
8835 p2 = c_getstr (s2);
8836 if (p2 == NULL)
8837 return NULL_TREE;
8839 p1 = c_getstr (s1);
8840 if (p1 != NULL)
8842 const char *r = strpbrk (p1, p2);
8843 tree tem;
8845 if (r == NULL)
8846 return build_int_cst (TREE_TYPE (s1), 0);
8848 /* Return an offset into the constant string argument. */
8849 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8850 return fold_convert_loc (loc, type, tem);
8853 if (p2[0] == '\0')
8854 /* strpbrk(x, "") == NULL.
8855 Evaluate and ignore s1 in case it had side-effects. */
8856 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
8858 if (p2[1] != '\0')
8859 return NULL_TREE; /* Really call strpbrk. */
8861 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8862 if (!fn)
8863 return NULL_TREE;
8865 /* New argument list transforming strpbrk(s1, s2) to
8866 strchr(s1, s2[0]). */
8867 return build_call_expr_loc (loc, fn, 2, s1,
8868 build_int_cst (integer_type_node, p2[0]));
8872 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
8873 to the call.
8875 Return NULL_TREE if no simplification was possible, otherwise return the
8876 simplified form of the call as a tree.
8878 The simplified form may be a constant or other expression which
8879 computes the same value, but in a more efficient manner (including
8880 calls to other builtin functions).
8882 The call may contain arguments which need to be evaluated, but
8883 which are not useful to determine the result of the call. In
8884 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8885 COMPOUND_EXPR will be an argument which must be evaluated.
8886 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8887 COMPOUND_EXPR in the chain will contain the tree for the simplified
8888 form of the builtin function call. */
8890 static tree
8891 fold_builtin_strspn (location_t loc, tree s1, tree s2)
8893 if (!validate_arg (s1, POINTER_TYPE)
8894 || !validate_arg (s2, POINTER_TYPE))
8895 return NULL_TREE;
8896 else
8898 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
8900 /* If either argument is "", return NULL_TREE. */
8901 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
8902 /* Evaluate and ignore both arguments in case either one has
8903 side-effects. */
8904 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
8905 s1, s2);
8906 return NULL_TREE;
8910 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
8911 to the call.
8913 Return NULL_TREE if no simplification was possible, otherwise return the
8914 simplified form of the call as a tree.
8916 The simplified form may be a constant or other expression which
8917 computes the same value, but in a more efficient manner (including
8918 calls to other builtin functions).
8920 The call may contain arguments which need to be evaluated, but
8921 which are not useful to determine the result of the call. In
8922 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8923 COMPOUND_EXPR will be an argument which must be evaluated.
8924 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8925 COMPOUND_EXPR in the chain will contain the tree for the simplified
8926 form of the builtin function call. */
8928 static tree
8929 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
8931 if (!validate_arg (s1, POINTER_TYPE)
8932 || !validate_arg (s2, POINTER_TYPE))
8933 return NULL_TREE;
8934 else
8936 /* If the first argument is "", return NULL_TREE. */
8937 const char *p1 = c_getstr (s1);
8938 if (p1 && *p1 == '\0')
8940 /* Evaluate and ignore argument s2 in case it has
8941 side-effects. */
8942 return omit_one_operand_loc (loc, size_type_node,
8943 size_zero_node, s2);
8946 /* If the second argument is "", return __builtin_strlen(s1). */
8947 const char *p2 = c_getstr (s2);
8948 if (p2 && *p2 == '\0')
8950 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
8952 /* If the replacement _DECL isn't initialized, don't do the
8953 transformation. */
8954 if (!fn)
8955 return NULL_TREE;
8957 return build_call_expr_loc (loc, fn, 1, s1);
8959 return NULL_TREE;
8963 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
8964 produced. False otherwise. This is done so that we don't output the error
8965 or warning twice or three times. */
8967 bool
8968 fold_builtin_next_arg (tree exp, bool va_start_p)
8970 tree fntype = TREE_TYPE (current_function_decl);
8971 int nargs = call_expr_nargs (exp);
8972 tree arg;
8973 /* There is good chance the current input_location points inside the
8974 definition of the va_start macro (perhaps on the token for
8975 builtin) in a system header, so warnings will not be emitted.
8976 Use the location in real source code. */
8977 source_location current_location =
8978 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
8979 NULL);
8981 if (!stdarg_p (fntype))
8983 error ("%<va_start%> used in function with fixed args");
8984 return true;
8987 if (va_start_p)
8989 if (va_start_p && (nargs != 2))
8991 error ("wrong number of arguments to function %<va_start%>");
8992 return true;
8994 arg = CALL_EXPR_ARG (exp, 1);
8996 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
8997 when we checked the arguments and if needed issued a warning. */
8998 else
9000 if (nargs == 0)
9002 /* Evidently an out of date version of <stdarg.h>; can't validate
9003 va_start's second argument, but can still work as intended. */
9004 warning_at (current_location,
9005 OPT_Wvarargs,
9006 "%<__builtin_next_arg%> called without an argument");
9007 return true;
9009 else if (nargs > 1)
9011 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9012 return true;
9014 arg = CALL_EXPR_ARG (exp, 0);
9017 if (TREE_CODE (arg) == SSA_NAME)
9018 arg = SSA_NAME_VAR (arg);
9020 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9021 or __builtin_next_arg (0) the first time we see it, after checking
9022 the arguments and if needed issuing a warning. */
9023 if (!integer_zerop (arg))
9025 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9027 /* Strip off all nops for the sake of the comparison. This
9028 is not quite the same as STRIP_NOPS. It does more.
9029 We must also strip off INDIRECT_EXPR for C++ reference
9030 parameters. */
9031 while (CONVERT_EXPR_P (arg)
9032 || TREE_CODE (arg) == INDIRECT_REF)
9033 arg = TREE_OPERAND (arg, 0);
9034 if (arg != last_parm)
9036 /* FIXME: Sometimes with the tree optimizers we can get the
9037 not the last argument even though the user used the last
9038 argument. We just warn and set the arg to be the last
9039 argument so that we will get wrong-code because of
9040 it. */
9041 warning_at (current_location,
9042 OPT_Wvarargs,
9043 "second parameter of %<va_start%> not last named argument");
9046 /* Undefined by C99 7.15.1.4p4 (va_start):
9047 "If the parameter parmN is declared with the register storage
9048 class, with a function or array type, or with a type that is
9049 not compatible with the type that results after application of
9050 the default argument promotions, the behavior is undefined."
9052 else if (DECL_REGISTER (arg))
9054 warning_at (current_location,
9055 OPT_Wvarargs,
9056 "undefined behavior when second parameter of "
9057 "%<va_start%> is declared with %<register%> storage");
9060 /* We want to verify the second parameter just once before the tree
9061 optimizers are run and then avoid keeping it in the tree,
9062 as otherwise we could warn even for correct code like:
9063 void foo (int i, ...)
9064 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9065 if (va_start_p)
9066 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9067 else
9068 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9070 return false;
9074 /* Expand a call EXP to __builtin_object_size. */
9076 static rtx
9077 expand_builtin_object_size (tree exp)
9079 tree ost;
9080 int object_size_type;
9081 tree fndecl = get_callee_fndecl (exp);
9083 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9085 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9086 exp, fndecl);
9087 expand_builtin_trap ();
9088 return const0_rtx;
9091 ost = CALL_EXPR_ARG (exp, 1);
9092 STRIP_NOPS (ost);
9094 if (TREE_CODE (ost) != INTEGER_CST
9095 || tree_int_cst_sgn (ost) < 0
9096 || compare_tree_int (ost, 3) > 0)
9098 error ("%Klast argument of %D is not integer constant between 0 and 3",
9099 exp, fndecl);
9100 expand_builtin_trap ();
9101 return const0_rtx;
9104 object_size_type = tree_to_shwi (ost);
9106 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9109 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9110 FCODE is the BUILT_IN_* to use.
9111 Return NULL_RTX if we failed; the caller should emit a normal call,
9112 otherwise try to get the result in TARGET, if convenient (and in
9113 mode MODE if that's convenient). */
9115 static rtx
9116 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9117 enum built_in_function fcode)
9119 tree dest, src, len, size;
9121 if (!validate_arglist (exp,
9122 POINTER_TYPE,
9123 fcode == BUILT_IN_MEMSET_CHK
9124 ? INTEGER_TYPE : POINTER_TYPE,
9125 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9126 return NULL_RTX;
9128 dest = CALL_EXPR_ARG (exp, 0);
9129 src = CALL_EXPR_ARG (exp, 1);
9130 len = CALL_EXPR_ARG (exp, 2);
9131 size = CALL_EXPR_ARG (exp, 3);
9133 if (! tree_fits_uhwi_p (size))
9134 return NULL_RTX;
9136 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9138 tree fn;
9140 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9142 warning_at (tree_nonartificial_location (exp),
9143 0, "%Kcall to %D will always overflow destination buffer",
9144 exp, get_callee_fndecl (exp));
9145 return NULL_RTX;
9148 fn = NULL_TREE;
9149 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9150 mem{cpy,pcpy,move,set} is available. */
9151 switch (fcode)
9153 case BUILT_IN_MEMCPY_CHK:
9154 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9155 break;
9156 case BUILT_IN_MEMPCPY_CHK:
9157 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9158 break;
9159 case BUILT_IN_MEMMOVE_CHK:
9160 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9161 break;
9162 case BUILT_IN_MEMSET_CHK:
9163 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9164 break;
9165 default:
9166 break;
9169 if (! fn)
9170 return NULL_RTX;
9172 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9173 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9174 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9175 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9177 else if (fcode == BUILT_IN_MEMSET_CHK)
9178 return NULL_RTX;
9179 else
9181 unsigned int dest_align = get_pointer_alignment (dest);
9183 /* If DEST is not a pointer type, call the normal function. */
9184 if (dest_align == 0)
9185 return NULL_RTX;
9187 /* If SRC and DEST are the same (and not volatile), do nothing. */
9188 if (operand_equal_p (src, dest, 0))
9190 tree expr;
9192 if (fcode != BUILT_IN_MEMPCPY_CHK)
9194 /* Evaluate and ignore LEN in case it has side-effects. */
9195 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9196 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9199 expr = fold_build_pointer_plus (dest, len);
9200 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9203 /* __memmove_chk special case. */
9204 if (fcode == BUILT_IN_MEMMOVE_CHK)
9206 unsigned int src_align = get_pointer_alignment (src);
9208 if (src_align == 0)
9209 return NULL_RTX;
9211 /* If src is categorized for a readonly section we can use
9212 normal __memcpy_chk. */
9213 if (readonly_data_expr (src))
9215 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9216 if (!fn)
9217 return NULL_RTX;
9218 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9219 dest, src, len, size);
9220 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9221 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9222 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9225 return NULL_RTX;
9229 /* Emit warning if a buffer overflow is detected at compile time. */
9231 static void
9232 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9234 int is_strlen = 0;
9235 tree len, size;
9236 location_t loc = tree_nonartificial_location (exp);
9238 switch (fcode)
9240 case BUILT_IN_STRCPY_CHK:
9241 case BUILT_IN_STPCPY_CHK:
9242 /* For __strcat_chk the warning will be emitted only if overflowing
9243 by at least strlen (dest) + 1 bytes. */
9244 case BUILT_IN_STRCAT_CHK:
9245 len = CALL_EXPR_ARG (exp, 1);
9246 size = CALL_EXPR_ARG (exp, 2);
9247 is_strlen = 1;
9248 break;
9249 case BUILT_IN_STRNCAT_CHK:
9250 case BUILT_IN_STRNCPY_CHK:
9251 case BUILT_IN_STPNCPY_CHK:
9252 len = CALL_EXPR_ARG (exp, 2);
9253 size = CALL_EXPR_ARG (exp, 3);
9254 break;
9255 case BUILT_IN_SNPRINTF_CHK:
9256 case BUILT_IN_VSNPRINTF_CHK:
9257 len = CALL_EXPR_ARG (exp, 1);
9258 size = CALL_EXPR_ARG (exp, 3);
9259 break;
9260 default:
9261 gcc_unreachable ();
9264 if (!len || !size)
9265 return;
9267 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9268 return;
9270 if (is_strlen)
9272 len = c_strlen (len, 1);
9273 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9274 return;
9276 else if (fcode == BUILT_IN_STRNCAT_CHK)
9278 tree src = CALL_EXPR_ARG (exp, 1);
9279 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9280 return;
9281 src = c_strlen (src, 1);
9282 if (! src || ! tree_fits_uhwi_p (src))
9284 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9285 exp, get_callee_fndecl (exp));
9286 return;
9288 else if (tree_int_cst_lt (src, size))
9289 return;
9291 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9292 return;
9294 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9295 exp, get_callee_fndecl (exp));
9298 /* Emit warning if a buffer overflow is detected at compile time
9299 in __sprintf_chk/__vsprintf_chk calls. */
9301 static void
9302 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9304 tree size, len, fmt;
9305 const char *fmt_str;
9306 int nargs = call_expr_nargs (exp);
9308 /* Verify the required arguments in the original call. */
9310 if (nargs < 4)
9311 return;
9312 size = CALL_EXPR_ARG (exp, 2);
9313 fmt = CALL_EXPR_ARG (exp, 3);
9315 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9316 return;
9318 /* Check whether the format is a literal string constant. */
9319 fmt_str = c_getstr (fmt);
9320 if (fmt_str == NULL)
9321 return;
9323 if (!init_target_chars ())
9324 return;
9326 /* If the format doesn't contain % args or %%, we know its size. */
9327 if (strchr (fmt_str, target_percent) == 0)
9328 len = build_int_cstu (size_type_node, strlen (fmt_str));
9329 /* If the format is "%s" and first ... argument is a string literal,
9330 we know it too. */
9331 else if (fcode == BUILT_IN_SPRINTF_CHK
9332 && strcmp (fmt_str, target_percent_s) == 0)
9334 tree arg;
9336 if (nargs < 5)
9337 return;
9338 arg = CALL_EXPR_ARG (exp, 4);
9339 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9340 return;
9342 len = c_strlen (arg, 1);
9343 if (!len || ! tree_fits_uhwi_p (len))
9344 return;
9346 else
9347 return;
9349 if (! tree_int_cst_lt (len, size))
9350 warning_at (tree_nonartificial_location (exp),
9351 0, "%Kcall to %D will always overflow destination buffer",
9352 exp, get_callee_fndecl (exp));
9355 /* Emit warning if a free is called with address of a variable. */
9357 static void
9358 maybe_emit_free_warning (tree exp)
9360 tree arg = CALL_EXPR_ARG (exp, 0);
9362 STRIP_NOPS (arg);
9363 if (TREE_CODE (arg) != ADDR_EXPR)
9364 return;
9366 arg = get_base_address (TREE_OPERAND (arg, 0));
9367 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9368 return;
9370 if (SSA_VAR_P (arg))
9371 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9372 "%Kattempt to free a non-heap object %qD", exp, arg);
9373 else
9374 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9375 "%Kattempt to free a non-heap object", exp);
9378 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9379 if possible. */
9381 static tree
9382 fold_builtin_object_size (tree ptr, tree ost)
9384 unsigned HOST_WIDE_INT bytes;
9385 int object_size_type;
9387 if (!validate_arg (ptr, POINTER_TYPE)
9388 || !validate_arg (ost, INTEGER_TYPE))
9389 return NULL_TREE;
9391 STRIP_NOPS (ost);
9393 if (TREE_CODE (ost) != INTEGER_CST
9394 || tree_int_cst_sgn (ost) < 0
9395 || compare_tree_int (ost, 3) > 0)
9396 return NULL_TREE;
9398 object_size_type = tree_to_shwi (ost);
9400 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9401 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9402 and (size_t) 0 for types 2 and 3. */
9403 if (TREE_SIDE_EFFECTS (ptr))
9404 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9406 if (TREE_CODE (ptr) == ADDR_EXPR)
9408 compute_builtin_object_size (ptr, object_size_type, &bytes);
9409 if (wi::fits_to_tree_p (bytes, size_type_node))
9410 return build_int_cstu (size_type_node, bytes);
9412 else if (TREE_CODE (ptr) == SSA_NAME)
9414 /* If object size is not known yet, delay folding until
9415 later. Maybe subsequent passes will help determining
9416 it. */
9417 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9418 && wi::fits_to_tree_p (bytes, size_type_node))
9419 return build_int_cstu (size_type_node, bytes);
9422 return NULL_TREE;
9425 /* Builtins with folding operations that operate on "..." arguments
9426 need special handling; we need to store the arguments in a convenient
9427 data structure before attempting any folding. Fortunately there are
9428 only a few builtins that fall into this category. FNDECL is the
9429 function, EXP is the CALL_EXPR for the call. */
9431 static tree
9432 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9434 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9435 tree ret = NULL_TREE;
9437 switch (fcode)
9439 case BUILT_IN_FPCLASSIFY:
9440 ret = fold_builtin_fpclassify (loc, args, nargs);
9441 break;
9443 default:
9444 break;
9446 if (ret)
9448 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9449 SET_EXPR_LOCATION (ret, loc);
9450 TREE_NO_WARNING (ret) = 1;
9451 return ret;
9453 return NULL_TREE;
9456 /* Initialize format string characters in the target charset. */
9458 bool
9459 init_target_chars (void)
9461 static bool init;
9462 if (!init)
9464 target_newline = lang_hooks.to_target_charset ('\n');
9465 target_percent = lang_hooks.to_target_charset ('%');
9466 target_c = lang_hooks.to_target_charset ('c');
9467 target_s = lang_hooks.to_target_charset ('s');
9468 if (target_newline == 0 || target_percent == 0 || target_c == 0
9469 || target_s == 0)
9470 return false;
9472 target_percent_c[0] = target_percent;
9473 target_percent_c[1] = target_c;
9474 target_percent_c[2] = '\0';
9476 target_percent_s[0] = target_percent;
9477 target_percent_s[1] = target_s;
9478 target_percent_s[2] = '\0';
9480 target_percent_s_newline[0] = target_percent;
9481 target_percent_s_newline[1] = target_s;
9482 target_percent_s_newline[2] = target_newline;
9483 target_percent_s_newline[3] = '\0';
9485 init = true;
9487 return true;
9490 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9491 and no overflow/underflow occurred. INEXACT is true if M was not
9492 exactly calculated. TYPE is the tree type for the result. This
9493 function assumes that you cleared the MPFR flags and then
9494 calculated M to see if anything subsequently set a flag prior to
9495 entering this function. Return NULL_TREE if any checks fail. */
9497 static tree
9498 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9500 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9501 overflow/underflow occurred. If -frounding-math, proceed iff the
9502 result of calling FUNC was exact. */
9503 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9504 && (!flag_rounding_math || !inexact))
9506 REAL_VALUE_TYPE rr;
9508 real_from_mpfr (&rr, m, type, GMP_RNDN);
9509 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9510 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9511 but the mpft_t is not, then we underflowed in the
9512 conversion. */
9513 if (real_isfinite (&rr)
9514 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9516 REAL_VALUE_TYPE rmode;
9518 real_convert (&rmode, TYPE_MODE (type), &rr);
9519 /* Proceed iff the specified mode can hold the value. */
9520 if (real_identical (&rmode, &rr))
9521 return build_real (type, rmode);
9524 return NULL_TREE;
9527 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9528 number and no overflow/underflow occurred. INEXACT is true if M
9529 was not exactly calculated. TYPE is the tree type for the result.
9530 This function assumes that you cleared the MPFR flags and then
9531 calculated M to see if anything subsequently set a flag prior to
9532 entering this function. Return NULL_TREE if any checks fail, if
9533 FORCE_CONVERT is true, then bypass the checks. */
9535 static tree
9536 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9538 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9539 overflow/underflow occurred. If -frounding-math, proceed iff the
9540 result of calling FUNC was exact. */
9541 if (force_convert
9542 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9543 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9544 && (!flag_rounding_math || !inexact)))
9546 REAL_VALUE_TYPE re, im;
9548 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9549 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9550 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9551 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9552 but the mpft_t is not, then we underflowed in the
9553 conversion. */
9554 if (force_convert
9555 || (real_isfinite (&re) && real_isfinite (&im)
9556 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9557 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9559 REAL_VALUE_TYPE re_mode, im_mode;
9561 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9562 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9563 /* Proceed iff the specified mode can hold the value. */
9564 if (force_convert
9565 || (real_identical (&re_mode, &re)
9566 && real_identical (&im_mode, &im)))
9567 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9568 build_real (TREE_TYPE (type), im_mode));
9571 return NULL_TREE;
9574 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9575 the pointer *(ARG_QUO) and return the result. The type is taken
9576 from the type of ARG0 and is used for setting the precision of the
9577 calculation and results. */
9579 static tree
9580 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9582 tree const type = TREE_TYPE (arg0);
9583 tree result = NULL_TREE;
9585 STRIP_NOPS (arg0);
9586 STRIP_NOPS (arg1);
9588 /* To proceed, MPFR must exactly represent the target floating point
9589 format, which only happens when the target base equals two. */
9590 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9591 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9592 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9594 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9595 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9597 if (real_isfinite (ra0) && real_isfinite (ra1))
9599 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9600 const int prec = fmt->p;
9601 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9602 tree result_rem;
9603 long integer_quo;
9604 mpfr_t m0, m1;
9606 mpfr_inits2 (prec, m0, m1, NULL);
9607 mpfr_from_real (m0, ra0, GMP_RNDN);
9608 mpfr_from_real (m1, ra1, GMP_RNDN);
9609 mpfr_clear_flags ();
9610 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9611 /* Remquo is independent of the rounding mode, so pass
9612 inexact=0 to do_mpfr_ckconv(). */
9613 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9614 mpfr_clears (m0, m1, NULL);
9615 if (result_rem)
9617 /* MPFR calculates quo in the host's long so it may
9618 return more bits in quo than the target int can hold
9619 if sizeof(host long) > sizeof(target int). This can
9620 happen even for native compilers in LP64 mode. In
9621 these cases, modulo the quo value with the largest
9622 number that the target int can hold while leaving one
9623 bit for the sign. */
9624 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9625 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9627 /* Dereference the quo pointer argument. */
9628 arg_quo = build_fold_indirect_ref (arg_quo);
9629 /* Proceed iff a valid pointer type was passed in. */
9630 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9632 /* Set the value. */
9633 tree result_quo
9634 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9635 build_int_cst (TREE_TYPE (arg_quo),
9636 integer_quo));
9637 TREE_SIDE_EFFECTS (result_quo) = 1;
9638 /* Combine the quo assignment with the rem. */
9639 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9640 result_quo, result_rem));
9645 return result;
9648 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9649 resulting value as a tree with type TYPE. The mpfr precision is
9650 set to the precision of TYPE. We assume that this mpfr function
9651 returns zero if the result could be calculated exactly within the
9652 requested precision. In addition, the integer pointer represented
9653 by ARG_SG will be dereferenced and set to the appropriate signgam
9654 (-1,1) value. */
9656 static tree
9657 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9659 tree result = NULL_TREE;
9661 STRIP_NOPS (arg);
9663 /* To proceed, MPFR must exactly represent the target floating point
9664 format, which only happens when the target base equals two. Also
9665 verify ARG is a constant and that ARG_SG is an int pointer. */
9666 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9667 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9668 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9669 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9671 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9673 /* In addition to NaN and Inf, the argument cannot be zero or a
9674 negative integer. */
9675 if (real_isfinite (ra)
9676 && ra->cl != rvc_zero
9677 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9679 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9680 const int prec = fmt->p;
9681 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9682 int inexact, sg;
9683 mpfr_t m;
9684 tree result_lg;
9686 mpfr_init2 (m, prec);
9687 mpfr_from_real (m, ra, GMP_RNDN);
9688 mpfr_clear_flags ();
9689 inexact = mpfr_lgamma (m, &sg, m, rnd);
9690 result_lg = do_mpfr_ckconv (m, type, inexact);
9691 mpfr_clear (m);
9692 if (result_lg)
9694 tree result_sg;
9696 /* Dereference the arg_sg pointer argument. */
9697 arg_sg = build_fold_indirect_ref (arg_sg);
9698 /* Assign the signgam value into *arg_sg. */
9699 result_sg = fold_build2 (MODIFY_EXPR,
9700 TREE_TYPE (arg_sg), arg_sg,
9701 build_int_cst (TREE_TYPE (arg_sg), sg));
9702 TREE_SIDE_EFFECTS (result_sg) = 1;
9703 /* Combine the signgam assignment with the lgamma result. */
9704 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9705 result_sg, result_lg));
9710 return result;
9713 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9714 mpc function FUNC on it and return the resulting value as a tree
9715 with type TYPE. The mpfr precision is set to the precision of
9716 TYPE. We assume that function FUNC returns zero if the result
9717 could be calculated exactly within the requested precision. If
9718 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9719 in the arguments and/or results. */
9721 tree
9722 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9723 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9725 tree result = NULL_TREE;
9727 STRIP_NOPS (arg0);
9728 STRIP_NOPS (arg1);
9730 /* To proceed, MPFR must exactly represent the target floating point
9731 format, which only happens when the target base equals two. */
9732 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9733 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9734 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9735 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9736 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9738 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9739 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9740 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9741 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9743 if (do_nonfinite
9744 || (real_isfinite (re0) && real_isfinite (im0)
9745 && real_isfinite (re1) && real_isfinite (im1)))
9747 const struct real_format *const fmt =
9748 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9749 const int prec = fmt->p;
9750 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9751 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9752 int inexact;
9753 mpc_t m0, m1;
9755 mpc_init2 (m0, prec);
9756 mpc_init2 (m1, prec);
9757 mpfr_from_real (mpc_realref (m0), re0, rnd);
9758 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9759 mpfr_from_real (mpc_realref (m1), re1, rnd);
9760 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9761 mpfr_clear_flags ();
9762 inexact = func (m0, m0, m1, crnd);
9763 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9764 mpc_clear (m0);
9765 mpc_clear (m1);
9769 return result;
9772 /* A wrapper function for builtin folding that prevents warnings for
9773 "statement without effect" and the like, caused by removing the
9774 call node earlier than the warning is generated. */
9776 tree
9777 fold_call_stmt (gcall *stmt, bool ignore)
9779 tree ret = NULL_TREE;
9780 tree fndecl = gimple_call_fndecl (stmt);
9781 location_t loc = gimple_location (stmt);
9782 if (fndecl
9783 && TREE_CODE (fndecl) == FUNCTION_DECL
9784 && DECL_BUILT_IN (fndecl)
9785 && !gimple_call_va_arg_pack_p (stmt))
9787 int nargs = gimple_call_num_args (stmt);
9788 tree *args = (nargs > 0
9789 ? gimple_call_arg_ptr (stmt, 0)
9790 : &error_mark_node);
9792 if (avoid_folding_inline_builtin (fndecl))
9793 return NULL_TREE;
9794 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9796 return targetm.fold_builtin (fndecl, nargs, args, ignore);
9798 else
9800 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9801 if (ret)
9803 /* Propagate location information from original call to
9804 expansion of builtin. Otherwise things like
9805 maybe_emit_chk_warning, that operate on the expansion
9806 of a builtin, will use the wrong location information. */
9807 if (gimple_has_location (stmt))
9809 tree realret = ret;
9810 if (TREE_CODE (ret) == NOP_EXPR)
9811 realret = TREE_OPERAND (ret, 0);
9812 if (CAN_HAVE_LOCATION_P (realret)
9813 && !EXPR_HAS_LOCATION (realret))
9814 SET_EXPR_LOCATION (realret, loc);
9815 return realret;
9817 return ret;
9821 return NULL_TREE;
9824 /* Look up the function in builtin_decl that corresponds to DECL
9825 and set ASMSPEC as its user assembler name. DECL must be a
9826 function decl that declares a builtin. */
9828 void
9829 set_builtin_user_assembler_name (tree decl, const char *asmspec)
9831 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
9832 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
9833 && asmspec != 0);
9835 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
9836 set_user_assembler_name (builtin, asmspec);
9838 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
9839 && INT_TYPE_SIZE < BITS_PER_WORD)
9841 set_user_assembler_libfunc ("ffs", asmspec);
9842 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
9843 "ffs");
9847 /* Return true if DECL is a builtin that expands to a constant or similarly
9848 simple code. */
9849 bool
9850 is_simple_builtin (tree decl)
9852 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9853 switch (DECL_FUNCTION_CODE (decl))
9855 /* Builtins that expand to constants. */
9856 case BUILT_IN_CONSTANT_P:
9857 case BUILT_IN_EXPECT:
9858 case BUILT_IN_OBJECT_SIZE:
9859 case BUILT_IN_UNREACHABLE:
9860 /* Simple register moves or loads from stack. */
9861 case BUILT_IN_ASSUME_ALIGNED:
9862 case BUILT_IN_RETURN_ADDRESS:
9863 case BUILT_IN_EXTRACT_RETURN_ADDR:
9864 case BUILT_IN_FROB_RETURN_ADDR:
9865 case BUILT_IN_RETURN:
9866 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9867 case BUILT_IN_FRAME_ADDRESS:
9868 case BUILT_IN_VA_END:
9869 case BUILT_IN_STACK_SAVE:
9870 case BUILT_IN_STACK_RESTORE:
9871 /* Exception state returns or moves registers around. */
9872 case BUILT_IN_EH_FILTER:
9873 case BUILT_IN_EH_POINTER:
9874 case BUILT_IN_EH_COPY_VALUES:
9875 return true;
9877 default:
9878 return false;
9881 return false;
9884 /* Return true if DECL is a builtin that is not expensive, i.e., they are
9885 most probably expanded inline into reasonably simple code. This is a
9886 superset of is_simple_builtin. */
9887 bool
9888 is_inexpensive_builtin (tree decl)
9890 if (!decl)
9891 return false;
9892 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
9893 return true;
9894 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9895 switch (DECL_FUNCTION_CODE (decl))
9897 case BUILT_IN_ABS:
9898 case BUILT_IN_ALLOCA:
9899 case BUILT_IN_ALLOCA_WITH_ALIGN:
9900 case BUILT_IN_BSWAP16:
9901 case BUILT_IN_BSWAP32:
9902 case BUILT_IN_BSWAP64:
9903 case BUILT_IN_CLZ:
9904 case BUILT_IN_CLZIMAX:
9905 case BUILT_IN_CLZL:
9906 case BUILT_IN_CLZLL:
9907 case BUILT_IN_CTZ:
9908 case BUILT_IN_CTZIMAX:
9909 case BUILT_IN_CTZL:
9910 case BUILT_IN_CTZLL:
9911 case BUILT_IN_FFS:
9912 case BUILT_IN_FFSIMAX:
9913 case BUILT_IN_FFSL:
9914 case BUILT_IN_FFSLL:
9915 case BUILT_IN_IMAXABS:
9916 case BUILT_IN_FINITE:
9917 case BUILT_IN_FINITEF:
9918 case BUILT_IN_FINITEL:
9919 case BUILT_IN_FINITED32:
9920 case BUILT_IN_FINITED64:
9921 case BUILT_IN_FINITED128:
9922 case BUILT_IN_FPCLASSIFY:
9923 case BUILT_IN_ISFINITE:
9924 case BUILT_IN_ISINF_SIGN:
9925 case BUILT_IN_ISINF:
9926 case BUILT_IN_ISINFF:
9927 case BUILT_IN_ISINFL:
9928 case BUILT_IN_ISINFD32:
9929 case BUILT_IN_ISINFD64:
9930 case BUILT_IN_ISINFD128:
9931 case BUILT_IN_ISNAN:
9932 case BUILT_IN_ISNANF:
9933 case BUILT_IN_ISNANL:
9934 case BUILT_IN_ISNAND32:
9935 case BUILT_IN_ISNAND64:
9936 case BUILT_IN_ISNAND128:
9937 case BUILT_IN_ISNORMAL:
9938 case BUILT_IN_ISGREATER:
9939 case BUILT_IN_ISGREATEREQUAL:
9940 case BUILT_IN_ISLESS:
9941 case BUILT_IN_ISLESSEQUAL:
9942 case BUILT_IN_ISLESSGREATER:
9943 case BUILT_IN_ISUNORDERED:
9944 case BUILT_IN_VA_ARG_PACK:
9945 case BUILT_IN_VA_ARG_PACK_LEN:
9946 case BUILT_IN_VA_COPY:
9947 case BUILT_IN_TRAP:
9948 case BUILT_IN_SAVEREGS:
9949 case BUILT_IN_POPCOUNTL:
9950 case BUILT_IN_POPCOUNTLL:
9951 case BUILT_IN_POPCOUNTIMAX:
9952 case BUILT_IN_POPCOUNT:
9953 case BUILT_IN_PARITYL:
9954 case BUILT_IN_PARITYLL:
9955 case BUILT_IN_PARITYIMAX:
9956 case BUILT_IN_PARITY:
9957 case BUILT_IN_LABS:
9958 case BUILT_IN_LLABS:
9959 case BUILT_IN_PREFETCH:
9960 case BUILT_IN_ACC_ON_DEVICE:
9961 return true;
9963 default:
9964 return is_simple_builtin (decl);
9967 return false;
9970 /* Return true if T is a constant and the value cast to a target char
9971 can be represented by a host char.
9972 Store the casted char constant in *P if so. */
9974 bool
9975 target_char_cst_p (tree t, char *p)
9977 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
9978 return false;
9980 *p = (char)tree_to_uhwi (t);
9981 return true;