PR preprocessor/79214 - -Wno-system-header defeats strncat buffer overflow warnings
[official-gcc.git] / gcc / builtins.c
blob9f2e447a2fd04842074a6690a0149f1c6e1740e3
1 /* Expand builtin functions.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "asan.h"
64 #include "cilk.h"
65 #include "tree-chkp.h"
66 #include "rtl-chkp.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
70 #include "intl.h"
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
84 #include "builtins.def"
87 /* Setup an array of builtin_info_type, make sure each element decl is
88 initialized to NULL_TREE. */
89 builtin_info_type builtin_info[(int)END_BUILTINS];
91 /* Non-zero if __builtin_constant_p should be folded right away. */
92 bool force_folding_builtin_constant_p;
94 static rtx c_readstr (const char *, machine_mode);
95 static int target_char_cast (tree, char *);
96 static rtx get_memory_rtx (tree, tree);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 static rtx result_vector (int, rtx);
100 static void expand_builtin_prefetch (tree);
101 static rtx expand_builtin_apply_args (void);
102 static rtx expand_builtin_apply_args_1 (void);
103 static rtx expand_builtin_apply (rtx, rtx, rtx);
104 static void expand_builtin_return (rtx);
105 static enum type_class type_to_class (tree);
106 static rtx expand_builtin_classify_type (tree);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_strcmp (tree, rtx);
119 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx);
122 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
123 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
124 static rtx expand_builtin_memmove (tree, rtx);
125 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
126 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
127 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
128 machine_mode, int, tree);
129 static rtx expand_builtin_strcat (tree, rtx);
130 static rtx expand_builtin_strcpy (tree, rtx);
131 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
132 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
133 static rtx expand_builtin_stpncpy (tree, rtx);
134 static rtx expand_builtin_strncat (tree, rtx);
135 static rtx expand_builtin_strncpy (tree, rtx);
136 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
137 static rtx expand_builtin_memset (tree, rtx, machine_mode);
138 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
139 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
140 static rtx expand_builtin_bzero (tree);
141 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
142 static rtx expand_builtin_alloca (tree, bool);
143 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
144 static rtx expand_builtin_frame_address (tree, tree);
145 static tree stabilize_va_list_loc (location_t, tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_classify_type (tree);
149 static tree fold_builtin_strlen (location_t, tree, tree);
150 static tree fold_builtin_inf (location_t, tree, int);
151 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
152 static bool validate_arg (const_tree, enum tree_code code);
153 static rtx expand_builtin_fabs (tree, rtx, rtx);
154 static rtx expand_builtin_signbit (tree, rtx);
155 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
156 static tree fold_builtin_isascii (location_t, tree);
157 static tree fold_builtin_toascii (location_t, tree);
158 static tree fold_builtin_isdigit (location_t, tree);
159 static tree fold_builtin_fabs (location_t, tree, tree);
160 static tree fold_builtin_abs (location_t, tree, tree);
161 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
162 enum tree_code);
163 static tree fold_builtin_0 (location_t, tree);
164 static tree fold_builtin_1 (location_t, tree, tree);
165 static tree fold_builtin_2 (location_t, tree, tree, tree);
166 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_varargs (location_t, tree, tree*, int);
169 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
170 static tree fold_builtin_strspn (location_t, tree, tree);
171 static tree fold_builtin_strcspn (location_t, tree, tree);
173 static rtx expand_builtin_object_size (tree);
174 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
175 enum built_in_function);
176 static void maybe_emit_chk_warning (tree, enum built_in_function);
177 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
178 static void maybe_emit_free_warning (tree);
179 static tree fold_builtin_object_size (tree, tree);
181 unsigned HOST_WIDE_INT target_newline;
182 unsigned HOST_WIDE_INT target_percent;
183 static unsigned HOST_WIDE_INT target_c;
184 static unsigned HOST_WIDE_INT target_s;
185 char target_percent_c[3];
186 char target_percent_s[3];
187 char target_percent_s_newline[4];
188 static tree do_mpfr_remquo (tree, tree, tree);
189 static tree do_mpfr_lgamma_r (tree, tree, tree);
190 static void expand_builtin_sync_synchronize (void);
192 /* Return true if NAME starts with __builtin_ or __sync_. */
194 static bool
195 is_builtin_name (const char *name)
197 if (strncmp (name, "__builtin_", 10) == 0)
198 return true;
199 if (strncmp (name, "__sync_", 7) == 0)
200 return true;
201 if (strncmp (name, "__atomic_", 9) == 0)
202 return true;
203 if (flag_cilkplus
204 && (!strcmp (name, "__cilkrts_detach")
205 || !strcmp (name, "__cilkrts_pop_frame")))
206 return true;
207 return false;
211 /* Return true if DECL is a function symbol representing a built-in. */
213 bool
214 is_builtin_fn (tree decl)
216 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
219 /* Return true if NODE should be considered for inline expansion regardless
220 of the optimization level. This means whenever a function is invoked with
221 its "internal" name, which normally contains the prefix "__builtin". */
223 bool
224 called_as_built_in (tree node)
226 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
227 we want the name used to call the function, not the name it
228 will have. */
229 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
230 return is_builtin_name (name);
233 /* Compute values M and N such that M divides (address of EXP - N) and such
234 that N < M. If these numbers can be determined, store M in alignp and N in
235 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
236 *alignp and any bit-offset to *bitposp.
238 Note that the address (and thus the alignment) computed here is based
239 on the address to which a symbol resolves, whereas DECL_ALIGN is based
240 on the address at which an object is actually located. These two
241 addresses are not always the same. For example, on ARM targets,
242 the address &foo of a Thumb function foo() has the lowest bit set,
243 whereas foo() itself starts on an even address.
245 If ADDR_P is true we are taking the address of the memory reference EXP
246 and thus cannot rely on the access taking place. */
248 static bool
249 get_object_alignment_2 (tree exp, unsigned int *alignp,
250 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
252 HOST_WIDE_INT bitsize, bitpos;
253 tree offset;
254 machine_mode mode;
255 int unsignedp, reversep, volatilep;
256 unsigned int align = BITS_PER_UNIT;
257 bool known_alignment = false;
259 /* Get the innermost object and the constant (bitpos) and possibly
260 variable (offset) offset of the access. */
261 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
262 &unsignedp, &reversep, &volatilep);
264 /* Extract alignment information from the innermost object and
265 possibly adjust bitpos and offset. */
266 if (TREE_CODE (exp) == FUNCTION_DECL)
268 /* Function addresses can encode extra information besides their
269 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
270 allows the low bit to be used as a virtual bit, we know
271 that the address itself must be at least 2-byte aligned. */
272 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
273 align = 2 * BITS_PER_UNIT;
275 else if (TREE_CODE (exp) == LABEL_DECL)
277 else if (TREE_CODE (exp) == CONST_DECL)
279 /* The alignment of a CONST_DECL is determined by its initializer. */
280 exp = DECL_INITIAL (exp);
281 align = TYPE_ALIGN (TREE_TYPE (exp));
282 if (CONSTANT_CLASS_P (exp))
283 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
285 known_alignment = true;
287 else if (DECL_P (exp))
289 align = DECL_ALIGN (exp);
290 known_alignment = true;
292 else if (TREE_CODE (exp) == INDIRECT_REF
293 || TREE_CODE (exp) == MEM_REF
294 || TREE_CODE (exp) == TARGET_MEM_REF)
296 tree addr = TREE_OPERAND (exp, 0);
297 unsigned ptr_align;
298 unsigned HOST_WIDE_INT ptr_bitpos;
299 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
301 /* If the address is explicitely aligned, handle that. */
302 if (TREE_CODE (addr) == BIT_AND_EXPR
303 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
305 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
306 ptr_bitmask *= BITS_PER_UNIT;
307 align = least_bit_hwi (ptr_bitmask);
308 addr = TREE_OPERAND (addr, 0);
311 known_alignment
312 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
313 align = MAX (ptr_align, align);
315 /* Re-apply explicit alignment to the bitpos. */
316 ptr_bitpos &= ptr_bitmask;
318 /* The alignment of the pointer operand in a TARGET_MEM_REF
319 has to take the variable offset parts into account. */
320 if (TREE_CODE (exp) == TARGET_MEM_REF)
322 if (TMR_INDEX (exp))
324 unsigned HOST_WIDE_INT step = 1;
325 if (TMR_STEP (exp))
326 step = TREE_INT_CST_LOW (TMR_STEP (exp));
327 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
329 if (TMR_INDEX2 (exp))
330 align = BITS_PER_UNIT;
331 known_alignment = false;
334 /* When EXP is an actual memory reference then we can use
335 TYPE_ALIGN of a pointer indirection to derive alignment.
336 Do so only if get_pointer_alignment_1 did not reveal absolute
337 alignment knowledge and if using that alignment would
338 improve the situation. */
339 unsigned int talign;
340 if (!addr_p && !known_alignment
341 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
342 && talign > align)
343 align = talign;
344 else
346 /* Else adjust bitpos accordingly. */
347 bitpos += ptr_bitpos;
348 if (TREE_CODE (exp) == MEM_REF
349 || TREE_CODE (exp) == TARGET_MEM_REF)
350 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
353 else if (TREE_CODE (exp) == STRING_CST)
355 /* STRING_CST are the only constant objects we allow to be not
356 wrapped inside a CONST_DECL. */
357 align = TYPE_ALIGN (TREE_TYPE (exp));
358 if (CONSTANT_CLASS_P (exp))
359 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
361 known_alignment = true;
364 /* If there is a non-constant offset part extract the maximum
365 alignment that can prevail. */
366 if (offset)
368 unsigned int trailing_zeros = tree_ctz (offset);
369 if (trailing_zeros < HOST_BITS_PER_INT)
371 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
372 if (inner)
373 align = MIN (align, inner);
377 *alignp = align;
378 *bitposp = bitpos & (*alignp - 1);
379 return known_alignment;
382 /* For a memory reference expression EXP compute values M and N such that M
383 divides (&EXP - N) and such that N < M. If these numbers can be determined,
384 store M in alignp and N in *BITPOSP and return true. Otherwise return false
385 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
387 bool
388 get_object_alignment_1 (tree exp, unsigned int *alignp,
389 unsigned HOST_WIDE_INT *bitposp)
391 return get_object_alignment_2 (exp, alignp, bitposp, false);
394 /* Return the alignment in bits of EXP, an object. */
396 unsigned int
397 get_object_alignment (tree exp)
399 unsigned HOST_WIDE_INT bitpos = 0;
400 unsigned int align;
402 get_object_alignment_1 (exp, &align, &bitpos);
404 /* align and bitpos now specify known low bits of the pointer.
405 ptr & (align - 1) == bitpos. */
407 if (bitpos != 0)
408 align = least_bit_hwi (bitpos);
409 return align;
412 /* For a pointer valued expression EXP compute values M and N such that M
413 divides (EXP - N) and such that N < M. If these numbers can be determined,
414 store M in alignp and N in *BITPOSP and return true. Return false if
415 the results are just a conservative approximation.
417 If EXP is not a pointer, false is returned too. */
419 bool
420 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
421 unsigned HOST_WIDE_INT *bitposp)
423 STRIP_NOPS (exp);
425 if (TREE_CODE (exp) == ADDR_EXPR)
426 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
427 alignp, bitposp, true);
428 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
430 unsigned int align;
431 unsigned HOST_WIDE_INT bitpos;
432 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
433 &align, &bitpos);
434 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
435 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
436 else
438 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
439 if (trailing_zeros < HOST_BITS_PER_INT)
441 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
442 if (inner)
443 align = MIN (align, inner);
446 *alignp = align;
447 *bitposp = bitpos & (align - 1);
448 return res;
450 else if (TREE_CODE (exp) == SSA_NAME
451 && POINTER_TYPE_P (TREE_TYPE (exp)))
453 unsigned int ptr_align, ptr_misalign;
454 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
456 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
458 *bitposp = ptr_misalign * BITS_PER_UNIT;
459 *alignp = ptr_align * BITS_PER_UNIT;
460 /* Make sure to return a sensible alignment when the multiplication
461 by BITS_PER_UNIT overflowed. */
462 if (*alignp == 0)
463 *alignp = 1u << (HOST_BITS_PER_INT - 1);
464 /* We cannot really tell whether this result is an approximation. */
465 return false;
467 else
469 *bitposp = 0;
470 *alignp = BITS_PER_UNIT;
471 return false;
474 else if (TREE_CODE (exp) == INTEGER_CST)
476 *alignp = BIGGEST_ALIGNMENT;
477 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
478 & (BIGGEST_ALIGNMENT - 1));
479 return true;
482 *bitposp = 0;
483 *alignp = BITS_PER_UNIT;
484 return false;
487 /* Return the alignment in bits of EXP, a pointer valued expression.
488 The alignment returned is, by default, the alignment of the thing that
489 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
491 Otherwise, look at the expression to see if we can do better, i.e., if the
492 expression is actually pointing at an object whose alignment is tighter. */
494 unsigned int
495 get_pointer_alignment (tree exp)
497 unsigned HOST_WIDE_INT bitpos = 0;
498 unsigned int align;
500 get_pointer_alignment_1 (exp, &align, &bitpos);
502 /* align and bitpos now specify known low bits of the pointer.
503 ptr & (align - 1) == bitpos. */
505 if (bitpos != 0)
506 align = least_bit_hwi (bitpos);
508 return align;
511 /* Return the number of non-zero elements in the sequence
512 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
513 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
515 static unsigned
516 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
518 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
520 unsigned n;
522 if (eltsize == 1)
524 /* Optimize the common case of plain char. */
525 for (n = 0; n < maxelts; n++)
527 const char *elt = (const char*) ptr + n;
528 if (!*elt)
529 break;
532 else
534 for (n = 0; n < maxelts; n++)
536 const char *elt = (const char*) ptr + n * eltsize;
537 if (!memcmp (elt, "\0\0\0\0", eltsize))
538 break;
541 return n;
544 /* Compute the length of a null-terminated character string or wide
545 character string handling character sizes of 1, 2, and 4 bytes.
546 TREE_STRING_LENGTH is not the right way because it evaluates to
547 the size of the character array in bytes (as opposed to characters)
548 and because it can contain a zero byte in the middle.
550 ONLY_VALUE should be nonzero if the result is not going to be emitted
551 into the instruction stream and zero if it is going to be expanded.
552 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
553 is returned, otherwise NULL, since
554 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
555 evaluate the side-effects.
557 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
558 accesses. Note that this implies the result is not going to be emitted
559 into the instruction stream.
561 The value returned is of type `ssizetype'.
563 Unfortunately, string_constant can't access the values of const char
564 arrays with initializers, so neither can we do so here. */
566 tree
567 c_strlen (tree src, int only_value)
569 STRIP_NOPS (src);
570 if (TREE_CODE (src) == COND_EXPR
571 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
573 tree len1, len2;
575 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
576 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
577 if (tree_int_cst_equal (len1, len2))
578 return len1;
581 if (TREE_CODE (src) == COMPOUND_EXPR
582 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
583 return c_strlen (TREE_OPERAND (src, 1), only_value);
585 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
587 /* Offset from the beginning of the string in bytes. */
588 tree byteoff;
589 src = string_constant (src, &byteoff);
590 if (src == 0)
591 return NULL_TREE;
593 /* Determine the size of the string element. */
594 unsigned eltsize
595 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
597 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
598 length of SRC. */
599 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
601 /* PTR can point to the byte representation of any string type, including
602 char* and wchar_t*. */
603 const char *ptr = TREE_STRING_POINTER (src);
605 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
607 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
608 compute the offset to the following null if we don't know where to
609 start searching for it. */
610 if (string_length (ptr, eltsize, maxelts) < maxelts)
612 /* Return when an embedded null character is found. */
613 return NULL_TREE;
616 /* We don't know the starting offset, but we do know that the string
617 has no internal zero bytes. We can assume that the offset falls
618 within the bounds of the string; otherwise, the programmer deserves
619 what he gets. Subtract the offset from the length of the string,
620 and return that. This would perhaps not be valid if we were dealing
621 with named arrays in addition to literal string constants. */
623 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
626 /* Offset from the beginning of the string in elements. */
627 HOST_WIDE_INT eltoff;
629 /* We have a known offset into the string. Start searching there for
630 a null character if we can represent it as a single HOST_WIDE_INT. */
631 if (byteoff == 0)
632 eltoff = 0;
633 else if (! tree_fits_shwi_p (byteoff))
634 eltoff = -1;
635 else
636 eltoff = tree_to_shwi (byteoff) / eltsize;
638 /* If the offset is known to be out of bounds, warn, and call strlen at
639 runtime. */
640 if (eltoff < 0 || eltoff > maxelts)
642 /* Suppress multiple warnings for propagated constant strings. */
643 if (only_value != 2
644 && !TREE_NO_WARNING (src))
646 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
647 eltoff);
648 TREE_NO_WARNING (src) = 1;
650 return NULL_TREE;
653 /* Use strlen to search for the first zero byte. Since any strings
654 constructed with build_string will have nulls appended, we win even
655 if we get handed something like (char[4])"abcd".
657 Since ELTOFF is our starting index into the string, no further
658 calculation is needed. */
659 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
660 maxelts - eltoff);
662 return ssize_int (len);
665 /* Return a constant integer corresponding to target reading
666 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
668 static rtx
669 c_readstr (const char *str, machine_mode mode)
671 HOST_WIDE_INT ch;
672 unsigned int i, j;
673 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
675 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
676 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
677 / HOST_BITS_PER_WIDE_INT;
679 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
680 for (i = 0; i < len; i++)
681 tmp[i] = 0;
683 ch = 1;
684 for (i = 0; i < GET_MODE_SIZE (mode); i++)
686 j = i;
687 if (WORDS_BIG_ENDIAN)
688 j = GET_MODE_SIZE (mode) - i - 1;
689 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
690 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
691 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
692 j *= BITS_PER_UNIT;
694 if (ch)
695 ch = (unsigned char) str[i];
696 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
699 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
700 return immed_wide_int_const (c, mode);
703 /* Cast a target constant CST to target CHAR and if that value fits into
704 host char type, return zero and put that value into variable pointed to by
705 P. */
707 static int
708 target_char_cast (tree cst, char *p)
710 unsigned HOST_WIDE_INT val, hostval;
712 if (TREE_CODE (cst) != INTEGER_CST
713 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
714 return 1;
716 /* Do not care if it fits or not right here. */
717 val = TREE_INT_CST_LOW (cst);
719 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
720 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
722 hostval = val;
723 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
724 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
726 if (val != hostval)
727 return 1;
729 *p = hostval;
730 return 0;
733 /* Similar to save_expr, but assumes that arbitrary code is not executed
734 in between the multiple evaluations. In particular, we assume that a
735 non-addressable local variable will not be modified. */
737 static tree
738 builtin_save_expr (tree exp)
740 if (TREE_CODE (exp) == SSA_NAME
741 || (TREE_ADDRESSABLE (exp) == 0
742 && (TREE_CODE (exp) == PARM_DECL
743 || (VAR_P (exp) && !TREE_STATIC (exp)))))
744 return exp;
746 return save_expr (exp);
749 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
750 times to get the address of either a higher stack frame, or a return
751 address located within it (depending on FNDECL_CODE). */
753 static rtx
754 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
756 int i;
757 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
758 if (tem == NULL_RTX)
760 /* For a zero count with __builtin_return_address, we don't care what
761 frame address we return, because target-specific definitions will
762 override us. Therefore frame pointer elimination is OK, and using
763 the soft frame pointer is OK.
765 For a nonzero count, or a zero count with __builtin_frame_address,
766 we require a stable offset from the current frame pointer to the
767 previous one, so we must use the hard frame pointer, and
768 we must disable frame pointer elimination. */
769 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
770 tem = frame_pointer_rtx;
771 else
773 tem = hard_frame_pointer_rtx;
775 /* Tell reload not to eliminate the frame pointer. */
776 crtl->accesses_prior_frames = 1;
780 if (count > 0)
781 SETUP_FRAME_ADDRESSES ();
783 /* On the SPARC, the return address is not in the frame, it is in a
784 register. There is no way to access it off of the current frame
785 pointer, but it can be accessed off the previous frame pointer by
786 reading the value from the register window save area. */
787 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
788 count--;
790 /* Scan back COUNT frames to the specified frame. */
791 for (i = 0; i < count; i++)
793 /* Assume the dynamic chain pointer is in the word that the
794 frame address points to, unless otherwise specified. */
795 tem = DYNAMIC_CHAIN_ADDRESS (tem);
796 tem = memory_address (Pmode, tem);
797 tem = gen_frame_mem (Pmode, tem);
798 tem = copy_to_reg (tem);
801 /* For __builtin_frame_address, return what we've got. But, on
802 the SPARC for example, we may have to add a bias. */
803 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
804 return FRAME_ADDR_RTX (tem);
806 /* For __builtin_return_address, get the return address from that frame. */
807 #ifdef RETURN_ADDR_RTX
808 tem = RETURN_ADDR_RTX (count, tem);
809 #else
810 tem = memory_address (Pmode,
811 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
812 tem = gen_frame_mem (Pmode, tem);
813 #endif
814 return tem;
817 /* Alias set used for setjmp buffer. */
818 static alias_set_type setjmp_alias_set = -1;
820 /* Construct the leading half of a __builtin_setjmp call. Control will
821 return to RECEIVER_LABEL. This is also called directly by the SJLJ
822 exception handling code. */
824 void
825 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
827 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
828 rtx stack_save;
829 rtx mem;
831 if (setjmp_alias_set == -1)
832 setjmp_alias_set = new_alias_set ();
834 buf_addr = convert_memory_address (Pmode, buf_addr);
836 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
838 /* We store the frame pointer and the address of receiver_label in
839 the buffer and use the rest of it for the stack save area, which
840 is machine-dependent. */
842 mem = gen_rtx_MEM (Pmode, buf_addr);
843 set_mem_alias_set (mem, setjmp_alias_set);
844 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
846 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
847 GET_MODE_SIZE (Pmode))),
848 set_mem_alias_set (mem, setjmp_alias_set);
850 emit_move_insn (validize_mem (mem),
851 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
853 stack_save = gen_rtx_MEM (sa_mode,
854 plus_constant (Pmode, buf_addr,
855 2 * GET_MODE_SIZE (Pmode)));
856 set_mem_alias_set (stack_save, setjmp_alias_set);
857 emit_stack_save (SAVE_NONLOCAL, &stack_save);
859 /* If there is further processing to do, do it. */
860 if (targetm.have_builtin_setjmp_setup ())
861 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
863 /* We have a nonlocal label. */
864 cfun->has_nonlocal_label = 1;
867 /* Construct the trailing part of a __builtin_setjmp call. This is
868 also called directly by the SJLJ exception handling code.
869 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
871 void
872 expand_builtin_setjmp_receiver (rtx receiver_label)
874 rtx chain;
876 /* Mark the FP as used when we get here, so we have to make sure it's
877 marked as used by this function. */
878 emit_use (hard_frame_pointer_rtx);
880 /* Mark the static chain as clobbered here so life information
881 doesn't get messed up for it. */
882 chain = targetm.calls.static_chain (current_function_decl, true);
883 if (chain && REG_P (chain))
884 emit_clobber (chain);
886 /* Now put in the code to restore the frame pointer, and argument
887 pointer, if needed. */
888 if (! targetm.have_nonlocal_goto ())
890 /* First adjust our frame pointer to its actual value. It was
891 previously set to the start of the virtual area corresponding to
892 the stacked variables when we branched here and now needs to be
893 adjusted to the actual hardware fp value.
895 Assignments to virtual registers are converted by
896 instantiate_virtual_regs into the corresponding assignment
897 to the underlying register (fp in this case) that makes
898 the original assignment true.
899 So the following insn will actually be decrementing fp by
900 STARTING_FRAME_OFFSET. */
901 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
903 /* Restoring the frame pointer also modifies the hard frame pointer.
904 Mark it used (so that the previous assignment remains live once
905 the frame pointer is eliminated) and clobbered (to represent the
906 implicit update from the assignment). */
907 emit_use (hard_frame_pointer_rtx);
908 emit_clobber (hard_frame_pointer_rtx);
911 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
913 /* If the argument pointer can be eliminated in favor of the
914 frame pointer, we don't need to restore it. We assume here
915 that if such an elimination is present, it can always be used.
916 This is the case on all known machines; if we don't make this
917 assumption, we do unnecessary saving on many machines. */
918 size_t i;
919 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
921 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
922 if (elim_regs[i].from == ARG_POINTER_REGNUM
923 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
924 break;
926 if (i == ARRAY_SIZE (elim_regs))
928 /* Now restore our arg pointer from the address at which it
929 was saved in our stack frame. */
930 emit_move_insn (crtl->args.internal_arg_pointer,
931 copy_to_reg (get_arg_pointer_save_area ()));
935 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
936 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
937 else if (targetm.have_nonlocal_goto_receiver ())
938 emit_insn (targetm.gen_nonlocal_goto_receiver ());
939 else
940 { /* Nothing */ }
942 /* We must not allow the code we just generated to be reordered by
943 scheduling. Specifically, the update of the frame pointer must
944 happen immediately, not later. */
945 emit_insn (gen_blockage ());
948 /* __builtin_longjmp is passed a pointer to an array of five words (not
949 all will be used on all machines). It operates similarly to the C
950 library function of the same name, but is more efficient. Much of
951 the code below is copied from the handling of non-local gotos. */
953 static void
954 expand_builtin_longjmp (rtx buf_addr, rtx value)
956 rtx fp, lab, stack;
957 rtx_insn *insn, *last;
958 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
960 /* DRAP is needed for stack realign if longjmp is expanded to current
961 function */
962 if (SUPPORTS_STACK_ALIGNMENT)
963 crtl->need_drap = true;
965 if (setjmp_alias_set == -1)
966 setjmp_alias_set = new_alias_set ();
968 buf_addr = convert_memory_address (Pmode, buf_addr);
970 buf_addr = force_reg (Pmode, buf_addr);
972 /* We require that the user must pass a second argument of 1, because
973 that is what builtin_setjmp will return. */
974 gcc_assert (value == const1_rtx);
976 last = get_last_insn ();
977 if (targetm.have_builtin_longjmp ())
978 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
979 else
981 fp = gen_rtx_MEM (Pmode, buf_addr);
982 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
983 GET_MODE_SIZE (Pmode)));
985 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
986 2 * GET_MODE_SIZE (Pmode)));
987 set_mem_alias_set (fp, setjmp_alias_set);
988 set_mem_alias_set (lab, setjmp_alias_set);
989 set_mem_alias_set (stack, setjmp_alias_set);
991 /* Pick up FP, label, and SP from the block and jump. This code is
992 from expand_goto in stmt.c; see there for detailed comments. */
993 if (targetm.have_nonlocal_goto ())
994 /* We have to pass a value to the nonlocal_goto pattern that will
995 get copied into the static_chain pointer, but it does not matter
996 what that value is, because builtin_setjmp does not use it. */
997 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
998 else
1000 lab = copy_to_reg (lab);
1002 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1003 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1005 emit_move_insn (hard_frame_pointer_rtx, fp);
1006 emit_stack_restore (SAVE_NONLOCAL, stack);
1008 emit_use (hard_frame_pointer_rtx);
1009 emit_use (stack_pointer_rtx);
1010 emit_indirect_jump (lab);
1014 /* Search backwards and mark the jump insn as a non-local goto.
1015 Note that this precludes the use of __builtin_longjmp to a
1016 __builtin_setjmp target in the same function. However, we've
1017 already cautioned the user that these functions are for
1018 internal exception handling use only. */
1019 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1021 gcc_assert (insn != last);
1023 if (JUMP_P (insn))
1025 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1026 break;
1028 else if (CALL_P (insn))
1029 break;
1033 static inline bool
1034 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1036 return (iter->i < iter->n);
1039 /* This function validates the types of a function call argument list
1040 against a specified list of tree_codes. If the last specifier is a 0,
1041 that represents an ellipsis, otherwise the last specifier must be a
1042 VOID_TYPE. */
1044 static bool
1045 validate_arglist (const_tree callexpr, ...)
1047 enum tree_code code;
1048 bool res = 0;
1049 va_list ap;
1050 const_call_expr_arg_iterator iter;
1051 const_tree arg;
1053 va_start (ap, callexpr);
1054 init_const_call_expr_arg_iterator (callexpr, &iter);
1056 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1057 tree fn = CALL_EXPR_FN (callexpr);
1058 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1060 for (unsigned argno = 1; ; ++argno)
1062 code = (enum tree_code) va_arg (ap, int);
1064 switch (code)
1066 case 0:
1067 /* This signifies an ellipses, any further arguments are all ok. */
1068 res = true;
1069 goto end;
1070 case VOID_TYPE:
1071 /* This signifies an endlink, if no arguments remain, return
1072 true, otherwise return false. */
1073 res = !more_const_call_expr_args_p (&iter);
1074 goto end;
1075 case POINTER_TYPE:
1076 /* The actual argument must be nonnull when either the whole
1077 called function has been declared nonnull, or when the formal
1078 argument corresponding to the actual argument has been. */
1079 if (argmap
1080 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1082 arg = next_const_call_expr_arg (&iter);
1083 if (!validate_arg (arg, code) || integer_zerop (arg))
1084 goto end;
1085 break;
1087 /* FALLTHRU */
1088 default:
1089 /* If no parameters remain or the parameter's code does not
1090 match the specified code, return false. Otherwise continue
1091 checking any remaining arguments. */
1092 arg = next_const_call_expr_arg (&iter);
1093 if (!validate_arg (arg, code))
1094 goto end;
1095 break;
1099 /* We need gotos here since we can only have one VA_CLOSE in a
1100 function. */
1101 end: ;
1102 va_end (ap);
1104 BITMAP_FREE (argmap);
1106 return res;
1109 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1110 and the address of the save area. */
1112 static rtx
1113 expand_builtin_nonlocal_goto (tree exp)
1115 tree t_label, t_save_area;
1116 rtx r_label, r_save_area, r_fp, r_sp;
1117 rtx_insn *insn;
1119 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1120 return NULL_RTX;
1122 t_label = CALL_EXPR_ARG (exp, 0);
1123 t_save_area = CALL_EXPR_ARG (exp, 1);
1125 r_label = expand_normal (t_label);
1126 r_label = convert_memory_address (Pmode, r_label);
1127 r_save_area = expand_normal (t_save_area);
1128 r_save_area = convert_memory_address (Pmode, r_save_area);
1129 /* Copy the address of the save location to a register just in case it was
1130 based on the frame pointer. */
1131 r_save_area = copy_to_reg (r_save_area);
1132 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1133 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1134 plus_constant (Pmode, r_save_area,
1135 GET_MODE_SIZE (Pmode)));
1137 crtl->has_nonlocal_goto = 1;
1139 /* ??? We no longer need to pass the static chain value, afaik. */
1140 if (targetm.have_nonlocal_goto ())
1141 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1142 else
1144 r_label = copy_to_reg (r_label);
1146 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1147 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1149 /* Restore frame pointer for containing function. */
1150 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1151 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1153 /* USE of hard_frame_pointer_rtx added for consistency;
1154 not clear if really needed. */
1155 emit_use (hard_frame_pointer_rtx);
1156 emit_use (stack_pointer_rtx);
1158 /* If the architecture is using a GP register, we must
1159 conservatively assume that the target function makes use of it.
1160 The prologue of functions with nonlocal gotos must therefore
1161 initialize the GP register to the appropriate value, and we
1162 must then make sure that this value is live at the point
1163 of the jump. (Note that this doesn't necessarily apply
1164 to targets with a nonlocal_goto pattern; they are free
1165 to implement it in their own way. Note also that this is
1166 a no-op if the GP register is a global invariant.) */
1167 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1168 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1169 emit_use (pic_offset_table_rtx);
1171 emit_indirect_jump (r_label);
1174 /* Search backwards to the jump insn and mark it as a
1175 non-local goto. */
1176 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1178 if (JUMP_P (insn))
1180 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1181 break;
1183 else if (CALL_P (insn))
1184 break;
1187 return const0_rtx;
1190 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1191 (not all will be used on all machines) that was passed to __builtin_setjmp.
1192 It updates the stack pointer in that block to the current value. This is
1193 also called directly by the SJLJ exception handling code. */
1195 void
1196 expand_builtin_update_setjmp_buf (rtx buf_addr)
1198 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1199 rtx stack_save
1200 = gen_rtx_MEM (sa_mode,
1201 memory_address
1202 (sa_mode,
1203 plus_constant (Pmode, buf_addr,
1204 2 * GET_MODE_SIZE (Pmode))));
1206 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1209 /* Expand a call to __builtin_prefetch. For a target that does not support
1210 data prefetch, evaluate the memory address argument in case it has side
1211 effects. */
1213 static void
1214 expand_builtin_prefetch (tree exp)
1216 tree arg0, arg1, arg2;
1217 int nargs;
1218 rtx op0, op1, op2;
1220 if (!validate_arglist (exp, POINTER_TYPE, 0))
1221 return;
1223 arg0 = CALL_EXPR_ARG (exp, 0);
1225 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1226 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1227 locality). */
1228 nargs = call_expr_nargs (exp);
1229 if (nargs > 1)
1230 arg1 = CALL_EXPR_ARG (exp, 1);
1231 else
1232 arg1 = integer_zero_node;
1233 if (nargs > 2)
1234 arg2 = CALL_EXPR_ARG (exp, 2);
1235 else
1236 arg2 = integer_three_node;
1238 /* Argument 0 is an address. */
1239 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1241 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1242 if (TREE_CODE (arg1) != INTEGER_CST)
1244 error ("second argument to %<__builtin_prefetch%> must be a constant");
1245 arg1 = integer_zero_node;
1247 op1 = expand_normal (arg1);
1248 /* Argument 1 must be either zero or one. */
1249 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1251 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1252 " using zero");
1253 op1 = const0_rtx;
1256 /* Argument 2 (locality) must be a compile-time constant int. */
1257 if (TREE_CODE (arg2) != INTEGER_CST)
1259 error ("third argument to %<__builtin_prefetch%> must be a constant");
1260 arg2 = integer_zero_node;
1262 op2 = expand_normal (arg2);
1263 /* Argument 2 must be 0, 1, 2, or 3. */
1264 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1266 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1267 op2 = const0_rtx;
1270 if (targetm.have_prefetch ())
1272 struct expand_operand ops[3];
1274 create_address_operand (&ops[0], op0);
1275 create_integer_operand (&ops[1], INTVAL (op1));
1276 create_integer_operand (&ops[2], INTVAL (op2));
1277 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1278 return;
1281 /* Don't do anything with direct references to volatile memory, but
1282 generate code to handle other side effects. */
1283 if (!MEM_P (op0) && side_effects_p (op0))
1284 emit_insn (op0);
1287 /* Get a MEM rtx for expression EXP which is the address of an operand
1288 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1289 the maximum length of the block of memory that might be accessed or
1290 NULL if unknown. */
1292 static rtx
1293 get_memory_rtx (tree exp, tree len)
1295 tree orig_exp = exp;
1296 rtx addr, mem;
1298 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1299 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1300 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1301 exp = TREE_OPERAND (exp, 0);
1303 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1304 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1306 /* Get an expression we can use to find the attributes to assign to MEM.
1307 First remove any nops. */
1308 while (CONVERT_EXPR_P (exp)
1309 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1310 exp = TREE_OPERAND (exp, 0);
1312 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1313 (as builtin stringops may alias with anything). */
1314 exp = fold_build2 (MEM_REF,
1315 build_array_type (char_type_node,
1316 build_range_type (sizetype,
1317 size_one_node, len)),
1318 exp, build_int_cst (ptr_type_node, 0));
1320 /* If the MEM_REF has no acceptable address, try to get the base object
1321 from the original address we got, and build an all-aliasing
1322 unknown-sized access to that one. */
1323 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1324 set_mem_attributes (mem, exp, 0);
1325 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1326 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1327 0))))
1329 exp = build_fold_addr_expr (exp);
1330 exp = fold_build2 (MEM_REF,
1331 build_array_type (char_type_node,
1332 build_range_type (sizetype,
1333 size_zero_node,
1334 NULL)),
1335 exp, build_int_cst (ptr_type_node, 0));
1336 set_mem_attributes (mem, exp, 0);
1338 set_mem_alias_set (mem, 0);
1339 return mem;
1342 /* Built-in functions to perform an untyped call and return. */
1344 #define apply_args_mode \
1345 (this_target_builtins->x_apply_args_mode)
1346 #define apply_result_mode \
1347 (this_target_builtins->x_apply_result_mode)
1349 /* Return the size required for the block returned by __builtin_apply_args,
1350 and initialize apply_args_mode. */
1352 static int
1353 apply_args_size (void)
1355 static int size = -1;
1356 int align;
1357 unsigned int regno;
1358 machine_mode mode;
1360 /* The values computed by this function never change. */
1361 if (size < 0)
1363 /* The first value is the incoming arg-pointer. */
1364 size = GET_MODE_SIZE (Pmode);
1366 /* The second value is the structure value address unless this is
1367 passed as an "invisible" first argument. */
1368 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1369 size += GET_MODE_SIZE (Pmode);
1371 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1372 if (FUNCTION_ARG_REGNO_P (regno))
1374 mode = targetm.calls.get_raw_arg_mode (regno);
1376 gcc_assert (mode != VOIDmode);
1378 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1379 if (size % align != 0)
1380 size = CEIL (size, align) * align;
1381 size += GET_MODE_SIZE (mode);
1382 apply_args_mode[regno] = mode;
1384 else
1386 apply_args_mode[regno] = VOIDmode;
1389 return size;
1392 /* Return the size required for the block returned by __builtin_apply,
1393 and initialize apply_result_mode. */
1395 static int
1396 apply_result_size (void)
1398 static int size = -1;
1399 int align, regno;
1400 machine_mode mode;
1402 /* The values computed by this function never change. */
1403 if (size < 0)
1405 size = 0;
1407 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1408 if (targetm.calls.function_value_regno_p (regno))
1410 mode = targetm.calls.get_raw_result_mode (regno);
1412 gcc_assert (mode != VOIDmode);
1414 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1415 if (size % align != 0)
1416 size = CEIL (size, align) * align;
1417 size += GET_MODE_SIZE (mode);
1418 apply_result_mode[regno] = mode;
1420 else
1421 apply_result_mode[regno] = VOIDmode;
1423 /* Allow targets that use untyped_call and untyped_return to override
1424 the size so that machine-specific information can be stored here. */
1425 #ifdef APPLY_RESULT_SIZE
1426 size = APPLY_RESULT_SIZE;
1427 #endif
1429 return size;
1432 /* Create a vector describing the result block RESULT. If SAVEP is true,
1433 the result block is used to save the values; otherwise it is used to
1434 restore the values. */
1436 static rtx
1437 result_vector (int savep, rtx result)
1439 int regno, size, align, nelts;
1440 machine_mode mode;
1441 rtx reg, mem;
1442 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1444 size = nelts = 0;
1445 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1446 if ((mode = apply_result_mode[regno]) != VOIDmode)
1448 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1449 if (size % align != 0)
1450 size = CEIL (size, align) * align;
1451 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1452 mem = adjust_address (result, mode, size);
1453 savevec[nelts++] = (savep
1454 ? gen_rtx_SET (mem, reg)
1455 : gen_rtx_SET (reg, mem));
1456 size += GET_MODE_SIZE (mode);
1458 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1461 /* Save the state required to perform an untyped call with the same
1462 arguments as were passed to the current function. */
1464 static rtx
1465 expand_builtin_apply_args_1 (void)
1467 rtx registers, tem;
1468 int size, align, regno;
1469 machine_mode mode;
1470 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1472 /* Create a block where the arg-pointer, structure value address,
1473 and argument registers can be saved. */
1474 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1476 /* Walk past the arg-pointer and structure value address. */
1477 size = GET_MODE_SIZE (Pmode);
1478 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1479 size += GET_MODE_SIZE (Pmode);
1481 /* Save each register used in calling a function to the block. */
1482 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1483 if ((mode = apply_args_mode[regno]) != VOIDmode)
1485 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1486 if (size % align != 0)
1487 size = CEIL (size, align) * align;
1489 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1491 emit_move_insn (adjust_address (registers, mode, size), tem);
1492 size += GET_MODE_SIZE (mode);
1495 /* Save the arg pointer to the block. */
1496 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1497 /* We need the pointer as the caller actually passed them to us, not
1498 as we might have pretended they were passed. Make sure it's a valid
1499 operand, as emit_move_insn isn't expected to handle a PLUS. */
1500 if (STACK_GROWS_DOWNWARD)
1502 = force_operand (plus_constant (Pmode, tem,
1503 crtl->args.pretend_args_size),
1504 NULL_RTX);
1505 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1507 size = GET_MODE_SIZE (Pmode);
1509 /* Save the structure value address unless this is passed as an
1510 "invisible" first argument. */
1511 if (struct_incoming_value)
1513 emit_move_insn (adjust_address (registers, Pmode, size),
1514 copy_to_reg (struct_incoming_value));
1515 size += GET_MODE_SIZE (Pmode);
1518 /* Return the address of the block. */
1519 return copy_addr_to_reg (XEXP (registers, 0));
1522 /* __builtin_apply_args returns block of memory allocated on
1523 the stack into which is stored the arg pointer, structure
1524 value address, static chain, and all the registers that might
1525 possibly be used in performing a function call. The code is
1526 moved to the start of the function so the incoming values are
1527 saved. */
1529 static rtx
1530 expand_builtin_apply_args (void)
1532 /* Don't do __builtin_apply_args more than once in a function.
1533 Save the result of the first call and reuse it. */
1534 if (apply_args_value != 0)
1535 return apply_args_value;
1537 /* When this function is called, it means that registers must be
1538 saved on entry to this function. So we migrate the
1539 call to the first insn of this function. */
1540 rtx temp;
1542 start_sequence ();
1543 temp = expand_builtin_apply_args_1 ();
1544 rtx_insn *seq = get_insns ();
1545 end_sequence ();
1547 apply_args_value = temp;
1549 /* Put the insns after the NOTE that starts the function.
1550 If this is inside a start_sequence, make the outer-level insn
1551 chain current, so the code is placed at the start of the
1552 function. If internal_arg_pointer is a non-virtual pseudo,
1553 it needs to be placed after the function that initializes
1554 that pseudo. */
1555 push_topmost_sequence ();
1556 if (REG_P (crtl->args.internal_arg_pointer)
1557 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1558 emit_insn_before (seq, parm_birth_insn);
1559 else
1560 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1561 pop_topmost_sequence ();
1562 return temp;
1566 /* Perform an untyped call and save the state required to perform an
1567 untyped return of whatever value was returned by the given function. */
1569 static rtx
1570 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1572 int size, align, regno;
1573 machine_mode mode;
1574 rtx incoming_args, result, reg, dest, src;
1575 rtx_call_insn *call_insn;
1576 rtx old_stack_level = 0;
1577 rtx call_fusage = 0;
1578 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1580 arguments = convert_memory_address (Pmode, arguments);
1582 /* Create a block where the return registers can be saved. */
1583 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1585 /* Fetch the arg pointer from the ARGUMENTS block. */
1586 incoming_args = gen_reg_rtx (Pmode);
1587 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1588 if (!STACK_GROWS_DOWNWARD)
1589 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1590 incoming_args, 0, OPTAB_LIB_WIDEN);
1592 /* Push a new argument block and copy the arguments. Do not allow
1593 the (potential) memcpy call below to interfere with our stack
1594 manipulations. */
1595 do_pending_stack_adjust ();
1596 NO_DEFER_POP;
1598 /* Save the stack with nonlocal if available. */
1599 if (targetm.have_save_stack_nonlocal ())
1600 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1601 else
1602 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1604 /* Allocate a block of memory onto the stack and copy the memory
1605 arguments to the outgoing arguments address. We can pass TRUE
1606 as the 4th argument because we just saved the stack pointer
1607 and will restore it right after the call. */
1608 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1610 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1611 may have already set current_function_calls_alloca to true.
1612 current_function_calls_alloca won't be set if argsize is zero,
1613 so we have to guarantee need_drap is true here. */
1614 if (SUPPORTS_STACK_ALIGNMENT)
1615 crtl->need_drap = true;
1617 dest = virtual_outgoing_args_rtx;
1618 if (!STACK_GROWS_DOWNWARD)
1620 if (CONST_INT_P (argsize))
1621 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1622 else
1623 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1625 dest = gen_rtx_MEM (BLKmode, dest);
1626 set_mem_align (dest, PARM_BOUNDARY);
1627 src = gen_rtx_MEM (BLKmode, incoming_args);
1628 set_mem_align (src, PARM_BOUNDARY);
1629 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1631 /* Refer to the argument block. */
1632 apply_args_size ();
1633 arguments = gen_rtx_MEM (BLKmode, arguments);
1634 set_mem_align (arguments, PARM_BOUNDARY);
1636 /* Walk past the arg-pointer and structure value address. */
1637 size = GET_MODE_SIZE (Pmode);
1638 if (struct_value)
1639 size += GET_MODE_SIZE (Pmode);
1641 /* Restore each of the registers previously saved. Make USE insns
1642 for each of these registers for use in making the call. */
1643 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1644 if ((mode = apply_args_mode[regno]) != VOIDmode)
1646 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1647 if (size % align != 0)
1648 size = CEIL (size, align) * align;
1649 reg = gen_rtx_REG (mode, regno);
1650 emit_move_insn (reg, adjust_address (arguments, mode, size));
1651 use_reg (&call_fusage, reg);
1652 size += GET_MODE_SIZE (mode);
1655 /* Restore the structure value address unless this is passed as an
1656 "invisible" first argument. */
1657 size = GET_MODE_SIZE (Pmode);
1658 if (struct_value)
1660 rtx value = gen_reg_rtx (Pmode);
1661 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1662 emit_move_insn (struct_value, value);
1663 if (REG_P (struct_value))
1664 use_reg (&call_fusage, struct_value);
1665 size += GET_MODE_SIZE (Pmode);
1668 /* All arguments and registers used for the call are set up by now! */
1669 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1671 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1672 and we don't want to load it into a register as an optimization,
1673 because prepare_call_address already did it if it should be done. */
1674 if (GET_CODE (function) != SYMBOL_REF)
1675 function = memory_address (FUNCTION_MODE, function);
1677 /* Generate the actual call instruction and save the return value. */
1678 if (targetm.have_untyped_call ())
1680 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1681 emit_call_insn (targetm.gen_untyped_call (mem, result,
1682 result_vector (1, result)));
1684 else if (targetm.have_call_value ())
1686 rtx valreg = 0;
1688 /* Locate the unique return register. It is not possible to
1689 express a call that sets more than one return register using
1690 call_value; use untyped_call for that. In fact, untyped_call
1691 only needs to save the return registers in the given block. */
1692 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1693 if ((mode = apply_result_mode[regno]) != VOIDmode)
1695 gcc_assert (!valreg); /* have_untyped_call required. */
1697 valreg = gen_rtx_REG (mode, regno);
1700 emit_insn (targetm.gen_call_value (valreg,
1701 gen_rtx_MEM (FUNCTION_MODE, function),
1702 const0_rtx, NULL_RTX, const0_rtx));
1704 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1706 else
1707 gcc_unreachable ();
1709 /* Find the CALL insn we just emitted, and attach the register usage
1710 information. */
1711 call_insn = last_call_insn ();
1712 add_function_usage_to (call_insn, call_fusage);
1714 /* Restore the stack. */
1715 if (targetm.have_save_stack_nonlocal ())
1716 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1717 else
1718 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1719 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1721 OK_DEFER_POP;
1723 /* Return the address of the result block. */
1724 result = copy_addr_to_reg (XEXP (result, 0));
1725 return convert_memory_address (ptr_mode, result);
1728 /* Perform an untyped return. */
1730 static void
1731 expand_builtin_return (rtx result)
1733 int size, align, regno;
1734 machine_mode mode;
1735 rtx reg;
1736 rtx_insn *call_fusage = 0;
1738 result = convert_memory_address (Pmode, result);
1740 apply_result_size ();
1741 result = gen_rtx_MEM (BLKmode, result);
1743 if (targetm.have_untyped_return ())
1745 rtx vector = result_vector (0, result);
1746 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1747 emit_barrier ();
1748 return;
1751 /* Restore the return value and note that each value is used. */
1752 size = 0;
1753 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1754 if ((mode = apply_result_mode[regno]) != VOIDmode)
1756 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1757 if (size % align != 0)
1758 size = CEIL (size, align) * align;
1759 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1760 emit_move_insn (reg, adjust_address (result, mode, size));
1762 push_to_sequence (call_fusage);
1763 emit_use (reg);
1764 call_fusage = get_insns ();
1765 end_sequence ();
1766 size += GET_MODE_SIZE (mode);
1769 /* Put the USE insns before the return. */
1770 emit_insn (call_fusage);
1772 /* Return whatever values was restored by jumping directly to the end
1773 of the function. */
1774 expand_naked_return ();
1777 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1779 static enum type_class
1780 type_to_class (tree type)
1782 switch (TREE_CODE (type))
1784 case VOID_TYPE: return void_type_class;
1785 case INTEGER_TYPE: return integer_type_class;
1786 case ENUMERAL_TYPE: return enumeral_type_class;
1787 case BOOLEAN_TYPE: return boolean_type_class;
1788 case POINTER_TYPE: return pointer_type_class;
1789 case REFERENCE_TYPE: return reference_type_class;
1790 case OFFSET_TYPE: return offset_type_class;
1791 case REAL_TYPE: return real_type_class;
1792 case COMPLEX_TYPE: return complex_type_class;
1793 case FUNCTION_TYPE: return function_type_class;
1794 case METHOD_TYPE: return method_type_class;
1795 case RECORD_TYPE: return record_type_class;
1796 case UNION_TYPE:
1797 case QUAL_UNION_TYPE: return union_type_class;
1798 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1799 ? string_type_class : array_type_class);
1800 case LANG_TYPE: return lang_type_class;
1801 default: return no_type_class;
1805 /* Expand a call EXP to __builtin_classify_type. */
1807 static rtx
1808 expand_builtin_classify_type (tree exp)
1810 if (call_expr_nargs (exp))
1811 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1812 return GEN_INT (no_type_class);
1815 /* This helper macro, meant to be used in mathfn_built_in below,
1816 determines which among a set of three builtin math functions is
1817 appropriate for a given type mode. The `F' and `L' cases are
1818 automatically generated from the `double' case. */
1819 #define CASE_MATHFN(MATHFN) \
1820 CASE_CFN_##MATHFN: \
1821 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1822 fcodel = BUILT_IN_##MATHFN##L ; break;
1823 /* Similar to above, but appends _R after any F/L suffix. */
1824 #define CASE_MATHFN_REENT(MATHFN) \
1825 case CFN_BUILT_IN_##MATHFN##_R: \
1826 case CFN_BUILT_IN_##MATHFN##F_R: \
1827 case CFN_BUILT_IN_##MATHFN##L_R: \
1828 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1829 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1831 /* Return a function equivalent to FN but operating on floating-point
1832 values of type TYPE, or END_BUILTINS if no such function exists.
1833 This is purely an operation on function codes; it does not guarantee
1834 that the target actually has an implementation of the function. */
1836 static built_in_function
1837 mathfn_built_in_2 (tree type, combined_fn fn)
1839 built_in_function fcode, fcodef, fcodel;
1841 switch (fn)
1843 CASE_MATHFN (ACOS)
1844 CASE_MATHFN (ACOSH)
1845 CASE_MATHFN (ASIN)
1846 CASE_MATHFN (ASINH)
1847 CASE_MATHFN (ATAN)
1848 CASE_MATHFN (ATAN2)
1849 CASE_MATHFN (ATANH)
1850 CASE_MATHFN (CBRT)
1851 CASE_MATHFN (CEIL)
1852 CASE_MATHFN (CEXPI)
1853 CASE_MATHFN (COPYSIGN)
1854 CASE_MATHFN (COS)
1855 CASE_MATHFN (COSH)
1856 CASE_MATHFN (DREM)
1857 CASE_MATHFN (ERF)
1858 CASE_MATHFN (ERFC)
1859 CASE_MATHFN (EXP)
1860 CASE_MATHFN (EXP10)
1861 CASE_MATHFN (EXP2)
1862 CASE_MATHFN (EXPM1)
1863 CASE_MATHFN (FABS)
1864 CASE_MATHFN (FDIM)
1865 CASE_MATHFN (FLOOR)
1866 CASE_MATHFN (FMA)
1867 CASE_MATHFN (FMAX)
1868 CASE_MATHFN (FMIN)
1869 CASE_MATHFN (FMOD)
1870 CASE_MATHFN (FREXP)
1871 CASE_MATHFN (GAMMA)
1872 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1873 CASE_MATHFN (HUGE_VAL)
1874 CASE_MATHFN (HYPOT)
1875 CASE_MATHFN (ILOGB)
1876 CASE_MATHFN (ICEIL)
1877 CASE_MATHFN (IFLOOR)
1878 CASE_MATHFN (INF)
1879 CASE_MATHFN (IRINT)
1880 CASE_MATHFN (IROUND)
1881 CASE_MATHFN (ISINF)
1882 CASE_MATHFN (J0)
1883 CASE_MATHFN (J1)
1884 CASE_MATHFN (JN)
1885 CASE_MATHFN (LCEIL)
1886 CASE_MATHFN (LDEXP)
1887 CASE_MATHFN (LFLOOR)
1888 CASE_MATHFN (LGAMMA)
1889 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1890 CASE_MATHFN (LLCEIL)
1891 CASE_MATHFN (LLFLOOR)
1892 CASE_MATHFN (LLRINT)
1893 CASE_MATHFN (LLROUND)
1894 CASE_MATHFN (LOG)
1895 CASE_MATHFN (LOG10)
1896 CASE_MATHFN (LOG1P)
1897 CASE_MATHFN (LOG2)
1898 CASE_MATHFN (LOGB)
1899 CASE_MATHFN (LRINT)
1900 CASE_MATHFN (LROUND)
1901 CASE_MATHFN (MODF)
1902 CASE_MATHFN (NAN)
1903 CASE_MATHFN (NANS)
1904 CASE_MATHFN (NEARBYINT)
1905 CASE_MATHFN (NEXTAFTER)
1906 CASE_MATHFN (NEXTTOWARD)
1907 CASE_MATHFN (POW)
1908 CASE_MATHFN (POWI)
1909 CASE_MATHFN (POW10)
1910 CASE_MATHFN (REMAINDER)
1911 CASE_MATHFN (REMQUO)
1912 CASE_MATHFN (RINT)
1913 CASE_MATHFN (ROUND)
1914 CASE_MATHFN (SCALB)
1915 CASE_MATHFN (SCALBLN)
1916 CASE_MATHFN (SCALBN)
1917 CASE_MATHFN (SIGNBIT)
1918 CASE_MATHFN (SIGNIFICAND)
1919 CASE_MATHFN (SIN)
1920 CASE_MATHFN (SINCOS)
1921 CASE_MATHFN (SINH)
1922 CASE_MATHFN (SQRT)
1923 CASE_MATHFN (TAN)
1924 CASE_MATHFN (TANH)
1925 CASE_MATHFN (TGAMMA)
1926 CASE_MATHFN (TRUNC)
1927 CASE_MATHFN (Y0)
1928 CASE_MATHFN (Y1)
1929 CASE_MATHFN (YN)
1931 default:
1932 return END_BUILTINS;
1935 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1936 return fcode;
1937 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1938 return fcodef;
1939 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1940 return fcodel;
1941 else
1942 return END_BUILTINS;
1945 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1946 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1947 otherwise use the explicit declaration. If we can't do the conversion,
1948 return null. */
1950 static tree
1951 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1953 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1954 if (fcode2 == END_BUILTINS)
1955 return NULL_TREE;
1957 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1958 return NULL_TREE;
1960 return builtin_decl_explicit (fcode2);
1963 /* Like mathfn_built_in_1, but always use the implicit array. */
1965 tree
1966 mathfn_built_in (tree type, combined_fn fn)
1968 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1971 /* Like mathfn_built_in_1, but take a built_in_function and
1972 always use the implicit array. */
1974 tree
1975 mathfn_built_in (tree type, enum built_in_function fn)
1977 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1980 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1981 return its code, otherwise return IFN_LAST. Note that this function
1982 only tests whether the function is defined in internals.def, not whether
1983 it is actually available on the target. */
1985 internal_fn
1986 associated_internal_fn (tree fndecl)
1988 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1989 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1990 switch (DECL_FUNCTION_CODE (fndecl))
1992 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1993 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1994 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1995 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1996 #include "internal-fn.def"
1998 CASE_FLT_FN (BUILT_IN_POW10):
1999 return IFN_EXP10;
2001 CASE_FLT_FN (BUILT_IN_DREM):
2002 return IFN_REMAINDER;
2004 CASE_FLT_FN (BUILT_IN_SCALBN):
2005 CASE_FLT_FN (BUILT_IN_SCALBLN):
2006 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2007 return IFN_LDEXP;
2008 return IFN_LAST;
2010 default:
2011 return IFN_LAST;
2015 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2016 on the current target by a call to an internal function, return the
2017 code of that internal function, otherwise return IFN_LAST. The caller
2018 is responsible for ensuring that any side-effects of the built-in
2019 call are dealt with correctly. E.g. if CALL sets errno, the caller
2020 must decide that the errno result isn't needed or make it available
2021 in some other way. */
2023 internal_fn
2024 replacement_internal_fn (gcall *call)
2026 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2028 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2029 if (ifn != IFN_LAST)
2031 tree_pair types = direct_internal_fn_types (ifn, call);
2032 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2033 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2034 return ifn;
2037 return IFN_LAST;
2040 /* Expand a call to the builtin trinary math functions (fma).
2041 Return NULL_RTX if a normal call should be emitted rather than expanding the
2042 function in-line. EXP is the expression that is a call to the builtin
2043 function; if convenient, the result should be placed in TARGET.
2044 SUBTARGET may be used as the target for computing one of EXP's
2045 operands. */
2047 static rtx
2048 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2050 optab builtin_optab;
2051 rtx op0, op1, op2, result;
2052 rtx_insn *insns;
2053 tree fndecl = get_callee_fndecl (exp);
2054 tree arg0, arg1, arg2;
2055 machine_mode mode;
2057 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2058 return NULL_RTX;
2060 arg0 = CALL_EXPR_ARG (exp, 0);
2061 arg1 = CALL_EXPR_ARG (exp, 1);
2062 arg2 = CALL_EXPR_ARG (exp, 2);
2064 switch (DECL_FUNCTION_CODE (fndecl))
2066 CASE_FLT_FN (BUILT_IN_FMA):
2067 builtin_optab = fma_optab; break;
2068 default:
2069 gcc_unreachable ();
2072 /* Make a suitable register to place result in. */
2073 mode = TYPE_MODE (TREE_TYPE (exp));
2075 /* Before working hard, check whether the instruction is available. */
2076 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2077 return NULL_RTX;
2079 result = gen_reg_rtx (mode);
2081 /* Always stabilize the argument list. */
2082 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2083 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2084 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2086 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2087 op1 = expand_normal (arg1);
2088 op2 = expand_normal (arg2);
2090 start_sequence ();
2092 /* Compute into RESULT.
2093 Set RESULT to wherever the result comes back. */
2094 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2095 result, 0);
2097 /* If we were unable to expand via the builtin, stop the sequence
2098 (without outputting the insns) and call to the library function
2099 with the stabilized argument list. */
2100 if (result == 0)
2102 end_sequence ();
2103 return expand_call (exp, target, target == const0_rtx);
2106 /* Output the entire sequence. */
2107 insns = get_insns ();
2108 end_sequence ();
2109 emit_insn (insns);
2111 return result;
2114 /* Expand a call to the builtin sin and cos math functions.
2115 Return NULL_RTX if a normal call should be emitted rather than expanding the
2116 function in-line. EXP is the expression that is a call to the builtin
2117 function; if convenient, the result should be placed in TARGET.
2118 SUBTARGET may be used as the target for computing one of EXP's
2119 operands. */
2121 static rtx
2122 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2124 optab builtin_optab;
2125 rtx op0;
2126 rtx_insn *insns;
2127 tree fndecl = get_callee_fndecl (exp);
2128 machine_mode mode;
2129 tree arg;
2131 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2132 return NULL_RTX;
2134 arg = CALL_EXPR_ARG (exp, 0);
2136 switch (DECL_FUNCTION_CODE (fndecl))
2138 CASE_FLT_FN (BUILT_IN_SIN):
2139 CASE_FLT_FN (BUILT_IN_COS):
2140 builtin_optab = sincos_optab; break;
2141 default:
2142 gcc_unreachable ();
2145 /* Make a suitable register to place result in. */
2146 mode = TYPE_MODE (TREE_TYPE (exp));
2148 /* Check if sincos insn is available, otherwise fallback
2149 to sin or cos insn. */
2150 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2151 switch (DECL_FUNCTION_CODE (fndecl))
2153 CASE_FLT_FN (BUILT_IN_SIN):
2154 builtin_optab = sin_optab; break;
2155 CASE_FLT_FN (BUILT_IN_COS):
2156 builtin_optab = cos_optab; break;
2157 default:
2158 gcc_unreachable ();
2161 /* Before working hard, check whether the instruction is available. */
2162 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2164 rtx result = gen_reg_rtx (mode);
2166 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2167 need to expand the argument again. This way, we will not perform
2168 side-effects more the once. */
2169 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2171 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2173 start_sequence ();
2175 /* Compute into RESULT.
2176 Set RESULT to wherever the result comes back. */
2177 if (builtin_optab == sincos_optab)
2179 int ok;
2181 switch (DECL_FUNCTION_CODE (fndecl))
2183 CASE_FLT_FN (BUILT_IN_SIN):
2184 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2185 break;
2186 CASE_FLT_FN (BUILT_IN_COS):
2187 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2188 break;
2189 default:
2190 gcc_unreachable ();
2192 gcc_assert (ok);
2194 else
2195 result = expand_unop (mode, builtin_optab, op0, result, 0);
2197 if (result != 0)
2199 /* Output the entire sequence. */
2200 insns = get_insns ();
2201 end_sequence ();
2202 emit_insn (insns);
2203 return result;
2206 /* If we were unable to expand via the builtin, stop the sequence
2207 (without outputting the insns) and call to the library function
2208 with the stabilized argument list. */
2209 end_sequence ();
2212 return expand_call (exp, target, target == const0_rtx);
2215 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2216 return an RTL instruction code that implements the functionality.
2217 If that isn't possible or available return CODE_FOR_nothing. */
2219 static enum insn_code
2220 interclass_mathfn_icode (tree arg, tree fndecl)
2222 bool errno_set = false;
2223 optab builtin_optab = unknown_optab;
2224 machine_mode mode;
2226 switch (DECL_FUNCTION_CODE (fndecl))
2228 CASE_FLT_FN (BUILT_IN_ILOGB):
2229 errno_set = true; builtin_optab = ilogb_optab; break;
2230 CASE_FLT_FN (BUILT_IN_ISINF):
2231 builtin_optab = isinf_optab; break;
2232 case BUILT_IN_ISNORMAL:
2233 case BUILT_IN_ISFINITE:
2234 CASE_FLT_FN (BUILT_IN_FINITE):
2235 case BUILT_IN_FINITED32:
2236 case BUILT_IN_FINITED64:
2237 case BUILT_IN_FINITED128:
2238 case BUILT_IN_ISINFD32:
2239 case BUILT_IN_ISINFD64:
2240 case BUILT_IN_ISINFD128:
2241 /* These builtins have no optabs (yet). */
2242 break;
2243 default:
2244 gcc_unreachable ();
2247 /* There's no easy way to detect the case we need to set EDOM. */
2248 if (flag_errno_math && errno_set)
2249 return CODE_FOR_nothing;
2251 /* Optab mode depends on the mode of the input argument. */
2252 mode = TYPE_MODE (TREE_TYPE (arg));
2254 if (builtin_optab)
2255 return optab_handler (builtin_optab, mode);
2256 return CODE_FOR_nothing;
2259 /* Expand a call to one of the builtin math functions that operate on
2260 floating point argument and output an integer result (ilogb, isinf,
2261 isnan, etc).
2262 Return 0 if a normal call should be emitted rather than expanding the
2263 function in-line. EXP is the expression that is a call to the builtin
2264 function; if convenient, the result should be placed in TARGET. */
2266 static rtx
2267 expand_builtin_interclass_mathfn (tree exp, rtx target)
2269 enum insn_code icode = CODE_FOR_nothing;
2270 rtx op0;
2271 tree fndecl = get_callee_fndecl (exp);
2272 machine_mode mode;
2273 tree arg;
2275 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2276 return NULL_RTX;
2278 arg = CALL_EXPR_ARG (exp, 0);
2279 icode = interclass_mathfn_icode (arg, fndecl);
2280 mode = TYPE_MODE (TREE_TYPE (arg));
2282 if (icode != CODE_FOR_nothing)
2284 struct expand_operand ops[1];
2285 rtx_insn *last = get_last_insn ();
2286 tree orig_arg = arg;
2288 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2289 need to expand the argument again. This way, we will not perform
2290 side-effects more the once. */
2291 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2293 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2295 if (mode != GET_MODE (op0))
2296 op0 = convert_to_mode (mode, op0, 0);
2298 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2299 if (maybe_legitimize_operands (icode, 0, 1, ops)
2300 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2301 return ops[0].value;
2303 delete_insns_since (last);
2304 CALL_EXPR_ARG (exp, 0) = orig_arg;
2307 return NULL_RTX;
2310 /* Expand a call to the builtin sincos math function.
2311 Return NULL_RTX if a normal call should be emitted rather than expanding the
2312 function in-line. EXP is the expression that is a call to the builtin
2313 function. */
2315 static rtx
2316 expand_builtin_sincos (tree exp)
2318 rtx op0, op1, op2, target1, target2;
2319 machine_mode mode;
2320 tree arg, sinp, cosp;
2321 int result;
2322 location_t loc = EXPR_LOCATION (exp);
2323 tree alias_type, alias_off;
2325 if (!validate_arglist (exp, REAL_TYPE,
2326 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2327 return NULL_RTX;
2329 arg = CALL_EXPR_ARG (exp, 0);
2330 sinp = CALL_EXPR_ARG (exp, 1);
2331 cosp = CALL_EXPR_ARG (exp, 2);
2333 /* Make a suitable register to place result in. */
2334 mode = TYPE_MODE (TREE_TYPE (arg));
2336 /* Check if sincos insn is available, otherwise emit the call. */
2337 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2338 return NULL_RTX;
2340 target1 = gen_reg_rtx (mode);
2341 target2 = gen_reg_rtx (mode);
2343 op0 = expand_normal (arg);
2344 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2345 alias_off = build_int_cst (alias_type, 0);
2346 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2347 sinp, alias_off));
2348 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2349 cosp, alias_off));
2351 /* Compute into target1 and target2.
2352 Set TARGET to wherever the result comes back. */
2353 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2354 gcc_assert (result);
2356 /* Move target1 and target2 to the memory locations indicated
2357 by op1 and op2. */
2358 emit_move_insn (op1, target1);
2359 emit_move_insn (op2, target2);
2361 return const0_rtx;
2364 /* Expand a call to the internal cexpi builtin to the sincos math function.
2365 EXP is the expression that is a call to the builtin function; if convenient,
2366 the result should be placed in TARGET. */
2368 static rtx
2369 expand_builtin_cexpi (tree exp, rtx target)
2371 tree fndecl = get_callee_fndecl (exp);
2372 tree arg, type;
2373 machine_mode mode;
2374 rtx op0, op1, op2;
2375 location_t loc = EXPR_LOCATION (exp);
2377 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2378 return NULL_RTX;
2380 arg = CALL_EXPR_ARG (exp, 0);
2381 type = TREE_TYPE (arg);
2382 mode = TYPE_MODE (TREE_TYPE (arg));
2384 /* Try expanding via a sincos optab, fall back to emitting a libcall
2385 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2386 is only generated from sincos, cexp or if we have either of them. */
2387 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2389 op1 = gen_reg_rtx (mode);
2390 op2 = gen_reg_rtx (mode);
2392 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2394 /* Compute into op1 and op2. */
2395 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2397 else if (targetm.libc_has_function (function_sincos))
2399 tree call, fn = NULL_TREE;
2400 tree top1, top2;
2401 rtx op1a, op2a;
2403 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2404 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2405 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2406 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2407 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2408 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2409 else
2410 gcc_unreachable ();
2412 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2413 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2414 op1a = copy_addr_to_reg (XEXP (op1, 0));
2415 op2a = copy_addr_to_reg (XEXP (op2, 0));
2416 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2417 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2419 /* Make sure not to fold the sincos call again. */
2420 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2421 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2422 call, 3, arg, top1, top2));
2424 else
2426 tree call, fn = NULL_TREE, narg;
2427 tree ctype = build_complex_type (type);
2429 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2430 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2431 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2432 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2433 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2434 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2435 else
2436 gcc_unreachable ();
2438 /* If we don't have a decl for cexp create one. This is the
2439 friendliest fallback if the user calls __builtin_cexpi
2440 without full target C99 function support. */
2441 if (fn == NULL_TREE)
2443 tree fntype;
2444 const char *name = NULL;
2446 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2447 name = "cexpf";
2448 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2449 name = "cexp";
2450 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2451 name = "cexpl";
2453 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2454 fn = build_fn_decl (name, fntype);
2457 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2458 build_real (type, dconst0), arg);
2460 /* Make sure not to fold the cexp call again. */
2461 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2462 return expand_expr (build_call_nary (ctype, call, 1, narg),
2463 target, VOIDmode, EXPAND_NORMAL);
2466 /* Now build the proper return type. */
2467 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2468 make_tree (TREE_TYPE (arg), op2),
2469 make_tree (TREE_TYPE (arg), op1)),
2470 target, VOIDmode, EXPAND_NORMAL);
2473 /* Conveniently construct a function call expression. FNDECL names the
2474 function to be called, N is the number of arguments, and the "..."
2475 parameters are the argument expressions. Unlike build_call_exr
2476 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2478 static tree
2479 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2481 va_list ap;
2482 tree fntype = TREE_TYPE (fndecl);
2483 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2485 va_start (ap, n);
2486 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2487 va_end (ap);
2488 SET_EXPR_LOCATION (fn, loc);
2489 return fn;
2492 /* Expand a call to one of the builtin rounding functions gcc defines
2493 as an extension (lfloor and lceil). As these are gcc extensions we
2494 do not need to worry about setting errno to EDOM.
2495 If expanding via optab fails, lower expression to (int)(floor(x)).
2496 EXP is the expression that is a call to the builtin function;
2497 if convenient, the result should be placed in TARGET. */
2499 static rtx
2500 expand_builtin_int_roundingfn (tree exp, rtx target)
2502 convert_optab builtin_optab;
2503 rtx op0, tmp;
2504 rtx_insn *insns;
2505 tree fndecl = get_callee_fndecl (exp);
2506 enum built_in_function fallback_fn;
2507 tree fallback_fndecl;
2508 machine_mode mode;
2509 tree arg;
2511 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2512 gcc_unreachable ();
2514 arg = CALL_EXPR_ARG (exp, 0);
2516 switch (DECL_FUNCTION_CODE (fndecl))
2518 CASE_FLT_FN (BUILT_IN_ICEIL):
2519 CASE_FLT_FN (BUILT_IN_LCEIL):
2520 CASE_FLT_FN (BUILT_IN_LLCEIL):
2521 builtin_optab = lceil_optab;
2522 fallback_fn = BUILT_IN_CEIL;
2523 break;
2525 CASE_FLT_FN (BUILT_IN_IFLOOR):
2526 CASE_FLT_FN (BUILT_IN_LFLOOR):
2527 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2528 builtin_optab = lfloor_optab;
2529 fallback_fn = BUILT_IN_FLOOR;
2530 break;
2532 default:
2533 gcc_unreachable ();
2536 /* Make a suitable register to place result in. */
2537 mode = TYPE_MODE (TREE_TYPE (exp));
2539 target = gen_reg_rtx (mode);
2541 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2542 need to expand the argument again. This way, we will not perform
2543 side-effects more the once. */
2544 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2546 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2548 start_sequence ();
2550 /* Compute into TARGET. */
2551 if (expand_sfix_optab (target, op0, builtin_optab))
2553 /* Output the entire sequence. */
2554 insns = get_insns ();
2555 end_sequence ();
2556 emit_insn (insns);
2557 return target;
2560 /* If we were unable to expand via the builtin, stop the sequence
2561 (without outputting the insns). */
2562 end_sequence ();
2564 /* Fall back to floating point rounding optab. */
2565 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2567 /* For non-C99 targets we may end up without a fallback fndecl here
2568 if the user called __builtin_lfloor directly. In this case emit
2569 a call to the floor/ceil variants nevertheless. This should result
2570 in the best user experience for not full C99 targets. */
2571 if (fallback_fndecl == NULL_TREE)
2573 tree fntype;
2574 const char *name = NULL;
2576 switch (DECL_FUNCTION_CODE (fndecl))
2578 case BUILT_IN_ICEIL:
2579 case BUILT_IN_LCEIL:
2580 case BUILT_IN_LLCEIL:
2581 name = "ceil";
2582 break;
2583 case BUILT_IN_ICEILF:
2584 case BUILT_IN_LCEILF:
2585 case BUILT_IN_LLCEILF:
2586 name = "ceilf";
2587 break;
2588 case BUILT_IN_ICEILL:
2589 case BUILT_IN_LCEILL:
2590 case BUILT_IN_LLCEILL:
2591 name = "ceill";
2592 break;
2593 case BUILT_IN_IFLOOR:
2594 case BUILT_IN_LFLOOR:
2595 case BUILT_IN_LLFLOOR:
2596 name = "floor";
2597 break;
2598 case BUILT_IN_IFLOORF:
2599 case BUILT_IN_LFLOORF:
2600 case BUILT_IN_LLFLOORF:
2601 name = "floorf";
2602 break;
2603 case BUILT_IN_IFLOORL:
2604 case BUILT_IN_LFLOORL:
2605 case BUILT_IN_LLFLOORL:
2606 name = "floorl";
2607 break;
2608 default:
2609 gcc_unreachable ();
2612 fntype = build_function_type_list (TREE_TYPE (arg),
2613 TREE_TYPE (arg), NULL_TREE);
2614 fallback_fndecl = build_fn_decl (name, fntype);
2617 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2619 tmp = expand_normal (exp);
2620 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2622 /* Truncate the result of floating point optab to integer
2623 via expand_fix (). */
2624 target = gen_reg_rtx (mode);
2625 expand_fix (target, tmp, 0);
2627 return target;
2630 /* Expand a call to one of the builtin math functions doing integer
2631 conversion (lrint).
2632 Return 0 if a normal call should be emitted rather than expanding the
2633 function in-line. EXP is the expression that is a call to the builtin
2634 function; if convenient, the result should be placed in TARGET. */
2636 static rtx
2637 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2639 convert_optab builtin_optab;
2640 rtx op0;
2641 rtx_insn *insns;
2642 tree fndecl = get_callee_fndecl (exp);
2643 tree arg;
2644 machine_mode mode;
2645 enum built_in_function fallback_fn = BUILT_IN_NONE;
2647 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2648 gcc_unreachable ();
2650 arg = CALL_EXPR_ARG (exp, 0);
2652 switch (DECL_FUNCTION_CODE (fndecl))
2654 CASE_FLT_FN (BUILT_IN_IRINT):
2655 fallback_fn = BUILT_IN_LRINT;
2656 gcc_fallthrough ();
2657 CASE_FLT_FN (BUILT_IN_LRINT):
2658 CASE_FLT_FN (BUILT_IN_LLRINT):
2659 builtin_optab = lrint_optab;
2660 break;
2662 CASE_FLT_FN (BUILT_IN_IROUND):
2663 fallback_fn = BUILT_IN_LROUND;
2664 gcc_fallthrough ();
2665 CASE_FLT_FN (BUILT_IN_LROUND):
2666 CASE_FLT_FN (BUILT_IN_LLROUND):
2667 builtin_optab = lround_optab;
2668 break;
2670 default:
2671 gcc_unreachable ();
2674 /* There's no easy way to detect the case we need to set EDOM. */
2675 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2676 return NULL_RTX;
2678 /* Make a suitable register to place result in. */
2679 mode = TYPE_MODE (TREE_TYPE (exp));
2681 /* There's no easy way to detect the case we need to set EDOM. */
2682 if (!flag_errno_math)
2684 rtx result = gen_reg_rtx (mode);
2686 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2687 need to expand the argument again. This way, we will not perform
2688 side-effects more the once. */
2689 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2691 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2693 start_sequence ();
2695 if (expand_sfix_optab (result, op0, builtin_optab))
2697 /* Output the entire sequence. */
2698 insns = get_insns ();
2699 end_sequence ();
2700 emit_insn (insns);
2701 return result;
2704 /* If we were unable to expand via the builtin, stop the sequence
2705 (without outputting the insns) and call to the library function
2706 with the stabilized argument list. */
2707 end_sequence ();
2710 if (fallback_fn != BUILT_IN_NONE)
2712 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2713 targets, (int) round (x) should never be transformed into
2714 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2715 a call to lround in the hope that the target provides at least some
2716 C99 functions. This should result in the best user experience for
2717 not full C99 targets. */
2718 tree fallback_fndecl = mathfn_built_in_1
2719 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2721 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2722 fallback_fndecl, 1, arg);
2724 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2725 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2726 return convert_to_mode (mode, target, 0);
2729 return expand_call (exp, target, target == const0_rtx);
2732 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2733 a normal call should be emitted rather than expanding the function
2734 in-line. EXP is the expression that is a call to the builtin
2735 function; if convenient, the result should be placed in TARGET. */
2737 static rtx
2738 expand_builtin_powi (tree exp, rtx target)
2740 tree arg0, arg1;
2741 rtx op0, op1;
2742 machine_mode mode;
2743 machine_mode mode2;
2745 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2746 return NULL_RTX;
2748 arg0 = CALL_EXPR_ARG (exp, 0);
2749 arg1 = CALL_EXPR_ARG (exp, 1);
2750 mode = TYPE_MODE (TREE_TYPE (exp));
2752 /* Emit a libcall to libgcc. */
2754 /* Mode of the 2nd argument must match that of an int. */
2755 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2757 if (target == NULL_RTX)
2758 target = gen_reg_rtx (mode);
2760 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2761 if (GET_MODE (op0) != mode)
2762 op0 = convert_to_mode (mode, op0, 0);
2763 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2764 if (GET_MODE (op1) != mode2)
2765 op1 = convert_to_mode (mode2, op1, 0);
2767 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2768 target, LCT_CONST, mode, 2,
2769 op0, mode, op1, mode2);
2771 return target;
2774 /* Expand expression EXP which is a call to the strlen builtin. Return
2775 NULL_RTX if we failed the caller should emit a normal call, otherwise
2776 try to get the result in TARGET, if convenient. */
2778 static rtx
2779 expand_builtin_strlen (tree exp, rtx target,
2780 machine_mode target_mode)
2782 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2783 return NULL_RTX;
2784 else
2786 struct expand_operand ops[4];
2787 rtx pat;
2788 tree len;
2789 tree src = CALL_EXPR_ARG (exp, 0);
2790 rtx src_reg;
2791 rtx_insn *before_strlen;
2792 machine_mode insn_mode = target_mode;
2793 enum insn_code icode = CODE_FOR_nothing;
2794 unsigned int align;
2796 /* If the length can be computed at compile-time, return it. */
2797 len = c_strlen (src, 0);
2798 if (len)
2799 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2801 /* If the length can be computed at compile-time and is constant
2802 integer, but there are side-effects in src, evaluate
2803 src for side-effects, then return len.
2804 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2805 can be optimized into: i++; x = 3; */
2806 len = c_strlen (src, 1);
2807 if (len && TREE_CODE (len) == INTEGER_CST)
2809 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2810 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2813 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2815 /* If SRC is not a pointer type, don't do this operation inline. */
2816 if (align == 0)
2817 return NULL_RTX;
2819 /* Bail out if we can't compute strlen in the right mode. */
2820 while (insn_mode != VOIDmode)
2822 icode = optab_handler (strlen_optab, insn_mode);
2823 if (icode != CODE_FOR_nothing)
2824 break;
2826 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2828 if (insn_mode == VOIDmode)
2829 return NULL_RTX;
2831 /* Make a place to hold the source address. We will not expand
2832 the actual source until we are sure that the expansion will
2833 not fail -- there are trees that cannot be expanded twice. */
2834 src_reg = gen_reg_rtx (Pmode);
2836 /* Mark the beginning of the strlen sequence so we can emit the
2837 source operand later. */
2838 before_strlen = get_last_insn ();
2840 create_output_operand (&ops[0], target, insn_mode);
2841 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2842 create_integer_operand (&ops[2], 0);
2843 create_integer_operand (&ops[3], align);
2844 if (!maybe_expand_insn (icode, 4, ops))
2845 return NULL_RTX;
2847 /* Now that we are assured of success, expand the source. */
2848 start_sequence ();
2849 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2850 if (pat != src_reg)
2852 #ifdef POINTERS_EXTEND_UNSIGNED
2853 if (GET_MODE (pat) != Pmode)
2854 pat = convert_to_mode (Pmode, pat,
2855 POINTERS_EXTEND_UNSIGNED);
2856 #endif
2857 emit_move_insn (src_reg, pat);
2859 pat = get_insns ();
2860 end_sequence ();
2862 if (before_strlen)
2863 emit_insn_after (pat, before_strlen);
2864 else
2865 emit_insn_before (pat, get_insns ());
2867 /* Return the value in the proper mode for this function. */
2868 if (GET_MODE (ops[0].value) == target_mode)
2869 target = ops[0].value;
2870 else if (target != 0)
2871 convert_move (target, ops[0].value, 0);
2872 else
2873 target = convert_to_mode (target_mode, ops[0].value, 0);
2875 return target;
2879 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2880 bytes from constant string DATA + OFFSET and return it as target
2881 constant. */
2883 static rtx
2884 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2885 machine_mode mode)
2887 const char *str = (const char *) data;
2889 gcc_assert (offset >= 0
2890 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2891 <= strlen (str) + 1));
2893 return c_readstr (str + offset, mode);
2896 /* LEN specify length of the block of memcpy/memset operation.
2897 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2898 In some cases we can make very likely guess on max size, then we
2899 set it into PROBABLE_MAX_SIZE. */
2901 static void
2902 determine_block_size (tree len, rtx len_rtx,
2903 unsigned HOST_WIDE_INT *min_size,
2904 unsigned HOST_WIDE_INT *max_size,
2905 unsigned HOST_WIDE_INT *probable_max_size)
2907 if (CONST_INT_P (len_rtx))
2909 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2910 return;
2912 else
2914 wide_int min, max;
2915 enum value_range_type range_type = VR_UNDEFINED;
2917 /* Determine bounds from the type. */
2918 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2919 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2920 else
2921 *min_size = 0;
2922 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2923 *probable_max_size = *max_size
2924 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2925 else
2926 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2928 if (TREE_CODE (len) == SSA_NAME)
2929 range_type = get_range_info (len, &min, &max);
2930 if (range_type == VR_RANGE)
2932 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2933 *min_size = min.to_uhwi ();
2934 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2935 *probable_max_size = *max_size = max.to_uhwi ();
2937 else if (range_type == VR_ANTI_RANGE)
2939 /* Anti range 0...N lets us to determine minimal size to N+1. */
2940 if (min == 0)
2942 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2943 *min_size = max.to_uhwi () + 1;
2945 /* Code like
2947 int n;
2948 if (n < 100)
2949 memcpy (a, b, n)
2951 Produce anti range allowing negative values of N. We still
2952 can use the information and make a guess that N is not negative.
2954 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2955 *probable_max_size = min.to_uhwi () - 1;
2958 gcc_checking_assert (*max_size <=
2959 (unsigned HOST_WIDE_INT)
2960 GET_MODE_MASK (GET_MODE (len_rtx)));
2963 /* Helper function to do the actual work for expand_builtin_memcpy. */
2965 static rtx
2966 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2968 const char *src_str;
2969 unsigned int src_align = get_pointer_alignment (src);
2970 unsigned int dest_align = get_pointer_alignment (dest);
2971 rtx dest_mem, src_mem, dest_addr, len_rtx;
2972 HOST_WIDE_INT expected_size = -1;
2973 unsigned int expected_align = 0;
2974 unsigned HOST_WIDE_INT min_size;
2975 unsigned HOST_WIDE_INT max_size;
2976 unsigned HOST_WIDE_INT probable_max_size;
2978 /* If DEST is not a pointer type, call the normal function. */
2979 if (dest_align == 0)
2980 return NULL_RTX;
2982 /* If either SRC is not a pointer type, don't do this
2983 operation in-line. */
2984 if (src_align == 0)
2985 return NULL_RTX;
2987 if (currently_expanding_gimple_stmt)
2988 stringop_block_profile (currently_expanding_gimple_stmt,
2989 &expected_align, &expected_size);
2991 if (expected_align < dest_align)
2992 expected_align = dest_align;
2993 dest_mem = get_memory_rtx (dest, len);
2994 set_mem_align (dest_mem, dest_align);
2995 len_rtx = expand_normal (len);
2996 determine_block_size (len, len_rtx, &min_size, &max_size,
2997 &probable_max_size);
2998 src_str = c_getstr (src);
3000 /* If SRC is a string constant and block move would be done
3001 by pieces, we can avoid loading the string from memory
3002 and only stored the computed constants. */
3003 if (src_str
3004 && CONST_INT_P (len_rtx)
3005 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3006 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3007 CONST_CAST (char *, src_str),
3008 dest_align, false))
3010 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3011 builtin_memcpy_read_str,
3012 CONST_CAST (char *, src_str),
3013 dest_align, false, 0);
3014 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3015 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3016 return dest_mem;
3019 src_mem = get_memory_rtx (src, len);
3020 set_mem_align (src_mem, src_align);
3022 /* Copy word part most expediently. */
3023 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3024 CALL_EXPR_TAILCALL (exp)
3025 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3026 expected_align, expected_size,
3027 min_size, max_size, probable_max_size);
3029 if (dest_addr == 0)
3031 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3032 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3035 return dest_addr;
3038 /* Try to verify that the sizes and lengths of the arguments to a string
3039 manipulation function given by EXP are within valid bounds and that
3040 the operation does not lead to buffer overflow. Arguments other than
3041 EXP may be null. When non-null, the arguments have the following
3042 meaning:
3043 SIZE is the user-supplied size argument to the function (such as in
3044 memcpy(d, s, SIZE) or strncpy(d, s, SIZE). It specifies the exact
3045 number of bytes to write.
3046 MAXLEN is the user-supplied bound on the length of the source sequence
3047 (such as in strncat(d, s, N). It specifies the upper limit on the number
3048 of bytes to write.
3049 STR is the source string (such as in strcpy(d, s)) when the epxression
3050 EXP is a string function call (as opposed to a memory call like memcpy).
3051 As an exception, STR can also be an integer denoting the precomputed
3052 length of the source string.
3053 OBJSIZE is the size of the destination object specified by the last
3054 argument to the _chk builtins, typically resulting from the expansion
3055 of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
3056 OBJSIZE).
3058 When SIZE is null LEN is checked to verify that it doesn't exceed
3059 SIZE_MAX.
3061 If the call is successfully verified as safe from buffer overflow
3062 the function returns true, otherwise false.. */
3064 static bool
3065 check_sizes (int opt, tree exp, tree size, tree maxlen, tree str, tree objsize)
3067 /* The size of the largest object is half the address space, or
3068 SSIZE_MAX. (This is way too permissive.) */
3069 tree maxobjsize = TYPE_MAX_VALUE (ssizetype);
3071 tree slen = NULL_TREE;
3073 /* Set to true when the exact number of bytes written by a string
3074 function like strcpy is not known and the only thing that is
3075 known is that it must be at least one (for the terminating nul). */
3076 bool at_least_one = false;
3077 if (str)
3079 /* STR is normally a pointer to string but as a special case
3080 it can be an integer denoting the length of a string. */
3081 if (POINTER_TYPE_P (TREE_TYPE (str)))
3083 /* Try to determine the range of lengths the source string
3084 refers to. If it can be determined add one to it for
3085 the terminating nul. Otherwise, set it to one for
3086 the same reason. */
3087 tree lenrange[2];
3088 get_range_strlen (str, lenrange);
3089 if (lenrange[0])
3090 slen = fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3091 size_one_node);
3092 else
3094 at_least_one = true;
3095 slen = size_one_node;
3098 else
3099 slen = str;
3102 if (!size && !maxlen)
3104 /* When the only available piece of data is the object size
3105 there is nothing to do. */
3106 if (!slen)
3107 return true;
3109 /* Otherwise, when the length of the source sequence is known
3110 (as with with strlen), set SIZE to it. */
3111 size = slen;
3114 if (!objsize)
3115 objsize = maxobjsize;
3117 /* The SIZE is exact if it's non-null, constant, and in range of
3118 unsigned HOST_WIDE_INT. */
3119 bool exactsize = size && tree_fits_uhwi_p (size);
3121 tree range[2] = { NULL_TREE, NULL_TREE };
3122 if (size)
3123 get_size_range (size, range);
3125 /* First check the number of bytes to be written against the maximum
3126 object size. */
3127 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3129 location_t loc = tree_nonartificial_location (exp);
3130 loc = expansion_point_location_if_in_system_header (loc);
3132 if (range[0] == range[1])
3133 warning_at (loc, opt,
3134 "%K%qD: specified size %wu "
3135 "exceeds maximum object size %wu",
3136 exp, get_callee_fndecl (exp),
3137 tree_to_uhwi (range[0]),
3138 tree_to_uhwi (maxobjsize));
3139 else
3140 warning_at (loc, opt,
3141 "%K%qD: specified size between %wu and %wu "
3142 "exceeds maximum object size %wu",
3143 exp, get_callee_fndecl (exp),
3144 tree_to_uhwi (range[0]),
3145 tree_to_uhwi (range[1]),
3146 tree_to_uhwi (maxobjsize));
3147 return false;
3150 /* Next check the number of bytes to be written against the destination
3151 object size. */
3152 if (range[0] || !exactsize || integer_all_onesp (size))
3154 if (range[0]
3155 && ((tree_fits_uhwi_p (objsize)
3156 && tree_int_cst_lt (objsize, range[0]))
3157 || (tree_fits_uhwi_p (size)
3158 && tree_int_cst_lt (size, range[0]))))
3160 unsigned HOST_WIDE_INT uwir0 = tree_to_uhwi (range[0]);
3162 location_t loc = tree_nonartificial_location (exp);
3163 loc = expansion_point_location_if_in_system_header (loc);
3165 if (at_least_one)
3166 warning_at (loc, opt,
3167 "%K%qD writing at least %wu byte into a region "
3168 "of size %wu overflows the destination",
3169 exp, get_callee_fndecl (exp), uwir0,
3170 tree_to_uhwi (objsize));
3171 else if (range[0] == range[1])
3172 warning_at (loc, opt,
3173 (uwir0 == 1
3174 ? G_("%K%qD writing %wu byte into a region "
3175 "of size %wu overflows the destination")
3176 : G_("%K%qD writing %wu bytes into a region "
3177 "of size %wu overflows the destination")),
3178 exp, get_callee_fndecl (exp), uwir0,
3179 tree_to_uhwi (objsize));
3180 else
3181 warning_at (loc, opt,
3182 "%K%qD writing between %wu and %wu bytes "
3183 "into a region of size %wu overflows "
3184 "the destination",
3185 exp, get_callee_fndecl (exp), uwir0,
3186 tree_to_uhwi (range[1]), tree_to_uhwi (objsize));
3188 /* Return error when an overflow has been detected. */
3189 return false;
3193 /* Check the maximum length of the source sequence against the size
3194 of the destination object if known, or against the maximum size
3195 of an object. */
3196 if (maxlen)
3198 get_size_range (maxlen, range);
3200 if (range[0] && objsize && tree_fits_uhwi_p (objsize))
3202 location_t loc = tree_nonartificial_location (exp);
3203 loc = expansion_point_location_if_in_system_header (loc);
3205 if (tree_int_cst_lt (maxobjsize, range[0]))
3207 /* Warn about crazy big sizes first since that's more
3208 likely to be meaningful than saying that the bound
3209 is greater than the object size if both are big. */
3210 if (range[0] == range[1])
3211 warning_at (loc, opt,
3212 "%K%qD: specified bound %wu "
3213 "exceeds maximum object size %wu",
3214 exp, get_callee_fndecl (exp),
3215 tree_to_uhwi (range[0]),
3216 tree_to_uhwi (maxobjsize));
3217 else
3218 warning_at (loc, opt,
3219 "%K%qD: specified bound between %wu and %wu "
3220 " exceeds maximum object size %wu",
3221 exp, get_callee_fndecl (exp),
3222 tree_to_uhwi (range[0]),
3223 tree_to_uhwi (range[1]),
3224 tree_to_uhwi (maxobjsize));
3226 return false;
3229 if (objsize != maxobjsize && tree_int_cst_lt (objsize, range[0]))
3231 if (range[0] == range[1])
3232 warning_at (loc, opt,
3233 "%K%qD: specified bound %wu "
3234 "exceeds the size %wu of the destination",
3235 exp, get_callee_fndecl (exp),
3236 tree_to_uhwi (range[0]),
3237 tree_to_uhwi (objsize));
3238 else
3239 warning_at (loc, opt,
3240 "%K%qD: specified bound between %wu and %wu "
3241 " exceeds the size %wu of the destination",
3242 exp, get_callee_fndecl (exp),
3243 tree_to_uhwi (range[0]),
3244 tree_to_uhwi (range[1]),
3245 tree_to_uhwi (objsize));
3246 return false;
3251 return true;
3254 /* Helper to compute the size of the object referenced by the DEST
3255 expression which must of of pointer type, using Object Size type
3256 OSTYPE (only the least significant 2 bits are used). Return
3257 the size of the object if successful or NULL when the size cannot
3258 be determined. */
3260 static inline tree
3261 compute_dest_size (tree dest, int ostype)
3263 unsigned HOST_WIDE_INT size;
3264 if (compute_builtin_object_size (dest, ostype & 3, &size))
3265 return build_int_cst (sizetype, size);
3267 return NULL_TREE;
3270 /* Helper to determine and check the sizes of the source and the destination
3271 of calls to __builtin_{bzero,memcpy,memset} calls. Use Object Size type-0
3272 regardless of the OPT_Wstringop_overflow_ setting. Returns true on success
3273 (no overflow or invalid sizes), false otherwise. */
3275 static bool
3276 check_memop_sizes (tree exp, tree dest, tree size)
3278 if (!warn_stringop_overflow)
3279 return true;
3281 /* For functions like memset and memcpy that operate on raw memory
3282 try to determine the size of the largest destination object using
3283 type-0 Object Size regardless of the object size type specified
3284 by the option. */
3285 tree objsize = compute_dest_size (dest, 0);
3287 return check_sizes (OPT_Wstringop_overflow_, exp,
3288 size, /*maxlen=*/NULL_TREE, /*str=*/NULL_TREE, objsize);
3291 /* Expand a call EXP to the memcpy builtin.
3292 Return NULL_RTX if we failed, the caller should emit a normal call,
3293 otherwise try to get the result in TARGET, if convenient (and in
3294 mode MODE if that's convenient). */
3296 static rtx
3297 expand_builtin_memcpy (tree exp, rtx target)
3299 if (!validate_arglist (exp,
3300 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3301 return NULL_RTX;
3303 tree dest = CALL_EXPR_ARG (exp, 0);
3304 tree src = CALL_EXPR_ARG (exp, 1);
3305 tree len = CALL_EXPR_ARG (exp, 2);
3307 check_memop_sizes (exp, dest, len);
3309 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3312 /* Check a call EXP to the memmove built-in for validity.
3313 Return NULL_RTX on both success and failure. */
3315 static rtx
3316 expand_builtin_memmove (tree exp, rtx)
3318 if (!validate_arglist (exp,
3319 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3320 return NULL_RTX;
3322 tree dest = CALL_EXPR_ARG (exp, 0);
3323 tree len = CALL_EXPR_ARG (exp, 2);
3325 check_memop_sizes (exp, dest, len);
3327 return NULL_RTX;
3330 /* Expand an instrumented call EXP to the memcpy builtin.
3331 Return NULL_RTX if we failed, the caller should emit a normal call,
3332 otherwise try to get the result in TARGET, if convenient (and in
3333 mode MODE if that's convenient). */
3335 static rtx
3336 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3338 if (!validate_arglist (exp,
3339 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3340 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3341 INTEGER_TYPE, VOID_TYPE))
3342 return NULL_RTX;
3343 else
3345 tree dest = CALL_EXPR_ARG (exp, 0);
3346 tree src = CALL_EXPR_ARG (exp, 2);
3347 tree len = CALL_EXPR_ARG (exp, 4);
3348 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3350 /* Return src bounds with the result. */
3351 if (res)
3353 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3354 expand_normal (CALL_EXPR_ARG (exp, 1)));
3355 res = chkp_join_splitted_slot (res, bnd);
3357 return res;
3361 /* Expand a call EXP to the mempcpy builtin.
3362 Return NULL_RTX if we failed; the caller should emit a normal call,
3363 otherwise try to get the result in TARGET, if convenient (and in
3364 mode MODE if that's convenient). If ENDP is 0 return the
3365 destination pointer, if ENDP is 1 return the end pointer ala
3366 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3367 stpcpy. */
3369 static rtx
3370 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3372 if (!validate_arglist (exp,
3373 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3374 return NULL_RTX;
3376 tree dest = CALL_EXPR_ARG (exp, 0);
3377 tree src = CALL_EXPR_ARG (exp, 1);
3378 tree len = CALL_EXPR_ARG (exp, 2);
3380 /* Avoid expanding mempcpy into memcpy when the call is determined
3381 to overflow the buffer. This also prevents the same overflow
3382 from being diagnosed again when expanding memcpy. */
3383 if (!check_memop_sizes (exp, dest, len))
3384 return NULL_RTX;
3386 return expand_builtin_mempcpy_args (dest, src, len,
3387 target, mode, /*endp=*/ 1,
3388 exp);
3391 /* Expand an instrumented call EXP to the mempcpy builtin.
3392 Return NULL_RTX if we failed, the caller should emit a normal call,
3393 otherwise try to get the result in TARGET, if convenient (and in
3394 mode MODE if that's convenient). */
3396 static rtx
3397 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3399 if (!validate_arglist (exp,
3400 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3401 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3402 INTEGER_TYPE, VOID_TYPE))
3403 return NULL_RTX;
3404 else
3406 tree dest = CALL_EXPR_ARG (exp, 0);
3407 tree src = CALL_EXPR_ARG (exp, 2);
3408 tree len = CALL_EXPR_ARG (exp, 4);
3409 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3410 mode, 1, exp);
3412 /* Return src bounds with the result. */
3413 if (res)
3415 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3416 expand_normal (CALL_EXPR_ARG (exp, 1)));
3417 res = chkp_join_splitted_slot (res, bnd);
3419 return res;
3423 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3424 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3425 so that this can also be called without constructing an actual CALL_EXPR.
3426 The other arguments and return value are the same as for
3427 expand_builtin_mempcpy. */
3429 static rtx
3430 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3431 rtx target, machine_mode mode, int endp,
3432 tree orig_exp)
3434 tree fndecl = get_callee_fndecl (orig_exp);
3436 /* If return value is ignored, transform mempcpy into memcpy. */
3437 if (target == const0_rtx
3438 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3439 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3441 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3442 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3443 dest, src, len);
3444 return expand_expr (result, target, mode, EXPAND_NORMAL);
3446 else if (target == const0_rtx
3447 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3449 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3450 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3451 dest, src, len);
3452 return expand_expr (result, target, mode, EXPAND_NORMAL);
3454 else
3456 const char *src_str;
3457 unsigned int src_align = get_pointer_alignment (src);
3458 unsigned int dest_align = get_pointer_alignment (dest);
3459 rtx dest_mem, src_mem, len_rtx;
3461 /* If either SRC or DEST is not a pointer type, don't do this
3462 operation in-line. */
3463 if (dest_align == 0 || src_align == 0)
3464 return NULL_RTX;
3466 /* If LEN is not constant, call the normal function. */
3467 if (! tree_fits_uhwi_p (len))
3468 return NULL_RTX;
3470 len_rtx = expand_normal (len);
3471 src_str = c_getstr (src);
3473 /* If SRC is a string constant and block move would be done
3474 by pieces, we can avoid loading the string from memory
3475 and only stored the computed constants. */
3476 if (src_str
3477 && CONST_INT_P (len_rtx)
3478 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3479 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3480 CONST_CAST (char *, src_str),
3481 dest_align, false))
3483 dest_mem = get_memory_rtx (dest, len);
3484 set_mem_align (dest_mem, dest_align);
3485 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3486 builtin_memcpy_read_str,
3487 CONST_CAST (char *, src_str),
3488 dest_align, false, endp);
3489 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3490 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3491 return dest_mem;
3494 if (CONST_INT_P (len_rtx)
3495 && can_move_by_pieces (INTVAL (len_rtx),
3496 MIN (dest_align, src_align)))
3498 dest_mem = get_memory_rtx (dest, len);
3499 set_mem_align (dest_mem, dest_align);
3500 src_mem = get_memory_rtx (src, len);
3501 set_mem_align (src_mem, src_align);
3502 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3503 MIN (dest_align, src_align), endp);
3504 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3505 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3506 return dest_mem;
3509 return NULL_RTX;
3513 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3514 we failed, the caller should emit a normal call, otherwise try to
3515 get the result in TARGET, if convenient. If ENDP is 0 return the
3516 destination pointer, if ENDP is 1 return the end pointer ala
3517 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3518 stpcpy. */
3520 static rtx
3521 expand_movstr (tree dest, tree src, rtx target, int endp)
3523 struct expand_operand ops[3];
3524 rtx dest_mem;
3525 rtx src_mem;
3527 if (!targetm.have_movstr ())
3528 return NULL_RTX;
3530 dest_mem = get_memory_rtx (dest, NULL);
3531 src_mem = get_memory_rtx (src, NULL);
3532 if (!endp)
3534 target = force_reg (Pmode, XEXP (dest_mem, 0));
3535 dest_mem = replace_equiv_address (dest_mem, target);
3538 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3539 create_fixed_operand (&ops[1], dest_mem);
3540 create_fixed_operand (&ops[2], src_mem);
3541 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3542 return NULL_RTX;
3544 if (endp && target != const0_rtx)
3546 target = ops[0].value;
3547 /* movstr is supposed to set end to the address of the NUL
3548 terminator. If the caller requested a mempcpy-like return value,
3549 adjust it. */
3550 if (endp == 1)
3552 rtx tem = plus_constant (GET_MODE (target),
3553 gen_lowpart (GET_MODE (target), target), 1);
3554 emit_move_insn (target, force_operand (tem, NULL_RTX));
3557 return target;
3560 /* Do some very basic size validation of a call to the strcpy builtin
3561 given by EXP. Return NULL_RTX to have the built-in expand to a call
3562 to the library function. */
3564 static rtx
3565 expand_builtin_strcat (tree exp, rtx)
3567 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3568 || !warn_stringop_overflow)
3569 return NULL_RTX;
3571 tree dest = CALL_EXPR_ARG (exp, 0);
3572 tree src = CALL_EXPR_ARG (exp, 1);
3574 /* There is no way here to determine the length of the string in
3575 the destination to which the SRC string is being appended so
3576 just diagnose cases when the souce string is longer than
3577 the destination object. */
3579 tree destsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3581 check_sizes (OPT_Wstringop_overflow_,
3582 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3584 return NULL_RTX;
3587 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3588 NULL_RTX if we failed the caller should emit a normal call, otherwise
3589 try to get the result in TARGET, if convenient (and in mode MODE if that's
3590 convenient). */
3592 static rtx
3593 expand_builtin_strcpy (tree exp, rtx target)
3595 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3596 return NULL_RTX;
3598 tree dest = CALL_EXPR_ARG (exp, 0);
3599 tree src = CALL_EXPR_ARG (exp, 1);
3601 if (warn_stringop_overflow)
3603 tree destsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3604 check_sizes (OPT_Wstringop_overflow_,
3605 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3608 return expand_builtin_strcpy_args (dest, src, target);
3611 /* Helper function to do the actual work for expand_builtin_strcpy. The
3612 arguments to the builtin_strcpy call DEST and SRC are broken out
3613 so that this can also be called without constructing an actual CALL_EXPR.
3614 The other arguments and return value are the same as for
3615 expand_builtin_strcpy. */
3617 static rtx
3618 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3620 return expand_movstr (dest, src, target, /*endp=*/0);
3623 /* Expand a call EXP to the stpcpy builtin.
3624 Return NULL_RTX if we failed the caller should emit a normal call,
3625 otherwise try to get the result in TARGET, if convenient (and in
3626 mode MODE if that's convenient). */
3628 static rtx
3629 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3631 tree dst, src;
3632 location_t loc = EXPR_LOCATION (exp);
3634 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3635 return NULL_RTX;
3637 dst = CALL_EXPR_ARG (exp, 0);
3638 src = CALL_EXPR_ARG (exp, 1);
3640 if (warn_stringop_overflow)
3642 tree destsize = compute_dest_size (dst, warn_stringop_overflow - 1);
3643 check_sizes (OPT_Wstringop_overflow_,
3644 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3647 /* If return value is ignored, transform stpcpy into strcpy. */
3648 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3650 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3651 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3652 return expand_expr (result, target, mode, EXPAND_NORMAL);
3654 else
3656 tree len, lenp1;
3657 rtx ret;
3659 /* Ensure we get an actual string whose length can be evaluated at
3660 compile-time, not an expression containing a string. This is
3661 because the latter will potentially produce pessimized code
3662 when used to produce the return value. */
3663 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3664 return expand_movstr (dst, src, target, /*endp=*/2);
3666 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3667 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3668 target, mode, /*endp=*/2,
3669 exp);
3671 if (ret)
3672 return ret;
3674 if (TREE_CODE (len) == INTEGER_CST)
3676 rtx len_rtx = expand_normal (len);
3678 if (CONST_INT_P (len_rtx))
3680 ret = expand_builtin_strcpy_args (dst, src, target);
3682 if (ret)
3684 if (! target)
3686 if (mode != VOIDmode)
3687 target = gen_reg_rtx (mode);
3688 else
3689 target = gen_reg_rtx (GET_MODE (ret));
3691 if (GET_MODE (target) != GET_MODE (ret))
3692 ret = gen_lowpart (GET_MODE (target), ret);
3694 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3695 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3696 gcc_assert (ret);
3698 return target;
3703 return expand_movstr (dst, src, target, /*endp=*/2);
3707 /* Check a call EXP to the stpncpy built-in for validity.
3708 Return NULL_RTX on both success and failure. */
3710 static rtx
3711 expand_builtin_stpncpy (tree exp, rtx)
3713 if (!validate_arglist (exp,
3714 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3715 || !warn_stringop_overflow)
3716 return NULL_RTX;
3718 tree dest = CALL_EXPR_ARG (exp, 0);
3719 tree src = CALL_EXPR_ARG (exp, 1);
3721 /* The number of bytes to write (not the maximum). */
3722 tree len = CALL_EXPR_ARG (exp, 2);
3723 /* The length of the source sequence. */
3724 tree slen = c_strlen (src, 1);
3726 /* Try to determine the range of lengths that the source expression
3727 refers to. */
3728 tree lenrange[2];
3729 if (slen)
3730 lenrange[0] = lenrange[1] = slen;
3731 else
3733 get_range_strlen (src, lenrange);
3734 slen = lenrange[0];
3737 tree destsize = compute_dest_size (dest,
3738 warn_stringop_overflow - 1);
3740 /* The number of bytes to write is LEN but check_sizes will also
3741 check SLEN if LEN's value isn't known. */
3742 check_sizes (OPT_Wstringop_overflow_,
3743 exp, len, /*maxlen=*/NULL_TREE, slen, destsize);
3745 return NULL_RTX;
3748 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3749 bytes from constant string DATA + OFFSET and return it as target
3750 constant. */
3753 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3754 machine_mode mode)
3756 const char *str = (const char *) data;
3758 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3759 return const0_rtx;
3761 return c_readstr (str + offset, mode);
3764 /* Helper to check the sizes of sequences and the destination of calls
3765 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3766 success (no overflow or invalid sizes), false otherwise. */
3768 static bool
3769 check_strncat_sizes (tree exp, tree objsize)
3771 tree dest = CALL_EXPR_ARG (exp, 0);
3772 tree src = CALL_EXPR_ARG (exp, 1);
3773 tree maxlen = CALL_EXPR_ARG (exp, 2);
3775 /* Try to determine the range of lengths that the source expression
3776 refers to. */
3777 tree lenrange[2];
3778 get_range_strlen (src, lenrange);
3780 /* Try to verify that the destination is big enough for the shortest
3781 string. */
3783 if (!objsize && warn_stringop_overflow)
3785 /* If it hasn't been provided by __strncat_chk, try to determine
3786 the size of the destination object into which the source is
3787 being copied. */
3788 objsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3791 /* Add one for the terminating nul. */
3792 tree srclen = (lenrange[0]
3793 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3794 size_one_node)
3795 : NULL_TREE);
3797 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3798 nul so the specified upper bound should never be equal to (or greater
3799 than) the size of the destination. */
3800 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (objsize)
3801 && tree_int_cst_equal (objsize, maxlen))
3803 location_t loc = tree_nonartificial_location (exp);
3804 loc = expansion_point_location_if_in_system_header (loc);
3806 warning_at (loc, OPT_Wstringop_overflow_,
3807 "%K%qD: specified bound %wu "
3808 "equals the size of the destination",
3809 exp, get_callee_fndecl (exp),
3810 tree_to_uhwi (maxlen));
3812 return false;
3815 if (!srclen
3816 || (maxlen && tree_fits_uhwi_p (maxlen)
3817 && tree_fits_uhwi_p (srclen)
3818 && tree_int_cst_lt (maxlen, srclen)))
3819 srclen = maxlen;
3821 /* The number of bytes to write is LEN but check_sizes will also
3822 check SRCLEN if LEN's value isn't known. */
3823 return check_sizes (OPT_Wstringop_overflow_,
3824 exp, /*size=*/NULL_TREE, maxlen, srclen, objsize);
3827 /* Similar to expand_builtin_strcat, do some very basic size validation
3828 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3829 the built-in expand to a call to the library function. */
3831 static rtx
3832 expand_builtin_strncat (tree exp, rtx)
3834 if (!validate_arglist (exp,
3835 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3836 || !warn_stringop_overflow)
3837 return NULL_RTX;
3839 tree dest = CALL_EXPR_ARG (exp, 0);
3840 tree src = CALL_EXPR_ARG (exp, 1);
3841 /* The upper bound on the number of bytes to write. */
3842 tree maxlen = CALL_EXPR_ARG (exp, 2);
3843 /* The length of the source sequence. */
3844 tree slen = c_strlen (src, 1);
3846 /* Try to determine the range of lengths that the source expression
3847 refers to. */
3848 tree lenrange[2];
3849 if (slen)
3850 lenrange[0] = lenrange[1] = slen;
3851 else
3852 get_range_strlen (src, lenrange);
3854 /* Try to verify that the destination is big enough for the shortest
3855 string. First try to determine the size of the destination object
3856 into which the source is being copied. */
3857 tree destsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3859 /* Add one for the terminating nul. */
3860 tree srclen = (lenrange[0]
3861 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3862 size_one_node)
3863 : NULL_TREE);
3865 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3866 nul so the specified upper bound should never be equal to (or greater
3867 than) the size of the destination. */
3868 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (destsize)
3869 && tree_int_cst_equal (destsize, maxlen))
3871 location_t loc = tree_nonartificial_location (exp);
3872 loc = expansion_point_location_if_in_system_header (loc);
3874 warning_at (loc, OPT_Wstringop_overflow_,
3875 "%K%qD: specified bound %wu "
3876 "equals the size of the destination",
3877 exp, get_callee_fndecl (exp),
3878 tree_to_uhwi (maxlen));
3880 return NULL_RTX;
3883 if (!srclen
3884 || (maxlen && tree_fits_uhwi_p (maxlen)
3885 && tree_fits_uhwi_p (srclen)
3886 && tree_int_cst_lt (maxlen, srclen)))
3887 srclen = maxlen;
3889 /* The number of bytes to write is LEN but check_sizes will also
3890 check SRCLEN if LEN's value isn't known. */
3891 check_sizes (OPT_Wstringop_overflow_,
3892 exp, /*size=*/NULL_TREE, maxlen, srclen, destsize);
3894 return NULL_RTX;
3897 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3898 NULL_RTX if we failed the caller should emit a normal call. */
3900 static rtx
3901 expand_builtin_strncpy (tree exp, rtx target)
3903 location_t loc = EXPR_LOCATION (exp);
3905 if (validate_arglist (exp,
3906 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3908 tree dest = CALL_EXPR_ARG (exp, 0);
3909 tree src = CALL_EXPR_ARG (exp, 1);
3910 /* The number of bytes to write (not the maximum). */
3911 tree len = CALL_EXPR_ARG (exp, 2);
3912 /* The length of the source sequence. */
3913 tree slen = c_strlen (src, 1);
3915 if (warn_stringop_overflow)
3917 /* Try to determine the range of lengths that the source expression
3918 refers to. */
3919 tree lenrange[2];
3920 if (slen)
3921 lenrange[0] = lenrange[1] = slen;
3922 else
3924 get_range_strlen (src, lenrange);
3925 slen = lenrange[0];
3928 tree destsize = compute_dest_size (dest,
3929 warn_stringop_overflow - 1);
3931 /* The number of bytes to write is LEN but check_sizes will also
3932 check SLEN if LEN's value isn't known. */
3933 check_sizes (OPT_Wstringop_overflow_,
3934 exp, len, /*maxlen=*/NULL_TREE, slen, destsize);
3937 /* We must be passed a constant len and src parameter. */
3938 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3939 return NULL_RTX;
3941 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3943 /* We're required to pad with trailing zeros if the requested
3944 len is greater than strlen(s2)+1. In that case try to
3945 use store_by_pieces, if it fails, punt. */
3946 if (tree_int_cst_lt (slen, len))
3948 unsigned int dest_align = get_pointer_alignment (dest);
3949 const char *p = c_getstr (src);
3950 rtx dest_mem;
3952 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3953 || !can_store_by_pieces (tree_to_uhwi (len),
3954 builtin_strncpy_read_str,
3955 CONST_CAST (char *, p),
3956 dest_align, false))
3957 return NULL_RTX;
3959 dest_mem = get_memory_rtx (dest, len);
3960 store_by_pieces (dest_mem, tree_to_uhwi (len),
3961 builtin_strncpy_read_str,
3962 CONST_CAST (char *, p), dest_align, false, 0);
3963 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3964 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3965 return dest_mem;
3968 return NULL_RTX;
3971 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3972 bytes from constant string DATA + OFFSET and return it as target
3973 constant. */
3976 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3977 machine_mode mode)
3979 const char *c = (const char *) data;
3980 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3982 memset (p, *c, GET_MODE_SIZE (mode));
3984 return c_readstr (p, mode);
3987 /* Callback routine for store_by_pieces. Return the RTL of a register
3988 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3989 char value given in the RTL register data. For example, if mode is
3990 4 bytes wide, return the RTL for 0x01010101*data. */
3992 static rtx
3993 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3994 machine_mode mode)
3996 rtx target, coeff;
3997 size_t size;
3998 char *p;
4000 size = GET_MODE_SIZE (mode);
4001 if (size == 1)
4002 return (rtx) data;
4004 p = XALLOCAVEC (char, size);
4005 memset (p, 1, size);
4006 coeff = c_readstr (p, mode);
4008 target = convert_to_mode (mode, (rtx) data, 1);
4009 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4010 return force_reg (mode, target);
4013 /* Expand expression EXP, which is a call to the memset builtin. Return
4014 NULL_RTX if we failed the caller should emit a normal call, otherwise
4015 try to get the result in TARGET, if convenient (and in mode MODE if that's
4016 convenient). */
4018 static rtx
4019 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4021 if (!validate_arglist (exp,
4022 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4023 return NULL_RTX;
4025 tree dest = CALL_EXPR_ARG (exp, 0);
4026 tree val = CALL_EXPR_ARG (exp, 1);
4027 tree len = CALL_EXPR_ARG (exp, 2);
4029 check_memop_sizes (exp, dest, len);
4031 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4034 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4035 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4036 try to get the result in TARGET, if convenient (and in mode MODE if that's
4037 convenient). */
4039 static rtx
4040 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4042 if (!validate_arglist (exp,
4043 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4044 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4045 return NULL_RTX;
4046 else
4048 tree dest = CALL_EXPR_ARG (exp, 0);
4049 tree val = CALL_EXPR_ARG (exp, 2);
4050 tree len = CALL_EXPR_ARG (exp, 3);
4051 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4053 /* Return src bounds with the result. */
4054 if (res)
4056 rtx bnd = force_reg (targetm.chkp_bound_mode (),
4057 expand_normal (CALL_EXPR_ARG (exp, 1)));
4058 res = chkp_join_splitted_slot (res, bnd);
4060 return res;
4064 /* Helper function to do the actual work for expand_builtin_memset. The
4065 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4066 so that this can also be called without constructing an actual CALL_EXPR.
4067 The other arguments and return value are the same as for
4068 expand_builtin_memset. */
4070 static rtx
4071 expand_builtin_memset_args (tree dest, tree val, tree len,
4072 rtx target, machine_mode mode, tree orig_exp)
4074 tree fndecl, fn;
4075 enum built_in_function fcode;
4076 machine_mode val_mode;
4077 char c;
4078 unsigned int dest_align;
4079 rtx dest_mem, dest_addr, len_rtx;
4080 HOST_WIDE_INT expected_size = -1;
4081 unsigned int expected_align = 0;
4082 unsigned HOST_WIDE_INT min_size;
4083 unsigned HOST_WIDE_INT max_size;
4084 unsigned HOST_WIDE_INT probable_max_size;
4086 dest_align = get_pointer_alignment (dest);
4088 /* If DEST is not a pointer type, don't do this operation in-line. */
4089 if (dest_align == 0)
4090 return NULL_RTX;
4092 if (currently_expanding_gimple_stmt)
4093 stringop_block_profile (currently_expanding_gimple_stmt,
4094 &expected_align, &expected_size);
4096 if (expected_align < dest_align)
4097 expected_align = dest_align;
4099 /* If the LEN parameter is zero, return DEST. */
4100 if (integer_zerop (len))
4102 /* Evaluate and ignore VAL in case it has side-effects. */
4103 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4104 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4107 /* Stabilize the arguments in case we fail. */
4108 dest = builtin_save_expr (dest);
4109 val = builtin_save_expr (val);
4110 len = builtin_save_expr (len);
4112 len_rtx = expand_normal (len);
4113 determine_block_size (len, len_rtx, &min_size, &max_size,
4114 &probable_max_size);
4115 dest_mem = get_memory_rtx (dest, len);
4116 val_mode = TYPE_MODE (unsigned_char_type_node);
4118 if (TREE_CODE (val) != INTEGER_CST)
4120 rtx val_rtx;
4122 val_rtx = expand_normal (val);
4123 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4125 /* Assume that we can memset by pieces if we can store
4126 * the coefficients by pieces (in the required modes).
4127 * We can't pass builtin_memset_gen_str as that emits RTL. */
4128 c = 1;
4129 if (tree_fits_uhwi_p (len)
4130 && can_store_by_pieces (tree_to_uhwi (len),
4131 builtin_memset_read_str, &c, dest_align,
4132 true))
4134 val_rtx = force_reg (val_mode, val_rtx);
4135 store_by_pieces (dest_mem, tree_to_uhwi (len),
4136 builtin_memset_gen_str, val_rtx, dest_align,
4137 true, 0);
4139 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4140 dest_align, expected_align,
4141 expected_size, min_size, max_size,
4142 probable_max_size))
4143 goto do_libcall;
4145 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4146 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4147 return dest_mem;
4150 if (target_char_cast (val, &c))
4151 goto do_libcall;
4153 if (c)
4155 if (tree_fits_uhwi_p (len)
4156 && can_store_by_pieces (tree_to_uhwi (len),
4157 builtin_memset_read_str, &c, dest_align,
4158 true))
4159 store_by_pieces (dest_mem, tree_to_uhwi (len),
4160 builtin_memset_read_str, &c, dest_align, true, 0);
4161 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4162 gen_int_mode (c, val_mode),
4163 dest_align, expected_align,
4164 expected_size, min_size, max_size,
4165 probable_max_size))
4166 goto do_libcall;
4168 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4169 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4170 return dest_mem;
4173 set_mem_align (dest_mem, dest_align);
4174 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4175 CALL_EXPR_TAILCALL (orig_exp)
4176 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4177 expected_align, expected_size,
4178 min_size, max_size,
4179 probable_max_size);
4181 if (dest_addr == 0)
4183 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4184 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4187 return dest_addr;
4189 do_libcall:
4190 fndecl = get_callee_fndecl (orig_exp);
4191 fcode = DECL_FUNCTION_CODE (fndecl);
4192 if (fcode == BUILT_IN_MEMSET
4193 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4194 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4195 dest, val, len);
4196 else if (fcode == BUILT_IN_BZERO)
4197 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4198 dest, len);
4199 else
4200 gcc_unreachable ();
4201 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4202 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4203 return expand_call (fn, target, target == const0_rtx);
4206 /* Expand expression EXP, which is a call to the bzero builtin. Return
4207 NULL_RTX if we failed the caller should emit a normal call. */
4209 static rtx
4210 expand_builtin_bzero (tree exp)
4212 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4213 return NULL_RTX;
4215 tree dest = CALL_EXPR_ARG (exp, 0);
4216 tree size = CALL_EXPR_ARG (exp, 1);
4218 check_memop_sizes (exp, dest, size);
4220 /* New argument list transforming bzero(ptr x, int y) to
4221 memset(ptr x, int 0, size_t y). This is done this way
4222 so that if it isn't expanded inline, we fallback to
4223 calling bzero instead of memset. */
4225 location_t loc = EXPR_LOCATION (exp);
4227 return expand_builtin_memset_args (dest, integer_zero_node,
4228 fold_convert_loc (loc,
4229 size_type_node, size),
4230 const0_rtx, VOIDmode, exp);
4233 /* Try to expand cmpstr operation ICODE with the given operands.
4234 Return the result rtx on success, otherwise return null. */
4236 static rtx
4237 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4238 HOST_WIDE_INT align)
4240 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4242 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4243 target = NULL_RTX;
4245 struct expand_operand ops[4];
4246 create_output_operand (&ops[0], target, insn_mode);
4247 create_fixed_operand (&ops[1], arg1_rtx);
4248 create_fixed_operand (&ops[2], arg2_rtx);
4249 create_integer_operand (&ops[3], align);
4250 if (maybe_expand_insn (icode, 4, ops))
4251 return ops[0].value;
4252 return NULL_RTX;
4255 /* Expand expression EXP, which is a call to the memcmp built-in function.
4256 Return NULL_RTX if we failed and the caller should emit a normal call,
4257 otherwise try to get the result in TARGET, if convenient.
4258 RESULT_EQ is true if we can relax the returned value to be either zero
4259 or nonzero, without caring about the sign. */
4261 static rtx
4262 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4264 if (!validate_arglist (exp,
4265 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4266 return NULL_RTX;
4268 tree arg1 = CALL_EXPR_ARG (exp, 0);
4269 tree arg2 = CALL_EXPR_ARG (exp, 1);
4270 tree len = CALL_EXPR_ARG (exp, 2);
4271 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4272 location_t loc = EXPR_LOCATION (exp);
4274 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4275 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4277 /* If we don't have POINTER_TYPE, call the function. */
4278 if (arg1_align == 0 || arg2_align == 0)
4279 return NULL_RTX;
4281 rtx arg1_rtx = get_memory_rtx (arg1, len);
4282 rtx arg2_rtx = get_memory_rtx (arg2, len);
4283 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4285 /* Set MEM_SIZE as appropriate. */
4286 if (CONST_INT_P (len_rtx))
4288 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4289 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4292 by_pieces_constfn constfn = NULL;
4294 const char *src_str = c_getstr (arg2);
4295 if (result_eq && src_str == NULL)
4297 src_str = c_getstr (arg1);
4298 if (src_str != NULL)
4299 std::swap (arg1_rtx, arg2_rtx);
4302 /* If SRC is a string constant and block move would be done
4303 by pieces, we can avoid loading the string from memory
4304 and only stored the computed constants. */
4305 if (src_str
4306 && CONST_INT_P (len_rtx)
4307 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4308 constfn = builtin_memcpy_read_str;
4310 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4311 TREE_TYPE (len), target,
4312 result_eq, constfn,
4313 CONST_CAST (char *, src_str));
4315 if (result)
4317 /* Return the value in the proper mode for this function. */
4318 if (GET_MODE (result) == mode)
4319 return result;
4321 if (target != 0)
4323 convert_move (target, result, 0);
4324 return target;
4327 return convert_to_mode (mode, result, 0);
4330 return NULL_RTX;
4333 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4334 if we failed the caller should emit a normal call, otherwise try to get
4335 the result in TARGET, if convenient. */
4337 static rtx
4338 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4340 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4341 return NULL_RTX;
4343 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4344 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4345 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4347 rtx arg1_rtx, arg2_rtx;
4348 tree fndecl, fn;
4349 tree arg1 = CALL_EXPR_ARG (exp, 0);
4350 tree arg2 = CALL_EXPR_ARG (exp, 1);
4351 rtx result = NULL_RTX;
4353 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4354 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4356 /* If we don't have POINTER_TYPE, call the function. */
4357 if (arg1_align == 0 || arg2_align == 0)
4358 return NULL_RTX;
4360 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4361 arg1 = builtin_save_expr (arg1);
4362 arg2 = builtin_save_expr (arg2);
4364 arg1_rtx = get_memory_rtx (arg1, NULL);
4365 arg2_rtx = get_memory_rtx (arg2, NULL);
4367 /* Try to call cmpstrsi. */
4368 if (cmpstr_icode != CODE_FOR_nothing)
4369 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4370 MIN (arg1_align, arg2_align));
4372 /* Try to determine at least one length and call cmpstrnsi. */
4373 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4375 tree len;
4376 rtx arg3_rtx;
4378 tree len1 = c_strlen (arg1, 1);
4379 tree len2 = c_strlen (arg2, 1);
4381 if (len1)
4382 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4383 if (len2)
4384 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4386 /* If we don't have a constant length for the first, use the length
4387 of the second, if we know it. We don't require a constant for
4388 this case; some cost analysis could be done if both are available
4389 but neither is constant. For now, assume they're equally cheap,
4390 unless one has side effects. If both strings have constant lengths,
4391 use the smaller. */
4393 if (!len1)
4394 len = len2;
4395 else if (!len2)
4396 len = len1;
4397 else if (TREE_SIDE_EFFECTS (len1))
4398 len = len2;
4399 else if (TREE_SIDE_EFFECTS (len2))
4400 len = len1;
4401 else if (TREE_CODE (len1) != INTEGER_CST)
4402 len = len2;
4403 else if (TREE_CODE (len2) != INTEGER_CST)
4404 len = len1;
4405 else if (tree_int_cst_lt (len1, len2))
4406 len = len1;
4407 else
4408 len = len2;
4410 /* If both arguments have side effects, we cannot optimize. */
4411 if (len && !TREE_SIDE_EFFECTS (len))
4413 arg3_rtx = expand_normal (len);
4414 result = expand_cmpstrn_or_cmpmem
4415 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4416 arg3_rtx, MIN (arg1_align, arg2_align));
4420 if (result)
4422 /* Return the value in the proper mode for this function. */
4423 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4424 if (GET_MODE (result) == mode)
4425 return result;
4426 if (target == 0)
4427 return convert_to_mode (mode, result, 0);
4428 convert_move (target, result, 0);
4429 return target;
4432 /* Expand the library call ourselves using a stabilized argument
4433 list to avoid re-evaluating the function's arguments twice. */
4434 fndecl = get_callee_fndecl (exp);
4435 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4436 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4437 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4438 return expand_call (fn, target, target == const0_rtx);
4440 return NULL_RTX;
4443 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4444 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4445 the result in TARGET, if convenient. */
4447 static rtx
4448 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4449 ATTRIBUTE_UNUSED machine_mode mode)
4451 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4453 if (!validate_arglist (exp,
4454 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4455 return NULL_RTX;
4457 /* If c_strlen can determine an expression for one of the string
4458 lengths, and it doesn't have side effects, then emit cmpstrnsi
4459 using length MIN(strlen(string)+1, arg3). */
4460 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4461 if (cmpstrn_icode != CODE_FOR_nothing)
4463 tree len, len1, len2, len3;
4464 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4465 rtx result;
4466 tree fndecl, fn;
4467 tree arg1 = CALL_EXPR_ARG (exp, 0);
4468 tree arg2 = CALL_EXPR_ARG (exp, 1);
4469 tree arg3 = CALL_EXPR_ARG (exp, 2);
4471 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4472 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4474 len1 = c_strlen (arg1, 1);
4475 len2 = c_strlen (arg2, 1);
4477 if (len1)
4478 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4479 if (len2)
4480 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4482 len3 = fold_convert_loc (loc, sizetype, arg3);
4484 /* If we don't have a constant length for the first, use the length
4485 of the second, if we know it. If neither string is constant length,
4486 use the given length argument. We don't require a constant for
4487 this case; some cost analysis could be done if both are available
4488 but neither is constant. For now, assume they're equally cheap,
4489 unless one has side effects. If both strings have constant lengths,
4490 use the smaller. */
4492 if (!len1 && !len2)
4493 len = len3;
4494 else if (!len1)
4495 len = len2;
4496 else if (!len2)
4497 len = len1;
4498 else if (TREE_SIDE_EFFECTS (len1))
4499 len = len2;
4500 else if (TREE_SIDE_EFFECTS (len2))
4501 len = len1;
4502 else if (TREE_CODE (len1) != INTEGER_CST)
4503 len = len2;
4504 else if (TREE_CODE (len2) != INTEGER_CST)
4505 len = len1;
4506 else if (tree_int_cst_lt (len1, len2))
4507 len = len1;
4508 else
4509 len = len2;
4511 /* If we are not using the given length, we must incorporate it here.
4512 The actual new length parameter will be MIN(len,arg3) in this case. */
4513 if (len != len3)
4514 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4515 arg1_rtx = get_memory_rtx (arg1, len);
4516 arg2_rtx = get_memory_rtx (arg2, len);
4517 arg3_rtx = expand_normal (len);
4518 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4519 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4520 MIN (arg1_align, arg2_align));
4521 if (result)
4523 /* Return the value in the proper mode for this function. */
4524 mode = TYPE_MODE (TREE_TYPE (exp));
4525 if (GET_MODE (result) == mode)
4526 return result;
4527 if (target == 0)
4528 return convert_to_mode (mode, result, 0);
4529 convert_move (target, result, 0);
4530 return target;
4533 /* Expand the library call ourselves using a stabilized argument
4534 list to avoid re-evaluating the function's arguments twice. */
4535 fndecl = get_callee_fndecl (exp);
4536 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4537 arg1, arg2, len);
4538 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4539 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4540 return expand_call (fn, target, target == const0_rtx);
4542 return NULL_RTX;
4545 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4546 if that's convenient. */
4549 expand_builtin_saveregs (void)
4551 rtx val;
4552 rtx_insn *seq;
4554 /* Don't do __builtin_saveregs more than once in a function.
4555 Save the result of the first call and reuse it. */
4556 if (saveregs_value != 0)
4557 return saveregs_value;
4559 /* When this function is called, it means that registers must be
4560 saved on entry to this function. So we migrate the call to the
4561 first insn of this function. */
4563 start_sequence ();
4565 /* Do whatever the machine needs done in this case. */
4566 val = targetm.calls.expand_builtin_saveregs ();
4568 seq = get_insns ();
4569 end_sequence ();
4571 saveregs_value = val;
4573 /* Put the insns after the NOTE that starts the function. If this
4574 is inside a start_sequence, make the outer-level insn chain current, so
4575 the code is placed at the start of the function. */
4576 push_topmost_sequence ();
4577 emit_insn_after (seq, entry_of_function ());
4578 pop_topmost_sequence ();
4580 return val;
4583 /* Expand a call to __builtin_next_arg. */
4585 static rtx
4586 expand_builtin_next_arg (void)
4588 /* Checking arguments is already done in fold_builtin_next_arg
4589 that must be called before this function. */
4590 return expand_binop (ptr_mode, add_optab,
4591 crtl->args.internal_arg_pointer,
4592 crtl->args.arg_offset_rtx,
4593 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4596 /* Make it easier for the backends by protecting the valist argument
4597 from multiple evaluations. */
4599 static tree
4600 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4602 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4604 /* The current way of determining the type of valist is completely
4605 bogus. We should have the information on the va builtin instead. */
4606 if (!vatype)
4607 vatype = targetm.fn_abi_va_list (cfun->decl);
4609 if (TREE_CODE (vatype) == ARRAY_TYPE)
4611 if (TREE_SIDE_EFFECTS (valist))
4612 valist = save_expr (valist);
4614 /* For this case, the backends will be expecting a pointer to
4615 vatype, but it's possible we've actually been given an array
4616 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4617 So fix it. */
4618 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4620 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4621 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4624 else
4626 tree pt = build_pointer_type (vatype);
4628 if (! needs_lvalue)
4630 if (! TREE_SIDE_EFFECTS (valist))
4631 return valist;
4633 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4634 TREE_SIDE_EFFECTS (valist) = 1;
4637 if (TREE_SIDE_EFFECTS (valist))
4638 valist = save_expr (valist);
4639 valist = fold_build2_loc (loc, MEM_REF,
4640 vatype, valist, build_int_cst (pt, 0));
4643 return valist;
4646 /* The "standard" definition of va_list is void*. */
4648 tree
4649 std_build_builtin_va_list (void)
4651 return ptr_type_node;
4654 /* The "standard" abi va_list is va_list_type_node. */
4656 tree
4657 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4659 return va_list_type_node;
4662 /* The "standard" type of va_list is va_list_type_node. */
4664 tree
4665 std_canonical_va_list_type (tree type)
4667 tree wtype, htype;
4669 wtype = va_list_type_node;
4670 htype = type;
4672 if (TREE_CODE (wtype) == ARRAY_TYPE)
4674 /* If va_list is an array type, the argument may have decayed
4675 to a pointer type, e.g. by being passed to another function.
4676 In that case, unwrap both types so that we can compare the
4677 underlying records. */
4678 if (TREE_CODE (htype) == ARRAY_TYPE
4679 || POINTER_TYPE_P (htype))
4681 wtype = TREE_TYPE (wtype);
4682 htype = TREE_TYPE (htype);
4685 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4686 return va_list_type_node;
4688 return NULL_TREE;
4691 /* The "standard" implementation of va_start: just assign `nextarg' to
4692 the variable. */
4694 void
4695 std_expand_builtin_va_start (tree valist, rtx nextarg)
4697 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4698 convert_move (va_r, nextarg, 0);
4700 /* We do not have any valid bounds for the pointer, so
4701 just store zero bounds for it. */
4702 if (chkp_function_instrumented_p (current_function_decl))
4703 chkp_expand_bounds_reset_for_mem (valist,
4704 make_tree (TREE_TYPE (valist),
4705 nextarg));
4708 /* Expand EXP, a call to __builtin_va_start. */
4710 static rtx
4711 expand_builtin_va_start (tree exp)
4713 rtx nextarg;
4714 tree valist;
4715 location_t loc = EXPR_LOCATION (exp);
4717 if (call_expr_nargs (exp) < 2)
4719 error_at (loc, "too few arguments to function %<va_start%>");
4720 return const0_rtx;
4723 if (fold_builtin_next_arg (exp, true))
4724 return const0_rtx;
4726 nextarg = expand_builtin_next_arg ();
4727 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4729 if (targetm.expand_builtin_va_start)
4730 targetm.expand_builtin_va_start (valist, nextarg);
4731 else
4732 std_expand_builtin_va_start (valist, nextarg);
4734 return const0_rtx;
4737 /* Expand EXP, a call to __builtin_va_end. */
4739 static rtx
4740 expand_builtin_va_end (tree exp)
4742 tree valist = CALL_EXPR_ARG (exp, 0);
4744 /* Evaluate for side effects, if needed. I hate macros that don't
4745 do that. */
4746 if (TREE_SIDE_EFFECTS (valist))
4747 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4749 return const0_rtx;
4752 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4753 builtin rather than just as an assignment in stdarg.h because of the
4754 nastiness of array-type va_list types. */
4756 static rtx
4757 expand_builtin_va_copy (tree exp)
4759 tree dst, src, t;
4760 location_t loc = EXPR_LOCATION (exp);
4762 dst = CALL_EXPR_ARG (exp, 0);
4763 src = CALL_EXPR_ARG (exp, 1);
4765 dst = stabilize_va_list_loc (loc, dst, 1);
4766 src = stabilize_va_list_loc (loc, src, 0);
4768 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4770 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4772 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4773 TREE_SIDE_EFFECTS (t) = 1;
4774 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4776 else
4778 rtx dstb, srcb, size;
4780 /* Evaluate to pointers. */
4781 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4782 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4783 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4784 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4786 dstb = convert_memory_address (Pmode, dstb);
4787 srcb = convert_memory_address (Pmode, srcb);
4789 /* "Dereference" to BLKmode memories. */
4790 dstb = gen_rtx_MEM (BLKmode, dstb);
4791 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4792 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4793 srcb = gen_rtx_MEM (BLKmode, srcb);
4794 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4795 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4797 /* Copy. */
4798 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4801 return const0_rtx;
4804 /* Expand a call to one of the builtin functions __builtin_frame_address or
4805 __builtin_return_address. */
4807 static rtx
4808 expand_builtin_frame_address (tree fndecl, tree exp)
4810 /* The argument must be a nonnegative integer constant.
4811 It counts the number of frames to scan up the stack.
4812 The value is either the frame pointer value or the return
4813 address saved in that frame. */
4814 if (call_expr_nargs (exp) == 0)
4815 /* Warning about missing arg was already issued. */
4816 return const0_rtx;
4817 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4819 error ("invalid argument to %qD", fndecl);
4820 return const0_rtx;
4822 else
4824 /* Number of frames to scan up the stack. */
4825 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4827 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4829 /* Some ports cannot access arbitrary stack frames. */
4830 if (tem == NULL)
4832 warning (0, "unsupported argument to %qD", fndecl);
4833 return const0_rtx;
4836 if (count)
4838 /* Warn since no effort is made to ensure that any frame
4839 beyond the current one exists or can be safely reached. */
4840 warning (OPT_Wframe_address, "calling %qD with "
4841 "a nonzero argument is unsafe", fndecl);
4844 /* For __builtin_frame_address, return what we've got. */
4845 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4846 return tem;
4848 if (!REG_P (tem)
4849 && ! CONSTANT_P (tem))
4850 tem = copy_addr_to_reg (tem);
4851 return tem;
4855 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4856 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4857 is the same as for allocate_dynamic_stack_space. */
4859 static rtx
4860 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4862 rtx op0;
4863 rtx result;
4864 unsigned int align;
4865 tree fndecl = get_callee_fndecl (exp);
4866 bool alloca_with_align = (DECL_FUNCTION_CODE (fndecl)
4867 == BUILT_IN_ALLOCA_WITH_ALIGN);
4869 bool valid_arglist
4870 = (alloca_with_align
4871 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4872 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4874 if (!valid_arglist)
4875 return NULL_RTX;
4877 if ((alloca_with_align && !warn_vla_limit)
4878 || (!alloca_with_align && !warn_alloca_limit))
4880 /* -Walloca-larger-than and -Wvla-larger-than settings override
4881 the more general -Walloc-size-larger-than so unless either of
4882 the former options is specified check the alloca arguments for
4883 overflow. */
4884 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4885 int idx[] = { 0, -1 };
4886 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4889 /* Compute the argument. */
4890 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4892 /* Compute the alignment. */
4893 align = (alloca_with_align
4894 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4895 : BIGGEST_ALIGNMENT);
4897 /* Allocate the desired space. */
4898 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4899 result = convert_memory_address (ptr_mode, result);
4901 return result;
4904 /* Expand a call to bswap builtin in EXP.
4905 Return NULL_RTX if a normal call should be emitted rather than expanding the
4906 function in-line. If convenient, the result should be placed in TARGET.
4907 SUBTARGET may be used as the target for computing one of EXP's operands. */
4909 static rtx
4910 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4911 rtx subtarget)
4913 tree arg;
4914 rtx op0;
4916 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4917 return NULL_RTX;
4919 arg = CALL_EXPR_ARG (exp, 0);
4920 op0 = expand_expr (arg,
4921 subtarget && GET_MODE (subtarget) == target_mode
4922 ? subtarget : NULL_RTX,
4923 target_mode, EXPAND_NORMAL);
4924 if (GET_MODE (op0) != target_mode)
4925 op0 = convert_to_mode (target_mode, op0, 1);
4927 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4929 gcc_assert (target);
4931 return convert_to_mode (target_mode, target, 1);
4934 /* Expand a call to a unary builtin in EXP.
4935 Return NULL_RTX if a normal call should be emitted rather than expanding the
4936 function in-line. If convenient, the result should be placed in TARGET.
4937 SUBTARGET may be used as the target for computing one of EXP's operands. */
4939 static rtx
4940 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4941 rtx subtarget, optab op_optab)
4943 rtx op0;
4945 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4946 return NULL_RTX;
4948 /* Compute the argument. */
4949 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4950 (subtarget
4951 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4952 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4953 VOIDmode, EXPAND_NORMAL);
4954 /* Compute op, into TARGET if possible.
4955 Set TARGET to wherever the result comes back. */
4956 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4957 op_optab, op0, target, op_optab != clrsb_optab);
4958 gcc_assert (target);
4960 return convert_to_mode (target_mode, target, 0);
4963 /* Expand a call to __builtin_expect. We just return our argument
4964 as the builtin_expect semantic should've been already executed by
4965 tree branch prediction pass. */
4967 static rtx
4968 expand_builtin_expect (tree exp, rtx target)
4970 tree arg;
4972 if (call_expr_nargs (exp) < 2)
4973 return const0_rtx;
4974 arg = CALL_EXPR_ARG (exp, 0);
4976 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4977 /* When guessing was done, the hints should be already stripped away. */
4978 gcc_assert (!flag_guess_branch_prob
4979 || optimize == 0 || seen_error ());
4980 return target;
4983 /* Expand a call to __builtin_assume_aligned. We just return our first
4984 argument as the builtin_assume_aligned semantic should've been already
4985 executed by CCP. */
4987 static rtx
4988 expand_builtin_assume_aligned (tree exp, rtx target)
4990 if (call_expr_nargs (exp) < 2)
4991 return const0_rtx;
4992 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4993 EXPAND_NORMAL);
4994 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4995 && (call_expr_nargs (exp) < 3
4996 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4997 return target;
5000 void
5001 expand_builtin_trap (void)
5003 if (targetm.have_trap ())
5005 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5006 /* For trap insns when not accumulating outgoing args force
5007 REG_ARGS_SIZE note to prevent crossjumping of calls with
5008 different args sizes. */
5009 if (!ACCUMULATE_OUTGOING_ARGS)
5010 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
5012 else
5014 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5015 tree call_expr = build_call_expr (fn, 0);
5016 expand_call (call_expr, NULL_RTX, false);
5019 emit_barrier ();
5022 /* Expand a call to __builtin_unreachable. We do nothing except emit
5023 a barrier saying that control flow will not pass here.
5025 It is the responsibility of the program being compiled to ensure
5026 that control flow does never reach __builtin_unreachable. */
5027 static void
5028 expand_builtin_unreachable (void)
5030 emit_barrier ();
5033 /* Expand EXP, a call to fabs, fabsf or fabsl.
5034 Return NULL_RTX if a normal call should be emitted rather than expanding
5035 the function inline. If convenient, the result should be placed
5036 in TARGET. SUBTARGET may be used as the target for computing
5037 the operand. */
5039 static rtx
5040 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5042 machine_mode mode;
5043 tree arg;
5044 rtx op0;
5046 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5047 return NULL_RTX;
5049 arg = CALL_EXPR_ARG (exp, 0);
5050 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5051 mode = TYPE_MODE (TREE_TYPE (arg));
5052 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5053 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5056 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5057 Return NULL is a normal call should be emitted rather than expanding the
5058 function inline. If convenient, the result should be placed in TARGET.
5059 SUBTARGET may be used as the target for computing the operand. */
5061 static rtx
5062 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5064 rtx op0, op1;
5065 tree arg;
5067 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5068 return NULL_RTX;
5070 arg = CALL_EXPR_ARG (exp, 0);
5071 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5073 arg = CALL_EXPR_ARG (exp, 1);
5074 op1 = expand_normal (arg);
5076 return expand_copysign (op0, op1, target);
5079 /* Expand a call to __builtin___clear_cache. */
5081 static rtx
5082 expand_builtin___clear_cache (tree exp)
5084 if (!targetm.code_for_clear_cache)
5086 #ifdef CLEAR_INSN_CACHE
5087 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5088 does something. Just do the default expansion to a call to
5089 __clear_cache(). */
5090 return NULL_RTX;
5091 #else
5092 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5093 does nothing. There is no need to call it. Do nothing. */
5094 return const0_rtx;
5095 #endif /* CLEAR_INSN_CACHE */
5098 /* We have a "clear_cache" insn, and it will handle everything. */
5099 tree begin, end;
5100 rtx begin_rtx, end_rtx;
5102 /* We must not expand to a library call. If we did, any
5103 fallback library function in libgcc that might contain a call to
5104 __builtin___clear_cache() would recurse infinitely. */
5105 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5107 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5108 return const0_rtx;
5111 if (targetm.have_clear_cache ())
5113 struct expand_operand ops[2];
5115 begin = CALL_EXPR_ARG (exp, 0);
5116 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5118 end = CALL_EXPR_ARG (exp, 1);
5119 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5121 create_address_operand (&ops[0], begin_rtx);
5122 create_address_operand (&ops[1], end_rtx);
5123 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5124 return const0_rtx;
5126 return const0_rtx;
5129 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5131 static rtx
5132 round_trampoline_addr (rtx tramp)
5134 rtx temp, addend, mask;
5136 /* If we don't need too much alignment, we'll have been guaranteed
5137 proper alignment by get_trampoline_type. */
5138 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5139 return tramp;
5141 /* Round address up to desired boundary. */
5142 temp = gen_reg_rtx (Pmode);
5143 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5144 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5146 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5147 temp, 0, OPTAB_LIB_WIDEN);
5148 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5149 temp, 0, OPTAB_LIB_WIDEN);
5151 return tramp;
5154 static rtx
5155 expand_builtin_init_trampoline (tree exp, bool onstack)
5157 tree t_tramp, t_func, t_chain;
5158 rtx m_tramp, r_tramp, r_chain, tmp;
5160 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5161 POINTER_TYPE, VOID_TYPE))
5162 return NULL_RTX;
5164 t_tramp = CALL_EXPR_ARG (exp, 0);
5165 t_func = CALL_EXPR_ARG (exp, 1);
5166 t_chain = CALL_EXPR_ARG (exp, 2);
5168 r_tramp = expand_normal (t_tramp);
5169 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5170 MEM_NOTRAP_P (m_tramp) = 1;
5172 /* If ONSTACK, the TRAMP argument should be the address of a field
5173 within the local function's FRAME decl. Either way, let's see if
5174 we can fill in the MEM_ATTRs for this memory. */
5175 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5176 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5178 /* Creator of a heap trampoline is responsible for making sure the
5179 address is aligned to at least STACK_BOUNDARY. Normally malloc
5180 will ensure this anyhow. */
5181 tmp = round_trampoline_addr (r_tramp);
5182 if (tmp != r_tramp)
5184 m_tramp = change_address (m_tramp, BLKmode, tmp);
5185 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5186 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5189 /* The FUNC argument should be the address of the nested function.
5190 Extract the actual function decl to pass to the hook. */
5191 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5192 t_func = TREE_OPERAND (t_func, 0);
5193 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5195 r_chain = expand_normal (t_chain);
5197 /* Generate insns to initialize the trampoline. */
5198 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5200 if (onstack)
5202 trampolines_created = 1;
5204 if (targetm.calls.custom_function_descriptors != 0)
5205 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5206 "trampoline generated for nested function %qD", t_func);
5209 return const0_rtx;
5212 static rtx
5213 expand_builtin_adjust_trampoline (tree exp)
5215 rtx tramp;
5217 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5218 return NULL_RTX;
5220 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5221 tramp = round_trampoline_addr (tramp);
5222 if (targetm.calls.trampoline_adjust_address)
5223 tramp = targetm.calls.trampoline_adjust_address (tramp);
5225 return tramp;
5228 /* Expand a call to the builtin descriptor initialization routine.
5229 A descriptor is made up of a couple of pointers to the static
5230 chain and the code entry in this order. */
5232 static rtx
5233 expand_builtin_init_descriptor (tree exp)
5235 tree t_descr, t_func, t_chain;
5236 rtx m_descr, r_descr, r_func, r_chain;
5238 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5239 VOID_TYPE))
5240 return NULL_RTX;
5242 t_descr = CALL_EXPR_ARG (exp, 0);
5243 t_func = CALL_EXPR_ARG (exp, 1);
5244 t_chain = CALL_EXPR_ARG (exp, 2);
5246 r_descr = expand_normal (t_descr);
5247 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5248 MEM_NOTRAP_P (m_descr) = 1;
5250 r_func = expand_normal (t_func);
5251 r_chain = expand_normal (t_chain);
5253 /* Generate insns to initialize the descriptor. */
5254 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5255 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5256 POINTER_SIZE / BITS_PER_UNIT), r_func);
5258 return const0_rtx;
5261 /* Expand a call to the builtin descriptor adjustment routine. */
5263 static rtx
5264 expand_builtin_adjust_descriptor (tree exp)
5266 rtx tramp;
5268 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5269 return NULL_RTX;
5271 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5273 /* Unalign the descriptor to allow runtime identification. */
5274 tramp = plus_constant (ptr_mode, tramp,
5275 targetm.calls.custom_function_descriptors);
5277 return force_operand (tramp, NULL_RTX);
5280 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5281 function. The function first checks whether the back end provides
5282 an insn to implement signbit for the respective mode. If not, it
5283 checks whether the floating point format of the value is such that
5284 the sign bit can be extracted. If that is not the case, error out.
5285 EXP is the expression that is a call to the builtin function; if
5286 convenient, the result should be placed in TARGET. */
5287 static rtx
5288 expand_builtin_signbit (tree exp, rtx target)
5290 const struct real_format *fmt;
5291 machine_mode fmode, imode, rmode;
5292 tree arg;
5293 int word, bitpos;
5294 enum insn_code icode;
5295 rtx temp;
5296 location_t loc = EXPR_LOCATION (exp);
5298 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5299 return NULL_RTX;
5301 arg = CALL_EXPR_ARG (exp, 0);
5302 fmode = TYPE_MODE (TREE_TYPE (arg));
5303 rmode = TYPE_MODE (TREE_TYPE (exp));
5304 fmt = REAL_MODE_FORMAT (fmode);
5306 arg = builtin_save_expr (arg);
5308 /* Expand the argument yielding a RTX expression. */
5309 temp = expand_normal (arg);
5311 /* Check if the back end provides an insn that handles signbit for the
5312 argument's mode. */
5313 icode = optab_handler (signbit_optab, fmode);
5314 if (icode != CODE_FOR_nothing)
5316 rtx_insn *last = get_last_insn ();
5317 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5318 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5319 return target;
5320 delete_insns_since (last);
5323 /* For floating point formats without a sign bit, implement signbit
5324 as "ARG < 0.0". */
5325 bitpos = fmt->signbit_ro;
5326 if (bitpos < 0)
5328 /* But we can't do this if the format supports signed zero. */
5329 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5331 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5332 build_real (TREE_TYPE (arg), dconst0));
5333 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5336 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5338 imode = int_mode_for_mode (fmode);
5339 gcc_assert (imode != BLKmode);
5340 temp = gen_lowpart (imode, temp);
5342 else
5344 imode = word_mode;
5345 /* Handle targets with different FP word orders. */
5346 if (FLOAT_WORDS_BIG_ENDIAN)
5347 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5348 else
5349 word = bitpos / BITS_PER_WORD;
5350 temp = operand_subword_force (temp, word, fmode);
5351 bitpos = bitpos % BITS_PER_WORD;
5354 /* Force the intermediate word_mode (or narrower) result into a
5355 register. This avoids attempting to create paradoxical SUBREGs
5356 of floating point modes below. */
5357 temp = force_reg (imode, temp);
5359 /* If the bitpos is within the "result mode" lowpart, the operation
5360 can be implement with a single bitwise AND. Otherwise, we need
5361 a right shift and an AND. */
5363 if (bitpos < GET_MODE_BITSIZE (rmode))
5365 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5367 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5368 temp = gen_lowpart (rmode, temp);
5369 temp = expand_binop (rmode, and_optab, temp,
5370 immed_wide_int_const (mask, rmode),
5371 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5373 else
5375 /* Perform a logical right shift to place the signbit in the least
5376 significant bit, then truncate the result to the desired mode
5377 and mask just this bit. */
5378 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5379 temp = gen_lowpart (rmode, temp);
5380 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5381 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5384 return temp;
5387 /* Expand fork or exec calls. TARGET is the desired target of the
5388 call. EXP is the call. FN is the
5389 identificator of the actual function. IGNORE is nonzero if the
5390 value is to be ignored. */
5392 static rtx
5393 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5395 tree id, decl;
5396 tree call;
5398 /* If we are not profiling, just call the function. */
5399 if (!profile_arc_flag)
5400 return NULL_RTX;
5402 /* Otherwise call the wrapper. This should be equivalent for the rest of
5403 compiler, so the code does not diverge, and the wrapper may run the
5404 code necessary for keeping the profiling sane. */
5406 switch (DECL_FUNCTION_CODE (fn))
5408 case BUILT_IN_FORK:
5409 id = get_identifier ("__gcov_fork");
5410 break;
5412 case BUILT_IN_EXECL:
5413 id = get_identifier ("__gcov_execl");
5414 break;
5416 case BUILT_IN_EXECV:
5417 id = get_identifier ("__gcov_execv");
5418 break;
5420 case BUILT_IN_EXECLP:
5421 id = get_identifier ("__gcov_execlp");
5422 break;
5424 case BUILT_IN_EXECLE:
5425 id = get_identifier ("__gcov_execle");
5426 break;
5428 case BUILT_IN_EXECVP:
5429 id = get_identifier ("__gcov_execvp");
5430 break;
5432 case BUILT_IN_EXECVE:
5433 id = get_identifier ("__gcov_execve");
5434 break;
5436 default:
5437 gcc_unreachable ();
5440 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5441 FUNCTION_DECL, id, TREE_TYPE (fn));
5442 DECL_EXTERNAL (decl) = 1;
5443 TREE_PUBLIC (decl) = 1;
5444 DECL_ARTIFICIAL (decl) = 1;
5445 TREE_NOTHROW (decl) = 1;
5446 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5447 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5448 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5449 return expand_call (call, target, ignore);
5454 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5455 the pointer in these functions is void*, the tree optimizers may remove
5456 casts. The mode computed in expand_builtin isn't reliable either, due
5457 to __sync_bool_compare_and_swap.
5459 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5460 group of builtins. This gives us log2 of the mode size. */
5462 static inline machine_mode
5463 get_builtin_sync_mode (int fcode_diff)
5465 /* The size is not negotiable, so ask not to get BLKmode in return
5466 if the target indicates that a smaller size would be better. */
5467 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5470 /* Expand the memory expression LOC and return the appropriate memory operand
5471 for the builtin_sync operations. */
5473 static rtx
5474 get_builtin_sync_mem (tree loc, machine_mode mode)
5476 rtx addr, mem;
5478 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5479 addr = convert_memory_address (Pmode, addr);
5481 /* Note that we explicitly do not want any alias information for this
5482 memory, so that we kill all other live memories. Otherwise we don't
5483 satisfy the full barrier semantics of the intrinsic. */
5484 mem = validize_mem (gen_rtx_MEM (mode, addr));
5486 /* The alignment needs to be at least according to that of the mode. */
5487 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5488 get_pointer_alignment (loc)));
5489 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5490 MEM_VOLATILE_P (mem) = 1;
5492 return mem;
5495 /* Make sure an argument is in the right mode.
5496 EXP is the tree argument.
5497 MODE is the mode it should be in. */
5499 static rtx
5500 expand_expr_force_mode (tree exp, machine_mode mode)
5502 rtx val;
5503 machine_mode old_mode;
5505 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5506 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5507 of CONST_INTs, where we know the old_mode only from the call argument. */
5509 old_mode = GET_MODE (val);
5510 if (old_mode == VOIDmode)
5511 old_mode = TYPE_MODE (TREE_TYPE (exp));
5512 val = convert_modes (mode, old_mode, val, 1);
5513 return val;
5517 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5518 EXP is the CALL_EXPR. CODE is the rtx code
5519 that corresponds to the arithmetic or logical operation from the name;
5520 an exception here is that NOT actually means NAND. TARGET is an optional
5521 place for us to store the results; AFTER is true if this is the
5522 fetch_and_xxx form. */
5524 static rtx
5525 expand_builtin_sync_operation (machine_mode mode, tree exp,
5526 enum rtx_code code, bool after,
5527 rtx target)
5529 rtx val, mem;
5530 location_t loc = EXPR_LOCATION (exp);
5532 if (code == NOT && warn_sync_nand)
5534 tree fndecl = get_callee_fndecl (exp);
5535 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5537 static bool warned_f_a_n, warned_n_a_f;
5539 switch (fcode)
5541 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5542 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5543 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5544 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5545 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5546 if (warned_f_a_n)
5547 break;
5549 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5550 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5551 warned_f_a_n = true;
5552 break;
5554 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5555 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5556 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5557 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5558 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5559 if (warned_n_a_f)
5560 break;
5562 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5563 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5564 warned_n_a_f = true;
5565 break;
5567 default:
5568 gcc_unreachable ();
5572 /* Expand the operands. */
5573 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5574 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5576 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5577 after);
5580 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5581 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5582 true if this is the boolean form. TARGET is a place for us to store the
5583 results; this is NOT optional if IS_BOOL is true. */
5585 static rtx
5586 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5587 bool is_bool, rtx target)
5589 rtx old_val, new_val, mem;
5590 rtx *pbool, *poval;
5592 /* Expand the operands. */
5593 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5594 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5595 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5597 pbool = poval = NULL;
5598 if (target != const0_rtx)
5600 if (is_bool)
5601 pbool = &target;
5602 else
5603 poval = &target;
5605 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5606 false, MEMMODEL_SYNC_SEQ_CST,
5607 MEMMODEL_SYNC_SEQ_CST))
5608 return NULL_RTX;
5610 return target;
5613 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5614 general form is actually an atomic exchange, and some targets only
5615 support a reduced form with the second argument being a constant 1.
5616 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5617 the results. */
5619 static rtx
5620 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5621 rtx target)
5623 rtx val, mem;
5625 /* Expand the operands. */
5626 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5627 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5629 return expand_sync_lock_test_and_set (target, mem, val);
5632 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5634 static void
5635 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5637 rtx mem;
5639 /* Expand the operands. */
5640 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5642 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5645 /* Given an integer representing an ``enum memmodel'', verify its
5646 correctness and return the memory model enum. */
5648 static enum memmodel
5649 get_memmodel (tree exp)
5651 rtx op;
5652 unsigned HOST_WIDE_INT val;
5653 source_location loc
5654 = expansion_point_location_if_in_system_header (input_location);
5656 /* If the parameter is not a constant, it's a run time value so we'll just
5657 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5658 if (TREE_CODE (exp) != INTEGER_CST)
5659 return MEMMODEL_SEQ_CST;
5661 op = expand_normal (exp);
5663 val = INTVAL (op);
5664 if (targetm.memmodel_check)
5665 val = targetm.memmodel_check (val);
5666 else if (val & ~MEMMODEL_MASK)
5668 warning_at (loc, OPT_Winvalid_memory_model,
5669 "unknown architecture specifier in memory model to builtin");
5670 return MEMMODEL_SEQ_CST;
5673 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5674 if (memmodel_base (val) >= MEMMODEL_LAST)
5676 warning_at (loc, OPT_Winvalid_memory_model,
5677 "invalid memory model argument to builtin");
5678 return MEMMODEL_SEQ_CST;
5681 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5682 be conservative and promote consume to acquire. */
5683 if (val == MEMMODEL_CONSUME)
5684 val = MEMMODEL_ACQUIRE;
5686 return (enum memmodel) val;
5689 /* Expand the __atomic_exchange intrinsic:
5690 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5691 EXP is the CALL_EXPR.
5692 TARGET is an optional place for us to store the results. */
5694 static rtx
5695 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5697 rtx val, mem;
5698 enum memmodel model;
5700 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5702 if (!flag_inline_atomics)
5703 return NULL_RTX;
5705 /* Expand the operands. */
5706 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5707 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5709 return expand_atomic_exchange (target, mem, val, model);
5712 /* Expand the __atomic_compare_exchange intrinsic:
5713 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5714 TYPE desired, BOOL weak,
5715 enum memmodel success,
5716 enum memmodel failure)
5717 EXP is the CALL_EXPR.
5718 TARGET is an optional place for us to store the results. */
5720 static rtx
5721 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5722 rtx target)
5724 rtx expect, desired, mem, oldval;
5725 rtx_code_label *label;
5726 enum memmodel success, failure;
5727 tree weak;
5728 bool is_weak;
5729 source_location loc
5730 = expansion_point_location_if_in_system_header (input_location);
5732 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5733 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5735 if (failure > success)
5737 warning_at (loc, OPT_Winvalid_memory_model,
5738 "failure memory model cannot be stronger than success "
5739 "memory model for %<__atomic_compare_exchange%>");
5740 success = MEMMODEL_SEQ_CST;
5743 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5745 warning_at (loc, OPT_Winvalid_memory_model,
5746 "invalid failure memory model for "
5747 "%<__atomic_compare_exchange%>");
5748 failure = MEMMODEL_SEQ_CST;
5749 success = MEMMODEL_SEQ_CST;
5753 if (!flag_inline_atomics)
5754 return NULL_RTX;
5756 /* Expand the operands. */
5757 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5759 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5760 expect = convert_memory_address (Pmode, expect);
5761 expect = gen_rtx_MEM (mode, expect);
5762 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5764 weak = CALL_EXPR_ARG (exp, 3);
5765 is_weak = false;
5766 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5767 is_weak = true;
5769 if (target == const0_rtx)
5770 target = NULL;
5772 /* Lest the rtl backend create a race condition with an imporoper store
5773 to memory, always create a new pseudo for OLDVAL. */
5774 oldval = NULL;
5776 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5777 is_weak, success, failure))
5778 return NULL_RTX;
5780 /* Conditionally store back to EXPECT, lest we create a race condition
5781 with an improper store to memory. */
5782 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5783 the normal case where EXPECT is totally private, i.e. a register. At
5784 which point the store can be unconditional. */
5785 label = gen_label_rtx ();
5786 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5787 GET_MODE (target), 1, label);
5788 emit_move_insn (expect, oldval);
5789 emit_label (label);
5791 return target;
5794 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5795 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5796 call. The weak parameter must be dropped to match the expected parameter
5797 list and the expected argument changed from value to pointer to memory
5798 slot. */
5800 static void
5801 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5803 unsigned int z;
5804 vec<tree, va_gc> *vec;
5806 vec_alloc (vec, 5);
5807 vec->quick_push (gimple_call_arg (call, 0));
5808 tree expected = gimple_call_arg (call, 1);
5809 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5810 TREE_TYPE (expected));
5811 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5812 if (expd != x)
5813 emit_move_insn (x, expd);
5814 tree v = make_tree (TREE_TYPE (expected), x);
5815 vec->quick_push (build1 (ADDR_EXPR,
5816 build_pointer_type (TREE_TYPE (expected)), v));
5817 vec->quick_push (gimple_call_arg (call, 2));
5818 /* Skip the boolean weak parameter. */
5819 for (z = 4; z < 6; z++)
5820 vec->quick_push (gimple_call_arg (call, z));
5821 built_in_function fncode
5822 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5823 + exact_log2 (GET_MODE_SIZE (mode)));
5824 tree fndecl = builtin_decl_explicit (fncode);
5825 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5826 fndecl);
5827 tree exp = build_call_vec (boolean_type_node, fn, vec);
5828 tree lhs = gimple_call_lhs (call);
5829 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5830 if (lhs)
5832 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5833 if (GET_MODE (boolret) != mode)
5834 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5835 x = force_reg (mode, x);
5836 write_complex_part (target, boolret, true);
5837 write_complex_part (target, x, false);
5841 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5843 void
5844 expand_ifn_atomic_compare_exchange (gcall *call)
5846 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5847 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5848 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5849 rtx expect, desired, mem, oldval, boolret;
5850 enum memmodel success, failure;
5851 tree lhs;
5852 bool is_weak;
5853 source_location loc
5854 = expansion_point_location_if_in_system_header (gimple_location (call));
5856 success = get_memmodel (gimple_call_arg (call, 4));
5857 failure = get_memmodel (gimple_call_arg (call, 5));
5859 if (failure > success)
5861 warning_at (loc, OPT_Winvalid_memory_model,
5862 "failure memory model cannot be stronger than success "
5863 "memory model for %<__atomic_compare_exchange%>");
5864 success = MEMMODEL_SEQ_CST;
5867 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5869 warning_at (loc, OPT_Winvalid_memory_model,
5870 "invalid failure memory model for "
5871 "%<__atomic_compare_exchange%>");
5872 failure = MEMMODEL_SEQ_CST;
5873 success = MEMMODEL_SEQ_CST;
5876 if (!flag_inline_atomics)
5878 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5879 return;
5882 /* Expand the operands. */
5883 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5885 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5886 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5888 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5890 boolret = NULL;
5891 oldval = NULL;
5893 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5894 is_weak, success, failure))
5896 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5897 return;
5900 lhs = gimple_call_lhs (call);
5901 if (lhs)
5903 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5904 if (GET_MODE (boolret) != mode)
5905 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5906 write_complex_part (target, boolret, true);
5907 write_complex_part (target, oldval, false);
5911 /* Expand the __atomic_load intrinsic:
5912 TYPE __atomic_load (TYPE *object, enum memmodel)
5913 EXP is the CALL_EXPR.
5914 TARGET is an optional place for us to store the results. */
5916 static rtx
5917 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5919 rtx mem;
5920 enum memmodel model;
5922 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5923 if (is_mm_release (model) || is_mm_acq_rel (model))
5925 source_location loc
5926 = expansion_point_location_if_in_system_header (input_location);
5927 warning_at (loc, OPT_Winvalid_memory_model,
5928 "invalid memory model for %<__atomic_load%>");
5929 model = MEMMODEL_SEQ_CST;
5932 if (!flag_inline_atomics)
5933 return NULL_RTX;
5935 /* Expand the operand. */
5936 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5938 return expand_atomic_load (target, mem, model);
5942 /* Expand the __atomic_store intrinsic:
5943 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5944 EXP is the CALL_EXPR.
5945 TARGET is an optional place for us to store the results. */
5947 static rtx
5948 expand_builtin_atomic_store (machine_mode mode, tree exp)
5950 rtx mem, val;
5951 enum memmodel model;
5953 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5954 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5955 || is_mm_release (model)))
5957 source_location loc
5958 = expansion_point_location_if_in_system_header (input_location);
5959 warning_at (loc, OPT_Winvalid_memory_model,
5960 "invalid memory model for %<__atomic_store%>");
5961 model = MEMMODEL_SEQ_CST;
5964 if (!flag_inline_atomics)
5965 return NULL_RTX;
5967 /* Expand the operands. */
5968 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5969 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5971 return expand_atomic_store (mem, val, model, false);
5974 /* Expand the __atomic_fetch_XXX intrinsic:
5975 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5976 EXP is the CALL_EXPR.
5977 TARGET is an optional place for us to store the results.
5978 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5979 FETCH_AFTER is true if returning the result of the operation.
5980 FETCH_AFTER is false if returning the value before the operation.
5981 IGNORE is true if the result is not used.
5982 EXT_CALL is the correct builtin for an external call if this cannot be
5983 resolved to an instruction sequence. */
5985 static rtx
5986 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5987 enum rtx_code code, bool fetch_after,
5988 bool ignore, enum built_in_function ext_call)
5990 rtx val, mem, ret;
5991 enum memmodel model;
5992 tree fndecl;
5993 tree addr;
5995 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5997 /* Expand the operands. */
5998 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5999 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6001 /* Only try generating instructions if inlining is turned on. */
6002 if (flag_inline_atomics)
6004 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6005 if (ret)
6006 return ret;
6009 /* Return if a different routine isn't needed for the library call. */
6010 if (ext_call == BUILT_IN_NONE)
6011 return NULL_RTX;
6013 /* Change the call to the specified function. */
6014 fndecl = get_callee_fndecl (exp);
6015 addr = CALL_EXPR_FN (exp);
6016 STRIP_NOPS (addr);
6018 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6019 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6021 /* Expand the call here so we can emit trailing code. */
6022 ret = expand_call (exp, target, ignore);
6024 /* Replace the original function just in case it matters. */
6025 TREE_OPERAND (addr, 0) = fndecl;
6027 /* Then issue the arithmetic correction to return the right result. */
6028 if (!ignore)
6030 if (code == NOT)
6032 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6033 OPTAB_LIB_WIDEN);
6034 ret = expand_simple_unop (mode, NOT, ret, target, true);
6036 else
6037 ret = expand_simple_binop (mode, code, ret, val, target, true,
6038 OPTAB_LIB_WIDEN);
6040 return ret;
6043 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6045 void
6046 expand_ifn_atomic_bit_test_and (gcall *call)
6048 tree ptr = gimple_call_arg (call, 0);
6049 tree bit = gimple_call_arg (call, 1);
6050 tree flag = gimple_call_arg (call, 2);
6051 tree lhs = gimple_call_lhs (call);
6052 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6053 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6054 enum rtx_code code;
6055 optab optab;
6056 struct expand_operand ops[5];
6058 gcc_assert (flag_inline_atomics);
6060 if (gimple_call_num_args (call) == 4)
6061 model = get_memmodel (gimple_call_arg (call, 3));
6063 rtx mem = get_builtin_sync_mem (ptr, mode);
6064 rtx val = expand_expr_force_mode (bit, mode);
6066 switch (gimple_call_internal_fn (call))
6068 case IFN_ATOMIC_BIT_TEST_AND_SET:
6069 code = IOR;
6070 optab = atomic_bit_test_and_set_optab;
6071 break;
6072 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6073 code = XOR;
6074 optab = atomic_bit_test_and_complement_optab;
6075 break;
6076 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6077 code = AND;
6078 optab = atomic_bit_test_and_reset_optab;
6079 break;
6080 default:
6081 gcc_unreachable ();
6084 if (lhs == NULL_TREE)
6086 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6087 val, NULL_RTX, true, OPTAB_DIRECT);
6088 if (code == AND)
6089 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6090 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6091 return;
6094 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6095 enum insn_code icode = direct_optab_handler (optab, mode);
6096 gcc_assert (icode != CODE_FOR_nothing);
6097 create_output_operand (&ops[0], target, mode);
6098 create_fixed_operand (&ops[1], mem);
6099 create_convert_operand_to (&ops[2], val, mode, true);
6100 create_integer_operand (&ops[3], model);
6101 create_integer_operand (&ops[4], integer_onep (flag));
6102 if (maybe_expand_insn (icode, 5, ops))
6103 return;
6105 rtx bitval = val;
6106 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6107 val, NULL_RTX, true, OPTAB_DIRECT);
6108 rtx maskval = val;
6109 if (code == AND)
6110 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6111 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6112 code, model, false);
6113 if (integer_onep (flag))
6115 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6116 NULL_RTX, true, OPTAB_DIRECT);
6117 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6118 true, OPTAB_DIRECT);
6120 else
6121 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6122 OPTAB_DIRECT);
6123 if (result != target)
6124 emit_move_insn (target, result);
6127 /* Expand an atomic clear operation.
6128 void _atomic_clear (BOOL *obj, enum memmodel)
6129 EXP is the call expression. */
6131 static rtx
6132 expand_builtin_atomic_clear (tree exp)
6134 machine_mode mode;
6135 rtx mem, ret;
6136 enum memmodel model;
6138 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6139 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6140 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6142 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6144 source_location loc
6145 = expansion_point_location_if_in_system_header (input_location);
6146 warning_at (loc, OPT_Winvalid_memory_model,
6147 "invalid memory model for %<__atomic_store%>");
6148 model = MEMMODEL_SEQ_CST;
6151 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6152 Failing that, a store is issued by __atomic_store. The only way this can
6153 fail is if the bool type is larger than a word size. Unlikely, but
6154 handle it anyway for completeness. Assume a single threaded model since
6155 there is no atomic support in this case, and no barriers are required. */
6156 ret = expand_atomic_store (mem, const0_rtx, model, true);
6157 if (!ret)
6158 emit_move_insn (mem, const0_rtx);
6159 return const0_rtx;
6162 /* Expand an atomic test_and_set operation.
6163 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6164 EXP is the call expression. */
6166 static rtx
6167 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6169 rtx mem;
6170 enum memmodel model;
6171 machine_mode mode;
6173 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6174 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6175 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6177 return expand_atomic_test_and_set (target, mem, model);
6181 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6182 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6184 static tree
6185 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6187 int size;
6188 machine_mode mode;
6189 unsigned int mode_align, type_align;
6191 if (TREE_CODE (arg0) != INTEGER_CST)
6192 return NULL_TREE;
6194 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6195 mode = mode_for_size (size, MODE_INT, 0);
6196 mode_align = GET_MODE_ALIGNMENT (mode);
6198 if (TREE_CODE (arg1) == INTEGER_CST)
6200 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6202 /* Either this argument is null, or it's a fake pointer encoding
6203 the alignment of the object. */
6204 val = least_bit_hwi (val);
6205 val *= BITS_PER_UNIT;
6207 if (val == 0 || mode_align < val)
6208 type_align = mode_align;
6209 else
6210 type_align = val;
6212 else
6214 tree ttype = TREE_TYPE (arg1);
6216 /* This function is usually invoked and folded immediately by the front
6217 end before anything else has a chance to look at it. The pointer
6218 parameter at this point is usually cast to a void *, so check for that
6219 and look past the cast. */
6220 if (CONVERT_EXPR_P (arg1)
6221 && POINTER_TYPE_P (ttype)
6222 && VOID_TYPE_P (TREE_TYPE (ttype))
6223 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6224 arg1 = TREE_OPERAND (arg1, 0);
6226 ttype = TREE_TYPE (arg1);
6227 gcc_assert (POINTER_TYPE_P (ttype));
6229 /* Get the underlying type of the object. */
6230 ttype = TREE_TYPE (ttype);
6231 type_align = TYPE_ALIGN (ttype);
6234 /* If the object has smaller alignment, the lock free routines cannot
6235 be used. */
6236 if (type_align < mode_align)
6237 return boolean_false_node;
6239 /* Check if a compare_and_swap pattern exists for the mode which represents
6240 the required size. The pattern is not allowed to fail, so the existence
6241 of the pattern indicates support is present. Also require that an
6242 atomic load exists for the required size. */
6243 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6244 return boolean_true_node;
6245 else
6246 return boolean_false_node;
6249 /* Return true if the parameters to call EXP represent an object which will
6250 always generate lock free instructions. The first argument represents the
6251 size of the object, and the second parameter is a pointer to the object
6252 itself. If NULL is passed for the object, then the result is based on
6253 typical alignment for an object of the specified size. Otherwise return
6254 false. */
6256 static rtx
6257 expand_builtin_atomic_always_lock_free (tree exp)
6259 tree size;
6260 tree arg0 = CALL_EXPR_ARG (exp, 0);
6261 tree arg1 = CALL_EXPR_ARG (exp, 1);
6263 if (TREE_CODE (arg0) != INTEGER_CST)
6265 error ("non-constant argument 1 to __atomic_always_lock_free");
6266 return const0_rtx;
6269 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6270 if (size == boolean_true_node)
6271 return const1_rtx;
6272 return const0_rtx;
6275 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6276 is lock free on this architecture. */
6278 static tree
6279 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6281 if (!flag_inline_atomics)
6282 return NULL_TREE;
6284 /* If it isn't always lock free, don't generate a result. */
6285 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6286 return boolean_true_node;
6288 return NULL_TREE;
6291 /* Return true if the parameters to call EXP represent an object which will
6292 always generate lock free instructions. The first argument represents the
6293 size of the object, and the second parameter is a pointer to the object
6294 itself. If NULL is passed for the object, then the result is based on
6295 typical alignment for an object of the specified size. Otherwise return
6296 NULL*/
6298 static rtx
6299 expand_builtin_atomic_is_lock_free (tree exp)
6301 tree size;
6302 tree arg0 = CALL_EXPR_ARG (exp, 0);
6303 tree arg1 = CALL_EXPR_ARG (exp, 1);
6305 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6307 error ("non-integer argument 1 to __atomic_is_lock_free");
6308 return NULL_RTX;
6311 if (!flag_inline_atomics)
6312 return NULL_RTX;
6314 /* If the value is known at compile time, return the RTX for it. */
6315 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6316 if (size == boolean_true_node)
6317 return const1_rtx;
6319 return NULL_RTX;
6322 /* Expand the __atomic_thread_fence intrinsic:
6323 void __atomic_thread_fence (enum memmodel)
6324 EXP is the CALL_EXPR. */
6326 static void
6327 expand_builtin_atomic_thread_fence (tree exp)
6329 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6330 expand_mem_thread_fence (model);
6333 /* Expand the __atomic_signal_fence intrinsic:
6334 void __atomic_signal_fence (enum memmodel)
6335 EXP is the CALL_EXPR. */
6337 static void
6338 expand_builtin_atomic_signal_fence (tree exp)
6340 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6341 expand_mem_signal_fence (model);
6344 /* Expand the __sync_synchronize intrinsic. */
6346 static void
6347 expand_builtin_sync_synchronize (void)
6349 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6352 static rtx
6353 expand_builtin_thread_pointer (tree exp, rtx target)
6355 enum insn_code icode;
6356 if (!validate_arglist (exp, VOID_TYPE))
6357 return const0_rtx;
6358 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6359 if (icode != CODE_FOR_nothing)
6361 struct expand_operand op;
6362 /* If the target is not sutitable then create a new target. */
6363 if (target == NULL_RTX
6364 || !REG_P (target)
6365 || GET_MODE (target) != Pmode)
6366 target = gen_reg_rtx (Pmode);
6367 create_output_operand (&op, target, Pmode);
6368 expand_insn (icode, 1, &op);
6369 return target;
6371 error ("__builtin_thread_pointer is not supported on this target");
6372 return const0_rtx;
6375 static void
6376 expand_builtin_set_thread_pointer (tree exp)
6378 enum insn_code icode;
6379 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6380 return;
6381 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6382 if (icode != CODE_FOR_nothing)
6384 struct expand_operand op;
6385 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6386 Pmode, EXPAND_NORMAL);
6387 create_input_operand (&op, val, Pmode);
6388 expand_insn (icode, 1, &op);
6389 return;
6391 error ("__builtin_set_thread_pointer is not supported on this target");
6395 /* Emit code to restore the current value of stack. */
6397 static void
6398 expand_stack_restore (tree var)
6400 rtx_insn *prev;
6401 rtx sa = expand_normal (var);
6403 sa = convert_memory_address (Pmode, sa);
6405 prev = get_last_insn ();
6406 emit_stack_restore (SAVE_BLOCK, sa);
6408 record_new_stack_level ();
6410 fixup_args_size_notes (prev, get_last_insn (), 0);
6413 /* Emit code to save the current value of stack. */
6415 static rtx
6416 expand_stack_save (void)
6418 rtx ret = NULL_RTX;
6420 emit_stack_save (SAVE_BLOCK, &ret);
6421 return ret;
6425 /* Expand an expression EXP that calls a built-in function,
6426 with result going to TARGET if that's convenient
6427 (and in mode MODE if that's convenient).
6428 SUBTARGET may be used as the target for computing one of EXP's operands.
6429 IGNORE is nonzero if the value is to be ignored. */
6432 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6433 int ignore)
6435 tree fndecl = get_callee_fndecl (exp);
6436 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6437 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6438 int flags;
6440 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6441 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6443 /* When ASan is enabled, we don't want to expand some memory/string
6444 builtins and rely on libsanitizer's hooks. This allows us to avoid
6445 redundant checks and be sure, that possible overflow will be detected
6446 by ASan. */
6448 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6449 return expand_call (exp, target, ignore);
6451 /* When not optimizing, generate calls to library functions for a certain
6452 set of builtins. */
6453 if (!optimize
6454 && !called_as_built_in (fndecl)
6455 && fcode != BUILT_IN_FORK
6456 && fcode != BUILT_IN_EXECL
6457 && fcode != BUILT_IN_EXECV
6458 && fcode != BUILT_IN_EXECLP
6459 && fcode != BUILT_IN_EXECLE
6460 && fcode != BUILT_IN_EXECVP
6461 && fcode != BUILT_IN_EXECVE
6462 && fcode != BUILT_IN_ALLOCA
6463 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
6464 && fcode != BUILT_IN_FREE
6465 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6466 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6467 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6468 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6469 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6470 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6471 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6472 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6473 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6474 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6475 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6476 && fcode != BUILT_IN_CHKP_BNDRET)
6477 return expand_call (exp, target, ignore);
6479 /* The built-in function expanders test for target == const0_rtx
6480 to determine whether the function's result will be ignored. */
6481 if (ignore)
6482 target = const0_rtx;
6484 /* If the result of a pure or const built-in function is ignored, and
6485 none of its arguments are volatile, we can avoid expanding the
6486 built-in call and just evaluate the arguments for side-effects. */
6487 if (target == const0_rtx
6488 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6489 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6491 bool volatilep = false;
6492 tree arg;
6493 call_expr_arg_iterator iter;
6495 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6496 if (TREE_THIS_VOLATILE (arg))
6498 volatilep = true;
6499 break;
6502 if (! volatilep)
6504 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6505 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6506 return const0_rtx;
6510 /* expand_builtin_with_bounds is supposed to be used for
6511 instrumented builtin calls. */
6512 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6514 switch (fcode)
6516 CASE_FLT_FN (BUILT_IN_FABS):
6517 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6518 case BUILT_IN_FABSD32:
6519 case BUILT_IN_FABSD64:
6520 case BUILT_IN_FABSD128:
6521 target = expand_builtin_fabs (exp, target, subtarget);
6522 if (target)
6523 return target;
6524 break;
6526 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6527 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6528 target = expand_builtin_copysign (exp, target, subtarget);
6529 if (target)
6530 return target;
6531 break;
6533 /* Just do a normal library call if we were unable to fold
6534 the values. */
6535 CASE_FLT_FN (BUILT_IN_CABS):
6536 break;
6538 CASE_FLT_FN (BUILT_IN_FMA):
6539 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6540 if (target)
6541 return target;
6542 break;
6544 CASE_FLT_FN (BUILT_IN_ILOGB):
6545 if (! flag_unsafe_math_optimizations)
6546 break;
6547 gcc_fallthrough ();
6548 CASE_FLT_FN (BUILT_IN_ISINF):
6549 CASE_FLT_FN (BUILT_IN_FINITE):
6550 case BUILT_IN_ISFINITE:
6551 case BUILT_IN_ISNORMAL:
6552 target = expand_builtin_interclass_mathfn (exp, target);
6553 if (target)
6554 return target;
6555 break;
6557 CASE_FLT_FN (BUILT_IN_ICEIL):
6558 CASE_FLT_FN (BUILT_IN_LCEIL):
6559 CASE_FLT_FN (BUILT_IN_LLCEIL):
6560 CASE_FLT_FN (BUILT_IN_LFLOOR):
6561 CASE_FLT_FN (BUILT_IN_IFLOOR):
6562 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6563 target = expand_builtin_int_roundingfn (exp, target);
6564 if (target)
6565 return target;
6566 break;
6568 CASE_FLT_FN (BUILT_IN_IRINT):
6569 CASE_FLT_FN (BUILT_IN_LRINT):
6570 CASE_FLT_FN (BUILT_IN_LLRINT):
6571 CASE_FLT_FN (BUILT_IN_IROUND):
6572 CASE_FLT_FN (BUILT_IN_LROUND):
6573 CASE_FLT_FN (BUILT_IN_LLROUND):
6574 target = expand_builtin_int_roundingfn_2 (exp, target);
6575 if (target)
6576 return target;
6577 break;
6579 CASE_FLT_FN (BUILT_IN_POWI):
6580 target = expand_builtin_powi (exp, target);
6581 if (target)
6582 return target;
6583 break;
6585 CASE_FLT_FN (BUILT_IN_CEXPI):
6586 target = expand_builtin_cexpi (exp, target);
6587 gcc_assert (target);
6588 return target;
6590 CASE_FLT_FN (BUILT_IN_SIN):
6591 CASE_FLT_FN (BUILT_IN_COS):
6592 if (! flag_unsafe_math_optimizations)
6593 break;
6594 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6595 if (target)
6596 return target;
6597 break;
6599 CASE_FLT_FN (BUILT_IN_SINCOS):
6600 if (! flag_unsafe_math_optimizations)
6601 break;
6602 target = expand_builtin_sincos (exp);
6603 if (target)
6604 return target;
6605 break;
6607 case BUILT_IN_APPLY_ARGS:
6608 return expand_builtin_apply_args ();
6610 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6611 FUNCTION with a copy of the parameters described by
6612 ARGUMENTS, and ARGSIZE. It returns a block of memory
6613 allocated on the stack into which is stored all the registers
6614 that might possibly be used for returning the result of a
6615 function. ARGUMENTS is the value returned by
6616 __builtin_apply_args. ARGSIZE is the number of bytes of
6617 arguments that must be copied. ??? How should this value be
6618 computed? We'll also need a safe worst case value for varargs
6619 functions. */
6620 case BUILT_IN_APPLY:
6621 if (!validate_arglist (exp, POINTER_TYPE,
6622 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6623 && !validate_arglist (exp, REFERENCE_TYPE,
6624 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6625 return const0_rtx;
6626 else
6628 rtx ops[3];
6630 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6631 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6632 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6634 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6637 /* __builtin_return (RESULT) causes the function to return the
6638 value described by RESULT. RESULT is address of the block of
6639 memory returned by __builtin_apply. */
6640 case BUILT_IN_RETURN:
6641 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6642 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6643 return const0_rtx;
6645 case BUILT_IN_SAVEREGS:
6646 return expand_builtin_saveregs ();
6648 case BUILT_IN_VA_ARG_PACK:
6649 /* All valid uses of __builtin_va_arg_pack () are removed during
6650 inlining. */
6651 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6652 return const0_rtx;
6654 case BUILT_IN_VA_ARG_PACK_LEN:
6655 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6656 inlining. */
6657 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6658 return const0_rtx;
6660 /* Return the address of the first anonymous stack arg. */
6661 case BUILT_IN_NEXT_ARG:
6662 if (fold_builtin_next_arg (exp, false))
6663 return const0_rtx;
6664 return expand_builtin_next_arg ();
6666 case BUILT_IN_CLEAR_CACHE:
6667 target = expand_builtin___clear_cache (exp);
6668 if (target)
6669 return target;
6670 break;
6672 case BUILT_IN_CLASSIFY_TYPE:
6673 return expand_builtin_classify_type (exp);
6675 case BUILT_IN_CONSTANT_P:
6676 return const0_rtx;
6678 case BUILT_IN_FRAME_ADDRESS:
6679 case BUILT_IN_RETURN_ADDRESS:
6680 return expand_builtin_frame_address (fndecl, exp);
6682 /* Returns the address of the area where the structure is returned.
6683 0 otherwise. */
6684 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6685 if (call_expr_nargs (exp) != 0
6686 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6687 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6688 return const0_rtx;
6689 else
6690 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6692 case BUILT_IN_ALLOCA:
6693 case BUILT_IN_ALLOCA_WITH_ALIGN:
6694 /* If the allocation stems from the declaration of a variable-sized
6695 object, it cannot accumulate. */
6696 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6697 if (target)
6698 return target;
6699 break;
6701 case BUILT_IN_STACK_SAVE:
6702 return expand_stack_save ();
6704 case BUILT_IN_STACK_RESTORE:
6705 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6706 return const0_rtx;
6708 case BUILT_IN_BSWAP16:
6709 case BUILT_IN_BSWAP32:
6710 case BUILT_IN_BSWAP64:
6711 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6712 if (target)
6713 return target;
6714 break;
6716 CASE_INT_FN (BUILT_IN_FFS):
6717 target = expand_builtin_unop (target_mode, exp, target,
6718 subtarget, ffs_optab);
6719 if (target)
6720 return target;
6721 break;
6723 CASE_INT_FN (BUILT_IN_CLZ):
6724 target = expand_builtin_unop (target_mode, exp, target,
6725 subtarget, clz_optab);
6726 if (target)
6727 return target;
6728 break;
6730 CASE_INT_FN (BUILT_IN_CTZ):
6731 target = expand_builtin_unop (target_mode, exp, target,
6732 subtarget, ctz_optab);
6733 if (target)
6734 return target;
6735 break;
6737 CASE_INT_FN (BUILT_IN_CLRSB):
6738 target = expand_builtin_unop (target_mode, exp, target,
6739 subtarget, clrsb_optab);
6740 if (target)
6741 return target;
6742 break;
6744 CASE_INT_FN (BUILT_IN_POPCOUNT):
6745 target = expand_builtin_unop (target_mode, exp, target,
6746 subtarget, popcount_optab);
6747 if (target)
6748 return target;
6749 break;
6751 CASE_INT_FN (BUILT_IN_PARITY):
6752 target = expand_builtin_unop (target_mode, exp, target,
6753 subtarget, parity_optab);
6754 if (target)
6755 return target;
6756 break;
6758 case BUILT_IN_STRLEN:
6759 target = expand_builtin_strlen (exp, target, target_mode);
6760 if (target)
6761 return target;
6762 break;
6764 case BUILT_IN_STRCAT:
6765 target = expand_builtin_strcat (exp, target);
6766 if (target)
6767 return target;
6768 break;
6770 case BUILT_IN_STRCPY:
6771 target = expand_builtin_strcpy (exp, target);
6772 if (target)
6773 return target;
6774 break;
6776 case BUILT_IN_STRNCAT:
6777 target = expand_builtin_strncat (exp, target);
6778 if (target)
6779 return target;
6780 break;
6782 case BUILT_IN_STRNCPY:
6783 target = expand_builtin_strncpy (exp, target);
6784 if (target)
6785 return target;
6786 break;
6788 case BUILT_IN_STPCPY:
6789 target = expand_builtin_stpcpy (exp, target, mode);
6790 if (target)
6791 return target;
6792 break;
6794 case BUILT_IN_STPNCPY:
6795 target = expand_builtin_stpncpy (exp, target);
6796 if (target)
6797 return target;
6798 break;
6800 case BUILT_IN_MEMCPY:
6801 target = expand_builtin_memcpy (exp, target);
6802 if (target)
6803 return target;
6804 break;
6806 case BUILT_IN_MEMMOVE:
6807 target = expand_builtin_memmove (exp, target);
6808 if (target)
6809 return target;
6810 break;
6812 case BUILT_IN_MEMPCPY:
6813 target = expand_builtin_mempcpy (exp, target, mode);
6814 if (target)
6815 return target;
6816 break;
6818 case BUILT_IN_MEMSET:
6819 target = expand_builtin_memset (exp, target, mode);
6820 if (target)
6821 return target;
6822 break;
6824 case BUILT_IN_BZERO:
6825 target = expand_builtin_bzero (exp);
6826 if (target)
6827 return target;
6828 break;
6830 case BUILT_IN_STRCMP:
6831 target = expand_builtin_strcmp (exp, target);
6832 if (target)
6833 return target;
6834 break;
6836 case BUILT_IN_STRNCMP:
6837 target = expand_builtin_strncmp (exp, target, mode);
6838 if (target)
6839 return target;
6840 break;
6842 case BUILT_IN_BCMP:
6843 case BUILT_IN_MEMCMP:
6844 case BUILT_IN_MEMCMP_EQ:
6845 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6846 if (target)
6847 return target;
6848 if (fcode == BUILT_IN_MEMCMP_EQ)
6850 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6851 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6853 break;
6855 case BUILT_IN_SETJMP:
6856 /* This should have been lowered to the builtins below. */
6857 gcc_unreachable ();
6859 case BUILT_IN_SETJMP_SETUP:
6860 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6861 and the receiver label. */
6862 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6864 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6865 VOIDmode, EXPAND_NORMAL);
6866 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6867 rtx_insn *label_r = label_rtx (label);
6869 /* This is copied from the handling of non-local gotos. */
6870 expand_builtin_setjmp_setup (buf_addr, label_r);
6871 nonlocal_goto_handler_labels
6872 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6873 nonlocal_goto_handler_labels);
6874 /* ??? Do not let expand_label treat us as such since we would
6875 not want to be both on the list of non-local labels and on
6876 the list of forced labels. */
6877 FORCED_LABEL (label) = 0;
6878 return const0_rtx;
6880 break;
6882 case BUILT_IN_SETJMP_RECEIVER:
6883 /* __builtin_setjmp_receiver is passed the receiver label. */
6884 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6886 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6887 rtx_insn *label_r = label_rtx (label);
6889 expand_builtin_setjmp_receiver (label_r);
6890 return const0_rtx;
6892 break;
6894 /* __builtin_longjmp is passed a pointer to an array of five words.
6895 It's similar to the C library longjmp function but works with
6896 __builtin_setjmp above. */
6897 case BUILT_IN_LONGJMP:
6898 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6900 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6901 VOIDmode, EXPAND_NORMAL);
6902 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6904 if (value != const1_rtx)
6906 error ("%<__builtin_longjmp%> second argument must be 1");
6907 return const0_rtx;
6910 expand_builtin_longjmp (buf_addr, value);
6911 return const0_rtx;
6913 break;
6915 case BUILT_IN_NONLOCAL_GOTO:
6916 target = expand_builtin_nonlocal_goto (exp);
6917 if (target)
6918 return target;
6919 break;
6921 /* This updates the setjmp buffer that is its argument with the value
6922 of the current stack pointer. */
6923 case BUILT_IN_UPDATE_SETJMP_BUF:
6924 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6926 rtx buf_addr
6927 = expand_normal (CALL_EXPR_ARG (exp, 0));
6929 expand_builtin_update_setjmp_buf (buf_addr);
6930 return const0_rtx;
6932 break;
6934 case BUILT_IN_TRAP:
6935 expand_builtin_trap ();
6936 return const0_rtx;
6938 case BUILT_IN_UNREACHABLE:
6939 expand_builtin_unreachable ();
6940 return const0_rtx;
6942 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6943 case BUILT_IN_SIGNBITD32:
6944 case BUILT_IN_SIGNBITD64:
6945 case BUILT_IN_SIGNBITD128:
6946 target = expand_builtin_signbit (exp, target);
6947 if (target)
6948 return target;
6949 break;
6951 /* Various hooks for the DWARF 2 __throw routine. */
6952 case BUILT_IN_UNWIND_INIT:
6953 expand_builtin_unwind_init ();
6954 return const0_rtx;
6955 case BUILT_IN_DWARF_CFA:
6956 return virtual_cfa_rtx;
6957 #ifdef DWARF2_UNWIND_INFO
6958 case BUILT_IN_DWARF_SP_COLUMN:
6959 return expand_builtin_dwarf_sp_column ();
6960 case BUILT_IN_INIT_DWARF_REG_SIZES:
6961 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6962 return const0_rtx;
6963 #endif
6964 case BUILT_IN_FROB_RETURN_ADDR:
6965 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6966 case BUILT_IN_EXTRACT_RETURN_ADDR:
6967 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6968 case BUILT_IN_EH_RETURN:
6969 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6970 CALL_EXPR_ARG (exp, 1));
6971 return const0_rtx;
6972 case BUILT_IN_EH_RETURN_DATA_REGNO:
6973 return expand_builtin_eh_return_data_regno (exp);
6974 case BUILT_IN_EXTEND_POINTER:
6975 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6976 case BUILT_IN_EH_POINTER:
6977 return expand_builtin_eh_pointer (exp);
6978 case BUILT_IN_EH_FILTER:
6979 return expand_builtin_eh_filter (exp);
6980 case BUILT_IN_EH_COPY_VALUES:
6981 return expand_builtin_eh_copy_values (exp);
6983 case BUILT_IN_VA_START:
6984 return expand_builtin_va_start (exp);
6985 case BUILT_IN_VA_END:
6986 return expand_builtin_va_end (exp);
6987 case BUILT_IN_VA_COPY:
6988 return expand_builtin_va_copy (exp);
6989 case BUILT_IN_EXPECT:
6990 return expand_builtin_expect (exp, target);
6991 case BUILT_IN_ASSUME_ALIGNED:
6992 return expand_builtin_assume_aligned (exp, target);
6993 case BUILT_IN_PREFETCH:
6994 expand_builtin_prefetch (exp);
6995 return const0_rtx;
6997 case BUILT_IN_INIT_TRAMPOLINE:
6998 return expand_builtin_init_trampoline (exp, true);
6999 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7000 return expand_builtin_init_trampoline (exp, false);
7001 case BUILT_IN_ADJUST_TRAMPOLINE:
7002 return expand_builtin_adjust_trampoline (exp);
7004 case BUILT_IN_INIT_DESCRIPTOR:
7005 return expand_builtin_init_descriptor (exp);
7006 case BUILT_IN_ADJUST_DESCRIPTOR:
7007 return expand_builtin_adjust_descriptor (exp);
7009 case BUILT_IN_FORK:
7010 case BUILT_IN_EXECL:
7011 case BUILT_IN_EXECV:
7012 case BUILT_IN_EXECLP:
7013 case BUILT_IN_EXECLE:
7014 case BUILT_IN_EXECVP:
7015 case BUILT_IN_EXECVE:
7016 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7017 if (target)
7018 return target;
7019 break;
7021 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7022 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7023 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7024 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7025 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7026 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7027 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7028 if (target)
7029 return target;
7030 break;
7032 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7033 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7034 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7035 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7036 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7037 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7038 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7039 if (target)
7040 return target;
7041 break;
7043 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7044 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7045 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7046 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7047 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7048 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7049 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7050 if (target)
7051 return target;
7052 break;
7054 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7055 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7056 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7057 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7058 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7059 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7060 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7061 if (target)
7062 return target;
7063 break;
7065 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7066 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7067 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7068 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7069 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7070 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7071 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7072 if (target)
7073 return target;
7074 break;
7076 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7077 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7078 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7079 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7080 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7081 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7082 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7083 if (target)
7084 return target;
7085 break;
7087 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7088 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7089 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7090 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7091 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7092 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7093 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7094 if (target)
7095 return target;
7096 break;
7098 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7099 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7100 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7101 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7102 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7103 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7104 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7105 if (target)
7106 return target;
7107 break;
7109 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7110 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7111 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7112 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7113 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7114 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7115 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7116 if (target)
7117 return target;
7118 break;
7120 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7121 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7122 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7123 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7124 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7125 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7126 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7127 if (target)
7128 return target;
7129 break;
7131 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7132 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7133 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7134 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7135 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7136 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7137 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7138 if (target)
7139 return target;
7140 break;
7142 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7143 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7144 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7145 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7146 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7147 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7148 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7149 if (target)
7150 return target;
7151 break;
7153 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7154 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7155 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7156 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7157 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7158 if (mode == VOIDmode)
7159 mode = TYPE_MODE (boolean_type_node);
7160 if (!target || !register_operand (target, mode))
7161 target = gen_reg_rtx (mode);
7163 mode = get_builtin_sync_mode
7164 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7165 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7166 if (target)
7167 return target;
7168 break;
7170 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7171 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7172 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7173 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7174 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7175 mode = get_builtin_sync_mode
7176 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7177 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7178 if (target)
7179 return target;
7180 break;
7182 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7183 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7184 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7185 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7186 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7187 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7188 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7189 if (target)
7190 return target;
7191 break;
7193 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7194 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7195 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7196 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7197 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7198 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7199 expand_builtin_sync_lock_release (mode, exp);
7200 return const0_rtx;
7202 case BUILT_IN_SYNC_SYNCHRONIZE:
7203 expand_builtin_sync_synchronize ();
7204 return const0_rtx;
7206 case BUILT_IN_ATOMIC_EXCHANGE_1:
7207 case BUILT_IN_ATOMIC_EXCHANGE_2:
7208 case BUILT_IN_ATOMIC_EXCHANGE_4:
7209 case BUILT_IN_ATOMIC_EXCHANGE_8:
7210 case BUILT_IN_ATOMIC_EXCHANGE_16:
7211 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7212 target = expand_builtin_atomic_exchange (mode, exp, target);
7213 if (target)
7214 return target;
7215 break;
7217 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7218 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7219 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7220 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7221 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7223 unsigned int nargs, z;
7224 vec<tree, va_gc> *vec;
7226 mode =
7227 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7228 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7229 if (target)
7230 return target;
7232 /* If this is turned into an external library call, the weak parameter
7233 must be dropped to match the expected parameter list. */
7234 nargs = call_expr_nargs (exp);
7235 vec_alloc (vec, nargs - 1);
7236 for (z = 0; z < 3; z++)
7237 vec->quick_push (CALL_EXPR_ARG (exp, z));
7238 /* Skip the boolean weak parameter. */
7239 for (z = 4; z < 6; z++)
7240 vec->quick_push (CALL_EXPR_ARG (exp, z));
7241 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7242 break;
7245 case BUILT_IN_ATOMIC_LOAD_1:
7246 case BUILT_IN_ATOMIC_LOAD_2:
7247 case BUILT_IN_ATOMIC_LOAD_4:
7248 case BUILT_IN_ATOMIC_LOAD_8:
7249 case BUILT_IN_ATOMIC_LOAD_16:
7250 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7251 target = expand_builtin_atomic_load (mode, exp, target);
7252 if (target)
7253 return target;
7254 break;
7256 case BUILT_IN_ATOMIC_STORE_1:
7257 case BUILT_IN_ATOMIC_STORE_2:
7258 case BUILT_IN_ATOMIC_STORE_4:
7259 case BUILT_IN_ATOMIC_STORE_8:
7260 case BUILT_IN_ATOMIC_STORE_16:
7261 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7262 target = expand_builtin_atomic_store (mode, exp);
7263 if (target)
7264 return const0_rtx;
7265 break;
7267 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7268 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7269 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7270 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7271 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7273 enum built_in_function lib;
7274 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7275 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7276 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7277 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7278 ignore, lib);
7279 if (target)
7280 return target;
7281 break;
7283 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7284 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7285 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7286 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7287 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7289 enum built_in_function lib;
7290 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7291 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7292 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7293 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7294 ignore, lib);
7295 if (target)
7296 return target;
7297 break;
7299 case BUILT_IN_ATOMIC_AND_FETCH_1:
7300 case BUILT_IN_ATOMIC_AND_FETCH_2:
7301 case BUILT_IN_ATOMIC_AND_FETCH_4:
7302 case BUILT_IN_ATOMIC_AND_FETCH_8:
7303 case BUILT_IN_ATOMIC_AND_FETCH_16:
7305 enum built_in_function lib;
7306 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7307 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7308 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7309 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7310 ignore, lib);
7311 if (target)
7312 return target;
7313 break;
7315 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7316 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7317 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7318 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7319 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7321 enum built_in_function lib;
7322 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7323 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7324 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7325 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7326 ignore, lib);
7327 if (target)
7328 return target;
7329 break;
7331 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7332 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7333 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7334 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7335 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7337 enum built_in_function lib;
7338 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7339 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7340 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7341 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7342 ignore, lib);
7343 if (target)
7344 return target;
7345 break;
7347 case BUILT_IN_ATOMIC_OR_FETCH_1:
7348 case BUILT_IN_ATOMIC_OR_FETCH_2:
7349 case BUILT_IN_ATOMIC_OR_FETCH_4:
7350 case BUILT_IN_ATOMIC_OR_FETCH_8:
7351 case BUILT_IN_ATOMIC_OR_FETCH_16:
7353 enum built_in_function lib;
7354 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7355 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7356 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7357 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7358 ignore, lib);
7359 if (target)
7360 return target;
7361 break;
7363 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7364 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7365 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7366 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7367 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7368 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7369 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7370 ignore, BUILT_IN_NONE);
7371 if (target)
7372 return target;
7373 break;
7375 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7376 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7377 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7378 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7379 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7380 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7381 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7382 ignore, BUILT_IN_NONE);
7383 if (target)
7384 return target;
7385 break;
7387 case BUILT_IN_ATOMIC_FETCH_AND_1:
7388 case BUILT_IN_ATOMIC_FETCH_AND_2:
7389 case BUILT_IN_ATOMIC_FETCH_AND_4:
7390 case BUILT_IN_ATOMIC_FETCH_AND_8:
7391 case BUILT_IN_ATOMIC_FETCH_AND_16:
7392 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7393 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7394 ignore, BUILT_IN_NONE);
7395 if (target)
7396 return target;
7397 break;
7399 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7400 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7401 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7402 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7403 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7404 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7405 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7406 ignore, BUILT_IN_NONE);
7407 if (target)
7408 return target;
7409 break;
7411 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7412 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7413 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7414 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7415 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7416 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7417 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7418 ignore, BUILT_IN_NONE);
7419 if (target)
7420 return target;
7421 break;
7423 case BUILT_IN_ATOMIC_FETCH_OR_1:
7424 case BUILT_IN_ATOMIC_FETCH_OR_2:
7425 case BUILT_IN_ATOMIC_FETCH_OR_4:
7426 case BUILT_IN_ATOMIC_FETCH_OR_8:
7427 case BUILT_IN_ATOMIC_FETCH_OR_16:
7428 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7429 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7430 ignore, BUILT_IN_NONE);
7431 if (target)
7432 return target;
7433 break;
7435 case BUILT_IN_ATOMIC_TEST_AND_SET:
7436 return expand_builtin_atomic_test_and_set (exp, target);
7438 case BUILT_IN_ATOMIC_CLEAR:
7439 return expand_builtin_atomic_clear (exp);
7441 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7442 return expand_builtin_atomic_always_lock_free (exp);
7444 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7445 target = expand_builtin_atomic_is_lock_free (exp);
7446 if (target)
7447 return target;
7448 break;
7450 case BUILT_IN_ATOMIC_THREAD_FENCE:
7451 expand_builtin_atomic_thread_fence (exp);
7452 return const0_rtx;
7454 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7455 expand_builtin_atomic_signal_fence (exp);
7456 return const0_rtx;
7458 case BUILT_IN_OBJECT_SIZE:
7459 return expand_builtin_object_size (exp);
7461 case BUILT_IN_MEMCPY_CHK:
7462 case BUILT_IN_MEMPCPY_CHK:
7463 case BUILT_IN_MEMMOVE_CHK:
7464 case BUILT_IN_MEMSET_CHK:
7465 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7466 if (target)
7467 return target;
7468 break;
7470 case BUILT_IN_STRCPY_CHK:
7471 case BUILT_IN_STPCPY_CHK:
7472 case BUILT_IN_STRNCPY_CHK:
7473 case BUILT_IN_STPNCPY_CHK:
7474 case BUILT_IN_STRCAT_CHK:
7475 case BUILT_IN_STRNCAT_CHK:
7476 case BUILT_IN_SNPRINTF_CHK:
7477 case BUILT_IN_VSNPRINTF_CHK:
7478 maybe_emit_chk_warning (exp, fcode);
7479 break;
7481 case BUILT_IN_SPRINTF_CHK:
7482 case BUILT_IN_VSPRINTF_CHK:
7483 maybe_emit_sprintf_chk_warning (exp, fcode);
7484 break;
7486 case BUILT_IN_FREE:
7487 if (warn_free_nonheap_object)
7488 maybe_emit_free_warning (exp);
7489 break;
7491 case BUILT_IN_THREAD_POINTER:
7492 return expand_builtin_thread_pointer (exp, target);
7494 case BUILT_IN_SET_THREAD_POINTER:
7495 expand_builtin_set_thread_pointer (exp);
7496 return const0_rtx;
7498 case BUILT_IN_CILK_DETACH:
7499 expand_builtin_cilk_detach (exp);
7500 return const0_rtx;
7502 case BUILT_IN_CILK_POP_FRAME:
7503 expand_builtin_cilk_pop_frame (exp);
7504 return const0_rtx;
7506 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7507 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7508 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7509 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7510 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7511 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7512 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7513 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7514 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7515 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7516 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7517 /* We allow user CHKP builtins if Pointer Bounds
7518 Checker is off. */
7519 if (!chkp_function_instrumented_p (current_function_decl))
7521 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7522 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7523 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7524 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7525 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7526 return expand_normal (CALL_EXPR_ARG (exp, 0));
7527 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7528 return expand_normal (size_zero_node);
7529 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7530 return expand_normal (size_int (-1));
7531 else
7532 return const0_rtx;
7534 /* FALLTHROUGH */
7536 case BUILT_IN_CHKP_BNDMK:
7537 case BUILT_IN_CHKP_BNDSTX:
7538 case BUILT_IN_CHKP_BNDCL:
7539 case BUILT_IN_CHKP_BNDCU:
7540 case BUILT_IN_CHKP_BNDLDX:
7541 case BUILT_IN_CHKP_BNDRET:
7542 case BUILT_IN_CHKP_INTERSECT:
7543 case BUILT_IN_CHKP_NARROW:
7544 case BUILT_IN_CHKP_EXTRACT_LOWER:
7545 case BUILT_IN_CHKP_EXTRACT_UPPER:
7546 /* Software implementation of Pointer Bounds Checker is NYI.
7547 Target support is required. */
7548 error ("Your target platform does not support -fcheck-pointer-bounds");
7549 break;
7551 case BUILT_IN_ACC_ON_DEVICE:
7552 /* Do library call, if we failed to expand the builtin when
7553 folding. */
7554 break;
7556 default: /* just do library call, if unknown builtin */
7557 break;
7560 /* The switch statement above can drop through to cause the function
7561 to be called normally. */
7562 return expand_call (exp, target, ignore);
7565 /* Similar to expand_builtin but is used for instrumented calls. */
7568 expand_builtin_with_bounds (tree exp, rtx target,
7569 rtx subtarget ATTRIBUTE_UNUSED,
7570 machine_mode mode, int ignore)
7572 tree fndecl = get_callee_fndecl (exp);
7573 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7575 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7577 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7578 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7580 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7581 && fcode < END_CHKP_BUILTINS);
7583 switch (fcode)
7585 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7586 target = expand_builtin_memcpy_with_bounds (exp, target);
7587 if (target)
7588 return target;
7589 break;
7591 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7592 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7593 if (target)
7594 return target;
7595 break;
7597 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7598 target = expand_builtin_memset_with_bounds (exp, target, mode);
7599 if (target)
7600 return target;
7601 break;
7603 default:
7604 break;
7607 /* The switch statement above can drop through to cause the function
7608 to be called normally. */
7609 return expand_call (exp, target, ignore);
7612 /* Determine whether a tree node represents a call to a built-in
7613 function. If the tree T is a call to a built-in function with
7614 the right number of arguments of the appropriate types, return
7615 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7616 Otherwise the return value is END_BUILTINS. */
7618 enum built_in_function
7619 builtin_mathfn_code (const_tree t)
7621 const_tree fndecl, arg, parmlist;
7622 const_tree argtype, parmtype;
7623 const_call_expr_arg_iterator iter;
7625 if (TREE_CODE (t) != CALL_EXPR
7626 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7627 return END_BUILTINS;
7629 fndecl = get_callee_fndecl (t);
7630 if (fndecl == NULL_TREE
7631 || TREE_CODE (fndecl) != FUNCTION_DECL
7632 || ! DECL_BUILT_IN (fndecl)
7633 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7634 return END_BUILTINS;
7636 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7637 init_const_call_expr_arg_iterator (t, &iter);
7638 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7640 /* If a function doesn't take a variable number of arguments,
7641 the last element in the list will have type `void'. */
7642 parmtype = TREE_VALUE (parmlist);
7643 if (VOID_TYPE_P (parmtype))
7645 if (more_const_call_expr_args_p (&iter))
7646 return END_BUILTINS;
7647 return DECL_FUNCTION_CODE (fndecl);
7650 if (! more_const_call_expr_args_p (&iter))
7651 return END_BUILTINS;
7653 arg = next_const_call_expr_arg (&iter);
7654 argtype = TREE_TYPE (arg);
7656 if (SCALAR_FLOAT_TYPE_P (parmtype))
7658 if (! SCALAR_FLOAT_TYPE_P (argtype))
7659 return END_BUILTINS;
7661 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7663 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7664 return END_BUILTINS;
7666 else if (POINTER_TYPE_P (parmtype))
7668 if (! POINTER_TYPE_P (argtype))
7669 return END_BUILTINS;
7671 else if (INTEGRAL_TYPE_P (parmtype))
7673 if (! INTEGRAL_TYPE_P (argtype))
7674 return END_BUILTINS;
7676 else
7677 return END_BUILTINS;
7680 /* Variable-length argument list. */
7681 return DECL_FUNCTION_CODE (fndecl);
7684 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7685 evaluate to a constant. */
7687 static tree
7688 fold_builtin_constant_p (tree arg)
7690 /* We return 1 for a numeric type that's known to be a constant
7691 value at compile-time or for an aggregate type that's a
7692 literal constant. */
7693 STRIP_NOPS (arg);
7695 /* If we know this is a constant, emit the constant of one. */
7696 if (CONSTANT_CLASS_P (arg)
7697 || (TREE_CODE (arg) == CONSTRUCTOR
7698 && TREE_CONSTANT (arg)))
7699 return integer_one_node;
7700 if (TREE_CODE (arg) == ADDR_EXPR)
7702 tree op = TREE_OPERAND (arg, 0);
7703 if (TREE_CODE (op) == STRING_CST
7704 || (TREE_CODE (op) == ARRAY_REF
7705 && integer_zerop (TREE_OPERAND (op, 1))
7706 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7707 return integer_one_node;
7710 /* If this expression has side effects, show we don't know it to be a
7711 constant. Likewise if it's a pointer or aggregate type since in
7712 those case we only want literals, since those are only optimized
7713 when generating RTL, not later.
7714 And finally, if we are compiling an initializer, not code, we
7715 need to return a definite result now; there's not going to be any
7716 more optimization done. */
7717 if (TREE_SIDE_EFFECTS (arg)
7718 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7719 || POINTER_TYPE_P (TREE_TYPE (arg))
7720 || cfun == 0
7721 || folding_initializer
7722 || force_folding_builtin_constant_p)
7723 return integer_zero_node;
7725 return NULL_TREE;
7728 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7729 return it as a truthvalue. */
7731 static tree
7732 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7733 tree predictor)
7735 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7737 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7738 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7739 ret_type = TREE_TYPE (TREE_TYPE (fn));
7740 pred_type = TREE_VALUE (arg_types);
7741 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7743 pred = fold_convert_loc (loc, pred_type, pred);
7744 expected = fold_convert_loc (loc, expected_type, expected);
7745 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7746 predictor);
7748 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7749 build_int_cst (ret_type, 0));
7752 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7753 NULL_TREE if no simplification is possible. */
7755 tree
7756 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7758 tree inner, fndecl, inner_arg0;
7759 enum tree_code code;
7761 /* Distribute the expected value over short-circuiting operators.
7762 See through the cast from truthvalue_type_node to long. */
7763 inner_arg0 = arg0;
7764 while (CONVERT_EXPR_P (inner_arg0)
7765 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7766 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7767 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7769 /* If this is a builtin_expect within a builtin_expect keep the
7770 inner one. See through a comparison against a constant. It
7771 might have been added to create a thruthvalue. */
7772 inner = inner_arg0;
7774 if (COMPARISON_CLASS_P (inner)
7775 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7776 inner = TREE_OPERAND (inner, 0);
7778 if (TREE_CODE (inner) == CALL_EXPR
7779 && (fndecl = get_callee_fndecl (inner))
7780 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7781 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7782 return arg0;
7784 inner = inner_arg0;
7785 code = TREE_CODE (inner);
7786 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7788 tree op0 = TREE_OPERAND (inner, 0);
7789 tree op1 = TREE_OPERAND (inner, 1);
7791 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7792 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7793 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7795 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7798 /* If the argument isn't invariant then there's nothing else we can do. */
7799 if (!TREE_CONSTANT (inner_arg0))
7800 return NULL_TREE;
7802 /* If we expect that a comparison against the argument will fold to
7803 a constant return the constant. In practice, this means a true
7804 constant or the address of a non-weak symbol. */
7805 inner = inner_arg0;
7806 STRIP_NOPS (inner);
7807 if (TREE_CODE (inner) == ADDR_EXPR)
7811 inner = TREE_OPERAND (inner, 0);
7813 while (TREE_CODE (inner) == COMPONENT_REF
7814 || TREE_CODE (inner) == ARRAY_REF);
7815 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7816 return NULL_TREE;
7819 /* Otherwise, ARG0 already has the proper type for the return value. */
7820 return arg0;
7823 /* Fold a call to __builtin_classify_type with argument ARG. */
7825 static tree
7826 fold_builtin_classify_type (tree arg)
7828 if (arg == 0)
7829 return build_int_cst (integer_type_node, no_type_class);
7831 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7834 /* Fold a call to __builtin_strlen with argument ARG. */
7836 static tree
7837 fold_builtin_strlen (location_t loc, tree type, tree arg)
7839 if (!validate_arg (arg, POINTER_TYPE))
7840 return NULL_TREE;
7841 else
7843 tree len = c_strlen (arg, 0);
7845 if (len)
7846 return fold_convert_loc (loc, type, len);
7848 return NULL_TREE;
7852 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7854 static tree
7855 fold_builtin_inf (location_t loc, tree type, int warn)
7857 REAL_VALUE_TYPE real;
7859 /* __builtin_inff is intended to be usable to define INFINITY on all
7860 targets. If an infinity is not available, INFINITY expands "to a
7861 positive constant of type float that overflows at translation
7862 time", footnote "In this case, using INFINITY will violate the
7863 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7864 Thus we pedwarn to ensure this constraint violation is
7865 diagnosed. */
7866 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7867 pedwarn (loc, 0, "target format does not support infinity");
7869 real_inf (&real);
7870 return build_real (type, real);
7873 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7874 NULL_TREE if no simplification can be made. */
7876 static tree
7877 fold_builtin_sincos (location_t loc,
7878 tree arg0, tree arg1, tree arg2)
7880 tree type;
7881 tree fndecl, call = NULL_TREE;
7883 if (!validate_arg (arg0, REAL_TYPE)
7884 || !validate_arg (arg1, POINTER_TYPE)
7885 || !validate_arg (arg2, POINTER_TYPE))
7886 return NULL_TREE;
7888 type = TREE_TYPE (arg0);
7890 /* Calculate the result when the argument is a constant. */
7891 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7892 if (fn == END_BUILTINS)
7893 return NULL_TREE;
7895 /* Canonicalize sincos to cexpi. */
7896 if (TREE_CODE (arg0) == REAL_CST)
7898 tree complex_type = build_complex_type (type);
7899 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7901 if (!call)
7903 if (!targetm.libc_has_function (function_c99_math_complex)
7904 || !builtin_decl_implicit_p (fn))
7905 return NULL_TREE;
7906 fndecl = builtin_decl_explicit (fn);
7907 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7908 call = builtin_save_expr (call);
7911 return build2 (COMPOUND_EXPR, void_type_node,
7912 build2 (MODIFY_EXPR, void_type_node,
7913 build_fold_indirect_ref_loc (loc, arg1),
7914 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7915 build2 (MODIFY_EXPR, void_type_node,
7916 build_fold_indirect_ref_loc (loc, arg2),
7917 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7920 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7921 Return NULL_TREE if no simplification can be made. */
7923 static tree
7924 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7926 if (!validate_arg (arg1, POINTER_TYPE)
7927 || !validate_arg (arg2, POINTER_TYPE)
7928 || !validate_arg (len, INTEGER_TYPE))
7929 return NULL_TREE;
7931 /* If the LEN parameter is zero, return zero. */
7932 if (integer_zerop (len))
7933 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7934 arg1, arg2);
7936 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7937 if (operand_equal_p (arg1, arg2, 0))
7938 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7940 /* If len parameter is one, return an expression corresponding to
7941 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7942 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7944 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7945 tree cst_uchar_ptr_node
7946 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7948 tree ind1
7949 = fold_convert_loc (loc, integer_type_node,
7950 build1 (INDIRECT_REF, cst_uchar_node,
7951 fold_convert_loc (loc,
7952 cst_uchar_ptr_node,
7953 arg1)));
7954 tree ind2
7955 = fold_convert_loc (loc, integer_type_node,
7956 build1 (INDIRECT_REF, cst_uchar_node,
7957 fold_convert_loc (loc,
7958 cst_uchar_ptr_node,
7959 arg2)));
7960 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7963 return NULL_TREE;
7966 /* Fold a call to builtin isascii with argument ARG. */
7968 static tree
7969 fold_builtin_isascii (location_t loc, tree arg)
7971 if (!validate_arg (arg, INTEGER_TYPE))
7972 return NULL_TREE;
7973 else
7975 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7976 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7977 build_int_cst (integer_type_node,
7978 ~ (unsigned HOST_WIDE_INT) 0x7f));
7979 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7980 arg, integer_zero_node);
7984 /* Fold a call to builtin toascii with argument ARG. */
7986 static tree
7987 fold_builtin_toascii (location_t loc, tree arg)
7989 if (!validate_arg (arg, INTEGER_TYPE))
7990 return NULL_TREE;
7992 /* Transform toascii(c) -> (c & 0x7f). */
7993 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7994 build_int_cst (integer_type_node, 0x7f));
7997 /* Fold a call to builtin isdigit with argument ARG. */
7999 static tree
8000 fold_builtin_isdigit (location_t loc, tree arg)
8002 if (!validate_arg (arg, INTEGER_TYPE))
8003 return NULL_TREE;
8004 else
8006 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8007 /* According to the C standard, isdigit is unaffected by locale.
8008 However, it definitely is affected by the target character set. */
8009 unsigned HOST_WIDE_INT target_digit0
8010 = lang_hooks.to_target_charset ('0');
8012 if (target_digit0 == 0)
8013 return NULL_TREE;
8015 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8016 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8017 build_int_cst (unsigned_type_node, target_digit0));
8018 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8019 build_int_cst (unsigned_type_node, 9));
8023 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8025 static tree
8026 fold_builtin_fabs (location_t loc, tree arg, tree type)
8028 if (!validate_arg (arg, REAL_TYPE))
8029 return NULL_TREE;
8031 arg = fold_convert_loc (loc, type, arg);
8032 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8035 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8037 static tree
8038 fold_builtin_abs (location_t loc, tree arg, tree type)
8040 if (!validate_arg (arg, INTEGER_TYPE))
8041 return NULL_TREE;
8043 arg = fold_convert_loc (loc, type, arg);
8044 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8047 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8049 static tree
8050 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8052 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8053 if (validate_arg (arg0, REAL_TYPE)
8054 && validate_arg (arg1, REAL_TYPE)
8055 && validate_arg (arg2, REAL_TYPE)
8056 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8057 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8059 return NULL_TREE;
8062 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8064 static tree
8065 fold_builtin_carg (location_t loc, tree arg, tree type)
8067 if (validate_arg (arg, COMPLEX_TYPE)
8068 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8070 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8072 if (atan2_fn)
8074 tree new_arg = builtin_save_expr (arg);
8075 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8076 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8077 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8081 return NULL_TREE;
8084 /* Fold a call to builtin frexp, we can assume the base is 2. */
8086 static tree
8087 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8089 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8090 return NULL_TREE;
8092 STRIP_NOPS (arg0);
8094 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8095 return NULL_TREE;
8097 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8099 /* Proceed if a valid pointer type was passed in. */
8100 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8102 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8103 tree frac, exp;
8105 switch (value->cl)
8107 case rvc_zero:
8108 /* For +-0, return (*exp = 0, +-0). */
8109 exp = integer_zero_node;
8110 frac = arg0;
8111 break;
8112 case rvc_nan:
8113 case rvc_inf:
8114 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8115 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8116 case rvc_normal:
8118 /* Since the frexp function always expects base 2, and in
8119 GCC normalized significands are already in the range
8120 [0.5, 1.0), we have exactly what frexp wants. */
8121 REAL_VALUE_TYPE frac_rvt = *value;
8122 SET_REAL_EXP (&frac_rvt, 0);
8123 frac = build_real (rettype, frac_rvt);
8124 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8126 break;
8127 default:
8128 gcc_unreachable ();
8131 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8132 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8133 TREE_SIDE_EFFECTS (arg1) = 1;
8134 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8137 return NULL_TREE;
8140 /* Fold a call to builtin modf. */
8142 static tree
8143 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8145 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8146 return NULL_TREE;
8148 STRIP_NOPS (arg0);
8150 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8151 return NULL_TREE;
8153 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8155 /* Proceed if a valid pointer type was passed in. */
8156 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8158 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8159 REAL_VALUE_TYPE trunc, frac;
8161 switch (value->cl)
8163 case rvc_nan:
8164 case rvc_zero:
8165 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8166 trunc = frac = *value;
8167 break;
8168 case rvc_inf:
8169 /* For +-Inf, return (*arg1 = arg0, +-0). */
8170 frac = dconst0;
8171 frac.sign = value->sign;
8172 trunc = *value;
8173 break;
8174 case rvc_normal:
8175 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8176 real_trunc (&trunc, VOIDmode, value);
8177 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8178 /* If the original number was negative and already
8179 integral, then the fractional part is -0.0. */
8180 if (value->sign && frac.cl == rvc_zero)
8181 frac.sign = value->sign;
8182 break;
8185 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8186 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8187 build_real (rettype, trunc));
8188 TREE_SIDE_EFFECTS (arg1) = 1;
8189 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8190 build_real (rettype, frac));
8193 return NULL_TREE;
8196 /* Given a location LOC, an interclass builtin function decl FNDECL
8197 and its single argument ARG, return an folded expression computing
8198 the same, or NULL_TREE if we either couldn't or didn't want to fold
8199 (the latter happen if there's an RTL instruction available). */
8201 static tree
8202 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8204 machine_mode mode;
8206 if (!validate_arg (arg, REAL_TYPE))
8207 return NULL_TREE;
8209 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8210 return NULL_TREE;
8212 mode = TYPE_MODE (TREE_TYPE (arg));
8214 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8216 /* If there is no optab, try generic code. */
8217 switch (DECL_FUNCTION_CODE (fndecl))
8219 tree result;
8221 CASE_FLT_FN (BUILT_IN_ISINF):
8223 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8224 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8225 tree type = TREE_TYPE (arg);
8226 REAL_VALUE_TYPE r;
8227 char buf[128];
8229 if (is_ibm_extended)
8231 /* NaN and Inf are encoded in the high-order double value
8232 only. The low-order value is not significant. */
8233 type = double_type_node;
8234 mode = DFmode;
8235 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8237 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8238 real_from_string (&r, buf);
8239 result = build_call_expr (isgr_fn, 2,
8240 fold_build1_loc (loc, ABS_EXPR, type, arg),
8241 build_real (type, r));
8242 return result;
8244 CASE_FLT_FN (BUILT_IN_FINITE):
8245 case BUILT_IN_ISFINITE:
8247 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8248 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8249 tree type = TREE_TYPE (arg);
8250 REAL_VALUE_TYPE r;
8251 char buf[128];
8253 if (is_ibm_extended)
8255 /* NaN and Inf are encoded in the high-order double value
8256 only. The low-order value is not significant. */
8257 type = double_type_node;
8258 mode = DFmode;
8259 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8261 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8262 real_from_string (&r, buf);
8263 result = build_call_expr (isle_fn, 2,
8264 fold_build1_loc (loc, ABS_EXPR, type, arg),
8265 build_real (type, r));
8266 /*result = fold_build2_loc (loc, UNGT_EXPR,
8267 TREE_TYPE (TREE_TYPE (fndecl)),
8268 fold_build1_loc (loc, ABS_EXPR, type, arg),
8269 build_real (type, r));
8270 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8271 TREE_TYPE (TREE_TYPE (fndecl)),
8272 result);*/
8273 return result;
8275 case BUILT_IN_ISNORMAL:
8277 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8278 islessequal(fabs(x),DBL_MAX). */
8279 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8280 tree type = TREE_TYPE (arg);
8281 tree orig_arg, max_exp, min_exp;
8282 machine_mode orig_mode = mode;
8283 REAL_VALUE_TYPE rmax, rmin;
8284 char buf[128];
8286 orig_arg = arg = builtin_save_expr (arg);
8287 if (is_ibm_extended)
8289 /* Use double to test the normal range of IBM extended
8290 precision. Emin for IBM extended precision is
8291 different to emin for IEEE double, being 53 higher
8292 since the low double exponent is at least 53 lower
8293 than the high double exponent. */
8294 type = double_type_node;
8295 mode = DFmode;
8296 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8298 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8300 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8301 real_from_string (&rmax, buf);
8302 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8303 real_from_string (&rmin, buf);
8304 max_exp = build_real (type, rmax);
8305 min_exp = build_real (type, rmin);
8307 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8308 if (is_ibm_extended)
8310 /* Testing the high end of the range is done just using
8311 the high double, using the same test as isfinite().
8312 For the subnormal end of the range we first test the
8313 high double, then if its magnitude is equal to the
8314 limit of 0x1p-969, we test whether the low double is
8315 non-zero and opposite sign to the high double. */
8316 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8317 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8318 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8319 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8320 arg, min_exp);
8321 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8322 complex_double_type_node, orig_arg);
8323 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8324 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8325 tree zero = build_real (type, dconst0);
8326 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8327 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8328 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8329 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8330 fold_build3 (COND_EXPR,
8331 integer_type_node,
8332 hilt, logt, lolt));
8333 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8334 eq_min, ok_lo);
8335 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8336 gt_min, eq_min);
8338 else
8340 tree const isge_fn
8341 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8342 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8344 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8345 max_exp, min_exp);
8346 return result;
8348 default:
8349 break;
8352 return NULL_TREE;
8355 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8356 ARG is the argument for the call. */
8358 static tree
8359 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8361 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8363 if (!validate_arg (arg, REAL_TYPE))
8364 return NULL_TREE;
8366 switch (builtin_index)
8368 case BUILT_IN_ISINF:
8369 if (!HONOR_INFINITIES (arg))
8370 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8372 return NULL_TREE;
8374 case BUILT_IN_ISINF_SIGN:
8376 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8377 /* In a boolean context, GCC will fold the inner COND_EXPR to
8378 1. So e.g. "if (isinf_sign(x))" would be folded to just
8379 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8380 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8381 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8382 tree tmp = NULL_TREE;
8384 arg = builtin_save_expr (arg);
8386 if (signbit_fn && isinf_fn)
8388 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8389 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8391 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8392 signbit_call, integer_zero_node);
8393 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8394 isinf_call, integer_zero_node);
8396 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8397 integer_minus_one_node, integer_one_node);
8398 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8399 isinf_call, tmp,
8400 integer_zero_node);
8403 return tmp;
8406 case BUILT_IN_ISFINITE:
8407 if (!HONOR_NANS (arg)
8408 && !HONOR_INFINITIES (arg))
8409 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8411 return NULL_TREE;
8413 case BUILT_IN_ISNAN:
8414 if (!HONOR_NANS (arg))
8415 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8418 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8419 if (is_ibm_extended)
8421 /* NaN and Inf are encoded in the high-order double value
8422 only. The low-order value is not significant. */
8423 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8426 arg = builtin_save_expr (arg);
8427 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8429 default:
8430 gcc_unreachable ();
8434 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8435 This builtin will generate code to return the appropriate floating
8436 point classification depending on the value of the floating point
8437 number passed in. The possible return values must be supplied as
8438 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8439 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8440 one floating point argument which is "type generic". */
8442 static tree
8443 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8445 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8446 arg, type, res, tmp;
8447 machine_mode mode;
8448 REAL_VALUE_TYPE r;
8449 char buf[128];
8451 /* Verify the required arguments in the original call. */
8452 if (nargs != 6
8453 || !validate_arg (args[0], INTEGER_TYPE)
8454 || !validate_arg (args[1], INTEGER_TYPE)
8455 || !validate_arg (args[2], INTEGER_TYPE)
8456 || !validate_arg (args[3], INTEGER_TYPE)
8457 || !validate_arg (args[4], INTEGER_TYPE)
8458 || !validate_arg (args[5], REAL_TYPE))
8459 return NULL_TREE;
8461 fp_nan = args[0];
8462 fp_infinite = args[1];
8463 fp_normal = args[2];
8464 fp_subnormal = args[3];
8465 fp_zero = args[4];
8466 arg = args[5];
8467 type = TREE_TYPE (arg);
8468 mode = TYPE_MODE (type);
8469 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8471 /* fpclassify(x) ->
8472 isnan(x) ? FP_NAN :
8473 (fabs(x) == Inf ? FP_INFINITE :
8474 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8475 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8477 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8478 build_real (type, dconst0));
8479 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8480 tmp, fp_zero, fp_subnormal);
8482 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8483 real_from_string (&r, buf);
8484 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8485 arg, build_real (type, r));
8486 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8488 if (HONOR_INFINITIES (mode))
8490 real_inf (&r);
8491 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8492 build_real (type, r));
8493 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8494 fp_infinite, res);
8497 if (HONOR_NANS (mode))
8499 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8500 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8503 return res;
8506 /* Fold a call to an unordered comparison function such as
8507 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8508 being called and ARG0 and ARG1 are the arguments for the call.
8509 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8510 the opposite of the desired result. UNORDERED_CODE is used
8511 for modes that can hold NaNs and ORDERED_CODE is used for
8512 the rest. */
8514 static tree
8515 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8516 enum tree_code unordered_code,
8517 enum tree_code ordered_code)
8519 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8520 enum tree_code code;
8521 tree type0, type1;
8522 enum tree_code code0, code1;
8523 tree cmp_type = NULL_TREE;
8525 type0 = TREE_TYPE (arg0);
8526 type1 = TREE_TYPE (arg1);
8528 code0 = TREE_CODE (type0);
8529 code1 = TREE_CODE (type1);
8531 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8532 /* Choose the wider of two real types. */
8533 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8534 ? type0 : type1;
8535 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8536 cmp_type = type0;
8537 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8538 cmp_type = type1;
8540 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8541 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8543 if (unordered_code == UNORDERED_EXPR)
8545 if (!HONOR_NANS (arg0))
8546 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8547 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8550 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8551 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8552 fold_build2_loc (loc, code, type, arg0, arg1));
8555 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8556 arithmetics if it can never overflow, or into internal functions that
8557 return both result of arithmetics and overflowed boolean flag in
8558 a complex integer result, or some other check for overflow.
8559 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8560 checking part of that. */
8562 static tree
8563 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8564 tree arg0, tree arg1, tree arg2)
8566 enum internal_fn ifn = IFN_LAST;
8567 /* The code of the expression corresponding to the type-generic
8568 built-in, or ERROR_MARK for the type-specific ones. */
8569 enum tree_code opcode = ERROR_MARK;
8570 bool ovf_only = false;
8572 switch (fcode)
8574 case BUILT_IN_ADD_OVERFLOW_P:
8575 ovf_only = true;
8576 /* FALLTHRU */
8577 case BUILT_IN_ADD_OVERFLOW:
8578 opcode = PLUS_EXPR;
8579 /* FALLTHRU */
8580 case BUILT_IN_SADD_OVERFLOW:
8581 case BUILT_IN_SADDL_OVERFLOW:
8582 case BUILT_IN_SADDLL_OVERFLOW:
8583 case BUILT_IN_UADD_OVERFLOW:
8584 case BUILT_IN_UADDL_OVERFLOW:
8585 case BUILT_IN_UADDLL_OVERFLOW:
8586 ifn = IFN_ADD_OVERFLOW;
8587 break;
8588 case BUILT_IN_SUB_OVERFLOW_P:
8589 ovf_only = true;
8590 /* FALLTHRU */
8591 case BUILT_IN_SUB_OVERFLOW:
8592 opcode = MINUS_EXPR;
8593 /* FALLTHRU */
8594 case BUILT_IN_SSUB_OVERFLOW:
8595 case BUILT_IN_SSUBL_OVERFLOW:
8596 case BUILT_IN_SSUBLL_OVERFLOW:
8597 case BUILT_IN_USUB_OVERFLOW:
8598 case BUILT_IN_USUBL_OVERFLOW:
8599 case BUILT_IN_USUBLL_OVERFLOW:
8600 ifn = IFN_SUB_OVERFLOW;
8601 break;
8602 case BUILT_IN_MUL_OVERFLOW_P:
8603 ovf_only = true;
8604 /* FALLTHRU */
8605 case BUILT_IN_MUL_OVERFLOW:
8606 opcode = MULT_EXPR;
8607 /* FALLTHRU */
8608 case BUILT_IN_SMUL_OVERFLOW:
8609 case BUILT_IN_SMULL_OVERFLOW:
8610 case BUILT_IN_SMULLL_OVERFLOW:
8611 case BUILT_IN_UMUL_OVERFLOW:
8612 case BUILT_IN_UMULL_OVERFLOW:
8613 case BUILT_IN_UMULLL_OVERFLOW:
8614 ifn = IFN_MUL_OVERFLOW;
8615 break;
8616 default:
8617 gcc_unreachable ();
8620 /* For the "generic" overloads, the first two arguments can have different
8621 types and the last argument determines the target type to use to check
8622 for overflow. The arguments of the other overloads all have the same
8623 type. */
8624 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8626 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8627 arguments are constant, attempt to fold the built-in call into a constant
8628 expression indicating whether or not it detected an overflow. */
8629 if (ovf_only
8630 && TREE_CODE (arg0) == INTEGER_CST
8631 && TREE_CODE (arg1) == INTEGER_CST)
8632 /* Perform the computation in the target type and check for overflow. */
8633 return omit_one_operand_loc (loc, boolean_type_node,
8634 arith_overflowed_p (opcode, type, arg0, arg1)
8635 ? boolean_true_node : boolean_false_node,
8636 arg2);
8638 tree ctype = build_complex_type (type);
8639 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8640 2, arg0, arg1);
8641 tree tgt = save_expr (call);
8642 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8643 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8644 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8646 if (ovf_only)
8647 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8649 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8650 tree store
8651 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8652 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8655 /* Fold a call to __builtin_FILE to a constant string. */
8657 static inline tree
8658 fold_builtin_FILE (location_t loc)
8660 if (const char *fname = LOCATION_FILE (loc))
8661 return build_string_literal (strlen (fname) + 1, fname);
8663 return build_string_literal (1, "");
8666 /* Fold a call to __builtin_FUNCTION to a constant string. */
8668 static inline tree
8669 fold_builtin_FUNCTION ()
8671 if (current_function_decl)
8673 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8674 return build_string_literal (strlen (name) + 1, name);
8677 return build_string_literal (1, "");
8680 /* Fold a call to __builtin_LINE to an integer constant. */
8682 static inline tree
8683 fold_builtin_LINE (location_t loc, tree type)
8685 return build_int_cst (type, LOCATION_LINE (loc));
8688 /* Fold a call to built-in function FNDECL with 0 arguments.
8689 This function returns NULL_TREE if no simplification was possible. */
8691 static tree
8692 fold_builtin_0 (location_t loc, tree fndecl)
8694 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8695 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8696 switch (fcode)
8698 case BUILT_IN_FILE:
8699 return fold_builtin_FILE (loc);
8701 case BUILT_IN_FUNCTION:
8702 return fold_builtin_FUNCTION ();
8704 case BUILT_IN_LINE:
8705 return fold_builtin_LINE (loc, type);
8707 CASE_FLT_FN (BUILT_IN_INF):
8708 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8709 case BUILT_IN_INFD32:
8710 case BUILT_IN_INFD64:
8711 case BUILT_IN_INFD128:
8712 return fold_builtin_inf (loc, type, true);
8714 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8715 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8716 return fold_builtin_inf (loc, type, false);
8718 case BUILT_IN_CLASSIFY_TYPE:
8719 return fold_builtin_classify_type (NULL_TREE);
8721 default:
8722 break;
8724 return NULL_TREE;
8727 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8728 This function returns NULL_TREE if no simplification was possible. */
8730 static tree
8731 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8733 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8734 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8736 if (TREE_CODE (arg0) == ERROR_MARK)
8737 return NULL_TREE;
8739 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8740 return ret;
8742 switch (fcode)
8744 case BUILT_IN_CONSTANT_P:
8746 tree val = fold_builtin_constant_p (arg0);
8748 /* Gimplification will pull the CALL_EXPR for the builtin out of
8749 an if condition. When not optimizing, we'll not CSE it back.
8750 To avoid link error types of regressions, return false now. */
8751 if (!val && !optimize)
8752 val = integer_zero_node;
8754 return val;
8757 case BUILT_IN_CLASSIFY_TYPE:
8758 return fold_builtin_classify_type (arg0);
8760 case BUILT_IN_STRLEN:
8761 return fold_builtin_strlen (loc, type, arg0);
8763 CASE_FLT_FN (BUILT_IN_FABS):
8764 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8765 case BUILT_IN_FABSD32:
8766 case BUILT_IN_FABSD64:
8767 case BUILT_IN_FABSD128:
8768 return fold_builtin_fabs (loc, arg0, type);
8770 case BUILT_IN_ABS:
8771 case BUILT_IN_LABS:
8772 case BUILT_IN_LLABS:
8773 case BUILT_IN_IMAXABS:
8774 return fold_builtin_abs (loc, arg0, type);
8776 CASE_FLT_FN (BUILT_IN_CONJ):
8777 if (validate_arg (arg0, COMPLEX_TYPE)
8778 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8779 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8780 break;
8782 CASE_FLT_FN (BUILT_IN_CREAL):
8783 if (validate_arg (arg0, COMPLEX_TYPE)
8784 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8785 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8786 break;
8788 CASE_FLT_FN (BUILT_IN_CIMAG):
8789 if (validate_arg (arg0, COMPLEX_TYPE)
8790 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8791 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8792 break;
8794 CASE_FLT_FN (BUILT_IN_CARG):
8795 return fold_builtin_carg (loc, arg0, type);
8797 case BUILT_IN_ISASCII:
8798 return fold_builtin_isascii (loc, arg0);
8800 case BUILT_IN_TOASCII:
8801 return fold_builtin_toascii (loc, arg0);
8803 case BUILT_IN_ISDIGIT:
8804 return fold_builtin_isdigit (loc, arg0);
8806 CASE_FLT_FN (BUILT_IN_FINITE):
8807 case BUILT_IN_FINITED32:
8808 case BUILT_IN_FINITED64:
8809 case BUILT_IN_FINITED128:
8810 case BUILT_IN_ISFINITE:
8812 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8813 if (ret)
8814 return ret;
8815 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8818 CASE_FLT_FN (BUILT_IN_ISINF):
8819 case BUILT_IN_ISINFD32:
8820 case BUILT_IN_ISINFD64:
8821 case BUILT_IN_ISINFD128:
8823 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8824 if (ret)
8825 return ret;
8826 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8829 case BUILT_IN_ISNORMAL:
8830 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8832 case BUILT_IN_ISINF_SIGN:
8833 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8835 CASE_FLT_FN (BUILT_IN_ISNAN):
8836 case BUILT_IN_ISNAND32:
8837 case BUILT_IN_ISNAND64:
8838 case BUILT_IN_ISNAND128:
8839 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8841 case BUILT_IN_FREE:
8842 if (integer_zerop (arg0))
8843 return build_empty_stmt (loc);
8844 break;
8846 default:
8847 break;
8850 return NULL_TREE;
8854 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8855 This function returns NULL_TREE if no simplification was possible. */
8857 static tree
8858 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8860 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8861 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8863 if (TREE_CODE (arg0) == ERROR_MARK
8864 || TREE_CODE (arg1) == ERROR_MARK)
8865 return NULL_TREE;
8867 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8868 return ret;
8870 switch (fcode)
8872 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8873 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8874 if (validate_arg (arg0, REAL_TYPE)
8875 && validate_arg (arg1, POINTER_TYPE))
8876 return do_mpfr_lgamma_r (arg0, arg1, type);
8877 break;
8879 CASE_FLT_FN (BUILT_IN_FREXP):
8880 return fold_builtin_frexp (loc, arg0, arg1, type);
8882 CASE_FLT_FN (BUILT_IN_MODF):
8883 return fold_builtin_modf (loc, arg0, arg1, type);
8885 case BUILT_IN_STRSPN:
8886 return fold_builtin_strspn (loc, arg0, arg1);
8888 case BUILT_IN_STRCSPN:
8889 return fold_builtin_strcspn (loc, arg0, arg1);
8891 case BUILT_IN_STRPBRK:
8892 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8894 case BUILT_IN_EXPECT:
8895 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8897 case BUILT_IN_ISGREATER:
8898 return fold_builtin_unordered_cmp (loc, fndecl,
8899 arg0, arg1, UNLE_EXPR, LE_EXPR);
8900 case BUILT_IN_ISGREATEREQUAL:
8901 return fold_builtin_unordered_cmp (loc, fndecl,
8902 arg0, arg1, UNLT_EXPR, LT_EXPR);
8903 case BUILT_IN_ISLESS:
8904 return fold_builtin_unordered_cmp (loc, fndecl,
8905 arg0, arg1, UNGE_EXPR, GE_EXPR);
8906 case BUILT_IN_ISLESSEQUAL:
8907 return fold_builtin_unordered_cmp (loc, fndecl,
8908 arg0, arg1, UNGT_EXPR, GT_EXPR);
8909 case BUILT_IN_ISLESSGREATER:
8910 return fold_builtin_unordered_cmp (loc, fndecl,
8911 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8912 case BUILT_IN_ISUNORDERED:
8913 return fold_builtin_unordered_cmp (loc, fndecl,
8914 arg0, arg1, UNORDERED_EXPR,
8915 NOP_EXPR);
8917 /* We do the folding for va_start in the expander. */
8918 case BUILT_IN_VA_START:
8919 break;
8921 case BUILT_IN_OBJECT_SIZE:
8922 return fold_builtin_object_size (arg0, arg1);
8924 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8925 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8927 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8928 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8930 default:
8931 break;
8933 return NULL_TREE;
8936 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8937 and ARG2.
8938 This function returns NULL_TREE if no simplification was possible. */
8940 static tree
8941 fold_builtin_3 (location_t loc, tree fndecl,
8942 tree arg0, tree arg1, tree arg2)
8944 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8945 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8947 if (TREE_CODE (arg0) == ERROR_MARK
8948 || TREE_CODE (arg1) == ERROR_MARK
8949 || TREE_CODE (arg2) == ERROR_MARK)
8950 return NULL_TREE;
8952 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8953 arg0, arg1, arg2))
8954 return ret;
8956 switch (fcode)
8959 CASE_FLT_FN (BUILT_IN_SINCOS):
8960 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8962 CASE_FLT_FN (BUILT_IN_FMA):
8963 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8965 CASE_FLT_FN (BUILT_IN_REMQUO):
8966 if (validate_arg (arg0, REAL_TYPE)
8967 && validate_arg (arg1, REAL_TYPE)
8968 && validate_arg (arg2, POINTER_TYPE))
8969 return do_mpfr_remquo (arg0, arg1, arg2);
8970 break;
8972 case BUILT_IN_BCMP:
8973 case BUILT_IN_MEMCMP:
8974 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8976 case BUILT_IN_EXPECT:
8977 return fold_builtin_expect (loc, arg0, arg1, arg2);
8979 case BUILT_IN_ADD_OVERFLOW:
8980 case BUILT_IN_SUB_OVERFLOW:
8981 case BUILT_IN_MUL_OVERFLOW:
8982 case BUILT_IN_ADD_OVERFLOW_P:
8983 case BUILT_IN_SUB_OVERFLOW_P:
8984 case BUILT_IN_MUL_OVERFLOW_P:
8985 case BUILT_IN_SADD_OVERFLOW:
8986 case BUILT_IN_SADDL_OVERFLOW:
8987 case BUILT_IN_SADDLL_OVERFLOW:
8988 case BUILT_IN_SSUB_OVERFLOW:
8989 case BUILT_IN_SSUBL_OVERFLOW:
8990 case BUILT_IN_SSUBLL_OVERFLOW:
8991 case BUILT_IN_SMUL_OVERFLOW:
8992 case BUILT_IN_SMULL_OVERFLOW:
8993 case BUILT_IN_SMULLL_OVERFLOW:
8994 case BUILT_IN_UADD_OVERFLOW:
8995 case BUILT_IN_UADDL_OVERFLOW:
8996 case BUILT_IN_UADDLL_OVERFLOW:
8997 case BUILT_IN_USUB_OVERFLOW:
8998 case BUILT_IN_USUBL_OVERFLOW:
8999 case BUILT_IN_USUBLL_OVERFLOW:
9000 case BUILT_IN_UMUL_OVERFLOW:
9001 case BUILT_IN_UMULL_OVERFLOW:
9002 case BUILT_IN_UMULLL_OVERFLOW:
9003 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9005 default:
9006 break;
9008 return NULL_TREE;
9011 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9012 arguments. IGNORE is true if the result of the
9013 function call is ignored. This function returns NULL_TREE if no
9014 simplification was possible. */
9016 tree
9017 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9019 tree ret = NULL_TREE;
9021 switch (nargs)
9023 case 0:
9024 ret = fold_builtin_0 (loc, fndecl);
9025 break;
9026 case 1:
9027 ret = fold_builtin_1 (loc, fndecl, args[0]);
9028 break;
9029 case 2:
9030 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9031 break;
9032 case 3:
9033 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9034 break;
9035 default:
9036 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9037 break;
9039 if (ret)
9041 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9042 SET_EXPR_LOCATION (ret, loc);
9043 TREE_NO_WARNING (ret) = 1;
9044 return ret;
9046 return NULL_TREE;
9049 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9050 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9051 of arguments in ARGS to be omitted. OLDNARGS is the number of
9052 elements in ARGS. */
9054 static tree
9055 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9056 int skip, tree fndecl, int n, va_list newargs)
9058 int nargs = oldnargs - skip + n;
9059 tree *buffer;
9061 if (n > 0)
9063 int i, j;
9065 buffer = XALLOCAVEC (tree, nargs);
9066 for (i = 0; i < n; i++)
9067 buffer[i] = va_arg (newargs, tree);
9068 for (j = skip; j < oldnargs; j++, i++)
9069 buffer[i] = args[j];
9071 else
9072 buffer = args + skip;
9074 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9077 /* Return true if FNDECL shouldn't be folded right now.
9078 If a built-in function has an inline attribute always_inline
9079 wrapper, defer folding it after always_inline functions have
9080 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9081 might not be performed. */
9083 bool
9084 avoid_folding_inline_builtin (tree fndecl)
9086 return (DECL_DECLARED_INLINE_P (fndecl)
9087 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9088 && cfun
9089 && !cfun->always_inline_functions_inlined
9090 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9093 /* A wrapper function for builtin folding that prevents warnings for
9094 "statement without effect" and the like, caused by removing the
9095 call node earlier than the warning is generated. */
9097 tree
9098 fold_call_expr (location_t loc, tree exp, bool ignore)
9100 tree ret = NULL_TREE;
9101 tree fndecl = get_callee_fndecl (exp);
9102 if (fndecl
9103 && TREE_CODE (fndecl) == FUNCTION_DECL
9104 && DECL_BUILT_IN (fndecl)
9105 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9106 yet. Defer folding until we see all the arguments
9107 (after inlining). */
9108 && !CALL_EXPR_VA_ARG_PACK (exp))
9110 int nargs = call_expr_nargs (exp);
9112 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9113 instead last argument is __builtin_va_arg_pack (). Defer folding
9114 even in that case, until arguments are finalized. */
9115 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9117 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9118 if (fndecl2
9119 && TREE_CODE (fndecl2) == FUNCTION_DECL
9120 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9121 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9122 return NULL_TREE;
9125 if (avoid_folding_inline_builtin (fndecl))
9126 return NULL_TREE;
9128 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9129 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9130 CALL_EXPR_ARGP (exp), ignore);
9131 else
9133 tree *args = CALL_EXPR_ARGP (exp);
9134 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9135 if (ret)
9136 return ret;
9139 return NULL_TREE;
9142 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9143 N arguments are passed in the array ARGARRAY. Return a folded
9144 expression or NULL_TREE if no simplification was possible. */
9146 tree
9147 fold_builtin_call_array (location_t loc, tree,
9148 tree fn,
9149 int n,
9150 tree *argarray)
9152 if (TREE_CODE (fn) != ADDR_EXPR)
9153 return NULL_TREE;
9155 tree fndecl = TREE_OPERAND (fn, 0);
9156 if (TREE_CODE (fndecl) == FUNCTION_DECL
9157 && DECL_BUILT_IN (fndecl))
9159 /* If last argument is __builtin_va_arg_pack (), arguments to this
9160 function are not finalized yet. Defer folding until they are. */
9161 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9163 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9164 if (fndecl2
9165 && TREE_CODE (fndecl2) == FUNCTION_DECL
9166 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9167 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9168 return NULL_TREE;
9170 if (avoid_folding_inline_builtin (fndecl))
9171 return NULL_TREE;
9172 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9173 return targetm.fold_builtin (fndecl, n, argarray, false);
9174 else
9175 return fold_builtin_n (loc, fndecl, argarray, n, false);
9178 return NULL_TREE;
9181 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9182 along with N new arguments specified as the "..." parameters. SKIP
9183 is the number of arguments in EXP to be omitted. This function is used
9184 to do varargs-to-varargs transformations. */
9186 static tree
9187 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9189 va_list ap;
9190 tree t;
9192 va_start (ap, n);
9193 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9194 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9195 va_end (ap);
9197 return t;
9200 /* Validate a single argument ARG against a tree code CODE representing
9201 a type. Return true when argument is valid. */
9203 static bool
9204 validate_arg (const_tree arg, enum tree_code code)
9206 if (!arg)
9207 return false;
9208 else if (code == POINTER_TYPE)
9209 return POINTER_TYPE_P (TREE_TYPE (arg));
9210 else if (code == INTEGER_TYPE)
9211 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9212 return code == TREE_CODE (TREE_TYPE (arg));
9215 /* This function validates the types of a function call argument list
9216 against a specified list of tree_codes. If the last specifier is a 0,
9217 that represents an ellipses, otherwise the last specifier must be a
9218 VOID_TYPE.
9220 This is the GIMPLE version of validate_arglist. Eventually we want to
9221 completely convert builtins.c to work from GIMPLEs and the tree based
9222 validate_arglist will then be removed. */
9224 bool
9225 validate_gimple_arglist (const gcall *call, ...)
9227 enum tree_code code;
9228 bool res = 0;
9229 va_list ap;
9230 const_tree arg;
9231 size_t i;
9233 va_start (ap, call);
9234 i = 0;
9238 code = (enum tree_code) va_arg (ap, int);
9239 switch (code)
9241 case 0:
9242 /* This signifies an ellipses, any further arguments are all ok. */
9243 res = true;
9244 goto end;
9245 case VOID_TYPE:
9246 /* This signifies an endlink, if no arguments remain, return
9247 true, otherwise return false. */
9248 res = (i == gimple_call_num_args (call));
9249 goto end;
9250 default:
9251 /* If no parameters remain or the parameter's code does not
9252 match the specified code, return false. Otherwise continue
9253 checking any remaining arguments. */
9254 arg = gimple_call_arg (call, i++);
9255 if (!validate_arg (arg, code))
9256 goto end;
9257 break;
9260 while (1);
9262 /* We need gotos here since we can only have one VA_CLOSE in a
9263 function. */
9264 end: ;
9265 va_end (ap);
9267 return res;
9270 /* Default target-specific builtin expander that does nothing. */
9273 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9274 rtx target ATTRIBUTE_UNUSED,
9275 rtx subtarget ATTRIBUTE_UNUSED,
9276 machine_mode mode ATTRIBUTE_UNUSED,
9277 int ignore ATTRIBUTE_UNUSED)
9279 return NULL_RTX;
9282 /* Returns true is EXP represents data that would potentially reside
9283 in a readonly section. */
9285 bool
9286 readonly_data_expr (tree exp)
9288 STRIP_NOPS (exp);
9290 if (TREE_CODE (exp) != ADDR_EXPR)
9291 return false;
9293 exp = get_base_address (TREE_OPERAND (exp, 0));
9294 if (!exp)
9295 return false;
9297 /* Make sure we call decl_readonly_section only for trees it
9298 can handle (since it returns true for everything it doesn't
9299 understand). */
9300 if (TREE_CODE (exp) == STRING_CST
9301 || TREE_CODE (exp) == CONSTRUCTOR
9302 || (VAR_P (exp) && TREE_STATIC (exp)))
9303 return decl_readonly_section (exp, 0);
9304 else
9305 return false;
9308 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9309 to the call, and TYPE is its return type.
9311 Return NULL_TREE if no simplification was possible, otherwise return the
9312 simplified form of the call as a tree.
9314 The simplified form may be a constant or other expression which
9315 computes the same value, but in a more efficient manner (including
9316 calls to other builtin functions).
9318 The call may contain arguments which need to be evaluated, but
9319 which are not useful to determine the result of the call. In
9320 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9321 COMPOUND_EXPR will be an argument which must be evaluated.
9322 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9323 COMPOUND_EXPR in the chain will contain the tree for the simplified
9324 form of the builtin function call. */
9326 static tree
9327 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9329 if (!validate_arg (s1, POINTER_TYPE)
9330 || !validate_arg (s2, POINTER_TYPE))
9331 return NULL_TREE;
9332 else
9334 tree fn;
9335 const char *p1, *p2;
9337 p2 = c_getstr (s2);
9338 if (p2 == NULL)
9339 return NULL_TREE;
9341 p1 = c_getstr (s1);
9342 if (p1 != NULL)
9344 const char *r = strpbrk (p1, p2);
9345 tree tem;
9347 if (r == NULL)
9348 return build_int_cst (TREE_TYPE (s1), 0);
9350 /* Return an offset into the constant string argument. */
9351 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9352 return fold_convert_loc (loc, type, tem);
9355 if (p2[0] == '\0')
9356 /* strpbrk(x, "") == NULL.
9357 Evaluate and ignore s1 in case it had side-effects. */
9358 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9360 if (p2[1] != '\0')
9361 return NULL_TREE; /* Really call strpbrk. */
9363 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9364 if (!fn)
9365 return NULL_TREE;
9367 /* New argument list transforming strpbrk(s1, s2) to
9368 strchr(s1, s2[0]). */
9369 return build_call_expr_loc (loc, fn, 2, s1,
9370 build_int_cst (integer_type_node, p2[0]));
9374 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9375 to the call.
9377 Return NULL_TREE if no simplification was possible, otherwise return the
9378 simplified form of the call as a tree.
9380 The simplified form may be a constant or other expression which
9381 computes the same value, but in a more efficient manner (including
9382 calls to other builtin functions).
9384 The call may contain arguments which need to be evaluated, but
9385 which are not useful to determine the result of the call. In
9386 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9387 COMPOUND_EXPR will be an argument which must be evaluated.
9388 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9389 COMPOUND_EXPR in the chain will contain the tree for the simplified
9390 form of the builtin function call. */
9392 static tree
9393 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9395 if (!validate_arg (s1, POINTER_TYPE)
9396 || !validate_arg (s2, POINTER_TYPE))
9397 return NULL_TREE;
9398 else
9400 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9402 /* If either argument is "", return NULL_TREE. */
9403 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9404 /* Evaluate and ignore both arguments in case either one has
9405 side-effects. */
9406 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9407 s1, s2);
9408 return NULL_TREE;
9412 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9413 to the call.
9415 Return NULL_TREE if no simplification was possible, otherwise return the
9416 simplified form of the call as a tree.
9418 The simplified form may be a constant or other expression which
9419 computes the same value, but in a more efficient manner (including
9420 calls to other builtin functions).
9422 The call may contain arguments which need to be evaluated, but
9423 which are not useful to determine the result of the call. In
9424 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9425 COMPOUND_EXPR will be an argument which must be evaluated.
9426 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9427 COMPOUND_EXPR in the chain will contain the tree for the simplified
9428 form of the builtin function call. */
9430 static tree
9431 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9433 if (!validate_arg (s1, POINTER_TYPE)
9434 || !validate_arg (s2, POINTER_TYPE))
9435 return NULL_TREE;
9436 else
9438 /* If the first argument is "", return NULL_TREE. */
9439 const char *p1 = c_getstr (s1);
9440 if (p1 && *p1 == '\0')
9442 /* Evaluate and ignore argument s2 in case it has
9443 side-effects. */
9444 return omit_one_operand_loc (loc, size_type_node,
9445 size_zero_node, s2);
9448 /* If the second argument is "", return __builtin_strlen(s1). */
9449 const char *p2 = c_getstr (s2);
9450 if (p2 && *p2 == '\0')
9452 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9454 /* If the replacement _DECL isn't initialized, don't do the
9455 transformation. */
9456 if (!fn)
9457 return NULL_TREE;
9459 return build_call_expr_loc (loc, fn, 1, s1);
9461 return NULL_TREE;
9465 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9466 produced. False otherwise. This is done so that we don't output the error
9467 or warning twice or three times. */
9469 bool
9470 fold_builtin_next_arg (tree exp, bool va_start_p)
9472 tree fntype = TREE_TYPE (current_function_decl);
9473 int nargs = call_expr_nargs (exp);
9474 tree arg;
9475 /* There is good chance the current input_location points inside the
9476 definition of the va_start macro (perhaps on the token for
9477 builtin) in a system header, so warnings will not be emitted.
9478 Use the location in real source code. */
9479 source_location current_location =
9480 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9481 NULL);
9483 if (!stdarg_p (fntype))
9485 error ("%<va_start%> used in function with fixed args");
9486 return true;
9489 if (va_start_p)
9491 if (va_start_p && (nargs != 2))
9493 error ("wrong number of arguments to function %<va_start%>");
9494 return true;
9496 arg = CALL_EXPR_ARG (exp, 1);
9498 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9499 when we checked the arguments and if needed issued a warning. */
9500 else
9502 if (nargs == 0)
9504 /* Evidently an out of date version of <stdarg.h>; can't validate
9505 va_start's second argument, but can still work as intended. */
9506 warning_at (current_location,
9507 OPT_Wvarargs,
9508 "%<__builtin_next_arg%> called without an argument");
9509 return true;
9511 else if (nargs > 1)
9513 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9514 return true;
9516 arg = CALL_EXPR_ARG (exp, 0);
9519 if (TREE_CODE (arg) == SSA_NAME)
9520 arg = SSA_NAME_VAR (arg);
9522 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9523 or __builtin_next_arg (0) the first time we see it, after checking
9524 the arguments and if needed issuing a warning. */
9525 if (!integer_zerop (arg))
9527 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9529 /* Strip off all nops for the sake of the comparison. This
9530 is not quite the same as STRIP_NOPS. It does more.
9531 We must also strip off INDIRECT_EXPR for C++ reference
9532 parameters. */
9533 while (CONVERT_EXPR_P (arg)
9534 || TREE_CODE (arg) == INDIRECT_REF)
9535 arg = TREE_OPERAND (arg, 0);
9536 if (arg != last_parm)
9538 /* FIXME: Sometimes with the tree optimizers we can get the
9539 not the last argument even though the user used the last
9540 argument. We just warn and set the arg to be the last
9541 argument so that we will get wrong-code because of
9542 it. */
9543 warning_at (current_location,
9544 OPT_Wvarargs,
9545 "second parameter of %<va_start%> not last named argument");
9548 /* Undefined by C99 7.15.1.4p4 (va_start):
9549 "If the parameter parmN is declared with the register storage
9550 class, with a function or array type, or with a type that is
9551 not compatible with the type that results after application of
9552 the default argument promotions, the behavior is undefined."
9554 else if (DECL_REGISTER (arg))
9556 warning_at (current_location,
9557 OPT_Wvarargs,
9558 "undefined behavior when second parameter of "
9559 "%<va_start%> is declared with %<register%> storage");
9562 /* We want to verify the second parameter just once before the tree
9563 optimizers are run and then avoid keeping it in the tree,
9564 as otherwise we could warn even for correct code like:
9565 void foo (int i, ...)
9566 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9567 if (va_start_p)
9568 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9569 else
9570 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9572 return false;
9576 /* Expand a call EXP to __builtin_object_size. */
9578 static rtx
9579 expand_builtin_object_size (tree exp)
9581 tree ost;
9582 int object_size_type;
9583 tree fndecl = get_callee_fndecl (exp);
9585 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9587 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9588 exp, fndecl);
9589 expand_builtin_trap ();
9590 return const0_rtx;
9593 ost = CALL_EXPR_ARG (exp, 1);
9594 STRIP_NOPS (ost);
9596 if (TREE_CODE (ost) != INTEGER_CST
9597 || tree_int_cst_sgn (ost) < 0
9598 || compare_tree_int (ost, 3) > 0)
9600 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9601 exp, fndecl);
9602 expand_builtin_trap ();
9603 return const0_rtx;
9606 object_size_type = tree_to_shwi (ost);
9608 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9611 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9612 FCODE is the BUILT_IN_* to use.
9613 Return NULL_RTX if we failed; the caller should emit a normal call,
9614 otherwise try to get the result in TARGET, if convenient (and in
9615 mode MODE if that's convenient). */
9617 static rtx
9618 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9619 enum built_in_function fcode)
9621 tree dest, src, len, size;
9623 if (!validate_arglist (exp,
9624 POINTER_TYPE,
9625 fcode == BUILT_IN_MEMSET_CHK
9626 ? INTEGER_TYPE : POINTER_TYPE,
9627 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9628 return NULL_RTX;
9630 dest = CALL_EXPR_ARG (exp, 0);
9631 src = CALL_EXPR_ARG (exp, 1);
9632 len = CALL_EXPR_ARG (exp, 2);
9633 size = CALL_EXPR_ARG (exp, 3);
9635 bool sizes_ok = check_sizes (OPT_Wstringop_overflow_,
9636 exp, len, /*maxlen=*/NULL_TREE,
9637 /*str=*/NULL_TREE, size);
9639 if (!tree_fits_uhwi_p (size))
9640 return NULL_RTX;
9642 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9644 /* Avoid transforming the checking call to an ordinary one when
9645 an overflow has been detected or when the call couldn't be
9646 validated because the size is not constant. */
9647 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9648 return NULL_RTX;
9650 tree fn = NULL_TREE;
9651 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9652 mem{cpy,pcpy,move,set} is available. */
9653 switch (fcode)
9655 case BUILT_IN_MEMCPY_CHK:
9656 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9657 break;
9658 case BUILT_IN_MEMPCPY_CHK:
9659 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9660 break;
9661 case BUILT_IN_MEMMOVE_CHK:
9662 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9663 break;
9664 case BUILT_IN_MEMSET_CHK:
9665 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9666 break;
9667 default:
9668 break;
9671 if (! fn)
9672 return NULL_RTX;
9674 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9675 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9676 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9677 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9679 else if (fcode == BUILT_IN_MEMSET_CHK)
9680 return NULL_RTX;
9681 else
9683 unsigned int dest_align = get_pointer_alignment (dest);
9685 /* If DEST is not a pointer type, call the normal function. */
9686 if (dest_align == 0)
9687 return NULL_RTX;
9689 /* If SRC and DEST are the same (and not volatile), do nothing. */
9690 if (operand_equal_p (src, dest, 0))
9692 tree expr;
9694 if (fcode != BUILT_IN_MEMPCPY_CHK)
9696 /* Evaluate and ignore LEN in case it has side-effects. */
9697 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9698 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9701 expr = fold_build_pointer_plus (dest, len);
9702 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9705 /* __memmove_chk special case. */
9706 if (fcode == BUILT_IN_MEMMOVE_CHK)
9708 unsigned int src_align = get_pointer_alignment (src);
9710 if (src_align == 0)
9711 return NULL_RTX;
9713 /* If src is categorized for a readonly section we can use
9714 normal __memcpy_chk. */
9715 if (readonly_data_expr (src))
9717 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9718 if (!fn)
9719 return NULL_RTX;
9720 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9721 dest, src, len, size);
9722 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9723 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9724 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9727 return NULL_RTX;
9731 /* Emit warning if a buffer overflow is detected at compile time. */
9733 static void
9734 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9736 /* The source string. */
9737 tree srcstr = NULL_TREE;
9738 /* The size of the destination object. */
9739 tree objsize = NULL_TREE;
9740 /* The string that is being concatenated with (as in __strcat_chk)
9741 or null if it isn't. */
9742 tree catstr = NULL_TREE;
9743 /* The maximum length of the source sequence in a bounded operation
9744 (such as __strncat_chk) or null if the operation isn't bounded
9745 (such as __strcat_chk). */
9746 tree maxlen = NULL_TREE;
9748 switch (fcode)
9750 case BUILT_IN_STRCPY_CHK:
9751 case BUILT_IN_STPCPY_CHK:
9752 srcstr = CALL_EXPR_ARG (exp, 1);
9753 objsize = CALL_EXPR_ARG (exp, 2);
9754 break;
9756 case BUILT_IN_STRCAT_CHK:
9757 /* For __strcat_chk the warning will be emitted only if overflowing
9758 by at least strlen (dest) + 1 bytes. */
9759 catstr = CALL_EXPR_ARG (exp, 0);
9760 srcstr = CALL_EXPR_ARG (exp, 1);
9761 objsize = CALL_EXPR_ARG (exp, 2);
9762 break;
9764 case BUILT_IN_STRNCAT_CHK:
9765 catstr = CALL_EXPR_ARG (exp, 0);
9766 srcstr = CALL_EXPR_ARG (exp, 1);
9767 maxlen = CALL_EXPR_ARG (exp, 2);
9768 objsize = CALL_EXPR_ARG (exp, 3);
9769 break;
9771 case BUILT_IN_STRNCPY_CHK:
9772 case BUILT_IN_STPNCPY_CHK:
9773 srcstr = CALL_EXPR_ARG (exp, 1);
9774 maxlen = CALL_EXPR_ARG (exp, 2);
9775 objsize = CALL_EXPR_ARG (exp, 3);
9776 break;
9778 case BUILT_IN_SNPRINTF_CHK:
9779 case BUILT_IN_VSNPRINTF_CHK:
9780 maxlen = CALL_EXPR_ARG (exp, 1);
9781 objsize = CALL_EXPR_ARG (exp, 3);
9782 break;
9783 default:
9784 gcc_unreachable ();
9787 if (catstr && maxlen)
9789 /* Check __strncat_chk. There is no way to determine the length
9790 of the string to which the source string is being appended so
9791 just warn when the length of the source string is not known. */
9792 if (!check_strncat_sizes (exp, objsize))
9793 return;
9796 check_sizes (OPT_Wstringop_overflow_, exp,
9797 /*size=*/NULL_TREE, maxlen, srcstr, objsize);
9800 /* Emit warning if a buffer overflow is detected at compile time
9801 in __sprintf_chk/__vsprintf_chk calls. */
9803 static void
9804 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9806 tree size, len, fmt;
9807 const char *fmt_str;
9808 int nargs = call_expr_nargs (exp);
9810 /* Verify the required arguments in the original call. */
9812 if (nargs < 4)
9813 return;
9814 size = CALL_EXPR_ARG (exp, 2);
9815 fmt = CALL_EXPR_ARG (exp, 3);
9817 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9818 return;
9820 /* Check whether the format is a literal string constant. */
9821 fmt_str = c_getstr (fmt);
9822 if (fmt_str == NULL)
9823 return;
9825 if (!init_target_chars ())
9826 return;
9828 /* If the format doesn't contain % args or %%, we know its size. */
9829 if (strchr (fmt_str, target_percent) == 0)
9830 len = build_int_cstu (size_type_node, strlen (fmt_str));
9831 /* If the format is "%s" and first ... argument is a string literal,
9832 we know it too. */
9833 else if (fcode == BUILT_IN_SPRINTF_CHK
9834 && strcmp (fmt_str, target_percent_s) == 0)
9836 tree arg;
9838 if (nargs < 5)
9839 return;
9840 arg = CALL_EXPR_ARG (exp, 4);
9841 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9842 return;
9844 len = c_strlen (arg, 1);
9845 if (!len || ! tree_fits_uhwi_p (len))
9846 return;
9848 else
9849 return;
9851 /* Add one for the terminating nul. */
9852 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9853 check_sizes (OPT_Wstringop_overflow_,
9854 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, len, size);
9857 /* Emit warning if a free is called with address of a variable. */
9859 static void
9860 maybe_emit_free_warning (tree exp)
9862 tree arg = CALL_EXPR_ARG (exp, 0);
9864 STRIP_NOPS (arg);
9865 if (TREE_CODE (arg) != ADDR_EXPR)
9866 return;
9868 arg = get_base_address (TREE_OPERAND (arg, 0));
9869 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9870 return;
9872 if (SSA_VAR_P (arg))
9873 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9874 "%Kattempt to free a non-heap object %qD", exp, arg);
9875 else
9876 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9877 "%Kattempt to free a non-heap object", exp);
9880 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9881 if possible. */
9883 static tree
9884 fold_builtin_object_size (tree ptr, tree ost)
9886 unsigned HOST_WIDE_INT bytes;
9887 int object_size_type;
9889 if (!validate_arg (ptr, POINTER_TYPE)
9890 || !validate_arg (ost, INTEGER_TYPE))
9891 return NULL_TREE;
9893 STRIP_NOPS (ost);
9895 if (TREE_CODE (ost) != INTEGER_CST
9896 || tree_int_cst_sgn (ost) < 0
9897 || compare_tree_int (ost, 3) > 0)
9898 return NULL_TREE;
9900 object_size_type = tree_to_shwi (ost);
9902 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9903 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9904 and (size_t) 0 for types 2 and 3. */
9905 if (TREE_SIDE_EFFECTS (ptr))
9906 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9908 if (TREE_CODE (ptr) == ADDR_EXPR)
9910 compute_builtin_object_size (ptr, object_size_type, &bytes);
9911 if (wi::fits_to_tree_p (bytes, size_type_node))
9912 return build_int_cstu (size_type_node, bytes);
9914 else if (TREE_CODE (ptr) == SSA_NAME)
9916 /* If object size is not known yet, delay folding until
9917 later. Maybe subsequent passes will help determining
9918 it. */
9919 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9920 && wi::fits_to_tree_p (bytes, size_type_node))
9921 return build_int_cstu (size_type_node, bytes);
9924 return NULL_TREE;
9927 /* Builtins with folding operations that operate on "..." arguments
9928 need special handling; we need to store the arguments in a convenient
9929 data structure before attempting any folding. Fortunately there are
9930 only a few builtins that fall into this category. FNDECL is the
9931 function, EXP is the CALL_EXPR for the call. */
9933 static tree
9934 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9936 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9937 tree ret = NULL_TREE;
9939 switch (fcode)
9941 case BUILT_IN_FPCLASSIFY:
9942 ret = fold_builtin_fpclassify (loc, args, nargs);
9943 break;
9945 default:
9946 break;
9948 if (ret)
9950 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9951 SET_EXPR_LOCATION (ret, loc);
9952 TREE_NO_WARNING (ret) = 1;
9953 return ret;
9955 return NULL_TREE;
9958 /* Initialize format string characters in the target charset. */
9960 bool
9961 init_target_chars (void)
9963 static bool init;
9964 if (!init)
9966 target_newline = lang_hooks.to_target_charset ('\n');
9967 target_percent = lang_hooks.to_target_charset ('%');
9968 target_c = lang_hooks.to_target_charset ('c');
9969 target_s = lang_hooks.to_target_charset ('s');
9970 if (target_newline == 0 || target_percent == 0 || target_c == 0
9971 || target_s == 0)
9972 return false;
9974 target_percent_c[0] = target_percent;
9975 target_percent_c[1] = target_c;
9976 target_percent_c[2] = '\0';
9978 target_percent_s[0] = target_percent;
9979 target_percent_s[1] = target_s;
9980 target_percent_s[2] = '\0';
9982 target_percent_s_newline[0] = target_percent;
9983 target_percent_s_newline[1] = target_s;
9984 target_percent_s_newline[2] = target_newline;
9985 target_percent_s_newline[3] = '\0';
9987 init = true;
9989 return true;
9992 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9993 and no overflow/underflow occurred. INEXACT is true if M was not
9994 exactly calculated. TYPE is the tree type for the result. This
9995 function assumes that you cleared the MPFR flags and then
9996 calculated M to see if anything subsequently set a flag prior to
9997 entering this function. Return NULL_TREE if any checks fail. */
9999 static tree
10000 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10002 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10003 overflow/underflow occurred. If -frounding-math, proceed iff the
10004 result of calling FUNC was exact. */
10005 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10006 && (!flag_rounding_math || !inexact))
10008 REAL_VALUE_TYPE rr;
10010 real_from_mpfr (&rr, m, type, GMP_RNDN);
10011 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10012 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10013 but the mpft_t is not, then we underflowed in the
10014 conversion. */
10015 if (real_isfinite (&rr)
10016 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10018 REAL_VALUE_TYPE rmode;
10020 real_convert (&rmode, TYPE_MODE (type), &rr);
10021 /* Proceed iff the specified mode can hold the value. */
10022 if (real_identical (&rmode, &rr))
10023 return build_real (type, rmode);
10026 return NULL_TREE;
10029 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10030 number and no overflow/underflow occurred. INEXACT is true if M
10031 was not exactly calculated. TYPE is the tree type for the result.
10032 This function assumes that you cleared the MPFR flags and then
10033 calculated M to see if anything subsequently set a flag prior to
10034 entering this function. Return NULL_TREE if any checks fail, if
10035 FORCE_CONVERT is true, then bypass the checks. */
10037 static tree
10038 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10040 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10041 overflow/underflow occurred. If -frounding-math, proceed iff the
10042 result of calling FUNC was exact. */
10043 if (force_convert
10044 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10045 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10046 && (!flag_rounding_math || !inexact)))
10048 REAL_VALUE_TYPE re, im;
10050 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10051 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10052 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10053 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10054 but the mpft_t is not, then we underflowed in the
10055 conversion. */
10056 if (force_convert
10057 || (real_isfinite (&re) && real_isfinite (&im)
10058 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10059 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10061 REAL_VALUE_TYPE re_mode, im_mode;
10063 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10064 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10065 /* Proceed iff the specified mode can hold the value. */
10066 if (force_convert
10067 || (real_identical (&re_mode, &re)
10068 && real_identical (&im_mode, &im)))
10069 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10070 build_real (TREE_TYPE (type), im_mode));
10073 return NULL_TREE;
10076 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10077 the pointer *(ARG_QUO) and return the result. The type is taken
10078 from the type of ARG0 and is used for setting the precision of the
10079 calculation and results. */
10081 static tree
10082 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10084 tree const type = TREE_TYPE (arg0);
10085 tree result = NULL_TREE;
10087 STRIP_NOPS (arg0);
10088 STRIP_NOPS (arg1);
10090 /* To proceed, MPFR must exactly represent the target floating point
10091 format, which only happens when the target base equals two. */
10092 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10093 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10094 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10096 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10097 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10099 if (real_isfinite (ra0) && real_isfinite (ra1))
10101 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10102 const int prec = fmt->p;
10103 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10104 tree result_rem;
10105 long integer_quo;
10106 mpfr_t m0, m1;
10108 mpfr_inits2 (prec, m0, m1, NULL);
10109 mpfr_from_real (m0, ra0, GMP_RNDN);
10110 mpfr_from_real (m1, ra1, GMP_RNDN);
10111 mpfr_clear_flags ();
10112 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10113 /* Remquo is independent of the rounding mode, so pass
10114 inexact=0 to do_mpfr_ckconv(). */
10115 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10116 mpfr_clears (m0, m1, NULL);
10117 if (result_rem)
10119 /* MPFR calculates quo in the host's long so it may
10120 return more bits in quo than the target int can hold
10121 if sizeof(host long) > sizeof(target int). This can
10122 happen even for native compilers in LP64 mode. In
10123 these cases, modulo the quo value with the largest
10124 number that the target int can hold while leaving one
10125 bit for the sign. */
10126 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10127 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10129 /* Dereference the quo pointer argument. */
10130 arg_quo = build_fold_indirect_ref (arg_quo);
10131 /* Proceed iff a valid pointer type was passed in. */
10132 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10134 /* Set the value. */
10135 tree result_quo
10136 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10137 build_int_cst (TREE_TYPE (arg_quo),
10138 integer_quo));
10139 TREE_SIDE_EFFECTS (result_quo) = 1;
10140 /* Combine the quo assignment with the rem. */
10141 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10142 result_quo, result_rem));
10147 return result;
10150 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10151 resulting value as a tree with type TYPE. The mpfr precision is
10152 set to the precision of TYPE. We assume that this mpfr function
10153 returns zero if the result could be calculated exactly within the
10154 requested precision. In addition, the integer pointer represented
10155 by ARG_SG will be dereferenced and set to the appropriate signgam
10156 (-1,1) value. */
10158 static tree
10159 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10161 tree result = NULL_TREE;
10163 STRIP_NOPS (arg);
10165 /* To proceed, MPFR must exactly represent the target floating point
10166 format, which only happens when the target base equals two. Also
10167 verify ARG is a constant and that ARG_SG is an int pointer. */
10168 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10169 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10170 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10171 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10173 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10175 /* In addition to NaN and Inf, the argument cannot be zero or a
10176 negative integer. */
10177 if (real_isfinite (ra)
10178 && ra->cl != rvc_zero
10179 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10181 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10182 const int prec = fmt->p;
10183 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10184 int inexact, sg;
10185 mpfr_t m;
10186 tree result_lg;
10188 mpfr_init2 (m, prec);
10189 mpfr_from_real (m, ra, GMP_RNDN);
10190 mpfr_clear_flags ();
10191 inexact = mpfr_lgamma (m, &sg, m, rnd);
10192 result_lg = do_mpfr_ckconv (m, type, inexact);
10193 mpfr_clear (m);
10194 if (result_lg)
10196 tree result_sg;
10198 /* Dereference the arg_sg pointer argument. */
10199 arg_sg = build_fold_indirect_ref (arg_sg);
10200 /* Assign the signgam value into *arg_sg. */
10201 result_sg = fold_build2 (MODIFY_EXPR,
10202 TREE_TYPE (arg_sg), arg_sg,
10203 build_int_cst (TREE_TYPE (arg_sg), sg));
10204 TREE_SIDE_EFFECTS (result_sg) = 1;
10205 /* Combine the signgam assignment with the lgamma result. */
10206 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10207 result_sg, result_lg));
10212 return result;
10215 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10216 mpc function FUNC on it and return the resulting value as a tree
10217 with type TYPE. The mpfr precision is set to the precision of
10218 TYPE. We assume that function FUNC returns zero if the result
10219 could be calculated exactly within the requested precision. If
10220 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10221 in the arguments and/or results. */
10223 tree
10224 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10225 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10227 tree result = NULL_TREE;
10229 STRIP_NOPS (arg0);
10230 STRIP_NOPS (arg1);
10232 /* To proceed, MPFR must exactly represent the target floating point
10233 format, which only happens when the target base equals two. */
10234 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10235 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10236 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10237 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10238 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10240 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10241 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10242 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10243 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10245 if (do_nonfinite
10246 || (real_isfinite (re0) && real_isfinite (im0)
10247 && real_isfinite (re1) && real_isfinite (im1)))
10249 const struct real_format *const fmt =
10250 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10251 const int prec = fmt->p;
10252 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10253 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10254 int inexact;
10255 mpc_t m0, m1;
10257 mpc_init2 (m0, prec);
10258 mpc_init2 (m1, prec);
10259 mpfr_from_real (mpc_realref (m0), re0, rnd);
10260 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10261 mpfr_from_real (mpc_realref (m1), re1, rnd);
10262 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10263 mpfr_clear_flags ();
10264 inexact = func (m0, m0, m1, crnd);
10265 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10266 mpc_clear (m0);
10267 mpc_clear (m1);
10271 return result;
10274 /* A wrapper function for builtin folding that prevents warnings for
10275 "statement without effect" and the like, caused by removing the
10276 call node earlier than the warning is generated. */
10278 tree
10279 fold_call_stmt (gcall *stmt, bool ignore)
10281 tree ret = NULL_TREE;
10282 tree fndecl = gimple_call_fndecl (stmt);
10283 location_t loc = gimple_location (stmt);
10284 if (fndecl
10285 && TREE_CODE (fndecl) == FUNCTION_DECL
10286 && DECL_BUILT_IN (fndecl)
10287 && !gimple_call_va_arg_pack_p (stmt))
10289 int nargs = gimple_call_num_args (stmt);
10290 tree *args = (nargs > 0
10291 ? gimple_call_arg_ptr (stmt, 0)
10292 : &error_mark_node);
10294 if (avoid_folding_inline_builtin (fndecl))
10295 return NULL_TREE;
10296 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10298 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10300 else
10302 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10303 if (ret)
10305 /* Propagate location information from original call to
10306 expansion of builtin. Otherwise things like
10307 maybe_emit_chk_warning, that operate on the expansion
10308 of a builtin, will use the wrong location information. */
10309 if (gimple_has_location (stmt))
10311 tree realret = ret;
10312 if (TREE_CODE (ret) == NOP_EXPR)
10313 realret = TREE_OPERAND (ret, 0);
10314 if (CAN_HAVE_LOCATION_P (realret)
10315 && !EXPR_HAS_LOCATION (realret))
10316 SET_EXPR_LOCATION (realret, loc);
10317 return realret;
10319 return ret;
10323 return NULL_TREE;
10326 /* Look up the function in builtin_decl that corresponds to DECL
10327 and set ASMSPEC as its user assembler name. DECL must be a
10328 function decl that declares a builtin. */
10330 void
10331 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10333 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10334 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10335 && asmspec != 0);
10337 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10338 set_user_assembler_name (builtin, asmspec);
10340 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10341 && INT_TYPE_SIZE < BITS_PER_WORD)
10343 set_user_assembler_libfunc ("ffs", asmspec);
10344 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
10345 "ffs");
10349 /* Return true if DECL is a builtin that expands to a constant or similarly
10350 simple code. */
10351 bool
10352 is_simple_builtin (tree decl)
10354 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10355 switch (DECL_FUNCTION_CODE (decl))
10357 /* Builtins that expand to constants. */
10358 case BUILT_IN_CONSTANT_P:
10359 case BUILT_IN_EXPECT:
10360 case BUILT_IN_OBJECT_SIZE:
10361 case BUILT_IN_UNREACHABLE:
10362 /* Simple register moves or loads from stack. */
10363 case BUILT_IN_ASSUME_ALIGNED:
10364 case BUILT_IN_RETURN_ADDRESS:
10365 case BUILT_IN_EXTRACT_RETURN_ADDR:
10366 case BUILT_IN_FROB_RETURN_ADDR:
10367 case BUILT_IN_RETURN:
10368 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10369 case BUILT_IN_FRAME_ADDRESS:
10370 case BUILT_IN_VA_END:
10371 case BUILT_IN_STACK_SAVE:
10372 case BUILT_IN_STACK_RESTORE:
10373 /* Exception state returns or moves registers around. */
10374 case BUILT_IN_EH_FILTER:
10375 case BUILT_IN_EH_POINTER:
10376 case BUILT_IN_EH_COPY_VALUES:
10377 return true;
10379 default:
10380 return false;
10383 return false;
10386 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10387 most probably expanded inline into reasonably simple code. This is a
10388 superset of is_simple_builtin. */
10389 bool
10390 is_inexpensive_builtin (tree decl)
10392 if (!decl)
10393 return false;
10394 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10395 return true;
10396 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10397 switch (DECL_FUNCTION_CODE (decl))
10399 case BUILT_IN_ABS:
10400 case BUILT_IN_ALLOCA:
10401 case BUILT_IN_ALLOCA_WITH_ALIGN:
10402 case BUILT_IN_BSWAP16:
10403 case BUILT_IN_BSWAP32:
10404 case BUILT_IN_BSWAP64:
10405 case BUILT_IN_CLZ:
10406 case BUILT_IN_CLZIMAX:
10407 case BUILT_IN_CLZL:
10408 case BUILT_IN_CLZLL:
10409 case BUILT_IN_CTZ:
10410 case BUILT_IN_CTZIMAX:
10411 case BUILT_IN_CTZL:
10412 case BUILT_IN_CTZLL:
10413 case BUILT_IN_FFS:
10414 case BUILT_IN_FFSIMAX:
10415 case BUILT_IN_FFSL:
10416 case BUILT_IN_FFSLL:
10417 case BUILT_IN_IMAXABS:
10418 case BUILT_IN_FINITE:
10419 case BUILT_IN_FINITEF:
10420 case BUILT_IN_FINITEL:
10421 case BUILT_IN_FINITED32:
10422 case BUILT_IN_FINITED64:
10423 case BUILT_IN_FINITED128:
10424 case BUILT_IN_FPCLASSIFY:
10425 case BUILT_IN_ISFINITE:
10426 case BUILT_IN_ISINF_SIGN:
10427 case BUILT_IN_ISINF:
10428 case BUILT_IN_ISINFF:
10429 case BUILT_IN_ISINFL:
10430 case BUILT_IN_ISINFD32:
10431 case BUILT_IN_ISINFD64:
10432 case BUILT_IN_ISINFD128:
10433 case BUILT_IN_ISNAN:
10434 case BUILT_IN_ISNANF:
10435 case BUILT_IN_ISNANL:
10436 case BUILT_IN_ISNAND32:
10437 case BUILT_IN_ISNAND64:
10438 case BUILT_IN_ISNAND128:
10439 case BUILT_IN_ISNORMAL:
10440 case BUILT_IN_ISGREATER:
10441 case BUILT_IN_ISGREATEREQUAL:
10442 case BUILT_IN_ISLESS:
10443 case BUILT_IN_ISLESSEQUAL:
10444 case BUILT_IN_ISLESSGREATER:
10445 case BUILT_IN_ISUNORDERED:
10446 case BUILT_IN_VA_ARG_PACK:
10447 case BUILT_IN_VA_ARG_PACK_LEN:
10448 case BUILT_IN_VA_COPY:
10449 case BUILT_IN_TRAP:
10450 case BUILT_IN_SAVEREGS:
10451 case BUILT_IN_POPCOUNTL:
10452 case BUILT_IN_POPCOUNTLL:
10453 case BUILT_IN_POPCOUNTIMAX:
10454 case BUILT_IN_POPCOUNT:
10455 case BUILT_IN_PARITYL:
10456 case BUILT_IN_PARITYLL:
10457 case BUILT_IN_PARITYIMAX:
10458 case BUILT_IN_PARITY:
10459 case BUILT_IN_LABS:
10460 case BUILT_IN_LLABS:
10461 case BUILT_IN_PREFETCH:
10462 case BUILT_IN_ACC_ON_DEVICE:
10463 return true;
10465 default:
10466 return is_simple_builtin (decl);
10469 return false;
10472 /* Return true if T is a constant and the value cast to a target char
10473 can be represented by a host char.
10474 Store the casted char constant in *P if so. */
10476 bool
10477 target_char_cst_p (tree t, char *p)
10479 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10480 return false;
10482 *p = (char)tree_to_uhwi (t);
10483 return true;