* asan.c (handle_builtin_alloca): Deal with all alloca variants.
[official-gcc.git] / gcc / builtins.c
blobd3498bb16c47995e68a0d6406ecf6f3d227ce74e
1 /* Expand builtin functions.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "stringpool.h"
64 #include "attribs.h"
65 #include "asan.h"
66 #include "cilk.h"
67 #include "tree-chkp.h"
68 #include "rtl-chkp.h"
69 #include "internal-fn.h"
70 #include "case-cfn-macros.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
74 struct target_builtins default_target_builtins;
75 #if SWITCHABLE_TARGET
76 struct target_builtins *this_target_builtins = &default_target_builtins;
77 #endif
79 /* Define the names of the builtin function types and codes. */
80 const char *const built_in_class_names[BUILT_IN_LAST]
81 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
84 const char * built_in_names[(int) END_BUILTINS] =
86 #include "builtins.def"
89 /* Setup an array of builtin_info_type, make sure each element decl is
90 initialized to NULL_TREE. */
91 builtin_info_type builtin_info[(int)END_BUILTINS];
93 /* Non-zero if __builtin_constant_p should be folded right away. */
94 bool force_folding_builtin_constant_p;
96 static rtx c_readstr (const char *, scalar_int_mode);
97 static int target_char_cast (tree, char *);
98 static rtx get_memory_rtx (tree, tree);
99 static int apply_args_size (void);
100 static int apply_result_size (void);
101 static rtx result_vector (int, rtx);
102 static void expand_builtin_prefetch (tree);
103 static rtx expand_builtin_apply_args (void);
104 static rtx expand_builtin_apply_args_1 (void);
105 static rtx expand_builtin_apply (rtx, rtx, rtx);
106 static void expand_builtin_return (rtx);
107 static enum type_class type_to_class (tree);
108 static rtx expand_builtin_classify_type (tree);
109 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
111 static rtx expand_builtin_interclass_mathfn (tree, rtx);
112 static rtx expand_builtin_sincos (tree);
113 static rtx expand_builtin_cexpi (tree, rtx);
114 static rtx expand_builtin_int_roundingfn (tree, rtx);
115 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
116 static rtx expand_builtin_next_arg (void);
117 static rtx expand_builtin_va_start (tree);
118 static rtx expand_builtin_va_end (tree);
119 static rtx expand_builtin_va_copy (tree);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
123 static rtx expand_builtin_memchr (tree, rtx);
124 static rtx expand_builtin_memcpy (tree, rtx);
125 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
126 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
127 rtx target, tree exp, int endp);
128 static rtx expand_builtin_memmove (tree, rtx);
129 static rtx expand_builtin_mempcpy (tree, rtx);
130 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx);
131 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
132 static rtx expand_builtin_strcat (tree, rtx);
133 static rtx expand_builtin_strcpy (tree, rtx);
134 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
135 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
136 static rtx expand_builtin_stpncpy (tree, rtx);
137 static rtx expand_builtin_strncat (tree, rtx);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
155 static bool validate_arg (const_tree, enum tree_code code);
156 static rtx expand_builtin_fabs (tree, rtx, rtx);
157 static rtx expand_builtin_signbit (tree, rtx);
158 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
159 static tree fold_builtin_isascii (location_t, tree);
160 static tree fold_builtin_toascii (location_t, tree);
161 static tree fold_builtin_isdigit (location_t, tree);
162 static tree fold_builtin_fabs (location_t, tree, tree);
163 static tree fold_builtin_abs (location_t, tree, tree);
164 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
165 enum tree_code);
166 static tree fold_builtin_0 (location_t, tree);
167 static tree fold_builtin_1 (location_t, tree, tree);
168 static tree fold_builtin_2 (location_t, tree, tree, tree);
169 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree);
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_free_warning (tree);
182 static tree fold_builtin_object_size (tree, tree);
184 unsigned HOST_WIDE_INT target_newline;
185 unsigned HOST_WIDE_INT target_percent;
186 static unsigned HOST_WIDE_INT target_c;
187 static unsigned HOST_WIDE_INT target_s;
188 char target_percent_c[3];
189 char target_percent_s[3];
190 char target_percent_s_newline[4];
191 static tree do_mpfr_remquo (tree, tree, tree);
192 static tree do_mpfr_lgamma_r (tree, tree, tree);
193 static void expand_builtin_sync_synchronize (void);
195 /* Return true if NAME starts with __builtin_ or __sync_. */
197 static bool
198 is_builtin_name (const char *name)
200 if (strncmp (name, "__builtin_", 10) == 0)
201 return true;
202 if (strncmp (name, "__sync_", 7) == 0)
203 return true;
204 if (strncmp (name, "__atomic_", 9) == 0)
205 return true;
206 if (flag_cilkplus
207 && (!strcmp (name, "__cilkrts_detach")
208 || !strcmp (name, "__cilkrts_pop_frame")))
209 return true;
210 return false;
214 /* Return true if DECL is a function symbol representing a built-in. */
216 bool
217 is_builtin_fn (tree decl)
219 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
222 /* Return true if NODE should be considered for inline expansion regardless
223 of the optimization level. This means whenever a function is invoked with
224 its "internal" name, which normally contains the prefix "__builtin". */
226 bool
227 called_as_built_in (tree node)
229 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
230 we want the name used to call the function, not the name it
231 will have. */
232 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
233 return is_builtin_name (name);
236 /* Compute values M and N such that M divides (address of EXP - N) and such
237 that N < M. If these numbers can be determined, store M in alignp and N in
238 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
239 *alignp and any bit-offset to *bitposp.
241 Note that the address (and thus the alignment) computed here is based
242 on the address to which a symbol resolves, whereas DECL_ALIGN is based
243 on the address at which an object is actually located. These two
244 addresses are not always the same. For example, on ARM targets,
245 the address &foo of a Thumb function foo() has the lowest bit set,
246 whereas foo() itself starts on an even address.
248 If ADDR_P is true we are taking the address of the memory reference EXP
249 and thus cannot rely on the access taking place. */
251 static bool
252 get_object_alignment_2 (tree exp, unsigned int *alignp,
253 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
255 HOST_WIDE_INT bitsize, bitpos;
256 tree offset;
257 machine_mode mode;
258 int unsignedp, reversep, volatilep;
259 unsigned int align = BITS_PER_UNIT;
260 bool known_alignment = false;
262 /* Get the innermost object and the constant (bitpos) and possibly
263 variable (offset) offset of the access. */
264 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
265 &unsignedp, &reversep, &volatilep);
267 /* Extract alignment information from the innermost object and
268 possibly adjust bitpos and offset. */
269 if (TREE_CODE (exp) == FUNCTION_DECL)
271 /* Function addresses can encode extra information besides their
272 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
273 allows the low bit to be used as a virtual bit, we know
274 that the address itself must be at least 2-byte aligned. */
275 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
276 align = 2 * BITS_PER_UNIT;
278 else if (TREE_CODE (exp) == LABEL_DECL)
280 else if (TREE_CODE (exp) == CONST_DECL)
282 /* The alignment of a CONST_DECL is determined by its initializer. */
283 exp = DECL_INITIAL (exp);
284 align = TYPE_ALIGN (TREE_TYPE (exp));
285 if (CONSTANT_CLASS_P (exp))
286 align = targetm.constant_alignment (exp, align);
288 known_alignment = true;
290 else if (DECL_P (exp))
292 align = DECL_ALIGN (exp);
293 known_alignment = true;
295 else if (TREE_CODE (exp) == INDIRECT_REF
296 || TREE_CODE (exp) == MEM_REF
297 || TREE_CODE (exp) == TARGET_MEM_REF)
299 tree addr = TREE_OPERAND (exp, 0);
300 unsigned ptr_align;
301 unsigned HOST_WIDE_INT ptr_bitpos;
302 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
304 /* If the address is explicitely aligned, handle that. */
305 if (TREE_CODE (addr) == BIT_AND_EXPR
306 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
308 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
309 ptr_bitmask *= BITS_PER_UNIT;
310 align = least_bit_hwi (ptr_bitmask);
311 addr = TREE_OPERAND (addr, 0);
314 known_alignment
315 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
316 align = MAX (ptr_align, align);
318 /* Re-apply explicit alignment to the bitpos. */
319 ptr_bitpos &= ptr_bitmask;
321 /* The alignment of the pointer operand in a TARGET_MEM_REF
322 has to take the variable offset parts into account. */
323 if (TREE_CODE (exp) == TARGET_MEM_REF)
325 if (TMR_INDEX (exp))
327 unsigned HOST_WIDE_INT step = 1;
328 if (TMR_STEP (exp))
329 step = TREE_INT_CST_LOW (TMR_STEP (exp));
330 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
332 if (TMR_INDEX2 (exp))
333 align = BITS_PER_UNIT;
334 known_alignment = false;
337 /* When EXP is an actual memory reference then we can use
338 TYPE_ALIGN of a pointer indirection to derive alignment.
339 Do so only if get_pointer_alignment_1 did not reveal absolute
340 alignment knowledge and if using that alignment would
341 improve the situation. */
342 unsigned int talign;
343 if (!addr_p && !known_alignment
344 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
345 && talign > align)
346 align = talign;
347 else
349 /* Else adjust bitpos accordingly. */
350 bitpos += ptr_bitpos;
351 if (TREE_CODE (exp) == MEM_REF
352 || TREE_CODE (exp) == TARGET_MEM_REF)
353 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
356 else if (TREE_CODE (exp) == STRING_CST)
358 /* STRING_CST are the only constant objects we allow to be not
359 wrapped inside a CONST_DECL. */
360 align = TYPE_ALIGN (TREE_TYPE (exp));
361 if (CONSTANT_CLASS_P (exp))
362 align = targetm.constant_alignment (exp, align);
364 known_alignment = true;
367 /* If there is a non-constant offset part extract the maximum
368 alignment that can prevail. */
369 if (offset)
371 unsigned int trailing_zeros = tree_ctz (offset);
372 if (trailing_zeros < HOST_BITS_PER_INT)
374 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
375 if (inner)
376 align = MIN (align, inner);
380 *alignp = align;
381 *bitposp = bitpos & (*alignp - 1);
382 return known_alignment;
385 /* For a memory reference expression EXP compute values M and N such that M
386 divides (&EXP - N) and such that N < M. If these numbers can be determined,
387 store M in alignp and N in *BITPOSP and return true. Otherwise return false
388 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
390 bool
391 get_object_alignment_1 (tree exp, unsigned int *alignp,
392 unsigned HOST_WIDE_INT *bitposp)
394 return get_object_alignment_2 (exp, alignp, bitposp, false);
397 /* Return the alignment in bits of EXP, an object. */
399 unsigned int
400 get_object_alignment (tree exp)
402 unsigned HOST_WIDE_INT bitpos = 0;
403 unsigned int align;
405 get_object_alignment_1 (exp, &align, &bitpos);
407 /* align and bitpos now specify known low bits of the pointer.
408 ptr & (align - 1) == bitpos. */
410 if (bitpos != 0)
411 align = least_bit_hwi (bitpos);
412 return align;
415 /* For a pointer valued expression EXP compute values M and N such that M
416 divides (EXP - N) and such that N < M. If these numbers can be determined,
417 store M in alignp and N in *BITPOSP and return true. Return false if
418 the results are just a conservative approximation.
420 If EXP is not a pointer, false is returned too. */
422 bool
423 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
424 unsigned HOST_WIDE_INT *bitposp)
426 STRIP_NOPS (exp);
428 if (TREE_CODE (exp) == ADDR_EXPR)
429 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
430 alignp, bitposp, true);
431 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
433 unsigned int align;
434 unsigned HOST_WIDE_INT bitpos;
435 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
436 &align, &bitpos);
437 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
438 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
439 else
441 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
442 if (trailing_zeros < HOST_BITS_PER_INT)
444 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
445 if (inner)
446 align = MIN (align, inner);
449 *alignp = align;
450 *bitposp = bitpos & (align - 1);
451 return res;
453 else if (TREE_CODE (exp) == SSA_NAME
454 && POINTER_TYPE_P (TREE_TYPE (exp)))
456 unsigned int ptr_align, ptr_misalign;
457 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
459 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
461 *bitposp = ptr_misalign * BITS_PER_UNIT;
462 *alignp = ptr_align * BITS_PER_UNIT;
463 /* Make sure to return a sensible alignment when the multiplication
464 by BITS_PER_UNIT overflowed. */
465 if (*alignp == 0)
466 *alignp = 1u << (HOST_BITS_PER_INT - 1);
467 /* We cannot really tell whether this result is an approximation. */
468 return false;
470 else
472 *bitposp = 0;
473 *alignp = BITS_PER_UNIT;
474 return false;
477 else if (TREE_CODE (exp) == INTEGER_CST)
479 *alignp = BIGGEST_ALIGNMENT;
480 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
481 & (BIGGEST_ALIGNMENT - 1));
482 return true;
485 *bitposp = 0;
486 *alignp = BITS_PER_UNIT;
487 return false;
490 /* Return the alignment in bits of EXP, a pointer valued expression.
491 The alignment returned is, by default, the alignment of the thing that
492 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
494 Otherwise, look at the expression to see if we can do better, i.e., if the
495 expression is actually pointing at an object whose alignment is tighter. */
497 unsigned int
498 get_pointer_alignment (tree exp)
500 unsigned HOST_WIDE_INT bitpos = 0;
501 unsigned int align;
503 get_pointer_alignment_1 (exp, &align, &bitpos);
505 /* align and bitpos now specify known low bits of the pointer.
506 ptr & (align - 1) == bitpos. */
508 if (bitpos != 0)
509 align = least_bit_hwi (bitpos);
511 return align;
514 /* Return the number of non-zero elements in the sequence
515 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
516 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
518 static unsigned
519 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
521 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
523 unsigned n;
525 if (eltsize == 1)
527 /* Optimize the common case of plain char. */
528 for (n = 0; n < maxelts; n++)
530 const char *elt = (const char*) ptr + n;
531 if (!*elt)
532 break;
535 else
537 for (n = 0; n < maxelts; n++)
539 const char *elt = (const char*) ptr + n * eltsize;
540 if (!memcmp (elt, "\0\0\0\0", eltsize))
541 break;
544 return n;
547 /* Compute the length of a null-terminated character string or wide
548 character string handling character sizes of 1, 2, and 4 bytes.
549 TREE_STRING_LENGTH is not the right way because it evaluates to
550 the size of the character array in bytes (as opposed to characters)
551 and because it can contain a zero byte in the middle.
553 ONLY_VALUE should be nonzero if the result is not going to be emitted
554 into the instruction stream and zero if it is going to be expanded.
555 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
556 is returned, otherwise NULL, since
557 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
558 evaluate the side-effects.
560 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
561 accesses. Note that this implies the result is not going to be emitted
562 into the instruction stream.
564 The value returned is of type `ssizetype'.
566 Unfortunately, string_constant can't access the values of const char
567 arrays with initializers, so neither can we do so here. */
569 tree
570 c_strlen (tree src, int only_value)
572 STRIP_NOPS (src);
573 if (TREE_CODE (src) == COND_EXPR
574 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
576 tree len1, len2;
578 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
579 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
580 if (tree_int_cst_equal (len1, len2))
581 return len1;
584 if (TREE_CODE (src) == COMPOUND_EXPR
585 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
586 return c_strlen (TREE_OPERAND (src, 1), only_value);
588 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
590 /* Offset from the beginning of the string in bytes. */
591 tree byteoff;
592 src = string_constant (src, &byteoff);
593 if (src == 0)
594 return NULL_TREE;
596 /* Determine the size of the string element. */
597 unsigned eltsize
598 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
600 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
601 length of SRC. */
602 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
604 /* PTR can point to the byte representation of any string type, including
605 char* and wchar_t*. */
606 const char *ptr = TREE_STRING_POINTER (src);
608 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
610 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
611 compute the offset to the following null if we don't know where to
612 start searching for it. */
613 if (string_length (ptr, eltsize, maxelts) < maxelts)
615 /* Return when an embedded null character is found. */
616 return NULL_TREE;
619 /* We don't know the starting offset, but we do know that the string
620 has no internal zero bytes. We can assume that the offset falls
621 within the bounds of the string; otherwise, the programmer deserves
622 what he gets. Subtract the offset from the length of the string,
623 and return that. This would perhaps not be valid if we were dealing
624 with named arrays in addition to literal string constants. */
626 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
629 /* Offset from the beginning of the string in elements. */
630 HOST_WIDE_INT eltoff;
632 /* We have a known offset into the string. Start searching there for
633 a null character if we can represent it as a single HOST_WIDE_INT. */
634 if (byteoff == 0)
635 eltoff = 0;
636 else if (! tree_fits_shwi_p (byteoff))
637 eltoff = -1;
638 else
639 eltoff = tree_to_shwi (byteoff) / eltsize;
641 /* If the offset is known to be out of bounds, warn, and call strlen at
642 runtime. */
643 if (eltoff < 0 || eltoff > maxelts)
645 /* Suppress multiple warnings for propagated constant strings. */
646 if (only_value != 2
647 && !TREE_NO_WARNING (src))
649 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
650 eltoff);
651 TREE_NO_WARNING (src) = 1;
653 return NULL_TREE;
656 /* Use strlen to search for the first zero byte. Since any strings
657 constructed with build_string will have nulls appended, we win even
658 if we get handed something like (char[4])"abcd".
660 Since ELTOFF is our starting index into the string, no further
661 calculation is needed. */
662 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
663 maxelts - eltoff);
665 return ssize_int (len);
668 /* Return a constant integer corresponding to target reading
669 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
671 static rtx
672 c_readstr (const char *str, scalar_int_mode mode)
674 HOST_WIDE_INT ch;
675 unsigned int i, j;
676 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
678 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
679 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
680 / HOST_BITS_PER_WIDE_INT;
682 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
683 for (i = 0; i < len; i++)
684 tmp[i] = 0;
686 ch = 1;
687 for (i = 0; i < GET_MODE_SIZE (mode); i++)
689 j = i;
690 if (WORDS_BIG_ENDIAN)
691 j = GET_MODE_SIZE (mode) - i - 1;
692 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
693 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
694 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
695 j *= BITS_PER_UNIT;
697 if (ch)
698 ch = (unsigned char) str[i];
699 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
702 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
703 return immed_wide_int_const (c, mode);
706 /* Cast a target constant CST to target CHAR and if that value fits into
707 host char type, return zero and put that value into variable pointed to by
708 P. */
710 static int
711 target_char_cast (tree cst, char *p)
713 unsigned HOST_WIDE_INT val, hostval;
715 if (TREE_CODE (cst) != INTEGER_CST
716 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
717 return 1;
719 /* Do not care if it fits or not right here. */
720 val = TREE_INT_CST_LOW (cst);
722 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
723 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
725 hostval = val;
726 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
727 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
729 if (val != hostval)
730 return 1;
732 *p = hostval;
733 return 0;
736 /* Similar to save_expr, but assumes that arbitrary code is not executed
737 in between the multiple evaluations. In particular, we assume that a
738 non-addressable local variable will not be modified. */
740 static tree
741 builtin_save_expr (tree exp)
743 if (TREE_CODE (exp) == SSA_NAME
744 || (TREE_ADDRESSABLE (exp) == 0
745 && (TREE_CODE (exp) == PARM_DECL
746 || (VAR_P (exp) && !TREE_STATIC (exp)))))
747 return exp;
749 return save_expr (exp);
752 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
753 times to get the address of either a higher stack frame, or a return
754 address located within it (depending on FNDECL_CODE). */
756 static rtx
757 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
759 int i;
760 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
761 if (tem == NULL_RTX)
763 /* For a zero count with __builtin_return_address, we don't care what
764 frame address we return, because target-specific definitions will
765 override us. Therefore frame pointer elimination is OK, and using
766 the soft frame pointer is OK.
768 For a nonzero count, or a zero count with __builtin_frame_address,
769 we require a stable offset from the current frame pointer to the
770 previous one, so we must use the hard frame pointer, and
771 we must disable frame pointer elimination. */
772 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
773 tem = frame_pointer_rtx;
774 else
776 tem = hard_frame_pointer_rtx;
778 /* Tell reload not to eliminate the frame pointer. */
779 crtl->accesses_prior_frames = 1;
783 if (count > 0)
784 SETUP_FRAME_ADDRESSES ();
786 /* On the SPARC, the return address is not in the frame, it is in a
787 register. There is no way to access it off of the current frame
788 pointer, but it can be accessed off the previous frame pointer by
789 reading the value from the register window save area. */
790 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
791 count--;
793 /* Scan back COUNT frames to the specified frame. */
794 for (i = 0; i < count; i++)
796 /* Assume the dynamic chain pointer is in the word that the
797 frame address points to, unless otherwise specified. */
798 tem = DYNAMIC_CHAIN_ADDRESS (tem);
799 tem = memory_address (Pmode, tem);
800 tem = gen_frame_mem (Pmode, tem);
801 tem = copy_to_reg (tem);
804 /* For __builtin_frame_address, return what we've got. But, on
805 the SPARC for example, we may have to add a bias. */
806 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
807 return FRAME_ADDR_RTX (tem);
809 /* For __builtin_return_address, get the return address from that frame. */
810 #ifdef RETURN_ADDR_RTX
811 tem = RETURN_ADDR_RTX (count, tem);
812 #else
813 tem = memory_address (Pmode,
814 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
815 tem = gen_frame_mem (Pmode, tem);
816 #endif
817 return tem;
820 /* Alias set used for setjmp buffer. */
821 static alias_set_type setjmp_alias_set = -1;
823 /* Construct the leading half of a __builtin_setjmp call. Control will
824 return to RECEIVER_LABEL. This is also called directly by the SJLJ
825 exception handling code. */
827 void
828 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
830 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
831 rtx stack_save;
832 rtx mem;
834 if (setjmp_alias_set == -1)
835 setjmp_alias_set = new_alias_set ();
837 buf_addr = convert_memory_address (Pmode, buf_addr);
839 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
841 /* We store the frame pointer and the address of receiver_label in
842 the buffer and use the rest of it for the stack save area, which
843 is machine-dependent. */
845 mem = gen_rtx_MEM (Pmode, buf_addr);
846 set_mem_alias_set (mem, setjmp_alias_set);
847 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
849 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
850 GET_MODE_SIZE (Pmode))),
851 set_mem_alias_set (mem, setjmp_alias_set);
853 emit_move_insn (validize_mem (mem),
854 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
856 stack_save = gen_rtx_MEM (sa_mode,
857 plus_constant (Pmode, buf_addr,
858 2 * GET_MODE_SIZE (Pmode)));
859 set_mem_alias_set (stack_save, setjmp_alias_set);
860 emit_stack_save (SAVE_NONLOCAL, &stack_save);
862 /* If there is further processing to do, do it. */
863 if (targetm.have_builtin_setjmp_setup ())
864 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
866 /* We have a nonlocal label. */
867 cfun->has_nonlocal_label = 1;
870 /* Construct the trailing part of a __builtin_setjmp call. This is
871 also called directly by the SJLJ exception handling code.
872 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
874 void
875 expand_builtin_setjmp_receiver (rtx receiver_label)
877 rtx chain;
879 /* Mark the FP as used when we get here, so we have to make sure it's
880 marked as used by this function. */
881 emit_use (hard_frame_pointer_rtx);
883 /* Mark the static chain as clobbered here so life information
884 doesn't get messed up for it. */
885 chain = targetm.calls.static_chain (current_function_decl, true);
886 if (chain && REG_P (chain))
887 emit_clobber (chain);
889 /* Now put in the code to restore the frame pointer, and argument
890 pointer, if needed. */
891 if (! targetm.have_nonlocal_goto ())
893 /* First adjust our frame pointer to its actual value. It was
894 previously set to the start of the virtual area corresponding to
895 the stacked variables when we branched here and now needs to be
896 adjusted to the actual hardware fp value.
898 Assignments to virtual registers are converted by
899 instantiate_virtual_regs into the corresponding assignment
900 to the underlying register (fp in this case) that makes
901 the original assignment true.
902 So the following insn will actually be decrementing fp by
903 STARTING_FRAME_OFFSET. */
904 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
906 /* Restoring the frame pointer also modifies the hard frame pointer.
907 Mark it used (so that the previous assignment remains live once
908 the frame pointer is eliminated) and clobbered (to represent the
909 implicit update from the assignment). */
910 emit_use (hard_frame_pointer_rtx);
911 emit_clobber (hard_frame_pointer_rtx);
914 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
916 /* If the argument pointer can be eliminated in favor of the
917 frame pointer, we don't need to restore it. We assume here
918 that if such an elimination is present, it can always be used.
919 This is the case on all known machines; if we don't make this
920 assumption, we do unnecessary saving on many machines. */
921 size_t i;
922 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
924 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
925 if (elim_regs[i].from == ARG_POINTER_REGNUM
926 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
927 break;
929 if (i == ARRAY_SIZE (elim_regs))
931 /* Now restore our arg pointer from the address at which it
932 was saved in our stack frame. */
933 emit_move_insn (crtl->args.internal_arg_pointer,
934 copy_to_reg (get_arg_pointer_save_area ()));
938 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
939 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
940 else if (targetm.have_nonlocal_goto_receiver ())
941 emit_insn (targetm.gen_nonlocal_goto_receiver ());
942 else
943 { /* Nothing */ }
945 /* We must not allow the code we just generated to be reordered by
946 scheduling. Specifically, the update of the frame pointer must
947 happen immediately, not later. */
948 emit_insn (gen_blockage ());
951 /* __builtin_longjmp is passed a pointer to an array of five words (not
952 all will be used on all machines). It operates similarly to the C
953 library function of the same name, but is more efficient. Much of
954 the code below is copied from the handling of non-local gotos. */
956 static void
957 expand_builtin_longjmp (rtx buf_addr, rtx value)
959 rtx fp, lab, stack;
960 rtx_insn *insn, *last;
961 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
963 /* DRAP is needed for stack realign if longjmp is expanded to current
964 function */
965 if (SUPPORTS_STACK_ALIGNMENT)
966 crtl->need_drap = true;
968 if (setjmp_alias_set == -1)
969 setjmp_alias_set = new_alias_set ();
971 buf_addr = convert_memory_address (Pmode, buf_addr);
973 buf_addr = force_reg (Pmode, buf_addr);
975 /* We require that the user must pass a second argument of 1, because
976 that is what builtin_setjmp will return. */
977 gcc_assert (value == const1_rtx);
979 last = get_last_insn ();
980 if (targetm.have_builtin_longjmp ())
981 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
982 else
984 fp = gen_rtx_MEM (Pmode, buf_addr);
985 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
986 GET_MODE_SIZE (Pmode)));
988 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
989 2 * GET_MODE_SIZE (Pmode)));
990 set_mem_alias_set (fp, setjmp_alias_set);
991 set_mem_alias_set (lab, setjmp_alias_set);
992 set_mem_alias_set (stack, setjmp_alias_set);
994 /* Pick up FP, label, and SP from the block and jump. This code is
995 from expand_goto in stmt.c; see there for detailed comments. */
996 if (targetm.have_nonlocal_goto ())
997 /* We have to pass a value to the nonlocal_goto pattern that will
998 get copied into the static_chain pointer, but it does not matter
999 what that value is, because builtin_setjmp does not use it. */
1000 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1001 else
1003 lab = copy_to_reg (lab);
1005 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1006 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1008 emit_move_insn (hard_frame_pointer_rtx, fp);
1009 emit_stack_restore (SAVE_NONLOCAL, stack);
1011 emit_use (hard_frame_pointer_rtx);
1012 emit_use (stack_pointer_rtx);
1013 emit_indirect_jump (lab);
1017 /* Search backwards and mark the jump insn as a non-local goto.
1018 Note that this precludes the use of __builtin_longjmp to a
1019 __builtin_setjmp target in the same function. However, we've
1020 already cautioned the user that these functions are for
1021 internal exception handling use only. */
1022 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1024 gcc_assert (insn != last);
1026 if (JUMP_P (insn))
1028 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1029 break;
1031 else if (CALL_P (insn))
1032 break;
1036 static inline bool
1037 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1039 return (iter->i < iter->n);
1042 /* This function validates the types of a function call argument list
1043 against a specified list of tree_codes. If the last specifier is a 0,
1044 that represents an ellipsis, otherwise the last specifier must be a
1045 VOID_TYPE. */
1047 static bool
1048 validate_arglist (const_tree callexpr, ...)
1050 enum tree_code code;
1051 bool res = 0;
1052 va_list ap;
1053 const_call_expr_arg_iterator iter;
1054 const_tree arg;
1056 va_start (ap, callexpr);
1057 init_const_call_expr_arg_iterator (callexpr, &iter);
1059 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1060 tree fn = CALL_EXPR_FN (callexpr);
1061 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1063 for (unsigned argno = 1; ; ++argno)
1065 code = (enum tree_code) va_arg (ap, int);
1067 switch (code)
1069 case 0:
1070 /* This signifies an ellipses, any further arguments are all ok. */
1071 res = true;
1072 goto end;
1073 case VOID_TYPE:
1074 /* This signifies an endlink, if no arguments remain, return
1075 true, otherwise return false. */
1076 res = !more_const_call_expr_args_p (&iter);
1077 goto end;
1078 case POINTER_TYPE:
1079 /* The actual argument must be nonnull when either the whole
1080 called function has been declared nonnull, or when the formal
1081 argument corresponding to the actual argument has been. */
1082 if (argmap
1083 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1085 arg = next_const_call_expr_arg (&iter);
1086 if (!validate_arg (arg, code) || integer_zerop (arg))
1087 goto end;
1088 break;
1090 /* FALLTHRU */
1091 default:
1092 /* If no parameters remain or the parameter's code does not
1093 match the specified code, return false. Otherwise continue
1094 checking any remaining arguments. */
1095 arg = next_const_call_expr_arg (&iter);
1096 if (!validate_arg (arg, code))
1097 goto end;
1098 break;
1102 /* We need gotos here since we can only have one VA_CLOSE in a
1103 function. */
1104 end: ;
1105 va_end (ap);
1107 BITMAP_FREE (argmap);
1109 return res;
1112 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1113 and the address of the save area. */
1115 static rtx
1116 expand_builtin_nonlocal_goto (tree exp)
1118 tree t_label, t_save_area;
1119 rtx r_label, r_save_area, r_fp, r_sp;
1120 rtx_insn *insn;
1122 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1123 return NULL_RTX;
1125 t_label = CALL_EXPR_ARG (exp, 0);
1126 t_save_area = CALL_EXPR_ARG (exp, 1);
1128 r_label = expand_normal (t_label);
1129 r_label = convert_memory_address (Pmode, r_label);
1130 r_save_area = expand_normal (t_save_area);
1131 r_save_area = convert_memory_address (Pmode, r_save_area);
1132 /* Copy the address of the save location to a register just in case it was
1133 based on the frame pointer. */
1134 r_save_area = copy_to_reg (r_save_area);
1135 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1136 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1137 plus_constant (Pmode, r_save_area,
1138 GET_MODE_SIZE (Pmode)));
1140 crtl->has_nonlocal_goto = 1;
1142 /* ??? We no longer need to pass the static chain value, afaik. */
1143 if (targetm.have_nonlocal_goto ())
1144 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1145 else
1147 r_label = copy_to_reg (r_label);
1149 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1150 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1152 /* Restore frame pointer for containing function. */
1153 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1154 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1156 /* USE of hard_frame_pointer_rtx added for consistency;
1157 not clear if really needed. */
1158 emit_use (hard_frame_pointer_rtx);
1159 emit_use (stack_pointer_rtx);
1161 /* If the architecture is using a GP register, we must
1162 conservatively assume that the target function makes use of it.
1163 The prologue of functions with nonlocal gotos must therefore
1164 initialize the GP register to the appropriate value, and we
1165 must then make sure that this value is live at the point
1166 of the jump. (Note that this doesn't necessarily apply
1167 to targets with a nonlocal_goto pattern; they are free
1168 to implement it in their own way. Note also that this is
1169 a no-op if the GP register is a global invariant.) */
1170 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1171 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1172 emit_use (pic_offset_table_rtx);
1174 emit_indirect_jump (r_label);
1177 /* Search backwards to the jump insn and mark it as a
1178 non-local goto. */
1179 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1181 if (JUMP_P (insn))
1183 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1184 break;
1186 else if (CALL_P (insn))
1187 break;
1190 return const0_rtx;
1193 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1194 (not all will be used on all machines) that was passed to __builtin_setjmp.
1195 It updates the stack pointer in that block to the current value. This is
1196 also called directly by the SJLJ exception handling code. */
1198 void
1199 expand_builtin_update_setjmp_buf (rtx buf_addr)
1201 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1202 buf_addr = convert_memory_address (Pmode, buf_addr);
1203 rtx stack_save
1204 = gen_rtx_MEM (sa_mode,
1205 memory_address
1206 (sa_mode,
1207 plus_constant (Pmode, buf_addr,
1208 2 * GET_MODE_SIZE (Pmode))));
1210 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1213 /* Expand a call to __builtin_prefetch. For a target that does not support
1214 data prefetch, evaluate the memory address argument in case it has side
1215 effects. */
1217 static void
1218 expand_builtin_prefetch (tree exp)
1220 tree arg0, arg1, arg2;
1221 int nargs;
1222 rtx op0, op1, op2;
1224 if (!validate_arglist (exp, POINTER_TYPE, 0))
1225 return;
1227 arg0 = CALL_EXPR_ARG (exp, 0);
1229 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1230 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1231 locality). */
1232 nargs = call_expr_nargs (exp);
1233 if (nargs > 1)
1234 arg1 = CALL_EXPR_ARG (exp, 1);
1235 else
1236 arg1 = integer_zero_node;
1237 if (nargs > 2)
1238 arg2 = CALL_EXPR_ARG (exp, 2);
1239 else
1240 arg2 = integer_three_node;
1242 /* Argument 0 is an address. */
1243 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1245 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1246 if (TREE_CODE (arg1) != INTEGER_CST)
1248 error ("second argument to %<__builtin_prefetch%> must be a constant");
1249 arg1 = integer_zero_node;
1251 op1 = expand_normal (arg1);
1252 /* Argument 1 must be either zero or one. */
1253 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1255 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1256 " using zero");
1257 op1 = const0_rtx;
1260 /* Argument 2 (locality) must be a compile-time constant int. */
1261 if (TREE_CODE (arg2) != INTEGER_CST)
1263 error ("third argument to %<__builtin_prefetch%> must be a constant");
1264 arg2 = integer_zero_node;
1266 op2 = expand_normal (arg2);
1267 /* Argument 2 must be 0, 1, 2, or 3. */
1268 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1270 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1271 op2 = const0_rtx;
1274 if (targetm.have_prefetch ())
1276 struct expand_operand ops[3];
1278 create_address_operand (&ops[0], op0);
1279 create_integer_operand (&ops[1], INTVAL (op1));
1280 create_integer_operand (&ops[2], INTVAL (op2));
1281 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1282 return;
1285 /* Don't do anything with direct references to volatile memory, but
1286 generate code to handle other side effects. */
1287 if (!MEM_P (op0) && side_effects_p (op0))
1288 emit_insn (op0);
1291 /* Get a MEM rtx for expression EXP which is the address of an operand
1292 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1293 the maximum length of the block of memory that might be accessed or
1294 NULL if unknown. */
1296 static rtx
1297 get_memory_rtx (tree exp, tree len)
1299 tree orig_exp = exp;
1300 rtx addr, mem;
1302 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1303 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1304 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1305 exp = TREE_OPERAND (exp, 0);
1307 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1308 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1310 /* Get an expression we can use to find the attributes to assign to MEM.
1311 First remove any nops. */
1312 while (CONVERT_EXPR_P (exp)
1313 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1314 exp = TREE_OPERAND (exp, 0);
1316 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1317 (as builtin stringops may alias with anything). */
1318 exp = fold_build2 (MEM_REF,
1319 build_array_type (char_type_node,
1320 build_range_type (sizetype,
1321 size_one_node, len)),
1322 exp, build_int_cst (ptr_type_node, 0));
1324 /* If the MEM_REF has no acceptable address, try to get the base object
1325 from the original address we got, and build an all-aliasing
1326 unknown-sized access to that one. */
1327 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1328 set_mem_attributes (mem, exp, 0);
1329 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1330 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1331 0))))
1333 exp = build_fold_addr_expr (exp);
1334 exp = fold_build2 (MEM_REF,
1335 build_array_type (char_type_node,
1336 build_range_type (sizetype,
1337 size_zero_node,
1338 NULL)),
1339 exp, build_int_cst (ptr_type_node, 0));
1340 set_mem_attributes (mem, exp, 0);
1342 set_mem_alias_set (mem, 0);
1343 return mem;
1346 /* Built-in functions to perform an untyped call and return. */
1348 #define apply_args_mode \
1349 (this_target_builtins->x_apply_args_mode)
1350 #define apply_result_mode \
1351 (this_target_builtins->x_apply_result_mode)
1353 /* Return the size required for the block returned by __builtin_apply_args,
1354 and initialize apply_args_mode. */
1356 static int
1357 apply_args_size (void)
1359 static int size = -1;
1360 int align;
1361 unsigned int regno;
1362 machine_mode mode;
1364 /* The values computed by this function never change. */
1365 if (size < 0)
1367 /* The first value is the incoming arg-pointer. */
1368 size = GET_MODE_SIZE (Pmode);
1370 /* The second value is the structure value address unless this is
1371 passed as an "invisible" first argument. */
1372 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1373 size += GET_MODE_SIZE (Pmode);
1375 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1376 if (FUNCTION_ARG_REGNO_P (regno))
1378 mode = targetm.calls.get_raw_arg_mode (regno);
1380 gcc_assert (mode != VOIDmode);
1382 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1383 if (size % align != 0)
1384 size = CEIL (size, align) * align;
1385 size += GET_MODE_SIZE (mode);
1386 apply_args_mode[regno] = mode;
1388 else
1390 apply_args_mode[regno] = VOIDmode;
1393 return size;
1396 /* Return the size required for the block returned by __builtin_apply,
1397 and initialize apply_result_mode. */
1399 static int
1400 apply_result_size (void)
1402 static int size = -1;
1403 int align, regno;
1404 machine_mode mode;
1406 /* The values computed by this function never change. */
1407 if (size < 0)
1409 size = 0;
1411 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1412 if (targetm.calls.function_value_regno_p (regno))
1414 mode = targetm.calls.get_raw_result_mode (regno);
1416 gcc_assert (mode != VOIDmode);
1418 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1419 if (size % align != 0)
1420 size = CEIL (size, align) * align;
1421 size += GET_MODE_SIZE (mode);
1422 apply_result_mode[regno] = mode;
1424 else
1425 apply_result_mode[regno] = VOIDmode;
1427 /* Allow targets that use untyped_call and untyped_return to override
1428 the size so that machine-specific information can be stored here. */
1429 #ifdef APPLY_RESULT_SIZE
1430 size = APPLY_RESULT_SIZE;
1431 #endif
1433 return size;
1436 /* Create a vector describing the result block RESULT. If SAVEP is true,
1437 the result block is used to save the values; otherwise it is used to
1438 restore the values. */
1440 static rtx
1441 result_vector (int savep, rtx result)
1443 int regno, size, align, nelts;
1444 machine_mode mode;
1445 rtx reg, mem;
1446 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1448 size = nelts = 0;
1449 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1450 if ((mode = apply_result_mode[regno]) != VOIDmode)
1452 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1453 if (size % align != 0)
1454 size = CEIL (size, align) * align;
1455 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1456 mem = adjust_address (result, mode, size);
1457 savevec[nelts++] = (savep
1458 ? gen_rtx_SET (mem, reg)
1459 : gen_rtx_SET (reg, mem));
1460 size += GET_MODE_SIZE (mode);
1462 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1465 /* Save the state required to perform an untyped call with the same
1466 arguments as were passed to the current function. */
1468 static rtx
1469 expand_builtin_apply_args_1 (void)
1471 rtx registers, tem;
1472 int size, align, regno;
1473 machine_mode mode;
1474 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1476 /* Create a block where the arg-pointer, structure value address,
1477 and argument registers can be saved. */
1478 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1480 /* Walk past the arg-pointer and structure value address. */
1481 size = GET_MODE_SIZE (Pmode);
1482 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1483 size += GET_MODE_SIZE (Pmode);
1485 /* Save each register used in calling a function to the block. */
1486 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1487 if ((mode = apply_args_mode[regno]) != VOIDmode)
1489 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1490 if (size % align != 0)
1491 size = CEIL (size, align) * align;
1493 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1495 emit_move_insn (adjust_address (registers, mode, size), tem);
1496 size += GET_MODE_SIZE (mode);
1499 /* Save the arg pointer to the block. */
1500 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1501 /* We need the pointer as the caller actually passed them to us, not
1502 as we might have pretended they were passed. Make sure it's a valid
1503 operand, as emit_move_insn isn't expected to handle a PLUS. */
1504 if (STACK_GROWS_DOWNWARD)
1506 = force_operand (plus_constant (Pmode, tem,
1507 crtl->args.pretend_args_size),
1508 NULL_RTX);
1509 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1511 size = GET_MODE_SIZE (Pmode);
1513 /* Save the structure value address unless this is passed as an
1514 "invisible" first argument. */
1515 if (struct_incoming_value)
1517 emit_move_insn (adjust_address (registers, Pmode, size),
1518 copy_to_reg (struct_incoming_value));
1519 size += GET_MODE_SIZE (Pmode);
1522 /* Return the address of the block. */
1523 return copy_addr_to_reg (XEXP (registers, 0));
1526 /* __builtin_apply_args returns block of memory allocated on
1527 the stack into which is stored the arg pointer, structure
1528 value address, static chain, and all the registers that might
1529 possibly be used in performing a function call. The code is
1530 moved to the start of the function so the incoming values are
1531 saved. */
1533 static rtx
1534 expand_builtin_apply_args (void)
1536 /* Don't do __builtin_apply_args more than once in a function.
1537 Save the result of the first call and reuse it. */
1538 if (apply_args_value != 0)
1539 return apply_args_value;
1541 /* When this function is called, it means that registers must be
1542 saved on entry to this function. So we migrate the
1543 call to the first insn of this function. */
1544 rtx temp;
1546 start_sequence ();
1547 temp = expand_builtin_apply_args_1 ();
1548 rtx_insn *seq = get_insns ();
1549 end_sequence ();
1551 apply_args_value = temp;
1553 /* Put the insns after the NOTE that starts the function.
1554 If this is inside a start_sequence, make the outer-level insn
1555 chain current, so the code is placed at the start of the
1556 function. If internal_arg_pointer is a non-virtual pseudo,
1557 it needs to be placed after the function that initializes
1558 that pseudo. */
1559 push_topmost_sequence ();
1560 if (REG_P (crtl->args.internal_arg_pointer)
1561 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1562 emit_insn_before (seq, parm_birth_insn);
1563 else
1564 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1565 pop_topmost_sequence ();
1566 return temp;
1570 /* Perform an untyped call and save the state required to perform an
1571 untyped return of whatever value was returned by the given function. */
1573 static rtx
1574 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1576 int size, align, regno;
1577 machine_mode mode;
1578 rtx incoming_args, result, reg, dest, src;
1579 rtx_call_insn *call_insn;
1580 rtx old_stack_level = 0;
1581 rtx call_fusage = 0;
1582 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1584 arguments = convert_memory_address (Pmode, arguments);
1586 /* Create a block where the return registers can be saved. */
1587 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1589 /* Fetch the arg pointer from the ARGUMENTS block. */
1590 incoming_args = gen_reg_rtx (Pmode);
1591 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1592 if (!STACK_GROWS_DOWNWARD)
1593 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1594 incoming_args, 0, OPTAB_LIB_WIDEN);
1596 /* Push a new argument block and copy the arguments. Do not allow
1597 the (potential) memcpy call below to interfere with our stack
1598 manipulations. */
1599 do_pending_stack_adjust ();
1600 NO_DEFER_POP;
1602 /* Save the stack with nonlocal if available. */
1603 if (targetm.have_save_stack_nonlocal ())
1604 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1605 else
1606 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1608 /* Allocate a block of memory onto the stack and copy the memory
1609 arguments to the outgoing arguments address. We can pass TRUE
1610 as the 4th argument because we just saved the stack pointer
1611 and will restore it right after the call. */
1612 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1614 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1615 may have already set current_function_calls_alloca to true.
1616 current_function_calls_alloca won't be set if argsize is zero,
1617 so we have to guarantee need_drap is true here. */
1618 if (SUPPORTS_STACK_ALIGNMENT)
1619 crtl->need_drap = true;
1621 dest = virtual_outgoing_args_rtx;
1622 if (!STACK_GROWS_DOWNWARD)
1624 if (CONST_INT_P (argsize))
1625 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1626 else
1627 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1629 dest = gen_rtx_MEM (BLKmode, dest);
1630 set_mem_align (dest, PARM_BOUNDARY);
1631 src = gen_rtx_MEM (BLKmode, incoming_args);
1632 set_mem_align (src, PARM_BOUNDARY);
1633 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1635 /* Refer to the argument block. */
1636 apply_args_size ();
1637 arguments = gen_rtx_MEM (BLKmode, arguments);
1638 set_mem_align (arguments, PARM_BOUNDARY);
1640 /* Walk past the arg-pointer and structure value address. */
1641 size = GET_MODE_SIZE (Pmode);
1642 if (struct_value)
1643 size += GET_MODE_SIZE (Pmode);
1645 /* Restore each of the registers previously saved. Make USE insns
1646 for each of these registers for use in making the call. */
1647 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1648 if ((mode = apply_args_mode[regno]) != VOIDmode)
1650 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1651 if (size % align != 0)
1652 size = CEIL (size, align) * align;
1653 reg = gen_rtx_REG (mode, regno);
1654 emit_move_insn (reg, adjust_address (arguments, mode, size));
1655 use_reg (&call_fusage, reg);
1656 size += GET_MODE_SIZE (mode);
1659 /* Restore the structure value address unless this is passed as an
1660 "invisible" first argument. */
1661 size = GET_MODE_SIZE (Pmode);
1662 if (struct_value)
1664 rtx value = gen_reg_rtx (Pmode);
1665 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1666 emit_move_insn (struct_value, value);
1667 if (REG_P (struct_value))
1668 use_reg (&call_fusage, struct_value);
1669 size += GET_MODE_SIZE (Pmode);
1672 /* All arguments and registers used for the call are set up by now! */
1673 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1675 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1676 and we don't want to load it into a register as an optimization,
1677 because prepare_call_address already did it if it should be done. */
1678 if (GET_CODE (function) != SYMBOL_REF)
1679 function = memory_address (FUNCTION_MODE, function);
1681 /* Generate the actual call instruction and save the return value. */
1682 if (targetm.have_untyped_call ())
1684 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1685 emit_call_insn (targetm.gen_untyped_call (mem, result,
1686 result_vector (1, result)));
1688 else if (targetm.have_call_value ())
1690 rtx valreg = 0;
1692 /* Locate the unique return register. It is not possible to
1693 express a call that sets more than one return register using
1694 call_value; use untyped_call for that. In fact, untyped_call
1695 only needs to save the return registers in the given block. */
1696 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1697 if ((mode = apply_result_mode[regno]) != VOIDmode)
1699 gcc_assert (!valreg); /* have_untyped_call required. */
1701 valreg = gen_rtx_REG (mode, regno);
1704 emit_insn (targetm.gen_call_value (valreg,
1705 gen_rtx_MEM (FUNCTION_MODE, function),
1706 const0_rtx, NULL_RTX, const0_rtx));
1708 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1710 else
1711 gcc_unreachable ();
1713 /* Find the CALL insn we just emitted, and attach the register usage
1714 information. */
1715 call_insn = last_call_insn ();
1716 add_function_usage_to (call_insn, call_fusage);
1718 /* Restore the stack. */
1719 if (targetm.have_save_stack_nonlocal ())
1720 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1721 else
1722 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1723 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1725 OK_DEFER_POP;
1727 /* Return the address of the result block. */
1728 result = copy_addr_to_reg (XEXP (result, 0));
1729 return convert_memory_address (ptr_mode, result);
1732 /* Perform an untyped return. */
1734 static void
1735 expand_builtin_return (rtx result)
1737 int size, align, regno;
1738 machine_mode mode;
1739 rtx reg;
1740 rtx_insn *call_fusage = 0;
1742 result = convert_memory_address (Pmode, result);
1744 apply_result_size ();
1745 result = gen_rtx_MEM (BLKmode, result);
1747 if (targetm.have_untyped_return ())
1749 rtx vector = result_vector (0, result);
1750 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1751 emit_barrier ();
1752 return;
1755 /* Restore the return value and note that each value is used. */
1756 size = 0;
1757 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1758 if ((mode = apply_result_mode[regno]) != VOIDmode)
1760 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1761 if (size % align != 0)
1762 size = CEIL (size, align) * align;
1763 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1764 emit_move_insn (reg, adjust_address (result, mode, size));
1766 push_to_sequence (call_fusage);
1767 emit_use (reg);
1768 call_fusage = get_insns ();
1769 end_sequence ();
1770 size += GET_MODE_SIZE (mode);
1773 /* Put the USE insns before the return. */
1774 emit_insn (call_fusage);
1776 /* Return whatever values was restored by jumping directly to the end
1777 of the function. */
1778 expand_naked_return ();
1781 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1783 static enum type_class
1784 type_to_class (tree type)
1786 switch (TREE_CODE (type))
1788 case VOID_TYPE: return void_type_class;
1789 case INTEGER_TYPE: return integer_type_class;
1790 case ENUMERAL_TYPE: return enumeral_type_class;
1791 case BOOLEAN_TYPE: return boolean_type_class;
1792 case POINTER_TYPE: return pointer_type_class;
1793 case REFERENCE_TYPE: return reference_type_class;
1794 case OFFSET_TYPE: return offset_type_class;
1795 case REAL_TYPE: return real_type_class;
1796 case COMPLEX_TYPE: return complex_type_class;
1797 case FUNCTION_TYPE: return function_type_class;
1798 case METHOD_TYPE: return method_type_class;
1799 case RECORD_TYPE: return record_type_class;
1800 case UNION_TYPE:
1801 case QUAL_UNION_TYPE: return union_type_class;
1802 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1803 ? string_type_class : array_type_class);
1804 case LANG_TYPE: return lang_type_class;
1805 default: return no_type_class;
1809 /* Expand a call EXP to __builtin_classify_type. */
1811 static rtx
1812 expand_builtin_classify_type (tree exp)
1814 if (call_expr_nargs (exp))
1815 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1816 return GEN_INT (no_type_class);
1819 /* This helper macro, meant to be used in mathfn_built_in below,
1820 determines which among a set of three builtin math functions is
1821 appropriate for a given type mode. The `F' and `L' cases are
1822 automatically generated from the `double' case. */
1823 #define CASE_MATHFN(MATHFN) \
1824 CASE_CFN_##MATHFN: \
1825 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1826 fcodel = BUILT_IN_##MATHFN##L ; break;
1827 /* Similar to above, but appends _R after any F/L suffix. */
1828 #define CASE_MATHFN_REENT(MATHFN) \
1829 case CFN_BUILT_IN_##MATHFN##_R: \
1830 case CFN_BUILT_IN_##MATHFN##F_R: \
1831 case CFN_BUILT_IN_##MATHFN##L_R: \
1832 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1833 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1835 /* Return a function equivalent to FN but operating on floating-point
1836 values of type TYPE, or END_BUILTINS if no such function exists.
1837 This is purely an operation on function codes; it does not guarantee
1838 that the target actually has an implementation of the function. */
1840 static built_in_function
1841 mathfn_built_in_2 (tree type, combined_fn fn)
1843 built_in_function fcode, fcodef, fcodel;
1845 switch (fn)
1847 CASE_MATHFN (ACOS)
1848 CASE_MATHFN (ACOSH)
1849 CASE_MATHFN (ASIN)
1850 CASE_MATHFN (ASINH)
1851 CASE_MATHFN (ATAN)
1852 CASE_MATHFN (ATAN2)
1853 CASE_MATHFN (ATANH)
1854 CASE_MATHFN (CBRT)
1855 CASE_MATHFN (CEIL)
1856 CASE_MATHFN (CEXPI)
1857 CASE_MATHFN (COPYSIGN)
1858 CASE_MATHFN (COS)
1859 CASE_MATHFN (COSH)
1860 CASE_MATHFN (DREM)
1861 CASE_MATHFN (ERF)
1862 CASE_MATHFN (ERFC)
1863 CASE_MATHFN (EXP)
1864 CASE_MATHFN (EXP10)
1865 CASE_MATHFN (EXP2)
1866 CASE_MATHFN (EXPM1)
1867 CASE_MATHFN (FABS)
1868 CASE_MATHFN (FDIM)
1869 CASE_MATHFN (FLOOR)
1870 CASE_MATHFN (FMA)
1871 CASE_MATHFN (FMAX)
1872 CASE_MATHFN (FMIN)
1873 CASE_MATHFN (FMOD)
1874 CASE_MATHFN (FREXP)
1875 CASE_MATHFN (GAMMA)
1876 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1877 CASE_MATHFN (HUGE_VAL)
1878 CASE_MATHFN (HYPOT)
1879 CASE_MATHFN (ILOGB)
1880 CASE_MATHFN (ICEIL)
1881 CASE_MATHFN (IFLOOR)
1882 CASE_MATHFN (INF)
1883 CASE_MATHFN (IRINT)
1884 CASE_MATHFN (IROUND)
1885 CASE_MATHFN (ISINF)
1886 CASE_MATHFN (J0)
1887 CASE_MATHFN (J1)
1888 CASE_MATHFN (JN)
1889 CASE_MATHFN (LCEIL)
1890 CASE_MATHFN (LDEXP)
1891 CASE_MATHFN (LFLOOR)
1892 CASE_MATHFN (LGAMMA)
1893 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1894 CASE_MATHFN (LLCEIL)
1895 CASE_MATHFN (LLFLOOR)
1896 CASE_MATHFN (LLRINT)
1897 CASE_MATHFN (LLROUND)
1898 CASE_MATHFN (LOG)
1899 CASE_MATHFN (LOG10)
1900 CASE_MATHFN (LOG1P)
1901 CASE_MATHFN (LOG2)
1902 CASE_MATHFN (LOGB)
1903 CASE_MATHFN (LRINT)
1904 CASE_MATHFN (LROUND)
1905 CASE_MATHFN (MODF)
1906 CASE_MATHFN (NAN)
1907 CASE_MATHFN (NANS)
1908 CASE_MATHFN (NEARBYINT)
1909 CASE_MATHFN (NEXTAFTER)
1910 CASE_MATHFN (NEXTTOWARD)
1911 CASE_MATHFN (POW)
1912 CASE_MATHFN (POWI)
1913 CASE_MATHFN (POW10)
1914 CASE_MATHFN (REMAINDER)
1915 CASE_MATHFN (REMQUO)
1916 CASE_MATHFN (RINT)
1917 CASE_MATHFN (ROUND)
1918 CASE_MATHFN (SCALB)
1919 CASE_MATHFN (SCALBLN)
1920 CASE_MATHFN (SCALBN)
1921 CASE_MATHFN (SIGNBIT)
1922 CASE_MATHFN (SIGNIFICAND)
1923 CASE_MATHFN (SIN)
1924 CASE_MATHFN (SINCOS)
1925 CASE_MATHFN (SINH)
1926 CASE_MATHFN (SQRT)
1927 CASE_MATHFN (TAN)
1928 CASE_MATHFN (TANH)
1929 CASE_MATHFN (TGAMMA)
1930 CASE_MATHFN (TRUNC)
1931 CASE_MATHFN (Y0)
1932 CASE_MATHFN (Y1)
1933 CASE_MATHFN (YN)
1935 default:
1936 return END_BUILTINS;
1939 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1940 return fcode;
1941 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1942 return fcodef;
1943 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1944 return fcodel;
1945 else
1946 return END_BUILTINS;
1949 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1950 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1951 otherwise use the explicit declaration. If we can't do the conversion,
1952 return null. */
1954 static tree
1955 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1957 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1958 if (fcode2 == END_BUILTINS)
1959 return NULL_TREE;
1961 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1962 return NULL_TREE;
1964 return builtin_decl_explicit (fcode2);
1967 /* Like mathfn_built_in_1, but always use the implicit array. */
1969 tree
1970 mathfn_built_in (tree type, combined_fn fn)
1972 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1975 /* Like mathfn_built_in_1, but take a built_in_function and
1976 always use the implicit array. */
1978 tree
1979 mathfn_built_in (tree type, enum built_in_function fn)
1981 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1984 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1985 return its code, otherwise return IFN_LAST. Note that this function
1986 only tests whether the function is defined in internals.def, not whether
1987 it is actually available on the target. */
1989 internal_fn
1990 associated_internal_fn (tree fndecl)
1992 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1993 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1994 switch (DECL_FUNCTION_CODE (fndecl))
1996 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1997 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1998 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1999 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2000 #include "internal-fn.def"
2002 CASE_FLT_FN (BUILT_IN_POW10):
2003 return IFN_EXP10;
2005 CASE_FLT_FN (BUILT_IN_DREM):
2006 return IFN_REMAINDER;
2008 CASE_FLT_FN (BUILT_IN_SCALBN):
2009 CASE_FLT_FN (BUILT_IN_SCALBLN):
2010 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2011 return IFN_LDEXP;
2012 return IFN_LAST;
2014 default:
2015 return IFN_LAST;
2019 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2020 on the current target by a call to an internal function, return the
2021 code of that internal function, otherwise return IFN_LAST. The caller
2022 is responsible for ensuring that any side-effects of the built-in
2023 call are dealt with correctly. E.g. if CALL sets errno, the caller
2024 must decide that the errno result isn't needed or make it available
2025 in some other way. */
2027 internal_fn
2028 replacement_internal_fn (gcall *call)
2030 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2032 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2033 if (ifn != IFN_LAST)
2035 tree_pair types = direct_internal_fn_types (ifn, call);
2036 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2037 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2038 return ifn;
2041 return IFN_LAST;
2044 /* Expand a call to the builtin trinary math functions (fma).
2045 Return NULL_RTX if a normal call should be emitted rather than expanding the
2046 function in-line. EXP is the expression that is a call to the builtin
2047 function; if convenient, the result should be placed in TARGET.
2048 SUBTARGET may be used as the target for computing one of EXP's
2049 operands. */
2051 static rtx
2052 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2054 optab builtin_optab;
2055 rtx op0, op1, op2, result;
2056 rtx_insn *insns;
2057 tree fndecl = get_callee_fndecl (exp);
2058 tree arg0, arg1, arg2;
2059 machine_mode mode;
2061 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2062 return NULL_RTX;
2064 arg0 = CALL_EXPR_ARG (exp, 0);
2065 arg1 = CALL_EXPR_ARG (exp, 1);
2066 arg2 = CALL_EXPR_ARG (exp, 2);
2068 switch (DECL_FUNCTION_CODE (fndecl))
2070 CASE_FLT_FN (BUILT_IN_FMA):
2071 builtin_optab = fma_optab; break;
2072 default:
2073 gcc_unreachable ();
2076 /* Make a suitable register to place result in. */
2077 mode = TYPE_MODE (TREE_TYPE (exp));
2079 /* Before working hard, check whether the instruction is available. */
2080 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2081 return NULL_RTX;
2083 result = gen_reg_rtx (mode);
2085 /* Always stabilize the argument list. */
2086 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2087 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2088 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2090 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2091 op1 = expand_normal (arg1);
2092 op2 = expand_normal (arg2);
2094 start_sequence ();
2096 /* Compute into RESULT.
2097 Set RESULT to wherever the result comes back. */
2098 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2099 result, 0);
2101 /* If we were unable to expand via the builtin, stop the sequence
2102 (without outputting the insns) and call to the library function
2103 with the stabilized argument list. */
2104 if (result == 0)
2106 end_sequence ();
2107 return expand_call (exp, target, target == const0_rtx);
2110 /* Output the entire sequence. */
2111 insns = get_insns ();
2112 end_sequence ();
2113 emit_insn (insns);
2115 return result;
2118 /* Expand a call to the builtin sin and cos math functions.
2119 Return NULL_RTX if a normal call should be emitted rather than expanding the
2120 function in-line. EXP is the expression that is a call to the builtin
2121 function; if convenient, the result should be placed in TARGET.
2122 SUBTARGET may be used as the target for computing one of EXP's
2123 operands. */
2125 static rtx
2126 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2128 optab builtin_optab;
2129 rtx op0;
2130 rtx_insn *insns;
2131 tree fndecl = get_callee_fndecl (exp);
2132 machine_mode mode;
2133 tree arg;
2135 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2136 return NULL_RTX;
2138 arg = CALL_EXPR_ARG (exp, 0);
2140 switch (DECL_FUNCTION_CODE (fndecl))
2142 CASE_FLT_FN (BUILT_IN_SIN):
2143 CASE_FLT_FN (BUILT_IN_COS):
2144 builtin_optab = sincos_optab; break;
2145 default:
2146 gcc_unreachable ();
2149 /* Make a suitable register to place result in. */
2150 mode = TYPE_MODE (TREE_TYPE (exp));
2152 /* Check if sincos insn is available, otherwise fallback
2153 to sin or cos insn. */
2154 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2155 switch (DECL_FUNCTION_CODE (fndecl))
2157 CASE_FLT_FN (BUILT_IN_SIN):
2158 builtin_optab = sin_optab; break;
2159 CASE_FLT_FN (BUILT_IN_COS):
2160 builtin_optab = cos_optab; break;
2161 default:
2162 gcc_unreachable ();
2165 /* Before working hard, check whether the instruction is available. */
2166 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2168 rtx result = gen_reg_rtx (mode);
2170 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2171 need to expand the argument again. This way, we will not perform
2172 side-effects more the once. */
2173 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2175 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2177 start_sequence ();
2179 /* Compute into RESULT.
2180 Set RESULT to wherever the result comes back. */
2181 if (builtin_optab == sincos_optab)
2183 int ok;
2185 switch (DECL_FUNCTION_CODE (fndecl))
2187 CASE_FLT_FN (BUILT_IN_SIN):
2188 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2189 break;
2190 CASE_FLT_FN (BUILT_IN_COS):
2191 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2192 break;
2193 default:
2194 gcc_unreachable ();
2196 gcc_assert (ok);
2198 else
2199 result = expand_unop (mode, builtin_optab, op0, result, 0);
2201 if (result != 0)
2203 /* Output the entire sequence. */
2204 insns = get_insns ();
2205 end_sequence ();
2206 emit_insn (insns);
2207 return result;
2210 /* If we were unable to expand via the builtin, stop the sequence
2211 (without outputting the insns) and call to the library function
2212 with the stabilized argument list. */
2213 end_sequence ();
2216 return expand_call (exp, target, target == const0_rtx);
2219 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2220 return an RTL instruction code that implements the functionality.
2221 If that isn't possible or available return CODE_FOR_nothing. */
2223 static enum insn_code
2224 interclass_mathfn_icode (tree arg, tree fndecl)
2226 bool errno_set = false;
2227 optab builtin_optab = unknown_optab;
2228 machine_mode mode;
2230 switch (DECL_FUNCTION_CODE (fndecl))
2232 CASE_FLT_FN (BUILT_IN_ILOGB):
2233 errno_set = true; builtin_optab = ilogb_optab; break;
2234 CASE_FLT_FN (BUILT_IN_ISINF):
2235 builtin_optab = isinf_optab; break;
2236 case BUILT_IN_ISNORMAL:
2237 case BUILT_IN_ISFINITE:
2238 CASE_FLT_FN (BUILT_IN_FINITE):
2239 case BUILT_IN_FINITED32:
2240 case BUILT_IN_FINITED64:
2241 case BUILT_IN_FINITED128:
2242 case BUILT_IN_ISINFD32:
2243 case BUILT_IN_ISINFD64:
2244 case BUILT_IN_ISINFD128:
2245 /* These builtins have no optabs (yet). */
2246 break;
2247 default:
2248 gcc_unreachable ();
2251 /* There's no easy way to detect the case we need to set EDOM. */
2252 if (flag_errno_math && errno_set)
2253 return CODE_FOR_nothing;
2255 /* Optab mode depends on the mode of the input argument. */
2256 mode = TYPE_MODE (TREE_TYPE (arg));
2258 if (builtin_optab)
2259 return optab_handler (builtin_optab, mode);
2260 return CODE_FOR_nothing;
2263 /* Expand a call to one of the builtin math functions that operate on
2264 floating point argument and output an integer result (ilogb, isinf,
2265 isnan, etc).
2266 Return 0 if a normal call should be emitted rather than expanding the
2267 function in-line. EXP is the expression that is a call to the builtin
2268 function; if convenient, the result should be placed in TARGET. */
2270 static rtx
2271 expand_builtin_interclass_mathfn (tree exp, rtx target)
2273 enum insn_code icode = CODE_FOR_nothing;
2274 rtx op0;
2275 tree fndecl = get_callee_fndecl (exp);
2276 machine_mode mode;
2277 tree arg;
2279 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2280 return NULL_RTX;
2282 arg = CALL_EXPR_ARG (exp, 0);
2283 icode = interclass_mathfn_icode (arg, fndecl);
2284 mode = TYPE_MODE (TREE_TYPE (arg));
2286 if (icode != CODE_FOR_nothing)
2288 struct expand_operand ops[1];
2289 rtx_insn *last = get_last_insn ();
2290 tree orig_arg = arg;
2292 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2293 need to expand the argument again. This way, we will not perform
2294 side-effects more the once. */
2295 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2297 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2299 if (mode != GET_MODE (op0))
2300 op0 = convert_to_mode (mode, op0, 0);
2302 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2303 if (maybe_legitimize_operands (icode, 0, 1, ops)
2304 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2305 return ops[0].value;
2307 delete_insns_since (last);
2308 CALL_EXPR_ARG (exp, 0) = orig_arg;
2311 return NULL_RTX;
2314 /* Expand a call to the builtin sincos math function.
2315 Return NULL_RTX if a normal call should be emitted rather than expanding the
2316 function in-line. EXP is the expression that is a call to the builtin
2317 function. */
2319 static rtx
2320 expand_builtin_sincos (tree exp)
2322 rtx op0, op1, op2, target1, target2;
2323 machine_mode mode;
2324 tree arg, sinp, cosp;
2325 int result;
2326 location_t loc = EXPR_LOCATION (exp);
2327 tree alias_type, alias_off;
2329 if (!validate_arglist (exp, REAL_TYPE,
2330 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2331 return NULL_RTX;
2333 arg = CALL_EXPR_ARG (exp, 0);
2334 sinp = CALL_EXPR_ARG (exp, 1);
2335 cosp = CALL_EXPR_ARG (exp, 2);
2337 /* Make a suitable register to place result in. */
2338 mode = TYPE_MODE (TREE_TYPE (arg));
2340 /* Check if sincos insn is available, otherwise emit the call. */
2341 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2342 return NULL_RTX;
2344 target1 = gen_reg_rtx (mode);
2345 target2 = gen_reg_rtx (mode);
2347 op0 = expand_normal (arg);
2348 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2349 alias_off = build_int_cst (alias_type, 0);
2350 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2351 sinp, alias_off));
2352 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2353 cosp, alias_off));
2355 /* Compute into target1 and target2.
2356 Set TARGET to wherever the result comes back. */
2357 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2358 gcc_assert (result);
2360 /* Move target1 and target2 to the memory locations indicated
2361 by op1 and op2. */
2362 emit_move_insn (op1, target1);
2363 emit_move_insn (op2, target2);
2365 return const0_rtx;
2368 /* Expand a call to the internal cexpi builtin to the sincos math function.
2369 EXP is the expression that is a call to the builtin function; if convenient,
2370 the result should be placed in TARGET. */
2372 static rtx
2373 expand_builtin_cexpi (tree exp, rtx target)
2375 tree fndecl = get_callee_fndecl (exp);
2376 tree arg, type;
2377 machine_mode mode;
2378 rtx op0, op1, op2;
2379 location_t loc = EXPR_LOCATION (exp);
2381 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2382 return NULL_RTX;
2384 arg = CALL_EXPR_ARG (exp, 0);
2385 type = TREE_TYPE (arg);
2386 mode = TYPE_MODE (TREE_TYPE (arg));
2388 /* Try expanding via a sincos optab, fall back to emitting a libcall
2389 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2390 is only generated from sincos, cexp or if we have either of them. */
2391 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2393 op1 = gen_reg_rtx (mode);
2394 op2 = gen_reg_rtx (mode);
2396 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2398 /* Compute into op1 and op2. */
2399 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2401 else if (targetm.libc_has_function (function_sincos))
2403 tree call, fn = NULL_TREE;
2404 tree top1, top2;
2405 rtx op1a, op2a;
2407 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2408 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2409 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2410 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2411 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2412 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2413 else
2414 gcc_unreachable ();
2416 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2417 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2418 op1a = copy_addr_to_reg (XEXP (op1, 0));
2419 op2a = copy_addr_to_reg (XEXP (op2, 0));
2420 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2421 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2423 /* Make sure not to fold the sincos call again. */
2424 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2425 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2426 call, 3, arg, top1, top2));
2428 else
2430 tree call, fn = NULL_TREE, narg;
2431 tree ctype = build_complex_type (type);
2433 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2434 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2435 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2436 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2437 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2438 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2439 else
2440 gcc_unreachable ();
2442 /* If we don't have a decl for cexp create one. This is the
2443 friendliest fallback if the user calls __builtin_cexpi
2444 without full target C99 function support. */
2445 if (fn == NULL_TREE)
2447 tree fntype;
2448 const char *name = NULL;
2450 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2451 name = "cexpf";
2452 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2453 name = "cexp";
2454 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2455 name = "cexpl";
2457 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2458 fn = build_fn_decl (name, fntype);
2461 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2462 build_real (type, dconst0), arg);
2464 /* Make sure not to fold the cexp call again. */
2465 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2466 return expand_expr (build_call_nary (ctype, call, 1, narg),
2467 target, VOIDmode, EXPAND_NORMAL);
2470 /* Now build the proper return type. */
2471 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2472 make_tree (TREE_TYPE (arg), op2),
2473 make_tree (TREE_TYPE (arg), op1)),
2474 target, VOIDmode, EXPAND_NORMAL);
2477 /* Conveniently construct a function call expression. FNDECL names the
2478 function to be called, N is the number of arguments, and the "..."
2479 parameters are the argument expressions. Unlike build_call_exr
2480 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2482 static tree
2483 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2485 va_list ap;
2486 tree fntype = TREE_TYPE (fndecl);
2487 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2489 va_start (ap, n);
2490 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2491 va_end (ap);
2492 SET_EXPR_LOCATION (fn, loc);
2493 return fn;
2496 /* Expand a call to one of the builtin rounding functions gcc defines
2497 as an extension (lfloor and lceil). As these are gcc extensions we
2498 do not need to worry about setting errno to EDOM.
2499 If expanding via optab fails, lower expression to (int)(floor(x)).
2500 EXP is the expression that is a call to the builtin function;
2501 if convenient, the result should be placed in TARGET. */
2503 static rtx
2504 expand_builtin_int_roundingfn (tree exp, rtx target)
2506 convert_optab builtin_optab;
2507 rtx op0, tmp;
2508 rtx_insn *insns;
2509 tree fndecl = get_callee_fndecl (exp);
2510 enum built_in_function fallback_fn;
2511 tree fallback_fndecl;
2512 machine_mode mode;
2513 tree arg;
2515 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2516 gcc_unreachable ();
2518 arg = CALL_EXPR_ARG (exp, 0);
2520 switch (DECL_FUNCTION_CODE (fndecl))
2522 CASE_FLT_FN (BUILT_IN_ICEIL):
2523 CASE_FLT_FN (BUILT_IN_LCEIL):
2524 CASE_FLT_FN (BUILT_IN_LLCEIL):
2525 builtin_optab = lceil_optab;
2526 fallback_fn = BUILT_IN_CEIL;
2527 break;
2529 CASE_FLT_FN (BUILT_IN_IFLOOR):
2530 CASE_FLT_FN (BUILT_IN_LFLOOR):
2531 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2532 builtin_optab = lfloor_optab;
2533 fallback_fn = BUILT_IN_FLOOR;
2534 break;
2536 default:
2537 gcc_unreachable ();
2540 /* Make a suitable register to place result in. */
2541 mode = TYPE_MODE (TREE_TYPE (exp));
2543 target = gen_reg_rtx (mode);
2545 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2546 need to expand the argument again. This way, we will not perform
2547 side-effects more the once. */
2548 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2550 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2552 start_sequence ();
2554 /* Compute into TARGET. */
2555 if (expand_sfix_optab (target, op0, builtin_optab))
2557 /* Output the entire sequence. */
2558 insns = get_insns ();
2559 end_sequence ();
2560 emit_insn (insns);
2561 return target;
2564 /* If we were unable to expand via the builtin, stop the sequence
2565 (without outputting the insns). */
2566 end_sequence ();
2568 /* Fall back to floating point rounding optab. */
2569 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2571 /* For non-C99 targets we may end up without a fallback fndecl here
2572 if the user called __builtin_lfloor directly. In this case emit
2573 a call to the floor/ceil variants nevertheless. This should result
2574 in the best user experience for not full C99 targets. */
2575 if (fallback_fndecl == NULL_TREE)
2577 tree fntype;
2578 const char *name = NULL;
2580 switch (DECL_FUNCTION_CODE (fndecl))
2582 case BUILT_IN_ICEIL:
2583 case BUILT_IN_LCEIL:
2584 case BUILT_IN_LLCEIL:
2585 name = "ceil";
2586 break;
2587 case BUILT_IN_ICEILF:
2588 case BUILT_IN_LCEILF:
2589 case BUILT_IN_LLCEILF:
2590 name = "ceilf";
2591 break;
2592 case BUILT_IN_ICEILL:
2593 case BUILT_IN_LCEILL:
2594 case BUILT_IN_LLCEILL:
2595 name = "ceill";
2596 break;
2597 case BUILT_IN_IFLOOR:
2598 case BUILT_IN_LFLOOR:
2599 case BUILT_IN_LLFLOOR:
2600 name = "floor";
2601 break;
2602 case BUILT_IN_IFLOORF:
2603 case BUILT_IN_LFLOORF:
2604 case BUILT_IN_LLFLOORF:
2605 name = "floorf";
2606 break;
2607 case BUILT_IN_IFLOORL:
2608 case BUILT_IN_LFLOORL:
2609 case BUILT_IN_LLFLOORL:
2610 name = "floorl";
2611 break;
2612 default:
2613 gcc_unreachable ();
2616 fntype = build_function_type_list (TREE_TYPE (arg),
2617 TREE_TYPE (arg), NULL_TREE);
2618 fallback_fndecl = build_fn_decl (name, fntype);
2621 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2623 tmp = expand_normal (exp);
2624 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2626 /* Truncate the result of floating point optab to integer
2627 via expand_fix (). */
2628 target = gen_reg_rtx (mode);
2629 expand_fix (target, tmp, 0);
2631 return target;
2634 /* Expand a call to one of the builtin math functions doing integer
2635 conversion (lrint).
2636 Return 0 if a normal call should be emitted rather than expanding the
2637 function in-line. EXP is the expression that is a call to the builtin
2638 function; if convenient, the result should be placed in TARGET. */
2640 static rtx
2641 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2643 convert_optab builtin_optab;
2644 rtx op0;
2645 rtx_insn *insns;
2646 tree fndecl = get_callee_fndecl (exp);
2647 tree arg;
2648 machine_mode mode;
2649 enum built_in_function fallback_fn = BUILT_IN_NONE;
2651 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2652 gcc_unreachable ();
2654 arg = CALL_EXPR_ARG (exp, 0);
2656 switch (DECL_FUNCTION_CODE (fndecl))
2658 CASE_FLT_FN (BUILT_IN_IRINT):
2659 fallback_fn = BUILT_IN_LRINT;
2660 gcc_fallthrough ();
2661 CASE_FLT_FN (BUILT_IN_LRINT):
2662 CASE_FLT_FN (BUILT_IN_LLRINT):
2663 builtin_optab = lrint_optab;
2664 break;
2666 CASE_FLT_FN (BUILT_IN_IROUND):
2667 fallback_fn = BUILT_IN_LROUND;
2668 gcc_fallthrough ();
2669 CASE_FLT_FN (BUILT_IN_LROUND):
2670 CASE_FLT_FN (BUILT_IN_LLROUND):
2671 builtin_optab = lround_optab;
2672 break;
2674 default:
2675 gcc_unreachable ();
2678 /* There's no easy way to detect the case we need to set EDOM. */
2679 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2680 return NULL_RTX;
2682 /* Make a suitable register to place result in. */
2683 mode = TYPE_MODE (TREE_TYPE (exp));
2685 /* There's no easy way to detect the case we need to set EDOM. */
2686 if (!flag_errno_math)
2688 rtx result = gen_reg_rtx (mode);
2690 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2691 need to expand the argument again. This way, we will not perform
2692 side-effects more the once. */
2693 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2695 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2697 start_sequence ();
2699 if (expand_sfix_optab (result, op0, builtin_optab))
2701 /* Output the entire sequence. */
2702 insns = get_insns ();
2703 end_sequence ();
2704 emit_insn (insns);
2705 return result;
2708 /* If we were unable to expand via the builtin, stop the sequence
2709 (without outputting the insns) and call to the library function
2710 with the stabilized argument list. */
2711 end_sequence ();
2714 if (fallback_fn != BUILT_IN_NONE)
2716 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2717 targets, (int) round (x) should never be transformed into
2718 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2719 a call to lround in the hope that the target provides at least some
2720 C99 functions. This should result in the best user experience for
2721 not full C99 targets. */
2722 tree fallback_fndecl = mathfn_built_in_1
2723 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2725 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2726 fallback_fndecl, 1, arg);
2728 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2729 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2730 return convert_to_mode (mode, target, 0);
2733 return expand_call (exp, target, target == const0_rtx);
2736 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2737 a normal call should be emitted rather than expanding the function
2738 in-line. EXP is the expression that is a call to the builtin
2739 function; if convenient, the result should be placed in TARGET. */
2741 static rtx
2742 expand_builtin_powi (tree exp, rtx target)
2744 tree arg0, arg1;
2745 rtx op0, op1;
2746 machine_mode mode;
2747 machine_mode mode2;
2749 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2750 return NULL_RTX;
2752 arg0 = CALL_EXPR_ARG (exp, 0);
2753 arg1 = CALL_EXPR_ARG (exp, 1);
2754 mode = TYPE_MODE (TREE_TYPE (exp));
2756 /* Emit a libcall to libgcc. */
2758 /* Mode of the 2nd argument must match that of an int. */
2759 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2761 if (target == NULL_RTX)
2762 target = gen_reg_rtx (mode);
2764 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2765 if (GET_MODE (op0) != mode)
2766 op0 = convert_to_mode (mode, op0, 0);
2767 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2768 if (GET_MODE (op1) != mode2)
2769 op1 = convert_to_mode (mode2, op1, 0);
2771 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2772 target, LCT_CONST, mode,
2773 op0, mode, op1, mode2);
2775 return target;
2778 /* Expand expression EXP which is a call to the strlen builtin. Return
2779 NULL_RTX if we failed the caller should emit a normal call, otherwise
2780 try to get the result in TARGET, if convenient. */
2782 static rtx
2783 expand_builtin_strlen (tree exp, rtx target,
2784 machine_mode target_mode)
2786 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2787 return NULL_RTX;
2788 else
2790 struct expand_operand ops[4];
2791 rtx pat;
2792 tree len;
2793 tree src = CALL_EXPR_ARG (exp, 0);
2794 rtx src_reg;
2795 rtx_insn *before_strlen;
2796 machine_mode insn_mode;
2797 enum insn_code icode = CODE_FOR_nothing;
2798 unsigned int align;
2800 /* If the length can be computed at compile-time, return it. */
2801 len = c_strlen (src, 0);
2802 if (len)
2803 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2805 /* If the length can be computed at compile-time and is constant
2806 integer, but there are side-effects in src, evaluate
2807 src for side-effects, then return len.
2808 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2809 can be optimized into: i++; x = 3; */
2810 len = c_strlen (src, 1);
2811 if (len && TREE_CODE (len) == INTEGER_CST)
2813 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2814 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2817 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2819 /* If SRC is not a pointer type, don't do this operation inline. */
2820 if (align == 0)
2821 return NULL_RTX;
2823 /* Bail out if we can't compute strlen in the right mode. */
2824 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2826 icode = optab_handler (strlen_optab, insn_mode);
2827 if (icode != CODE_FOR_nothing)
2828 break;
2830 if (insn_mode == VOIDmode)
2831 return NULL_RTX;
2833 /* Make a place to hold the source address. We will not expand
2834 the actual source until we are sure that the expansion will
2835 not fail -- there are trees that cannot be expanded twice. */
2836 src_reg = gen_reg_rtx (Pmode);
2838 /* Mark the beginning of the strlen sequence so we can emit the
2839 source operand later. */
2840 before_strlen = get_last_insn ();
2842 create_output_operand (&ops[0], target, insn_mode);
2843 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2844 create_integer_operand (&ops[2], 0);
2845 create_integer_operand (&ops[3], align);
2846 if (!maybe_expand_insn (icode, 4, ops))
2847 return NULL_RTX;
2849 /* Now that we are assured of success, expand the source. */
2850 start_sequence ();
2851 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2852 if (pat != src_reg)
2854 #ifdef POINTERS_EXTEND_UNSIGNED
2855 if (GET_MODE (pat) != Pmode)
2856 pat = convert_to_mode (Pmode, pat,
2857 POINTERS_EXTEND_UNSIGNED);
2858 #endif
2859 emit_move_insn (src_reg, pat);
2861 pat = get_insns ();
2862 end_sequence ();
2864 if (before_strlen)
2865 emit_insn_after (pat, before_strlen);
2866 else
2867 emit_insn_before (pat, get_insns ());
2869 /* Return the value in the proper mode for this function. */
2870 if (GET_MODE (ops[0].value) == target_mode)
2871 target = ops[0].value;
2872 else if (target != 0)
2873 convert_move (target, ops[0].value, 0);
2874 else
2875 target = convert_to_mode (target_mode, ops[0].value, 0);
2877 return target;
2881 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2882 bytes from constant string DATA + OFFSET and return it as target
2883 constant. */
2885 static rtx
2886 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2887 scalar_int_mode mode)
2889 const char *str = (const char *) data;
2891 gcc_assert (offset >= 0
2892 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2893 <= strlen (str) + 1));
2895 return c_readstr (str + offset, mode);
2898 /* LEN specify length of the block of memcpy/memset operation.
2899 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2900 In some cases we can make very likely guess on max size, then we
2901 set it into PROBABLE_MAX_SIZE. */
2903 static void
2904 determine_block_size (tree len, rtx len_rtx,
2905 unsigned HOST_WIDE_INT *min_size,
2906 unsigned HOST_WIDE_INT *max_size,
2907 unsigned HOST_WIDE_INT *probable_max_size)
2909 if (CONST_INT_P (len_rtx))
2911 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2912 return;
2914 else
2916 wide_int min, max;
2917 enum value_range_type range_type = VR_UNDEFINED;
2919 /* Determine bounds from the type. */
2920 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2921 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2922 else
2923 *min_size = 0;
2924 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2925 *probable_max_size = *max_size
2926 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2927 else
2928 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2930 if (TREE_CODE (len) == SSA_NAME)
2931 range_type = get_range_info (len, &min, &max);
2932 if (range_type == VR_RANGE)
2934 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2935 *min_size = min.to_uhwi ();
2936 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2937 *probable_max_size = *max_size = max.to_uhwi ();
2939 else if (range_type == VR_ANTI_RANGE)
2941 /* Anti range 0...N lets us to determine minimal size to N+1. */
2942 if (min == 0)
2944 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2945 *min_size = max.to_uhwi () + 1;
2947 /* Code like
2949 int n;
2950 if (n < 100)
2951 memcpy (a, b, n)
2953 Produce anti range allowing negative values of N. We still
2954 can use the information and make a guess that N is not negative.
2956 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2957 *probable_max_size = min.to_uhwi () - 1;
2960 gcc_checking_assert (*max_size <=
2961 (unsigned HOST_WIDE_INT)
2962 GET_MODE_MASK (GET_MODE (len_rtx)));
2965 /* Try to verify that the sizes and lengths of the arguments to a string
2966 manipulation function given by EXP are within valid bounds and that
2967 the operation does not lead to buffer overflow. Arguments other than
2968 EXP may be null. When non-null, the arguments have the following
2969 meaning:
2970 SIZE is the user-supplied size argument to the function (such as in
2971 memcpy(d, s, SIZE) or strncpy(d, s, SIZE). It specifies the exact
2972 number of bytes to write.
2973 MAXLEN is the user-supplied bound on the length of the source sequence
2974 (such as in strncat(d, s, N). It specifies the upper limit on the number
2975 of bytes to write.
2976 SRC is the source string (such as in strcpy(d, s)) when the expression
2977 EXP is a string function call (as opposed to a memory call like memcpy).
2978 As an exception, SRC can also be an integer denoting the precomputed
2979 size of the source string or object (for functions like memcpy).
2980 OBJSIZE is the size of the destination object specified by the last
2981 argument to the _chk builtins, typically resulting from the expansion
2982 of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
2983 OBJSIZE).
2985 When SIZE is null LEN is checked to verify that it doesn't exceed
2986 SIZE_MAX.
2988 If the call is successfully verified as safe from buffer overflow
2989 the function returns true, otherwise false.. */
2991 static bool
2992 check_sizes (int opt, tree exp, tree size, tree maxlen, tree src, tree objsize)
2994 /* The size of the largest object is half the address space, or
2995 SSIZE_MAX. (This is way too permissive.) */
2996 tree maxobjsize = TYPE_MAX_VALUE (ssizetype);
2998 tree slen = NULL_TREE;
3000 tree range[2] = { NULL_TREE, NULL_TREE };
3002 /* Set to true when the exact number of bytes written by a string
3003 function like strcpy is not known and the only thing that is
3004 known is that it must be at least one (for the terminating nul). */
3005 bool at_least_one = false;
3006 if (src)
3008 /* SRC is normally a pointer to string but as a special case
3009 it can be an integer denoting the length of a string. */
3010 if (POINTER_TYPE_P (TREE_TYPE (src)))
3012 /* Try to determine the range of lengths the source string
3013 refers to. If it can be determined and is less than
3014 the upper bound given by MAXLEN add one to it for
3015 the terminating nul. Otherwise, set it to one for
3016 the same reason, or to MAXLEN as appropriate. */
3017 get_range_strlen (src, range);
3018 if (range[0] && (!maxlen || TREE_CODE (maxlen) == INTEGER_CST))
3020 if (maxlen && tree_int_cst_le (maxlen, range[0]))
3021 range[0] = range[1] = maxlen;
3022 else
3023 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3024 range[0], size_one_node);
3026 if (maxlen && tree_int_cst_le (maxlen, range[1]))
3027 range[1] = maxlen;
3028 else if (!integer_all_onesp (range[1]))
3029 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3030 range[1], size_one_node);
3032 slen = range[0];
3034 else
3036 at_least_one = true;
3037 slen = size_one_node;
3040 else
3041 slen = src;
3044 if (!size && !maxlen)
3046 /* When the only available piece of data is the object size
3047 there is nothing to do. */
3048 if (!slen)
3049 return true;
3051 /* Otherwise, when the length of the source sequence is known
3052 (as with with strlen), set SIZE to it. */
3053 if (!range[0])
3054 size = slen;
3057 if (!objsize)
3058 objsize = maxobjsize;
3060 /* The SIZE is exact if it's non-null, constant, and in range of
3061 unsigned HOST_WIDE_INT. */
3062 bool exactsize = size && tree_fits_uhwi_p (size);
3064 if (size)
3065 get_size_range (size, range);
3067 /* First check the number of bytes to be written against the maximum
3068 object size. */
3069 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3071 location_t loc = tree_nonartificial_location (exp);
3072 loc = expansion_point_location_if_in_system_header (loc);
3074 if (range[0] == range[1])
3075 warning_at (loc, opt,
3076 "%K%qD specified size %E "
3077 "exceeds maximum object size %E",
3078 exp, get_callee_fndecl (exp), range[0], maxobjsize);
3079 else
3080 warning_at (loc, opt,
3081 "%K%qD specified size between %E and %E "
3082 "exceeds maximum object size %E",
3083 exp, get_callee_fndecl (exp),
3084 range[0], range[1], maxobjsize);
3085 return false;
3088 /* Next check the number of bytes to be written against the destination
3089 object size. */
3090 if (range[0] || !exactsize || integer_all_onesp (size))
3092 if (range[0]
3093 && ((tree_fits_uhwi_p (objsize)
3094 && tree_int_cst_lt (objsize, range[0]))
3095 || (tree_fits_uhwi_p (size)
3096 && tree_int_cst_lt (size, range[0]))))
3098 location_t loc = tree_nonartificial_location (exp);
3099 loc = expansion_point_location_if_in_system_header (loc);
3101 if (size == slen && at_least_one)
3103 /* This is a call to strcpy with a destination of 0 size
3104 and a source of unknown length. The call will write
3105 at least one byte past the end of the destination. */
3106 warning_at (loc, opt,
3107 "%K%qD writing %E or more bytes into a region "
3108 "of size %E overflows the destination",
3109 exp, get_callee_fndecl (exp), range[0], objsize);
3111 else if (tree_int_cst_equal (range[0], range[1]))
3112 warning_at (loc, opt,
3113 (integer_onep (range[0])
3114 ? G_("%K%qD writing %E byte into a region "
3115 "of size %E overflows the destination")
3116 : G_("%K%qD writing %E bytes into a region "
3117 "of size %E overflows the destination")),
3118 exp, get_callee_fndecl (exp), range[0], objsize);
3119 else if (tree_int_cst_sign_bit (range[1]))
3121 /* Avoid printing the upper bound if it's invalid. */
3122 warning_at (loc, opt,
3123 "%K%qD writing %E or more bytes into a region "
3124 "of size %E overflows the destination",
3125 exp, get_callee_fndecl (exp), range[0], objsize);
3127 else
3128 warning_at (loc, opt,
3129 "%K%qD writing between %E and %E bytes into "
3130 "a region of size %E overflows the destination",
3131 exp, get_callee_fndecl (exp), range[0], range[1],
3132 objsize);
3134 /* Return error when an overflow has been detected. */
3135 return false;
3139 /* Check the maximum length of the source sequence against the size
3140 of the destination object if known, or against the maximum size
3141 of an object. */
3142 if (maxlen)
3144 get_size_range (maxlen, range);
3146 if (range[0] && objsize && tree_fits_uhwi_p (objsize))
3148 location_t loc = tree_nonartificial_location (exp);
3149 loc = expansion_point_location_if_in_system_header (loc);
3151 if (tree_int_cst_lt (maxobjsize, range[0]))
3153 /* Warn about crazy big sizes first since that's more
3154 likely to be meaningful than saying that the bound
3155 is greater than the object size if both are big. */
3156 if (range[0] == range[1])
3157 warning_at (loc, opt,
3158 "%K%qD specified bound %E "
3159 "exceeds maximum object size %E",
3160 exp, get_callee_fndecl (exp),
3161 range[0], maxobjsize);
3162 else
3163 warning_at (loc, opt,
3164 "%K%qD specified bound between %E and %E "
3165 "exceeds maximum object size %E",
3166 exp, get_callee_fndecl (exp),
3167 range[0], range[1], maxobjsize);
3169 return false;
3172 if (objsize != maxobjsize && tree_int_cst_lt (objsize, range[0]))
3174 if (tree_int_cst_equal (range[0], range[1]))
3175 warning_at (loc, opt,
3176 "%K%qD specified bound %E "
3177 "exceeds destination size %E",
3178 exp, get_callee_fndecl (exp),
3179 range[0], objsize);
3180 else
3181 warning_at (loc, opt,
3182 "%K%qD specified bound between %E and %E "
3183 "exceeds destination size %E",
3184 exp, get_callee_fndecl (exp),
3185 range[0], range[1], objsize);
3186 return false;
3191 if (slen
3192 && slen == src
3193 && size && range[0]
3194 && tree_int_cst_lt (slen, range[0]))
3196 location_t loc = tree_nonartificial_location (exp);
3198 if (tree_int_cst_equal (range[0], range[1]))
3199 warning_at (loc, opt,
3200 (tree_int_cst_equal (range[0], integer_one_node)
3201 ? G_("%K%qD reading %E byte from a region of size %E")
3202 : G_("%K%qD reading %E bytes from a region of size %E")),
3203 exp, get_callee_fndecl (exp), range[0], slen);
3204 else if (tree_int_cst_sign_bit (range[1]))
3206 /* Avoid printing the upper bound if it's invalid. */
3207 warning_at (loc, opt,
3208 "%K%qD reading %E or more bytes from a region "
3209 "of size %E",
3210 exp, get_callee_fndecl (exp), range[0], slen);
3212 else
3213 warning_at (loc, opt,
3214 "%K%qD reading between %E and %E bytes from a region "
3215 "of size %E",
3216 exp, get_callee_fndecl (exp), range[0], range[1], slen);
3217 return false;
3220 return true;
3223 /* Helper to compute the size of the object referenced by the DEST
3224 expression which must of of pointer type, using Object Size type
3225 OSTYPE (only the least significant 2 bits are used). Return
3226 the size of the object if successful or NULL when the size cannot
3227 be determined. */
3229 static inline tree
3230 compute_objsize (tree dest, int ostype)
3232 unsigned HOST_WIDE_INT size;
3233 if (compute_builtin_object_size (dest, ostype & 3, &size))
3234 return build_int_cst (sizetype, size);
3236 return NULL_TREE;
3239 /* Helper to determine and check the sizes of the source and the destination
3240 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3241 call expression, DEST is the destination argument, SRC is the source
3242 argument or null, and LEN is the number of bytes. Use Object Size type-0
3243 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3244 (no overflow or invalid sizes), false otherwise. */
3246 static bool
3247 check_memop_sizes (tree exp, tree dest, tree src, tree size)
3249 if (!warn_stringop_overflow)
3250 return true;
3252 /* For functions like memset and memcpy that operate on raw memory
3253 try to determine the size of the largest source and destination
3254 object using type-0 Object Size regardless of the object size
3255 type specified by the option. */
3256 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3257 tree dstsize = compute_objsize (dest, 0);
3259 return check_sizes (OPT_Wstringop_overflow_, exp,
3260 size, /*maxlen=*/NULL_TREE, srcsize, dstsize);
3263 /* Validate memchr arguments without performing any expansion.
3264 Return NULL_RTX. */
3266 static rtx
3267 expand_builtin_memchr (tree exp, rtx)
3269 if (!validate_arglist (exp,
3270 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3271 return NULL_RTX;
3273 tree arg1 = CALL_EXPR_ARG (exp, 0);
3274 tree len = CALL_EXPR_ARG (exp, 2);
3276 /* Diagnose calls where the specified length exceeds the size
3277 of the object. */
3278 if (warn_stringop_overflow)
3280 tree size = compute_objsize (arg1, 0);
3281 check_sizes (OPT_Wstringop_overflow_,
3282 exp, len, /*maxlen=*/NULL_TREE,
3283 size, /*objsize=*/NULL_TREE);
3286 return NULL_RTX;
3289 /* Expand a call EXP to the memcpy builtin.
3290 Return NULL_RTX if we failed, the caller should emit a normal call,
3291 otherwise try to get the result in TARGET, if convenient (and in
3292 mode MODE if that's convenient). */
3294 static rtx
3295 expand_builtin_memcpy (tree exp, rtx target)
3297 if (!validate_arglist (exp,
3298 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3299 return NULL_RTX;
3301 tree dest = CALL_EXPR_ARG (exp, 0);
3302 tree src = CALL_EXPR_ARG (exp, 1);
3303 tree len = CALL_EXPR_ARG (exp, 2);
3305 check_memop_sizes (exp, dest, src, len);
3307 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3308 /*endp=*/ 0);
3311 /* Check a call EXP to the memmove built-in for validity.
3312 Return NULL_RTX on both success and failure. */
3314 static rtx
3315 expand_builtin_memmove (tree exp, rtx)
3317 if (!validate_arglist (exp,
3318 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3319 return NULL_RTX;
3321 tree dest = CALL_EXPR_ARG (exp, 0);
3322 tree src = CALL_EXPR_ARG (exp, 1);
3323 tree len = CALL_EXPR_ARG (exp, 2);
3325 check_memop_sizes (exp, dest, src, len);
3327 return NULL_RTX;
3330 /* Expand an instrumented call EXP to the memcpy builtin.
3331 Return NULL_RTX if we failed, the caller should emit a normal call,
3332 otherwise try to get the result in TARGET, if convenient (and in
3333 mode MODE if that's convenient). */
3335 static rtx
3336 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3338 if (!validate_arglist (exp,
3339 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3340 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3341 INTEGER_TYPE, VOID_TYPE))
3342 return NULL_RTX;
3343 else
3345 tree dest = CALL_EXPR_ARG (exp, 0);
3346 tree src = CALL_EXPR_ARG (exp, 2);
3347 tree len = CALL_EXPR_ARG (exp, 4);
3348 rtx res = expand_builtin_memory_copy_args (dest, src, len, target, exp,
3349 /*end_p=*/ 0);
3351 /* Return src bounds with the result. */
3352 if (res)
3354 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3355 expand_normal (CALL_EXPR_ARG (exp, 1)));
3356 res = chkp_join_splitted_slot (res, bnd);
3358 return res;
3362 /* Expand a call EXP to the mempcpy builtin.
3363 Return NULL_RTX if we failed; the caller should emit a normal call,
3364 otherwise try to get the result in TARGET, if convenient (and in
3365 mode MODE if that's convenient). If ENDP is 0 return the
3366 destination pointer, if ENDP is 1 return the end pointer ala
3367 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3368 stpcpy. */
3370 static rtx
3371 expand_builtin_mempcpy (tree exp, rtx target)
3373 if (!validate_arglist (exp,
3374 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3375 return NULL_RTX;
3377 tree dest = CALL_EXPR_ARG (exp, 0);
3378 tree src = CALL_EXPR_ARG (exp, 1);
3379 tree len = CALL_EXPR_ARG (exp, 2);
3381 /* Avoid expanding mempcpy into memcpy when the call is determined
3382 to overflow the buffer. This also prevents the same overflow
3383 from being diagnosed again when expanding memcpy. */
3384 if (!check_memop_sizes (exp, dest, src, len))
3385 return NULL_RTX;
3387 return expand_builtin_mempcpy_args (dest, src, len,
3388 target, exp, /*endp=*/ 1);
3391 /* Expand an instrumented call EXP to the mempcpy builtin.
3392 Return NULL_RTX if we failed, the caller should emit a normal call,
3393 otherwise try to get the result in TARGET, if convenient (and in
3394 mode MODE if that's convenient). */
3396 static rtx
3397 expand_builtin_mempcpy_with_bounds (tree exp, rtx target)
3399 if (!validate_arglist (exp,
3400 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3401 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3402 INTEGER_TYPE, VOID_TYPE))
3403 return NULL_RTX;
3404 else
3406 tree dest = CALL_EXPR_ARG (exp, 0);
3407 tree src = CALL_EXPR_ARG (exp, 2);
3408 tree len = CALL_EXPR_ARG (exp, 4);
3409 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3410 exp, 1);
3412 /* Return src bounds with the result. */
3413 if (res)
3415 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3416 expand_normal (CALL_EXPR_ARG (exp, 1)));
3417 res = chkp_join_splitted_slot (res, bnd);
3419 return res;
3423 /* Helper function to do the actual work for expand of memory copy family
3424 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3425 of memory from SRC to DEST and assign to TARGET if convenient.
3426 If ENDP is 0 return the
3427 destination pointer, if ENDP is 1 return the end pointer ala
3428 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3429 stpcpy. */
3431 static rtx
3432 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3433 rtx target, tree exp, int endp)
3435 const char *src_str;
3436 unsigned int src_align = get_pointer_alignment (src);
3437 unsigned int dest_align = get_pointer_alignment (dest);
3438 rtx dest_mem, src_mem, dest_addr, len_rtx;
3439 HOST_WIDE_INT expected_size = -1;
3440 unsigned int expected_align = 0;
3441 unsigned HOST_WIDE_INT min_size;
3442 unsigned HOST_WIDE_INT max_size;
3443 unsigned HOST_WIDE_INT probable_max_size;
3445 /* If DEST is not a pointer type, call the normal function. */
3446 if (dest_align == 0)
3447 return NULL_RTX;
3449 /* If either SRC is not a pointer type, don't do this
3450 operation in-line. */
3451 if (src_align == 0)
3452 return NULL_RTX;
3454 if (currently_expanding_gimple_stmt)
3455 stringop_block_profile (currently_expanding_gimple_stmt,
3456 &expected_align, &expected_size);
3458 if (expected_align < dest_align)
3459 expected_align = dest_align;
3460 dest_mem = get_memory_rtx (dest, len);
3461 set_mem_align (dest_mem, dest_align);
3462 len_rtx = expand_normal (len);
3463 determine_block_size (len, len_rtx, &min_size, &max_size,
3464 &probable_max_size);
3465 src_str = c_getstr (src);
3467 /* If SRC is a string constant and block move would be done
3468 by pieces, we can avoid loading the string from memory
3469 and only stored the computed constants. */
3470 if (src_str
3471 && CONST_INT_P (len_rtx)
3472 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3473 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3474 CONST_CAST (char *, src_str),
3475 dest_align, false))
3477 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3478 builtin_memcpy_read_str,
3479 CONST_CAST (char *, src_str),
3480 dest_align, false, endp);
3481 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3482 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3483 return dest_mem;
3486 src_mem = get_memory_rtx (src, len);
3487 set_mem_align (src_mem, src_align);
3489 /* Copy word part most expediently. */
3490 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3491 CALL_EXPR_TAILCALL (exp)
3492 && (endp == 0 || target == const0_rtx)
3493 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3494 expected_align, expected_size,
3495 min_size, max_size, probable_max_size);
3497 if (dest_addr == 0)
3499 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3500 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3503 if (endp && target != const0_rtx)
3505 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3506 /* stpcpy pointer to last byte. */
3507 if (endp == 2)
3508 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3511 return dest_addr;
3514 static rtx
3515 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3516 rtx target, tree orig_exp, int endp)
3518 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3519 endp);
3522 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3523 we failed, the caller should emit a normal call, otherwise try to
3524 get the result in TARGET, if convenient. If ENDP is 0 return the
3525 destination pointer, if ENDP is 1 return the end pointer ala
3526 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3527 stpcpy. */
3529 static rtx
3530 expand_movstr (tree dest, tree src, rtx target, int endp)
3532 struct expand_operand ops[3];
3533 rtx dest_mem;
3534 rtx src_mem;
3536 if (!targetm.have_movstr ())
3537 return NULL_RTX;
3539 dest_mem = get_memory_rtx (dest, NULL);
3540 src_mem = get_memory_rtx (src, NULL);
3541 if (!endp)
3543 target = force_reg (Pmode, XEXP (dest_mem, 0));
3544 dest_mem = replace_equiv_address (dest_mem, target);
3547 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3548 create_fixed_operand (&ops[1], dest_mem);
3549 create_fixed_operand (&ops[2], src_mem);
3550 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3551 return NULL_RTX;
3553 if (endp && target != const0_rtx)
3555 target = ops[0].value;
3556 /* movstr is supposed to set end to the address of the NUL
3557 terminator. If the caller requested a mempcpy-like return value,
3558 adjust it. */
3559 if (endp == 1)
3561 rtx tem = plus_constant (GET_MODE (target),
3562 gen_lowpart (GET_MODE (target), target), 1);
3563 emit_move_insn (target, force_operand (tem, NULL_RTX));
3566 return target;
3569 /* Do some very basic size validation of a call to the strcpy builtin
3570 given by EXP. Return NULL_RTX to have the built-in expand to a call
3571 to the library function. */
3573 static rtx
3574 expand_builtin_strcat (tree exp, rtx)
3576 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3577 || !warn_stringop_overflow)
3578 return NULL_RTX;
3580 tree dest = CALL_EXPR_ARG (exp, 0);
3581 tree src = CALL_EXPR_ARG (exp, 1);
3583 /* There is no way here to determine the length of the string in
3584 the destination to which the SRC string is being appended so
3585 just diagnose cases when the souce string is longer than
3586 the destination object. */
3588 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3590 check_sizes (OPT_Wstringop_overflow_,
3591 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3593 return NULL_RTX;
3596 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3597 NULL_RTX if we failed the caller should emit a normal call, otherwise
3598 try to get the result in TARGET, if convenient (and in mode MODE if that's
3599 convenient). */
3601 static rtx
3602 expand_builtin_strcpy (tree exp, rtx target)
3604 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3605 return NULL_RTX;
3607 tree dest = CALL_EXPR_ARG (exp, 0);
3608 tree src = CALL_EXPR_ARG (exp, 1);
3610 if (warn_stringop_overflow)
3612 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3613 check_sizes (OPT_Wstringop_overflow_,
3614 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3617 return expand_builtin_strcpy_args (dest, src, target);
3620 /* Helper function to do the actual work for expand_builtin_strcpy. The
3621 arguments to the builtin_strcpy call DEST and SRC are broken out
3622 so that this can also be called without constructing an actual CALL_EXPR.
3623 The other arguments and return value are the same as for
3624 expand_builtin_strcpy. */
3626 static rtx
3627 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3629 return expand_movstr (dest, src, target, /*endp=*/0);
3632 /* Expand a call EXP to the stpcpy builtin.
3633 Return NULL_RTX if we failed the caller should emit a normal call,
3634 otherwise try to get the result in TARGET, if convenient (and in
3635 mode MODE if that's convenient). */
3637 static rtx
3638 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3640 tree dst, src;
3641 location_t loc = EXPR_LOCATION (exp);
3643 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3644 return NULL_RTX;
3646 dst = CALL_EXPR_ARG (exp, 0);
3647 src = CALL_EXPR_ARG (exp, 1);
3649 if (warn_stringop_overflow)
3651 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3652 check_sizes (OPT_Wstringop_overflow_,
3653 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3656 /* If return value is ignored, transform stpcpy into strcpy. */
3657 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3659 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3660 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3661 return expand_expr (result, target, mode, EXPAND_NORMAL);
3663 else
3665 tree len, lenp1;
3666 rtx ret;
3668 /* Ensure we get an actual string whose length can be evaluated at
3669 compile-time, not an expression containing a string. This is
3670 because the latter will potentially produce pessimized code
3671 when used to produce the return value. */
3672 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3673 return expand_movstr (dst, src, target, /*endp=*/2);
3675 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3676 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3677 target, exp, /*endp=*/2);
3679 if (ret)
3680 return ret;
3682 if (TREE_CODE (len) == INTEGER_CST)
3684 rtx len_rtx = expand_normal (len);
3686 if (CONST_INT_P (len_rtx))
3688 ret = expand_builtin_strcpy_args (dst, src, target);
3690 if (ret)
3692 if (! target)
3694 if (mode != VOIDmode)
3695 target = gen_reg_rtx (mode);
3696 else
3697 target = gen_reg_rtx (GET_MODE (ret));
3699 if (GET_MODE (target) != GET_MODE (ret))
3700 ret = gen_lowpart (GET_MODE (target), ret);
3702 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3703 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3704 gcc_assert (ret);
3706 return target;
3711 return expand_movstr (dst, src, target, /*endp=*/2);
3715 /* Check a call EXP to the stpncpy built-in for validity.
3716 Return NULL_RTX on both success and failure. */
3718 static rtx
3719 expand_builtin_stpncpy (tree exp, rtx)
3721 if (!validate_arglist (exp,
3722 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3723 || !warn_stringop_overflow)
3724 return NULL_RTX;
3726 /* The source and destination of the call. */
3727 tree dest = CALL_EXPR_ARG (exp, 0);
3728 tree src = CALL_EXPR_ARG (exp, 1);
3730 /* The exact number of bytes to write (not the maximum). */
3731 tree len = CALL_EXPR_ARG (exp, 2);
3733 /* The size of the destination object. */
3734 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3736 check_sizes (OPT_Wstringop_overflow_,
3737 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
3739 return NULL_RTX;
3742 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3743 bytes from constant string DATA + OFFSET and return it as target
3744 constant. */
3747 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3748 scalar_int_mode mode)
3750 const char *str = (const char *) data;
3752 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3753 return const0_rtx;
3755 return c_readstr (str + offset, mode);
3758 /* Helper to check the sizes of sequences and the destination of calls
3759 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3760 success (no overflow or invalid sizes), false otherwise. */
3762 static bool
3763 check_strncat_sizes (tree exp, tree objsize)
3765 tree dest = CALL_EXPR_ARG (exp, 0);
3766 tree src = CALL_EXPR_ARG (exp, 1);
3767 tree maxlen = CALL_EXPR_ARG (exp, 2);
3769 /* Try to determine the range of lengths that the source expression
3770 refers to. */
3771 tree lenrange[2];
3772 get_range_strlen (src, lenrange);
3774 /* Try to verify that the destination is big enough for the shortest
3775 string. */
3777 if (!objsize && warn_stringop_overflow)
3779 /* If it hasn't been provided by __strncat_chk, try to determine
3780 the size of the destination object into which the source is
3781 being copied. */
3782 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3785 /* Add one for the terminating nul. */
3786 tree srclen = (lenrange[0]
3787 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3788 size_one_node)
3789 : NULL_TREE);
3791 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3792 nul so the specified upper bound should never be equal to (or greater
3793 than) the size of the destination. */
3794 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (objsize)
3795 && tree_int_cst_equal (objsize, maxlen))
3797 location_t loc = tree_nonartificial_location (exp);
3798 loc = expansion_point_location_if_in_system_header (loc);
3800 warning_at (loc, OPT_Wstringop_overflow_,
3801 "%K%qD specified bound %E equals destination size",
3802 exp, get_callee_fndecl (exp), maxlen);
3804 return false;
3807 if (!srclen
3808 || (maxlen && tree_fits_uhwi_p (maxlen)
3809 && tree_fits_uhwi_p (srclen)
3810 && tree_int_cst_lt (maxlen, srclen)))
3811 srclen = maxlen;
3813 /* The number of bytes to write is LEN but check_sizes will also
3814 check SRCLEN if LEN's value isn't known. */
3815 return check_sizes (OPT_Wstringop_overflow_,
3816 exp, /*size=*/NULL_TREE, maxlen, srclen, objsize);
3819 /* Similar to expand_builtin_strcat, do some very basic size validation
3820 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3821 the built-in expand to a call to the library function. */
3823 static rtx
3824 expand_builtin_strncat (tree exp, rtx)
3826 if (!validate_arglist (exp,
3827 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3828 || !warn_stringop_overflow)
3829 return NULL_RTX;
3831 tree dest = CALL_EXPR_ARG (exp, 0);
3832 tree src = CALL_EXPR_ARG (exp, 1);
3833 /* The upper bound on the number of bytes to write. */
3834 tree maxlen = CALL_EXPR_ARG (exp, 2);
3835 /* The length of the source sequence. */
3836 tree slen = c_strlen (src, 1);
3838 /* Try to determine the range of lengths that the source expression
3839 refers to. */
3840 tree lenrange[2];
3841 if (slen)
3842 lenrange[0] = lenrange[1] = slen;
3843 else
3844 get_range_strlen (src, lenrange);
3846 /* Try to verify that the destination is big enough for the shortest
3847 string. First try to determine the size of the destination object
3848 into which the source is being copied. */
3849 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3851 /* Add one for the terminating nul. */
3852 tree srclen = (lenrange[0]
3853 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3854 size_one_node)
3855 : NULL_TREE);
3857 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3858 nul so the specified upper bound should never be equal to (or greater
3859 than) the size of the destination. */
3860 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (destsize)
3861 && tree_int_cst_equal (destsize, maxlen))
3863 location_t loc = tree_nonartificial_location (exp);
3864 loc = expansion_point_location_if_in_system_header (loc);
3866 warning_at (loc, OPT_Wstringop_overflow_,
3867 "%K%qD specified bound %E equals destination size",
3868 exp, get_callee_fndecl (exp), maxlen);
3870 return NULL_RTX;
3873 if (!srclen
3874 || (maxlen && tree_fits_uhwi_p (maxlen)
3875 && tree_fits_uhwi_p (srclen)
3876 && tree_int_cst_lt (maxlen, srclen)))
3877 srclen = maxlen;
3879 /* The number of bytes to write is LEN but check_sizes will also
3880 check SRCLEN if LEN's value isn't known. */
3881 check_sizes (OPT_Wstringop_overflow_,
3882 exp, /*size=*/NULL_TREE, maxlen, srclen, destsize);
3884 return NULL_RTX;
3887 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3888 NULL_RTX if we failed the caller should emit a normal call. */
3890 static rtx
3891 expand_builtin_strncpy (tree exp, rtx target)
3893 location_t loc = EXPR_LOCATION (exp);
3895 if (validate_arglist (exp,
3896 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3898 tree dest = CALL_EXPR_ARG (exp, 0);
3899 tree src = CALL_EXPR_ARG (exp, 1);
3900 /* The number of bytes to write (not the maximum). */
3901 tree len = CALL_EXPR_ARG (exp, 2);
3902 /* The length of the source sequence. */
3903 tree slen = c_strlen (src, 1);
3905 if (warn_stringop_overflow)
3907 tree destsize = compute_objsize (dest,
3908 warn_stringop_overflow - 1);
3910 /* The number of bytes to write is LEN but check_sizes will also
3911 check SLEN if LEN's value isn't known. */
3912 check_sizes (OPT_Wstringop_overflow_,
3913 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
3916 /* We must be passed a constant len and src parameter. */
3917 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3918 return NULL_RTX;
3920 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3922 /* We're required to pad with trailing zeros if the requested
3923 len is greater than strlen(s2)+1. In that case try to
3924 use store_by_pieces, if it fails, punt. */
3925 if (tree_int_cst_lt (slen, len))
3927 unsigned int dest_align = get_pointer_alignment (dest);
3928 const char *p = c_getstr (src);
3929 rtx dest_mem;
3931 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3932 || !can_store_by_pieces (tree_to_uhwi (len),
3933 builtin_strncpy_read_str,
3934 CONST_CAST (char *, p),
3935 dest_align, false))
3936 return NULL_RTX;
3938 dest_mem = get_memory_rtx (dest, len);
3939 store_by_pieces (dest_mem, tree_to_uhwi (len),
3940 builtin_strncpy_read_str,
3941 CONST_CAST (char *, p), dest_align, false, 0);
3942 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3943 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3944 return dest_mem;
3947 return NULL_RTX;
3950 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3951 bytes from constant string DATA + OFFSET and return it as target
3952 constant. */
3955 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3956 scalar_int_mode mode)
3958 const char *c = (const char *) data;
3959 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3961 memset (p, *c, GET_MODE_SIZE (mode));
3963 return c_readstr (p, mode);
3966 /* Callback routine for store_by_pieces. Return the RTL of a register
3967 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3968 char value given in the RTL register data. For example, if mode is
3969 4 bytes wide, return the RTL for 0x01010101*data. */
3971 static rtx
3972 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3973 scalar_int_mode mode)
3975 rtx target, coeff;
3976 size_t size;
3977 char *p;
3979 size = GET_MODE_SIZE (mode);
3980 if (size == 1)
3981 return (rtx) data;
3983 p = XALLOCAVEC (char, size);
3984 memset (p, 1, size);
3985 coeff = c_readstr (p, mode);
3987 target = convert_to_mode (mode, (rtx) data, 1);
3988 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3989 return force_reg (mode, target);
3992 /* Expand expression EXP, which is a call to the memset builtin. Return
3993 NULL_RTX if we failed the caller should emit a normal call, otherwise
3994 try to get the result in TARGET, if convenient (and in mode MODE if that's
3995 convenient). */
3997 static rtx
3998 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4000 if (!validate_arglist (exp,
4001 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4002 return NULL_RTX;
4004 tree dest = CALL_EXPR_ARG (exp, 0);
4005 tree val = CALL_EXPR_ARG (exp, 1);
4006 tree len = CALL_EXPR_ARG (exp, 2);
4008 check_memop_sizes (exp, dest, NULL_TREE, len);
4010 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4013 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4014 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4015 try to get the result in TARGET, if convenient (and in mode MODE if that's
4016 convenient). */
4018 static rtx
4019 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4021 if (!validate_arglist (exp,
4022 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4023 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4024 return NULL_RTX;
4025 else
4027 tree dest = CALL_EXPR_ARG (exp, 0);
4028 tree val = CALL_EXPR_ARG (exp, 2);
4029 tree len = CALL_EXPR_ARG (exp, 3);
4030 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4032 /* Return src bounds with the result. */
4033 if (res)
4035 rtx bnd = force_reg (targetm.chkp_bound_mode (),
4036 expand_normal (CALL_EXPR_ARG (exp, 1)));
4037 res = chkp_join_splitted_slot (res, bnd);
4039 return res;
4043 /* Helper function to do the actual work for expand_builtin_memset. The
4044 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4045 so that this can also be called without constructing an actual CALL_EXPR.
4046 The other arguments and return value are the same as for
4047 expand_builtin_memset. */
4049 static rtx
4050 expand_builtin_memset_args (tree dest, tree val, tree len,
4051 rtx target, machine_mode mode, tree orig_exp)
4053 tree fndecl, fn;
4054 enum built_in_function fcode;
4055 machine_mode val_mode;
4056 char c;
4057 unsigned int dest_align;
4058 rtx dest_mem, dest_addr, len_rtx;
4059 HOST_WIDE_INT expected_size = -1;
4060 unsigned int expected_align = 0;
4061 unsigned HOST_WIDE_INT min_size;
4062 unsigned HOST_WIDE_INT max_size;
4063 unsigned HOST_WIDE_INT probable_max_size;
4065 dest_align = get_pointer_alignment (dest);
4067 /* If DEST is not a pointer type, don't do this operation in-line. */
4068 if (dest_align == 0)
4069 return NULL_RTX;
4071 if (currently_expanding_gimple_stmt)
4072 stringop_block_profile (currently_expanding_gimple_stmt,
4073 &expected_align, &expected_size);
4075 if (expected_align < dest_align)
4076 expected_align = dest_align;
4078 /* If the LEN parameter is zero, return DEST. */
4079 if (integer_zerop (len))
4081 /* Evaluate and ignore VAL in case it has side-effects. */
4082 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4083 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4086 /* Stabilize the arguments in case we fail. */
4087 dest = builtin_save_expr (dest);
4088 val = builtin_save_expr (val);
4089 len = builtin_save_expr (len);
4091 len_rtx = expand_normal (len);
4092 determine_block_size (len, len_rtx, &min_size, &max_size,
4093 &probable_max_size);
4094 dest_mem = get_memory_rtx (dest, len);
4095 val_mode = TYPE_MODE (unsigned_char_type_node);
4097 if (TREE_CODE (val) != INTEGER_CST)
4099 rtx val_rtx;
4101 val_rtx = expand_normal (val);
4102 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4104 /* Assume that we can memset by pieces if we can store
4105 * the coefficients by pieces (in the required modes).
4106 * We can't pass builtin_memset_gen_str as that emits RTL. */
4107 c = 1;
4108 if (tree_fits_uhwi_p (len)
4109 && can_store_by_pieces (tree_to_uhwi (len),
4110 builtin_memset_read_str, &c, dest_align,
4111 true))
4113 val_rtx = force_reg (val_mode, val_rtx);
4114 store_by_pieces (dest_mem, tree_to_uhwi (len),
4115 builtin_memset_gen_str, val_rtx, dest_align,
4116 true, 0);
4118 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4119 dest_align, expected_align,
4120 expected_size, min_size, max_size,
4121 probable_max_size))
4122 goto do_libcall;
4124 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4125 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4126 return dest_mem;
4129 if (target_char_cast (val, &c))
4130 goto do_libcall;
4132 if (c)
4134 if (tree_fits_uhwi_p (len)
4135 && can_store_by_pieces (tree_to_uhwi (len),
4136 builtin_memset_read_str, &c, dest_align,
4137 true))
4138 store_by_pieces (dest_mem, tree_to_uhwi (len),
4139 builtin_memset_read_str, &c, dest_align, true, 0);
4140 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4141 gen_int_mode (c, val_mode),
4142 dest_align, expected_align,
4143 expected_size, min_size, max_size,
4144 probable_max_size))
4145 goto do_libcall;
4147 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4148 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4149 return dest_mem;
4152 set_mem_align (dest_mem, dest_align);
4153 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4154 CALL_EXPR_TAILCALL (orig_exp)
4155 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4156 expected_align, expected_size,
4157 min_size, max_size,
4158 probable_max_size);
4160 if (dest_addr == 0)
4162 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4163 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4166 return dest_addr;
4168 do_libcall:
4169 fndecl = get_callee_fndecl (orig_exp);
4170 fcode = DECL_FUNCTION_CODE (fndecl);
4171 if (fcode == BUILT_IN_MEMSET
4172 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4173 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4174 dest, val, len);
4175 else if (fcode == BUILT_IN_BZERO)
4176 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4177 dest, len);
4178 else
4179 gcc_unreachable ();
4180 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4181 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4182 return expand_call (fn, target, target == const0_rtx);
4185 /* Expand expression EXP, which is a call to the bzero builtin. Return
4186 NULL_RTX if we failed the caller should emit a normal call. */
4188 static rtx
4189 expand_builtin_bzero (tree exp)
4191 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4192 return NULL_RTX;
4194 tree dest = CALL_EXPR_ARG (exp, 0);
4195 tree size = CALL_EXPR_ARG (exp, 1);
4197 check_memop_sizes (exp, dest, NULL_TREE, size);
4199 /* New argument list transforming bzero(ptr x, int y) to
4200 memset(ptr x, int 0, size_t y). This is done this way
4201 so that if it isn't expanded inline, we fallback to
4202 calling bzero instead of memset. */
4204 location_t loc = EXPR_LOCATION (exp);
4206 return expand_builtin_memset_args (dest, integer_zero_node,
4207 fold_convert_loc (loc,
4208 size_type_node, size),
4209 const0_rtx, VOIDmode, exp);
4212 /* Try to expand cmpstr operation ICODE with the given operands.
4213 Return the result rtx on success, otherwise return null. */
4215 static rtx
4216 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4217 HOST_WIDE_INT align)
4219 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4221 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4222 target = NULL_RTX;
4224 struct expand_operand ops[4];
4225 create_output_operand (&ops[0], target, insn_mode);
4226 create_fixed_operand (&ops[1], arg1_rtx);
4227 create_fixed_operand (&ops[2], arg2_rtx);
4228 create_integer_operand (&ops[3], align);
4229 if (maybe_expand_insn (icode, 4, ops))
4230 return ops[0].value;
4231 return NULL_RTX;
4234 /* Expand expression EXP, which is a call to the memcmp built-in function.
4235 Return NULL_RTX if we failed and the caller should emit a normal call,
4236 otherwise try to get the result in TARGET, if convenient.
4237 RESULT_EQ is true if we can relax the returned value to be either zero
4238 or nonzero, without caring about the sign. */
4240 static rtx
4241 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4243 if (!validate_arglist (exp,
4244 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4245 return NULL_RTX;
4247 tree arg1 = CALL_EXPR_ARG (exp, 0);
4248 tree arg2 = CALL_EXPR_ARG (exp, 1);
4249 tree len = CALL_EXPR_ARG (exp, 2);
4251 /* Diagnose calls where the specified length exceeds the size of either
4252 object. */
4253 if (warn_stringop_overflow)
4255 tree size = compute_objsize (arg1, 0);
4256 if (check_sizes (OPT_Wstringop_overflow_,
4257 exp, len, /*maxlen=*/NULL_TREE,
4258 size, /*objsize=*/NULL_TREE))
4260 size = compute_objsize (arg2, 0);
4261 check_sizes (OPT_Wstringop_overflow_,
4262 exp, len, /*maxlen=*/NULL_TREE,
4263 size, /*objsize=*/NULL_TREE);
4267 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4268 location_t loc = EXPR_LOCATION (exp);
4270 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4271 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4273 /* If we don't have POINTER_TYPE, call the function. */
4274 if (arg1_align == 0 || arg2_align == 0)
4275 return NULL_RTX;
4277 rtx arg1_rtx = get_memory_rtx (arg1, len);
4278 rtx arg2_rtx = get_memory_rtx (arg2, len);
4279 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4281 /* Set MEM_SIZE as appropriate. */
4282 if (CONST_INT_P (len_rtx))
4284 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4285 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4288 by_pieces_constfn constfn = NULL;
4290 const char *src_str = c_getstr (arg2);
4291 if (result_eq && src_str == NULL)
4293 src_str = c_getstr (arg1);
4294 if (src_str != NULL)
4295 std::swap (arg1_rtx, arg2_rtx);
4298 /* If SRC is a string constant and block move would be done
4299 by pieces, we can avoid loading the string from memory
4300 and only stored the computed constants. */
4301 if (src_str
4302 && CONST_INT_P (len_rtx)
4303 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4304 constfn = builtin_memcpy_read_str;
4306 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4307 TREE_TYPE (len), target,
4308 result_eq, constfn,
4309 CONST_CAST (char *, src_str));
4311 if (result)
4313 /* Return the value in the proper mode for this function. */
4314 if (GET_MODE (result) == mode)
4315 return result;
4317 if (target != 0)
4319 convert_move (target, result, 0);
4320 return target;
4323 return convert_to_mode (mode, result, 0);
4326 return NULL_RTX;
4329 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4330 if we failed the caller should emit a normal call, otherwise try to get
4331 the result in TARGET, if convenient. */
4333 static rtx
4334 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4336 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4337 return NULL_RTX;
4339 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4340 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4341 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4343 rtx arg1_rtx, arg2_rtx;
4344 tree fndecl, fn;
4345 tree arg1 = CALL_EXPR_ARG (exp, 0);
4346 tree arg2 = CALL_EXPR_ARG (exp, 1);
4347 rtx result = NULL_RTX;
4349 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4350 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4352 /* If we don't have POINTER_TYPE, call the function. */
4353 if (arg1_align == 0 || arg2_align == 0)
4354 return NULL_RTX;
4356 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4357 arg1 = builtin_save_expr (arg1);
4358 arg2 = builtin_save_expr (arg2);
4360 arg1_rtx = get_memory_rtx (arg1, NULL);
4361 arg2_rtx = get_memory_rtx (arg2, NULL);
4363 /* Try to call cmpstrsi. */
4364 if (cmpstr_icode != CODE_FOR_nothing)
4365 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4366 MIN (arg1_align, arg2_align));
4368 /* Try to determine at least one length and call cmpstrnsi. */
4369 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4371 tree len;
4372 rtx arg3_rtx;
4374 tree len1 = c_strlen (arg1, 1);
4375 tree len2 = c_strlen (arg2, 1);
4377 if (len1)
4378 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4379 if (len2)
4380 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4382 /* If we don't have a constant length for the first, use the length
4383 of the second, if we know it. We don't require a constant for
4384 this case; some cost analysis could be done if both are available
4385 but neither is constant. For now, assume they're equally cheap,
4386 unless one has side effects. If both strings have constant lengths,
4387 use the smaller. */
4389 if (!len1)
4390 len = len2;
4391 else if (!len2)
4392 len = len1;
4393 else if (TREE_SIDE_EFFECTS (len1))
4394 len = len2;
4395 else if (TREE_SIDE_EFFECTS (len2))
4396 len = len1;
4397 else if (TREE_CODE (len1) != INTEGER_CST)
4398 len = len2;
4399 else if (TREE_CODE (len2) != INTEGER_CST)
4400 len = len1;
4401 else if (tree_int_cst_lt (len1, len2))
4402 len = len1;
4403 else
4404 len = len2;
4406 /* If both arguments have side effects, we cannot optimize. */
4407 if (len && !TREE_SIDE_EFFECTS (len))
4409 arg3_rtx = expand_normal (len);
4410 result = expand_cmpstrn_or_cmpmem
4411 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4412 arg3_rtx, MIN (arg1_align, arg2_align));
4416 if (result)
4418 /* Return the value in the proper mode for this function. */
4419 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4420 if (GET_MODE (result) == mode)
4421 return result;
4422 if (target == 0)
4423 return convert_to_mode (mode, result, 0);
4424 convert_move (target, result, 0);
4425 return target;
4428 /* Expand the library call ourselves using a stabilized argument
4429 list to avoid re-evaluating the function's arguments twice. */
4430 fndecl = get_callee_fndecl (exp);
4431 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4432 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4433 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4434 return expand_call (fn, target, target == const0_rtx);
4436 return NULL_RTX;
4439 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4440 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4441 the result in TARGET, if convenient. */
4443 static rtx
4444 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4445 ATTRIBUTE_UNUSED machine_mode mode)
4447 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4449 if (!validate_arglist (exp,
4450 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4451 return NULL_RTX;
4453 /* If c_strlen can determine an expression for one of the string
4454 lengths, and it doesn't have side effects, then emit cmpstrnsi
4455 using length MIN(strlen(string)+1, arg3). */
4456 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4457 if (cmpstrn_icode != CODE_FOR_nothing)
4459 tree len, len1, len2, len3;
4460 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4461 rtx result;
4462 tree fndecl, fn;
4463 tree arg1 = CALL_EXPR_ARG (exp, 0);
4464 tree arg2 = CALL_EXPR_ARG (exp, 1);
4465 tree arg3 = CALL_EXPR_ARG (exp, 2);
4467 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4468 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4470 len1 = c_strlen (arg1, 1);
4471 len2 = c_strlen (arg2, 1);
4473 if (len1)
4474 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4475 if (len2)
4476 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4478 len3 = fold_convert_loc (loc, sizetype, arg3);
4480 /* If we don't have a constant length for the first, use the length
4481 of the second, if we know it. If neither string is constant length,
4482 use the given length argument. We don't require a constant for
4483 this case; some cost analysis could be done if both are available
4484 but neither is constant. For now, assume they're equally cheap,
4485 unless one has side effects. If both strings have constant lengths,
4486 use the smaller. */
4488 if (!len1 && !len2)
4489 len = len3;
4490 else if (!len1)
4491 len = len2;
4492 else if (!len2)
4493 len = len1;
4494 else if (TREE_SIDE_EFFECTS (len1))
4495 len = len2;
4496 else if (TREE_SIDE_EFFECTS (len2))
4497 len = len1;
4498 else if (TREE_CODE (len1) != INTEGER_CST)
4499 len = len2;
4500 else if (TREE_CODE (len2) != INTEGER_CST)
4501 len = len1;
4502 else if (tree_int_cst_lt (len1, len2))
4503 len = len1;
4504 else
4505 len = len2;
4507 /* If we are not using the given length, we must incorporate it here.
4508 The actual new length parameter will be MIN(len,arg3) in this case. */
4509 if (len != len3)
4510 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4511 arg1_rtx = get_memory_rtx (arg1, len);
4512 arg2_rtx = get_memory_rtx (arg2, len);
4513 arg3_rtx = expand_normal (len);
4514 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4515 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4516 MIN (arg1_align, arg2_align));
4517 if (result)
4519 /* Return the value in the proper mode for this function. */
4520 mode = TYPE_MODE (TREE_TYPE (exp));
4521 if (GET_MODE (result) == mode)
4522 return result;
4523 if (target == 0)
4524 return convert_to_mode (mode, result, 0);
4525 convert_move (target, result, 0);
4526 return target;
4529 /* Expand the library call ourselves using a stabilized argument
4530 list to avoid re-evaluating the function's arguments twice. */
4531 fndecl = get_callee_fndecl (exp);
4532 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4533 arg1, arg2, len);
4534 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4535 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4536 return expand_call (fn, target, target == const0_rtx);
4538 return NULL_RTX;
4541 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4542 if that's convenient. */
4545 expand_builtin_saveregs (void)
4547 rtx val;
4548 rtx_insn *seq;
4550 /* Don't do __builtin_saveregs more than once in a function.
4551 Save the result of the first call and reuse it. */
4552 if (saveregs_value != 0)
4553 return saveregs_value;
4555 /* When this function is called, it means that registers must be
4556 saved on entry to this function. So we migrate the call to the
4557 first insn of this function. */
4559 start_sequence ();
4561 /* Do whatever the machine needs done in this case. */
4562 val = targetm.calls.expand_builtin_saveregs ();
4564 seq = get_insns ();
4565 end_sequence ();
4567 saveregs_value = val;
4569 /* Put the insns after the NOTE that starts the function. If this
4570 is inside a start_sequence, make the outer-level insn chain current, so
4571 the code is placed at the start of the function. */
4572 push_topmost_sequence ();
4573 emit_insn_after (seq, entry_of_function ());
4574 pop_topmost_sequence ();
4576 return val;
4579 /* Expand a call to __builtin_next_arg. */
4581 static rtx
4582 expand_builtin_next_arg (void)
4584 /* Checking arguments is already done in fold_builtin_next_arg
4585 that must be called before this function. */
4586 return expand_binop (ptr_mode, add_optab,
4587 crtl->args.internal_arg_pointer,
4588 crtl->args.arg_offset_rtx,
4589 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4592 /* Make it easier for the backends by protecting the valist argument
4593 from multiple evaluations. */
4595 static tree
4596 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4598 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4600 /* The current way of determining the type of valist is completely
4601 bogus. We should have the information on the va builtin instead. */
4602 if (!vatype)
4603 vatype = targetm.fn_abi_va_list (cfun->decl);
4605 if (TREE_CODE (vatype) == ARRAY_TYPE)
4607 if (TREE_SIDE_EFFECTS (valist))
4608 valist = save_expr (valist);
4610 /* For this case, the backends will be expecting a pointer to
4611 vatype, but it's possible we've actually been given an array
4612 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4613 So fix it. */
4614 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4616 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4617 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4620 else
4622 tree pt = build_pointer_type (vatype);
4624 if (! needs_lvalue)
4626 if (! TREE_SIDE_EFFECTS (valist))
4627 return valist;
4629 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4630 TREE_SIDE_EFFECTS (valist) = 1;
4633 if (TREE_SIDE_EFFECTS (valist))
4634 valist = save_expr (valist);
4635 valist = fold_build2_loc (loc, MEM_REF,
4636 vatype, valist, build_int_cst (pt, 0));
4639 return valist;
4642 /* The "standard" definition of va_list is void*. */
4644 tree
4645 std_build_builtin_va_list (void)
4647 return ptr_type_node;
4650 /* The "standard" abi va_list is va_list_type_node. */
4652 tree
4653 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4655 return va_list_type_node;
4658 /* The "standard" type of va_list is va_list_type_node. */
4660 tree
4661 std_canonical_va_list_type (tree type)
4663 tree wtype, htype;
4665 wtype = va_list_type_node;
4666 htype = type;
4668 if (TREE_CODE (wtype) == ARRAY_TYPE)
4670 /* If va_list is an array type, the argument may have decayed
4671 to a pointer type, e.g. by being passed to another function.
4672 In that case, unwrap both types so that we can compare the
4673 underlying records. */
4674 if (TREE_CODE (htype) == ARRAY_TYPE
4675 || POINTER_TYPE_P (htype))
4677 wtype = TREE_TYPE (wtype);
4678 htype = TREE_TYPE (htype);
4681 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4682 return va_list_type_node;
4684 return NULL_TREE;
4687 /* The "standard" implementation of va_start: just assign `nextarg' to
4688 the variable. */
4690 void
4691 std_expand_builtin_va_start (tree valist, rtx nextarg)
4693 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4694 convert_move (va_r, nextarg, 0);
4696 /* We do not have any valid bounds for the pointer, so
4697 just store zero bounds for it. */
4698 if (chkp_function_instrumented_p (current_function_decl))
4699 chkp_expand_bounds_reset_for_mem (valist,
4700 make_tree (TREE_TYPE (valist),
4701 nextarg));
4704 /* Expand EXP, a call to __builtin_va_start. */
4706 static rtx
4707 expand_builtin_va_start (tree exp)
4709 rtx nextarg;
4710 tree valist;
4711 location_t loc = EXPR_LOCATION (exp);
4713 if (call_expr_nargs (exp) < 2)
4715 error_at (loc, "too few arguments to function %<va_start%>");
4716 return const0_rtx;
4719 if (fold_builtin_next_arg (exp, true))
4720 return const0_rtx;
4722 nextarg = expand_builtin_next_arg ();
4723 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4725 if (targetm.expand_builtin_va_start)
4726 targetm.expand_builtin_va_start (valist, nextarg);
4727 else
4728 std_expand_builtin_va_start (valist, nextarg);
4730 return const0_rtx;
4733 /* Expand EXP, a call to __builtin_va_end. */
4735 static rtx
4736 expand_builtin_va_end (tree exp)
4738 tree valist = CALL_EXPR_ARG (exp, 0);
4740 /* Evaluate for side effects, if needed. I hate macros that don't
4741 do that. */
4742 if (TREE_SIDE_EFFECTS (valist))
4743 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4745 return const0_rtx;
4748 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4749 builtin rather than just as an assignment in stdarg.h because of the
4750 nastiness of array-type va_list types. */
4752 static rtx
4753 expand_builtin_va_copy (tree exp)
4755 tree dst, src, t;
4756 location_t loc = EXPR_LOCATION (exp);
4758 dst = CALL_EXPR_ARG (exp, 0);
4759 src = CALL_EXPR_ARG (exp, 1);
4761 dst = stabilize_va_list_loc (loc, dst, 1);
4762 src = stabilize_va_list_loc (loc, src, 0);
4764 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4766 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4768 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4769 TREE_SIDE_EFFECTS (t) = 1;
4770 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4772 else
4774 rtx dstb, srcb, size;
4776 /* Evaluate to pointers. */
4777 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4778 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4779 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4780 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4782 dstb = convert_memory_address (Pmode, dstb);
4783 srcb = convert_memory_address (Pmode, srcb);
4785 /* "Dereference" to BLKmode memories. */
4786 dstb = gen_rtx_MEM (BLKmode, dstb);
4787 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4788 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4789 srcb = gen_rtx_MEM (BLKmode, srcb);
4790 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4791 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4793 /* Copy. */
4794 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4797 return const0_rtx;
4800 /* Expand a call to one of the builtin functions __builtin_frame_address or
4801 __builtin_return_address. */
4803 static rtx
4804 expand_builtin_frame_address (tree fndecl, tree exp)
4806 /* The argument must be a nonnegative integer constant.
4807 It counts the number of frames to scan up the stack.
4808 The value is either the frame pointer value or the return
4809 address saved in that frame. */
4810 if (call_expr_nargs (exp) == 0)
4811 /* Warning about missing arg was already issued. */
4812 return const0_rtx;
4813 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4815 error ("invalid argument to %qD", fndecl);
4816 return const0_rtx;
4818 else
4820 /* Number of frames to scan up the stack. */
4821 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4823 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4825 /* Some ports cannot access arbitrary stack frames. */
4826 if (tem == NULL)
4828 warning (0, "unsupported argument to %qD", fndecl);
4829 return const0_rtx;
4832 if (count)
4834 /* Warn since no effort is made to ensure that any frame
4835 beyond the current one exists or can be safely reached. */
4836 warning (OPT_Wframe_address, "calling %qD with "
4837 "a nonzero argument is unsafe", fndecl);
4840 /* For __builtin_frame_address, return what we've got. */
4841 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4842 return tem;
4844 if (!REG_P (tem)
4845 && ! CONSTANT_P (tem))
4846 tem = copy_addr_to_reg (tem);
4847 return tem;
4851 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4852 failed and the caller should emit a normal call. */
4854 static rtx
4855 expand_builtin_alloca (tree exp)
4857 rtx op0;
4858 rtx result;
4859 unsigned int align;
4860 tree fndecl = get_callee_fndecl (exp);
4861 HOST_WIDE_INT max_size;
4862 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4863 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4864 bool valid_arglist
4865 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4866 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
4867 VOID_TYPE)
4868 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
4869 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4870 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4872 if (!valid_arglist)
4873 return NULL_RTX;
4875 if ((alloca_for_var && !warn_vla_limit)
4876 || (!alloca_for_var && !warn_alloca_limit))
4878 /* -Walloca-larger-than and -Wvla-larger-than settings override
4879 the more general -Walloc-size-larger-than so unless either of
4880 the former options is specified check the alloca arguments for
4881 overflow. */
4882 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4883 int idx[] = { 0, -1 };
4884 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4887 /* Compute the argument. */
4888 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4890 /* Compute the alignment. */
4891 align = (fcode == BUILT_IN_ALLOCA
4892 ? BIGGEST_ALIGNMENT
4893 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
4895 /* Compute the maximum size. */
4896 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4897 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
4898 : -1);
4900 /* Allocate the desired space. If the allocation stems from the declaration
4901 of a variable-sized object, it cannot accumulate. */
4902 result
4903 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
4904 result = convert_memory_address (ptr_mode, result);
4906 return result;
4909 /* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
4910 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
4911 dummy value into second parameter relying on this function to perform the
4912 change. See motivation for this in comment to handle_builtin_stack_restore
4913 function. */
4915 static rtx
4916 expand_asan_emit_allocas_unpoison (tree exp)
4918 tree arg0 = CALL_EXPR_ARG (exp, 0);
4919 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
4920 rtx bot = convert_memory_address (ptr_mode, virtual_stack_dynamic_rtx);
4921 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
4922 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
4923 top, ptr_mode, bot, ptr_mode);
4924 return ret;
4927 /* Expand a call to bswap builtin in EXP.
4928 Return NULL_RTX if a normal call should be emitted rather than expanding the
4929 function in-line. If convenient, the result should be placed in TARGET.
4930 SUBTARGET may be used as the target for computing one of EXP's operands. */
4932 static rtx
4933 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4934 rtx subtarget)
4936 tree arg;
4937 rtx op0;
4939 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4940 return NULL_RTX;
4942 arg = CALL_EXPR_ARG (exp, 0);
4943 op0 = expand_expr (arg,
4944 subtarget && GET_MODE (subtarget) == target_mode
4945 ? subtarget : NULL_RTX,
4946 target_mode, EXPAND_NORMAL);
4947 if (GET_MODE (op0) != target_mode)
4948 op0 = convert_to_mode (target_mode, op0, 1);
4950 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4952 gcc_assert (target);
4954 return convert_to_mode (target_mode, target, 1);
4957 /* Expand a call to a unary builtin in EXP.
4958 Return NULL_RTX if a normal call should be emitted rather than expanding the
4959 function in-line. If convenient, the result should be placed in TARGET.
4960 SUBTARGET may be used as the target for computing one of EXP's operands. */
4962 static rtx
4963 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4964 rtx subtarget, optab op_optab)
4966 rtx op0;
4968 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4969 return NULL_RTX;
4971 /* Compute the argument. */
4972 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4973 (subtarget
4974 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4975 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4976 VOIDmode, EXPAND_NORMAL);
4977 /* Compute op, into TARGET if possible.
4978 Set TARGET to wherever the result comes back. */
4979 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4980 op_optab, op0, target, op_optab != clrsb_optab);
4981 gcc_assert (target);
4983 return convert_to_mode (target_mode, target, 0);
4986 /* Expand a call to __builtin_expect. We just return our argument
4987 as the builtin_expect semantic should've been already executed by
4988 tree branch prediction pass. */
4990 static rtx
4991 expand_builtin_expect (tree exp, rtx target)
4993 tree arg;
4995 if (call_expr_nargs (exp) < 2)
4996 return const0_rtx;
4997 arg = CALL_EXPR_ARG (exp, 0);
4999 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5000 /* When guessing was done, the hints should be already stripped away. */
5001 gcc_assert (!flag_guess_branch_prob
5002 || optimize == 0 || seen_error ());
5003 return target;
5006 /* Expand a call to __builtin_assume_aligned. We just return our first
5007 argument as the builtin_assume_aligned semantic should've been already
5008 executed by CCP. */
5010 static rtx
5011 expand_builtin_assume_aligned (tree exp, rtx target)
5013 if (call_expr_nargs (exp) < 2)
5014 return const0_rtx;
5015 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5016 EXPAND_NORMAL);
5017 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5018 && (call_expr_nargs (exp) < 3
5019 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5020 return target;
5023 void
5024 expand_builtin_trap (void)
5026 if (targetm.have_trap ())
5028 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5029 /* For trap insns when not accumulating outgoing args force
5030 REG_ARGS_SIZE note to prevent crossjumping of calls with
5031 different args sizes. */
5032 if (!ACCUMULATE_OUTGOING_ARGS)
5033 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
5035 else
5037 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5038 tree call_expr = build_call_expr (fn, 0);
5039 expand_call (call_expr, NULL_RTX, false);
5042 emit_barrier ();
5045 /* Expand a call to __builtin_unreachable. We do nothing except emit
5046 a barrier saying that control flow will not pass here.
5048 It is the responsibility of the program being compiled to ensure
5049 that control flow does never reach __builtin_unreachable. */
5050 static void
5051 expand_builtin_unreachable (void)
5053 emit_barrier ();
5056 /* Expand EXP, a call to fabs, fabsf or fabsl.
5057 Return NULL_RTX if a normal call should be emitted rather than expanding
5058 the function inline. If convenient, the result should be placed
5059 in TARGET. SUBTARGET may be used as the target for computing
5060 the operand. */
5062 static rtx
5063 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5065 machine_mode mode;
5066 tree arg;
5067 rtx op0;
5069 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5070 return NULL_RTX;
5072 arg = CALL_EXPR_ARG (exp, 0);
5073 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5074 mode = TYPE_MODE (TREE_TYPE (arg));
5075 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5076 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5079 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5080 Return NULL is a normal call should be emitted rather than expanding the
5081 function inline. If convenient, the result should be placed in TARGET.
5082 SUBTARGET may be used as the target for computing the operand. */
5084 static rtx
5085 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5087 rtx op0, op1;
5088 tree arg;
5090 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5091 return NULL_RTX;
5093 arg = CALL_EXPR_ARG (exp, 0);
5094 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5096 arg = CALL_EXPR_ARG (exp, 1);
5097 op1 = expand_normal (arg);
5099 return expand_copysign (op0, op1, target);
5102 /* Expand a call to __builtin___clear_cache. */
5104 static rtx
5105 expand_builtin___clear_cache (tree exp)
5107 if (!targetm.code_for_clear_cache)
5109 #ifdef CLEAR_INSN_CACHE
5110 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5111 does something. Just do the default expansion to a call to
5112 __clear_cache(). */
5113 return NULL_RTX;
5114 #else
5115 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5116 does nothing. There is no need to call it. Do nothing. */
5117 return const0_rtx;
5118 #endif /* CLEAR_INSN_CACHE */
5121 /* We have a "clear_cache" insn, and it will handle everything. */
5122 tree begin, end;
5123 rtx begin_rtx, end_rtx;
5125 /* We must not expand to a library call. If we did, any
5126 fallback library function in libgcc that might contain a call to
5127 __builtin___clear_cache() would recurse infinitely. */
5128 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5130 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5131 return const0_rtx;
5134 if (targetm.have_clear_cache ())
5136 struct expand_operand ops[2];
5138 begin = CALL_EXPR_ARG (exp, 0);
5139 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5141 end = CALL_EXPR_ARG (exp, 1);
5142 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5144 create_address_operand (&ops[0], begin_rtx);
5145 create_address_operand (&ops[1], end_rtx);
5146 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5147 return const0_rtx;
5149 return const0_rtx;
5152 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5154 static rtx
5155 round_trampoline_addr (rtx tramp)
5157 rtx temp, addend, mask;
5159 /* If we don't need too much alignment, we'll have been guaranteed
5160 proper alignment by get_trampoline_type. */
5161 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5162 return tramp;
5164 /* Round address up to desired boundary. */
5165 temp = gen_reg_rtx (Pmode);
5166 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5167 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5169 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5170 temp, 0, OPTAB_LIB_WIDEN);
5171 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5172 temp, 0, OPTAB_LIB_WIDEN);
5174 return tramp;
5177 static rtx
5178 expand_builtin_init_trampoline (tree exp, bool onstack)
5180 tree t_tramp, t_func, t_chain;
5181 rtx m_tramp, r_tramp, r_chain, tmp;
5183 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5184 POINTER_TYPE, VOID_TYPE))
5185 return NULL_RTX;
5187 t_tramp = CALL_EXPR_ARG (exp, 0);
5188 t_func = CALL_EXPR_ARG (exp, 1);
5189 t_chain = CALL_EXPR_ARG (exp, 2);
5191 r_tramp = expand_normal (t_tramp);
5192 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5193 MEM_NOTRAP_P (m_tramp) = 1;
5195 /* If ONSTACK, the TRAMP argument should be the address of a field
5196 within the local function's FRAME decl. Either way, let's see if
5197 we can fill in the MEM_ATTRs for this memory. */
5198 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5199 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5201 /* Creator of a heap trampoline is responsible for making sure the
5202 address is aligned to at least STACK_BOUNDARY. Normally malloc
5203 will ensure this anyhow. */
5204 tmp = round_trampoline_addr (r_tramp);
5205 if (tmp != r_tramp)
5207 m_tramp = change_address (m_tramp, BLKmode, tmp);
5208 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5209 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5212 /* The FUNC argument should be the address of the nested function.
5213 Extract the actual function decl to pass to the hook. */
5214 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5215 t_func = TREE_OPERAND (t_func, 0);
5216 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5218 r_chain = expand_normal (t_chain);
5220 /* Generate insns to initialize the trampoline. */
5221 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5223 if (onstack)
5225 trampolines_created = 1;
5227 if (targetm.calls.custom_function_descriptors != 0)
5228 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5229 "trampoline generated for nested function %qD", t_func);
5232 return const0_rtx;
5235 static rtx
5236 expand_builtin_adjust_trampoline (tree exp)
5238 rtx tramp;
5240 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5241 return NULL_RTX;
5243 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5244 tramp = round_trampoline_addr (tramp);
5245 if (targetm.calls.trampoline_adjust_address)
5246 tramp = targetm.calls.trampoline_adjust_address (tramp);
5248 return tramp;
5251 /* Expand a call to the builtin descriptor initialization routine.
5252 A descriptor is made up of a couple of pointers to the static
5253 chain and the code entry in this order. */
5255 static rtx
5256 expand_builtin_init_descriptor (tree exp)
5258 tree t_descr, t_func, t_chain;
5259 rtx m_descr, r_descr, r_func, r_chain;
5261 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5262 VOID_TYPE))
5263 return NULL_RTX;
5265 t_descr = CALL_EXPR_ARG (exp, 0);
5266 t_func = CALL_EXPR_ARG (exp, 1);
5267 t_chain = CALL_EXPR_ARG (exp, 2);
5269 r_descr = expand_normal (t_descr);
5270 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5271 MEM_NOTRAP_P (m_descr) = 1;
5273 r_func = expand_normal (t_func);
5274 r_chain = expand_normal (t_chain);
5276 /* Generate insns to initialize the descriptor. */
5277 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5278 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5279 POINTER_SIZE / BITS_PER_UNIT), r_func);
5281 return const0_rtx;
5284 /* Expand a call to the builtin descriptor adjustment routine. */
5286 static rtx
5287 expand_builtin_adjust_descriptor (tree exp)
5289 rtx tramp;
5291 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5292 return NULL_RTX;
5294 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5296 /* Unalign the descriptor to allow runtime identification. */
5297 tramp = plus_constant (ptr_mode, tramp,
5298 targetm.calls.custom_function_descriptors);
5300 return force_operand (tramp, NULL_RTX);
5303 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5304 function. The function first checks whether the back end provides
5305 an insn to implement signbit for the respective mode. If not, it
5306 checks whether the floating point format of the value is such that
5307 the sign bit can be extracted. If that is not the case, error out.
5308 EXP is the expression that is a call to the builtin function; if
5309 convenient, the result should be placed in TARGET. */
5310 static rtx
5311 expand_builtin_signbit (tree exp, rtx target)
5313 const struct real_format *fmt;
5314 scalar_float_mode fmode;
5315 scalar_int_mode rmode, imode;
5316 tree arg;
5317 int word, bitpos;
5318 enum insn_code icode;
5319 rtx temp;
5320 location_t loc = EXPR_LOCATION (exp);
5322 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5323 return NULL_RTX;
5325 arg = CALL_EXPR_ARG (exp, 0);
5326 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5327 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5328 fmt = REAL_MODE_FORMAT (fmode);
5330 arg = builtin_save_expr (arg);
5332 /* Expand the argument yielding a RTX expression. */
5333 temp = expand_normal (arg);
5335 /* Check if the back end provides an insn that handles signbit for the
5336 argument's mode. */
5337 icode = optab_handler (signbit_optab, fmode);
5338 if (icode != CODE_FOR_nothing)
5340 rtx_insn *last = get_last_insn ();
5341 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5342 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5343 return target;
5344 delete_insns_since (last);
5347 /* For floating point formats without a sign bit, implement signbit
5348 as "ARG < 0.0". */
5349 bitpos = fmt->signbit_ro;
5350 if (bitpos < 0)
5352 /* But we can't do this if the format supports signed zero. */
5353 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5355 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5356 build_real (TREE_TYPE (arg), dconst0));
5357 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5360 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5362 imode = int_mode_for_mode (fmode).require ();
5363 temp = gen_lowpart (imode, temp);
5365 else
5367 imode = word_mode;
5368 /* Handle targets with different FP word orders. */
5369 if (FLOAT_WORDS_BIG_ENDIAN)
5370 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5371 else
5372 word = bitpos / BITS_PER_WORD;
5373 temp = operand_subword_force (temp, word, fmode);
5374 bitpos = bitpos % BITS_PER_WORD;
5377 /* Force the intermediate word_mode (or narrower) result into a
5378 register. This avoids attempting to create paradoxical SUBREGs
5379 of floating point modes below. */
5380 temp = force_reg (imode, temp);
5382 /* If the bitpos is within the "result mode" lowpart, the operation
5383 can be implement with a single bitwise AND. Otherwise, we need
5384 a right shift and an AND. */
5386 if (bitpos < GET_MODE_BITSIZE (rmode))
5388 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5390 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5391 temp = gen_lowpart (rmode, temp);
5392 temp = expand_binop (rmode, and_optab, temp,
5393 immed_wide_int_const (mask, rmode),
5394 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5396 else
5398 /* Perform a logical right shift to place the signbit in the least
5399 significant bit, then truncate the result to the desired mode
5400 and mask just this bit. */
5401 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5402 temp = gen_lowpart (rmode, temp);
5403 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5404 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5407 return temp;
5410 /* Expand fork or exec calls. TARGET is the desired target of the
5411 call. EXP is the call. FN is the
5412 identificator of the actual function. IGNORE is nonzero if the
5413 value is to be ignored. */
5415 static rtx
5416 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5418 tree id, decl;
5419 tree call;
5421 /* If we are not profiling, just call the function. */
5422 if (!profile_arc_flag)
5423 return NULL_RTX;
5425 /* Otherwise call the wrapper. This should be equivalent for the rest of
5426 compiler, so the code does not diverge, and the wrapper may run the
5427 code necessary for keeping the profiling sane. */
5429 switch (DECL_FUNCTION_CODE (fn))
5431 case BUILT_IN_FORK:
5432 id = get_identifier ("__gcov_fork");
5433 break;
5435 case BUILT_IN_EXECL:
5436 id = get_identifier ("__gcov_execl");
5437 break;
5439 case BUILT_IN_EXECV:
5440 id = get_identifier ("__gcov_execv");
5441 break;
5443 case BUILT_IN_EXECLP:
5444 id = get_identifier ("__gcov_execlp");
5445 break;
5447 case BUILT_IN_EXECLE:
5448 id = get_identifier ("__gcov_execle");
5449 break;
5451 case BUILT_IN_EXECVP:
5452 id = get_identifier ("__gcov_execvp");
5453 break;
5455 case BUILT_IN_EXECVE:
5456 id = get_identifier ("__gcov_execve");
5457 break;
5459 default:
5460 gcc_unreachable ();
5463 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5464 FUNCTION_DECL, id, TREE_TYPE (fn));
5465 DECL_EXTERNAL (decl) = 1;
5466 TREE_PUBLIC (decl) = 1;
5467 DECL_ARTIFICIAL (decl) = 1;
5468 TREE_NOTHROW (decl) = 1;
5469 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5470 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5471 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5472 return expand_call (call, target, ignore);
5477 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5478 the pointer in these functions is void*, the tree optimizers may remove
5479 casts. The mode computed in expand_builtin isn't reliable either, due
5480 to __sync_bool_compare_and_swap.
5482 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5483 group of builtins. This gives us log2 of the mode size. */
5485 static inline machine_mode
5486 get_builtin_sync_mode (int fcode_diff)
5488 /* The size is not negotiable, so ask not to get BLKmode in return
5489 if the target indicates that a smaller size would be better. */
5490 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5493 /* Expand the memory expression LOC and return the appropriate memory operand
5494 for the builtin_sync operations. */
5496 static rtx
5497 get_builtin_sync_mem (tree loc, machine_mode mode)
5499 rtx addr, mem;
5501 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5502 addr = convert_memory_address (Pmode, addr);
5504 /* Note that we explicitly do not want any alias information for this
5505 memory, so that we kill all other live memories. Otherwise we don't
5506 satisfy the full barrier semantics of the intrinsic. */
5507 mem = validize_mem (gen_rtx_MEM (mode, addr));
5509 /* The alignment needs to be at least according to that of the mode. */
5510 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5511 get_pointer_alignment (loc)));
5512 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5513 MEM_VOLATILE_P (mem) = 1;
5515 return mem;
5518 /* Make sure an argument is in the right mode.
5519 EXP is the tree argument.
5520 MODE is the mode it should be in. */
5522 static rtx
5523 expand_expr_force_mode (tree exp, machine_mode mode)
5525 rtx val;
5526 machine_mode old_mode;
5528 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5529 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5530 of CONST_INTs, where we know the old_mode only from the call argument. */
5532 old_mode = GET_MODE (val);
5533 if (old_mode == VOIDmode)
5534 old_mode = TYPE_MODE (TREE_TYPE (exp));
5535 val = convert_modes (mode, old_mode, val, 1);
5536 return val;
5540 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5541 EXP is the CALL_EXPR. CODE is the rtx code
5542 that corresponds to the arithmetic or logical operation from the name;
5543 an exception here is that NOT actually means NAND. TARGET is an optional
5544 place for us to store the results; AFTER is true if this is the
5545 fetch_and_xxx form. */
5547 static rtx
5548 expand_builtin_sync_operation (machine_mode mode, tree exp,
5549 enum rtx_code code, bool after,
5550 rtx target)
5552 rtx val, mem;
5553 location_t loc = EXPR_LOCATION (exp);
5555 if (code == NOT && warn_sync_nand)
5557 tree fndecl = get_callee_fndecl (exp);
5558 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5560 static bool warned_f_a_n, warned_n_a_f;
5562 switch (fcode)
5564 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5565 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5566 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5567 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5568 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5569 if (warned_f_a_n)
5570 break;
5572 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5573 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5574 warned_f_a_n = true;
5575 break;
5577 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5578 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5579 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5580 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5581 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5582 if (warned_n_a_f)
5583 break;
5585 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5586 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5587 warned_n_a_f = true;
5588 break;
5590 default:
5591 gcc_unreachable ();
5595 /* Expand the operands. */
5596 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5597 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5599 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5600 after);
5603 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5604 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5605 true if this is the boolean form. TARGET is a place for us to store the
5606 results; this is NOT optional if IS_BOOL is true. */
5608 static rtx
5609 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5610 bool is_bool, rtx target)
5612 rtx old_val, new_val, mem;
5613 rtx *pbool, *poval;
5615 /* Expand the operands. */
5616 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5617 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5618 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5620 pbool = poval = NULL;
5621 if (target != const0_rtx)
5623 if (is_bool)
5624 pbool = &target;
5625 else
5626 poval = &target;
5628 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5629 false, MEMMODEL_SYNC_SEQ_CST,
5630 MEMMODEL_SYNC_SEQ_CST))
5631 return NULL_RTX;
5633 return target;
5636 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5637 general form is actually an atomic exchange, and some targets only
5638 support a reduced form with the second argument being a constant 1.
5639 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5640 the results. */
5642 static rtx
5643 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5644 rtx target)
5646 rtx val, mem;
5648 /* Expand the operands. */
5649 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5650 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5652 return expand_sync_lock_test_and_set (target, mem, val);
5655 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5657 static void
5658 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5660 rtx mem;
5662 /* Expand the operands. */
5663 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5665 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5668 /* Given an integer representing an ``enum memmodel'', verify its
5669 correctness and return the memory model enum. */
5671 static enum memmodel
5672 get_memmodel (tree exp)
5674 rtx op;
5675 unsigned HOST_WIDE_INT val;
5676 source_location loc
5677 = expansion_point_location_if_in_system_header (input_location);
5679 /* If the parameter is not a constant, it's a run time value so we'll just
5680 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5681 if (TREE_CODE (exp) != INTEGER_CST)
5682 return MEMMODEL_SEQ_CST;
5684 op = expand_normal (exp);
5686 val = INTVAL (op);
5687 if (targetm.memmodel_check)
5688 val = targetm.memmodel_check (val);
5689 else if (val & ~MEMMODEL_MASK)
5691 warning_at (loc, OPT_Winvalid_memory_model,
5692 "unknown architecture specifier in memory model to builtin");
5693 return MEMMODEL_SEQ_CST;
5696 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5697 if (memmodel_base (val) >= MEMMODEL_LAST)
5699 warning_at (loc, OPT_Winvalid_memory_model,
5700 "invalid memory model argument to builtin");
5701 return MEMMODEL_SEQ_CST;
5704 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5705 be conservative and promote consume to acquire. */
5706 if (val == MEMMODEL_CONSUME)
5707 val = MEMMODEL_ACQUIRE;
5709 return (enum memmodel) val;
5712 /* Expand the __atomic_exchange intrinsic:
5713 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5714 EXP is the CALL_EXPR.
5715 TARGET is an optional place for us to store the results. */
5717 static rtx
5718 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5720 rtx val, mem;
5721 enum memmodel model;
5723 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5725 if (!flag_inline_atomics)
5726 return NULL_RTX;
5728 /* Expand the operands. */
5729 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5730 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5732 return expand_atomic_exchange (target, mem, val, model);
5735 /* Expand the __atomic_compare_exchange intrinsic:
5736 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5737 TYPE desired, BOOL weak,
5738 enum memmodel success,
5739 enum memmodel failure)
5740 EXP is the CALL_EXPR.
5741 TARGET is an optional place for us to store the results. */
5743 static rtx
5744 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5745 rtx target)
5747 rtx expect, desired, mem, oldval;
5748 rtx_code_label *label;
5749 enum memmodel success, failure;
5750 tree weak;
5751 bool is_weak;
5752 source_location loc
5753 = expansion_point_location_if_in_system_header (input_location);
5755 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5756 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5758 if (failure > success)
5760 warning_at (loc, OPT_Winvalid_memory_model,
5761 "failure memory model cannot be stronger than success "
5762 "memory model for %<__atomic_compare_exchange%>");
5763 success = MEMMODEL_SEQ_CST;
5766 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5768 warning_at (loc, OPT_Winvalid_memory_model,
5769 "invalid failure memory model for "
5770 "%<__atomic_compare_exchange%>");
5771 failure = MEMMODEL_SEQ_CST;
5772 success = MEMMODEL_SEQ_CST;
5776 if (!flag_inline_atomics)
5777 return NULL_RTX;
5779 /* Expand the operands. */
5780 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5782 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5783 expect = convert_memory_address (Pmode, expect);
5784 expect = gen_rtx_MEM (mode, expect);
5785 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5787 weak = CALL_EXPR_ARG (exp, 3);
5788 is_weak = false;
5789 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5790 is_weak = true;
5792 if (target == const0_rtx)
5793 target = NULL;
5795 /* Lest the rtl backend create a race condition with an imporoper store
5796 to memory, always create a new pseudo for OLDVAL. */
5797 oldval = NULL;
5799 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5800 is_weak, success, failure))
5801 return NULL_RTX;
5803 /* Conditionally store back to EXPECT, lest we create a race condition
5804 with an improper store to memory. */
5805 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5806 the normal case where EXPECT is totally private, i.e. a register. At
5807 which point the store can be unconditional. */
5808 label = gen_label_rtx ();
5809 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5810 GET_MODE (target), 1, label);
5811 emit_move_insn (expect, oldval);
5812 emit_label (label);
5814 return target;
5817 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5818 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5819 call. The weak parameter must be dropped to match the expected parameter
5820 list and the expected argument changed from value to pointer to memory
5821 slot. */
5823 static void
5824 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5826 unsigned int z;
5827 vec<tree, va_gc> *vec;
5829 vec_alloc (vec, 5);
5830 vec->quick_push (gimple_call_arg (call, 0));
5831 tree expected = gimple_call_arg (call, 1);
5832 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5833 TREE_TYPE (expected));
5834 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5835 if (expd != x)
5836 emit_move_insn (x, expd);
5837 tree v = make_tree (TREE_TYPE (expected), x);
5838 vec->quick_push (build1 (ADDR_EXPR,
5839 build_pointer_type (TREE_TYPE (expected)), v));
5840 vec->quick_push (gimple_call_arg (call, 2));
5841 /* Skip the boolean weak parameter. */
5842 for (z = 4; z < 6; z++)
5843 vec->quick_push (gimple_call_arg (call, z));
5844 built_in_function fncode
5845 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5846 + exact_log2 (GET_MODE_SIZE (mode)));
5847 tree fndecl = builtin_decl_explicit (fncode);
5848 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5849 fndecl);
5850 tree exp = build_call_vec (boolean_type_node, fn, vec);
5851 tree lhs = gimple_call_lhs (call);
5852 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5853 if (lhs)
5855 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5856 if (GET_MODE (boolret) != mode)
5857 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5858 x = force_reg (mode, x);
5859 write_complex_part (target, boolret, true);
5860 write_complex_part (target, x, false);
5864 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5866 void
5867 expand_ifn_atomic_compare_exchange (gcall *call)
5869 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5870 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5871 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
5872 rtx expect, desired, mem, oldval, boolret;
5873 enum memmodel success, failure;
5874 tree lhs;
5875 bool is_weak;
5876 source_location loc
5877 = expansion_point_location_if_in_system_header (gimple_location (call));
5879 success = get_memmodel (gimple_call_arg (call, 4));
5880 failure = get_memmodel (gimple_call_arg (call, 5));
5882 if (failure > success)
5884 warning_at (loc, OPT_Winvalid_memory_model,
5885 "failure memory model cannot be stronger than success "
5886 "memory model for %<__atomic_compare_exchange%>");
5887 success = MEMMODEL_SEQ_CST;
5890 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5892 warning_at (loc, OPT_Winvalid_memory_model,
5893 "invalid failure memory model for "
5894 "%<__atomic_compare_exchange%>");
5895 failure = MEMMODEL_SEQ_CST;
5896 success = MEMMODEL_SEQ_CST;
5899 if (!flag_inline_atomics)
5901 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5902 return;
5905 /* Expand the operands. */
5906 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5908 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5909 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5911 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5913 boolret = NULL;
5914 oldval = NULL;
5916 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5917 is_weak, success, failure))
5919 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5920 return;
5923 lhs = gimple_call_lhs (call);
5924 if (lhs)
5926 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5927 if (GET_MODE (boolret) != mode)
5928 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5929 write_complex_part (target, boolret, true);
5930 write_complex_part (target, oldval, false);
5934 /* Expand the __atomic_load intrinsic:
5935 TYPE __atomic_load (TYPE *object, enum memmodel)
5936 EXP is the CALL_EXPR.
5937 TARGET is an optional place for us to store the results. */
5939 static rtx
5940 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5942 rtx mem;
5943 enum memmodel model;
5945 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5946 if (is_mm_release (model) || is_mm_acq_rel (model))
5948 source_location loc
5949 = expansion_point_location_if_in_system_header (input_location);
5950 warning_at (loc, OPT_Winvalid_memory_model,
5951 "invalid memory model for %<__atomic_load%>");
5952 model = MEMMODEL_SEQ_CST;
5955 if (!flag_inline_atomics)
5956 return NULL_RTX;
5958 /* Expand the operand. */
5959 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5961 return expand_atomic_load (target, mem, model);
5965 /* Expand the __atomic_store intrinsic:
5966 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5967 EXP is the CALL_EXPR.
5968 TARGET is an optional place for us to store the results. */
5970 static rtx
5971 expand_builtin_atomic_store (machine_mode mode, tree exp)
5973 rtx mem, val;
5974 enum memmodel model;
5976 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5977 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5978 || is_mm_release (model)))
5980 source_location loc
5981 = expansion_point_location_if_in_system_header (input_location);
5982 warning_at (loc, OPT_Winvalid_memory_model,
5983 "invalid memory model for %<__atomic_store%>");
5984 model = MEMMODEL_SEQ_CST;
5987 if (!flag_inline_atomics)
5988 return NULL_RTX;
5990 /* Expand the operands. */
5991 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5992 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5994 return expand_atomic_store (mem, val, model, false);
5997 /* Expand the __atomic_fetch_XXX intrinsic:
5998 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5999 EXP is the CALL_EXPR.
6000 TARGET is an optional place for us to store the results.
6001 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6002 FETCH_AFTER is true if returning the result of the operation.
6003 FETCH_AFTER is false if returning the value before the operation.
6004 IGNORE is true if the result is not used.
6005 EXT_CALL is the correct builtin for an external call if this cannot be
6006 resolved to an instruction sequence. */
6008 static rtx
6009 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6010 enum rtx_code code, bool fetch_after,
6011 bool ignore, enum built_in_function ext_call)
6013 rtx val, mem, ret;
6014 enum memmodel model;
6015 tree fndecl;
6016 tree addr;
6018 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6020 /* Expand the operands. */
6021 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6022 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6024 /* Only try generating instructions if inlining is turned on. */
6025 if (flag_inline_atomics)
6027 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6028 if (ret)
6029 return ret;
6032 /* Return if a different routine isn't needed for the library call. */
6033 if (ext_call == BUILT_IN_NONE)
6034 return NULL_RTX;
6036 /* Change the call to the specified function. */
6037 fndecl = get_callee_fndecl (exp);
6038 addr = CALL_EXPR_FN (exp);
6039 STRIP_NOPS (addr);
6041 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6042 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6044 /* If we will emit code after the call, the call can not be a tail call.
6045 If it is emitted as a tail call, a barrier is emitted after it, and
6046 then all trailing code is removed. */
6047 if (!ignore)
6048 CALL_EXPR_TAILCALL (exp) = 0;
6050 /* Expand the call here so we can emit trailing code. */
6051 ret = expand_call (exp, target, ignore);
6053 /* Replace the original function just in case it matters. */
6054 TREE_OPERAND (addr, 0) = fndecl;
6056 /* Then issue the arithmetic correction to return the right result. */
6057 if (!ignore)
6059 if (code == NOT)
6061 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6062 OPTAB_LIB_WIDEN);
6063 ret = expand_simple_unop (mode, NOT, ret, target, true);
6065 else
6066 ret = expand_simple_binop (mode, code, ret, val, target, true,
6067 OPTAB_LIB_WIDEN);
6069 return ret;
6072 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6074 void
6075 expand_ifn_atomic_bit_test_and (gcall *call)
6077 tree ptr = gimple_call_arg (call, 0);
6078 tree bit = gimple_call_arg (call, 1);
6079 tree flag = gimple_call_arg (call, 2);
6080 tree lhs = gimple_call_lhs (call);
6081 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6082 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6083 enum rtx_code code;
6084 optab optab;
6085 struct expand_operand ops[5];
6087 gcc_assert (flag_inline_atomics);
6089 if (gimple_call_num_args (call) == 4)
6090 model = get_memmodel (gimple_call_arg (call, 3));
6092 rtx mem = get_builtin_sync_mem (ptr, mode);
6093 rtx val = expand_expr_force_mode (bit, mode);
6095 switch (gimple_call_internal_fn (call))
6097 case IFN_ATOMIC_BIT_TEST_AND_SET:
6098 code = IOR;
6099 optab = atomic_bit_test_and_set_optab;
6100 break;
6101 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6102 code = XOR;
6103 optab = atomic_bit_test_and_complement_optab;
6104 break;
6105 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6106 code = AND;
6107 optab = atomic_bit_test_and_reset_optab;
6108 break;
6109 default:
6110 gcc_unreachable ();
6113 if (lhs == NULL_TREE)
6115 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6116 val, NULL_RTX, true, OPTAB_DIRECT);
6117 if (code == AND)
6118 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6119 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6120 return;
6123 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6124 enum insn_code icode = direct_optab_handler (optab, mode);
6125 gcc_assert (icode != CODE_FOR_nothing);
6126 create_output_operand (&ops[0], target, mode);
6127 create_fixed_operand (&ops[1], mem);
6128 create_convert_operand_to (&ops[2], val, mode, true);
6129 create_integer_operand (&ops[3], model);
6130 create_integer_operand (&ops[4], integer_onep (flag));
6131 if (maybe_expand_insn (icode, 5, ops))
6132 return;
6134 rtx bitval = val;
6135 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6136 val, NULL_RTX, true, OPTAB_DIRECT);
6137 rtx maskval = val;
6138 if (code == AND)
6139 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6140 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6141 code, model, false);
6142 if (integer_onep (flag))
6144 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6145 NULL_RTX, true, OPTAB_DIRECT);
6146 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6147 true, OPTAB_DIRECT);
6149 else
6150 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6151 OPTAB_DIRECT);
6152 if (result != target)
6153 emit_move_insn (target, result);
6156 /* Expand an atomic clear operation.
6157 void _atomic_clear (BOOL *obj, enum memmodel)
6158 EXP is the call expression. */
6160 static rtx
6161 expand_builtin_atomic_clear (tree exp)
6163 machine_mode mode;
6164 rtx mem, ret;
6165 enum memmodel model;
6167 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6168 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6169 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6171 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6173 source_location loc
6174 = expansion_point_location_if_in_system_header (input_location);
6175 warning_at (loc, OPT_Winvalid_memory_model,
6176 "invalid memory model for %<__atomic_store%>");
6177 model = MEMMODEL_SEQ_CST;
6180 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6181 Failing that, a store is issued by __atomic_store. The only way this can
6182 fail is if the bool type is larger than a word size. Unlikely, but
6183 handle it anyway for completeness. Assume a single threaded model since
6184 there is no atomic support in this case, and no barriers are required. */
6185 ret = expand_atomic_store (mem, const0_rtx, model, true);
6186 if (!ret)
6187 emit_move_insn (mem, const0_rtx);
6188 return const0_rtx;
6191 /* Expand an atomic test_and_set operation.
6192 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6193 EXP is the call expression. */
6195 static rtx
6196 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6198 rtx mem;
6199 enum memmodel model;
6200 machine_mode mode;
6202 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6203 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6204 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6206 return expand_atomic_test_and_set (target, mem, model);
6210 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6211 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6213 static tree
6214 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6216 int size;
6217 machine_mode mode;
6218 unsigned int mode_align, type_align;
6220 if (TREE_CODE (arg0) != INTEGER_CST)
6221 return NULL_TREE;
6223 /* We need a corresponding integer mode for the access to be lock-free. */
6224 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6225 if (!int_mode_for_size (size, 0).exists (&mode))
6226 return boolean_false_node;
6228 mode_align = GET_MODE_ALIGNMENT (mode);
6230 if (TREE_CODE (arg1) == INTEGER_CST)
6232 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6234 /* Either this argument is null, or it's a fake pointer encoding
6235 the alignment of the object. */
6236 val = least_bit_hwi (val);
6237 val *= BITS_PER_UNIT;
6239 if (val == 0 || mode_align < val)
6240 type_align = mode_align;
6241 else
6242 type_align = val;
6244 else
6246 tree ttype = TREE_TYPE (arg1);
6248 /* This function is usually invoked and folded immediately by the front
6249 end before anything else has a chance to look at it. The pointer
6250 parameter at this point is usually cast to a void *, so check for that
6251 and look past the cast. */
6252 if (CONVERT_EXPR_P (arg1)
6253 && POINTER_TYPE_P (ttype)
6254 && VOID_TYPE_P (TREE_TYPE (ttype))
6255 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6256 arg1 = TREE_OPERAND (arg1, 0);
6258 ttype = TREE_TYPE (arg1);
6259 gcc_assert (POINTER_TYPE_P (ttype));
6261 /* Get the underlying type of the object. */
6262 ttype = TREE_TYPE (ttype);
6263 type_align = TYPE_ALIGN (ttype);
6266 /* If the object has smaller alignment, the lock free routines cannot
6267 be used. */
6268 if (type_align < mode_align)
6269 return boolean_false_node;
6271 /* Check if a compare_and_swap pattern exists for the mode which represents
6272 the required size. The pattern is not allowed to fail, so the existence
6273 of the pattern indicates support is present. Also require that an
6274 atomic load exists for the required size. */
6275 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6276 return boolean_true_node;
6277 else
6278 return boolean_false_node;
6281 /* Return true if the parameters to call EXP represent an object which will
6282 always generate lock free instructions. The first argument represents the
6283 size of the object, and the second parameter is a pointer to the object
6284 itself. If NULL is passed for the object, then the result is based on
6285 typical alignment for an object of the specified size. Otherwise return
6286 false. */
6288 static rtx
6289 expand_builtin_atomic_always_lock_free (tree exp)
6291 tree size;
6292 tree arg0 = CALL_EXPR_ARG (exp, 0);
6293 tree arg1 = CALL_EXPR_ARG (exp, 1);
6295 if (TREE_CODE (arg0) != INTEGER_CST)
6297 error ("non-constant argument 1 to __atomic_always_lock_free");
6298 return const0_rtx;
6301 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6302 if (size == boolean_true_node)
6303 return const1_rtx;
6304 return const0_rtx;
6307 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6308 is lock free on this architecture. */
6310 static tree
6311 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6313 if (!flag_inline_atomics)
6314 return NULL_TREE;
6316 /* If it isn't always lock free, don't generate a result. */
6317 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6318 return boolean_true_node;
6320 return NULL_TREE;
6323 /* Return true if the parameters to call EXP represent an object which will
6324 always generate lock free instructions. The first argument represents the
6325 size of the object, and the second parameter is a pointer to the object
6326 itself. If NULL is passed for the object, then the result is based on
6327 typical alignment for an object of the specified size. Otherwise return
6328 NULL*/
6330 static rtx
6331 expand_builtin_atomic_is_lock_free (tree exp)
6333 tree size;
6334 tree arg0 = CALL_EXPR_ARG (exp, 0);
6335 tree arg1 = CALL_EXPR_ARG (exp, 1);
6337 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6339 error ("non-integer argument 1 to __atomic_is_lock_free");
6340 return NULL_RTX;
6343 if (!flag_inline_atomics)
6344 return NULL_RTX;
6346 /* If the value is known at compile time, return the RTX for it. */
6347 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6348 if (size == boolean_true_node)
6349 return const1_rtx;
6351 return NULL_RTX;
6354 /* Expand the __atomic_thread_fence intrinsic:
6355 void __atomic_thread_fence (enum memmodel)
6356 EXP is the CALL_EXPR. */
6358 static void
6359 expand_builtin_atomic_thread_fence (tree exp)
6361 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6362 expand_mem_thread_fence (model);
6365 /* Expand the __atomic_signal_fence intrinsic:
6366 void __atomic_signal_fence (enum memmodel)
6367 EXP is the CALL_EXPR. */
6369 static void
6370 expand_builtin_atomic_signal_fence (tree exp)
6372 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6373 expand_mem_signal_fence (model);
6376 /* Expand the __sync_synchronize intrinsic. */
6378 static void
6379 expand_builtin_sync_synchronize (void)
6381 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6384 static rtx
6385 expand_builtin_thread_pointer (tree exp, rtx target)
6387 enum insn_code icode;
6388 if (!validate_arglist (exp, VOID_TYPE))
6389 return const0_rtx;
6390 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6391 if (icode != CODE_FOR_nothing)
6393 struct expand_operand op;
6394 /* If the target is not sutitable then create a new target. */
6395 if (target == NULL_RTX
6396 || !REG_P (target)
6397 || GET_MODE (target) != Pmode)
6398 target = gen_reg_rtx (Pmode);
6399 create_output_operand (&op, target, Pmode);
6400 expand_insn (icode, 1, &op);
6401 return target;
6403 error ("__builtin_thread_pointer is not supported on this target");
6404 return const0_rtx;
6407 static void
6408 expand_builtin_set_thread_pointer (tree exp)
6410 enum insn_code icode;
6411 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6412 return;
6413 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6414 if (icode != CODE_FOR_nothing)
6416 struct expand_operand op;
6417 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6418 Pmode, EXPAND_NORMAL);
6419 create_input_operand (&op, val, Pmode);
6420 expand_insn (icode, 1, &op);
6421 return;
6423 error ("__builtin_set_thread_pointer is not supported on this target");
6427 /* Emit code to restore the current value of stack. */
6429 static void
6430 expand_stack_restore (tree var)
6432 rtx_insn *prev;
6433 rtx sa = expand_normal (var);
6435 sa = convert_memory_address (Pmode, sa);
6437 prev = get_last_insn ();
6438 emit_stack_restore (SAVE_BLOCK, sa);
6440 record_new_stack_level ();
6442 fixup_args_size_notes (prev, get_last_insn (), 0);
6445 /* Emit code to save the current value of stack. */
6447 static rtx
6448 expand_stack_save (void)
6450 rtx ret = NULL_RTX;
6452 emit_stack_save (SAVE_BLOCK, &ret);
6453 return ret;
6457 /* Expand an expression EXP that calls a built-in function,
6458 with result going to TARGET if that's convenient
6459 (and in mode MODE if that's convenient).
6460 SUBTARGET may be used as the target for computing one of EXP's operands.
6461 IGNORE is nonzero if the value is to be ignored. */
6464 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6465 int ignore)
6467 tree fndecl = get_callee_fndecl (exp);
6468 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6469 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6470 int flags;
6472 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6473 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6475 /* When ASan is enabled, we don't want to expand some memory/string
6476 builtins and rely on libsanitizer's hooks. This allows us to avoid
6477 redundant checks and be sure, that possible overflow will be detected
6478 by ASan. */
6480 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6481 return expand_call (exp, target, ignore);
6483 /* When not optimizing, generate calls to library functions for a certain
6484 set of builtins. */
6485 if (!optimize
6486 && !called_as_built_in (fndecl)
6487 && fcode != BUILT_IN_FORK
6488 && fcode != BUILT_IN_EXECL
6489 && fcode != BUILT_IN_EXECV
6490 && fcode != BUILT_IN_EXECLP
6491 && fcode != BUILT_IN_EXECLE
6492 && fcode != BUILT_IN_EXECVP
6493 && fcode != BUILT_IN_EXECVE
6494 && !ALLOCA_FUNCTION_CODE_P (fcode)
6495 && fcode != BUILT_IN_FREE
6496 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6497 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6498 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6499 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6500 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6501 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6502 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6503 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6504 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6505 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6506 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6507 && fcode != BUILT_IN_CHKP_BNDRET)
6508 return expand_call (exp, target, ignore);
6510 /* The built-in function expanders test for target == const0_rtx
6511 to determine whether the function's result will be ignored. */
6512 if (ignore)
6513 target = const0_rtx;
6515 /* If the result of a pure or const built-in function is ignored, and
6516 none of its arguments are volatile, we can avoid expanding the
6517 built-in call and just evaluate the arguments for side-effects. */
6518 if (target == const0_rtx
6519 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6520 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6522 bool volatilep = false;
6523 tree arg;
6524 call_expr_arg_iterator iter;
6526 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6527 if (TREE_THIS_VOLATILE (arg))
6529 volatilep = true;
6530 break;
6533 if (! volatilep)
6535 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6536 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6537 return const0_rtx;
6541 /* expand_builtin_with_bounds is supposed to be used for
6542 instrumented builtin calls. */
6543 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6545 switch (fcode)
6547 CASE_FLT_FN (BUILT_IN_FABS):
6548 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6549 case BUILT_IN_FABSD32:
6550 case BUILT_IN_FABSD64:
6551 case BUILT_IN_FABSD128:
6552 target = expand_builtin_fabs (exp, target, subtarget);
6553 if (target)
6554 return target;
6555 break;
6557 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6558 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6559 target = expand_builtin_copysign (exp, target, subtarget);
6560 if (target)
6561 return target;
6562 break;
6564 /* Just do a normal library call if we were unable to fold
6565 the values. */
6566 CASE_FLT_FN (BUILT_IN_CABS):
6567 break;
6569 CASE_FLT_FN (BUILT_IN_FMA):
6570 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6571 if (target)
6572 return target;
6573 break;
6575 CASE_FLT_FN (BUILT_IN_ILOGB):
6576 if (! flag_unsafe_math_optimizations)
6577 break;
6578 gcc_fallthrough ();
6579 CASE_FLT_FN (BUILT_IN_ISINF):
6580 CASE_FLT_FN (BUILT_IN_FINITE):
6581 case BUILT_IN_ISFINITE:
6582 case BUILT_IN_ISNORMAL:
6583 target = expand_builtin_interclass_mathfn (exp, target);
6584 if (target)
6585 return target;
6586 break;
6588 CASE_FLT_FN (BUILT_IN_ICEIL):
6589 CASE_FLT_FN (BUILT_IN_LCEIL):
6590 CASE_FLT_FN (BUILT_IN_LLCEIL):
6591 CASE_FLT_FN (BUILT_IN_LFLOOR):
6592 CASE_FLT_FN (BUILT_IN_IFLOOR):
6593 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6594 target = expand_builtin_int_roundingfn (exp, target);
6595 if (target)
6596 return target;
6597 break;
6599 CASE_FLT_FN (BUILT_IN_IRINT):
6600 CASE_FLT_FN (BUILT_IN_LRINT):
6601 CASE_FLT_FN (BUILT_IN_LLRINT):
6602 CASE_FLT_FN (BUILT_IN_IROUND):
6603 CASE_FLT_FN (BUILT_IN_LROUND):
6604 CASE_FLT_FN (BUILT_IN_LLROUND):
6605 target = expand_builtin_int_roundingfn_2 (exp, target);
6606 if (target)
6607 return target;
6608 break;
6610 CASE_FLT_FN (BUILT_IN_POWI):
6611 target = expand_builtin_powi (exp, target);
6612 if (target)
6613 return target;
6614 break;
6616 CASE_FLT_FN (BUILT_IN_CEXPI):
6617 target = expand_builtin_cexpi (exp, target);
6618 gcc_assert (target);
6619 return target;
6621 CASE_FLT_FN (BUILT_IN_SIN):
6622 CASE_FLT_FN (BUILT_IN_COS):
6623 if (! flag_unsafe_math_optimizations)
6624 break;
6625 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6626 if (target)
6627 return target;
6628 break;
6630 CASE_FLT_FN (BUILT_IN_SINCOS):
6631 if (! flag_unsafe_math_optimizations)
6632 break;
6633 target = expand_builtin_sincos (exp);
6634 if (target)
6635 return target;
6636 break;
6638 case BUILT_IN_APPLY_ARGS:
6639 return expand_builtin_apply_args ();
6641 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6642 FUNCTION with a copy of the parameters described by
6643 ARGUMENTS, and ARGSIZE. It returns a block of memory
6644 allocated on the stack into which is stored all the registers
6645 that might possibly be used for returning the result of a
6646 function. ARGUMENTS is the value returned by
6647 __builtin_apply_args. ARGSIZE is the number of bytes of
6648 arguments that must be copied. ??? How should this value be
6649 computed? We'll also need a safe worst case value for varargs
6650 functions. */
6651 case BUILT_IN_APPLY:
6652 if (!validate_arglist (exp, POINTER_TYPE,
6653 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6654 && !validate_arglist (exp, REFERENCE_TYPE,
6655 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6656 return const0_rtx;
6657 else
6659 rtx ops[3];
6661 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6662 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6663 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6665 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6668 /* __builtin_return (RESULT) causes the function to return the
6669 value described by RESULT. RESULT is address of the block of
6670 memory returned by __builtin_apply. */
6671 case BUILT_IN_RETURN:
6672 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6673 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6674 return const0_rtx;
6676 case BUILT_IN_SAVEREGS:
6677 return expand_builtin_saveregs ();
6679 case BUILT_IN_VA_ARG_PACK:
6680 /* All valid uses of __builtin_va_arg_pack () are removed during
6681 inlining. */
6682 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6683 return const0_rtx;
6685 case BUILT_IN_VA_ARG_PACK_LEN:
6686 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6687 inlining. */
6688 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6689 return const0_rtx;
6691 /* Return the address of the first anonymous stack arg. */
6692 case BUILT_IN_NEXT_ARG:
6693 if (fold_builtin_next_arg (exp, false))
6694 return const0_rtx;
6695 return expand_builtin_next_arg ();
6697 case BUILT_IN_CLEAR_CACHE:
6698 target = expand_builtin___clear_cache (exp);
6699 if (target)
6700 return target;
6701 break;
6703 case BUILT_IN_CLASSIFY_TYPE:
6704 return expand_builtin_classify_type (exp);
6706 case BUILT_IN_CONSTANT_P:
6707 return const0_rtx;
6709 case BUILT_IN_FRAME_ADDRESS:
6710 case BUILT_IN_RETURN_ADDRESS:
6711 return expand_builtin_frame_address (fndecl, exp);
6713 /* Returns the address of the area where the structure is returned.
6714 0 otherwise. */
6715 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6716 if (call_expr_nargs (exp) != 0
6717 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6718 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6719 return const0_rtx;
6720 else
6721 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6723 CASE_BUILT_IN_ALLOCA:
6724 target = expand_builtin_alloca (exp);
6725 if (target)
6726 return target;
6727 break;
6729 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6730 return expand_asan_emit_allocas_unpoison (exp);
6732 case BUILT_IN_STACK_SAVE:
6733 return expand_stack_save ();
6735 case BUILT_IN_STACK_RESTORE:
6736 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6737 return const0_rtx;
6739 case BUILT_IN_BSWAP16:
6740 case BUILT_IN_BSWAP32:
6741 case BUILT_IN_BSWAP64:
6742 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6743 if (target)
6744 return target;
6745 break;
6747 CASE_INT_FN (BUILT_IN_FFS):
6748 target = expand_builtin_unop (target_mode, exp, target,
6749 subtarget, ffs_optab);
6750 if (target)
6751 return target;
6752 break;
6754 CASE_INT_FN (BUILT_IN_CLZ):
6755 target = expand_builtin_unop (target_mode, exp, target,
6756 subtarget, clz_optab);
6757 if (target)
6758 return target;
6759 break;
6761 CASE_INT_FN (BUILT_IN_CTZ):
6762 target = expand_builtin_unop (target_mode, exp, target,
6763 subtarget, ctz_optab);
6764 if (target)
6765 return target;
6766 break;
6768 CASE_INT_FN (BUILT_IN_CLRSB):
6769 target = expand_builtin_unop (target_mode, exp, target,
6770 subtarget, clrsb_optab);
6771 if (target)
6772 return target;
6773 break;
6775 CASE_INT_FN (BUILT_IN_POPCOUNT):
6776 target = expand_builtin_unop (target_mode, exp, target,
6777 subtarget, popcount_optab);
6778 if (target)
6779 return target;
6780 break;
6782 CASE_INT_FN (BUILT_IN_PARITY):
6783 target = expand_builtin_unop (target_mode, exp, target,
6784 subtarget, parity_optab);
6785 if (target)
6786 return target;
6787 break;
6789 case BUILT_IN_STRLEN:
6790 target = expand_builtin_strlen (exp, target, target_mode);
6791 if (target)
6792 return target;
6793 break;
6795 case BUILT_IN_STRCAT:
6796 target = expand_builtin_strcat (exp, target);
6797 if (target)
6798 return target;
6799 break;
6801 case BUILT_IN_STRCPY:
6802 target = expand_builtin_strcpy (exp, target);
6803 if (target)
6804 return target;
6805 break;
6807 case BUILT_IN_STRNCAT:
6808 target = expand_builtin_strncat (exp, target);
6809 if (target)
6810 return target;
6811 break;
6813 case BUILT_IN_STRNCPY:
6814 target = expand_builtin_strncpy (exp, target);
6815 if (target)
6816 return target;
6817 break;
6819 case BUILT_IN_STPCPY:
6820 target = expand_builtin_stpcpy (exp, target, mode);
6821 if (target)
6822 return target;
6823 break;
6825 case BUILT_IN_STPNCPY:
6826 target = expand_builtin_stpncpy (exp, target);
6827 if (target)
6828 return target;
6829 break;
6831 case BUILT_IN_MEMCHR:
6832 target = expand_builtin_memchr (exp, target);
6833 if (target)
6834 return target;
6835 break;
6837 case BUILT_IN_MEMCPY:
6838 target = expand_builtin_memcpy (exp, target);
6839 if (target)
6840 return target;
6841 break;
6843 case BUILT_IN_MEMMOVE:
6844 target = expand_builtin_memmove (exp, target);
6845 if (target)
6846 return target;
6847 break;
6849 case BUILT_IN_MEMPCPY:
6850 target = expand_builtin_mempcpy (exp, target);
6851 if (target)
6852 return target;
6853 break;
6855 case BUILT_IN_MEMSET:
6856 target = expand_builtin_memset (exp, target, mode);
6857 if (target)
6858 return target;
6859 break;
6861 case BUILT_IN_BZERO:
6862 target = expand_builtin_bzero (exp);
6863 if (target)
6864 return target;
6865 break;
6867 case BUILT_IN_STRCMP:
6868 target = expand_builtin_strcmp (exp, target);
6869 if (target)
6870 return target;
6871 break;
6873 case BUILT_IN_STRNCMP:
6874 target = expand_builtin_strncmp (exp, target, mode);
6875 if (target)
6876 return target;
6877 break;
6879 case BUILT_IN_BCMP:
6880 case BUILT_IN_MEMCMP:
6881 case BUILT_IN_MEMCMP_EQ:
6882 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6883 if (target)
6884 return target;
6885 if (fcode == BUILT_IN_MEMCMP_EQ)
6887 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6888 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6890 break;
6892 case BUILT_IN_SETJMP:
6893 /* This should have been lowered to the builtins below. */
6894 gcc_unreachable ();
6896 case BUILT_IN_SETJMP_SETUP:
6897 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6898 and the receiver label. */
6899 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6901 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6902 VOIDmode, EXPAND_NORMAL);
6903 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6904 rtx_insn *label_r = label_rtx (label);
6906 /* This is copied from the handling of non-local gotos. */
6907 expand_builtin_setjmp_setup (buf_addr, label_r);
6908 nonlocal_goto_handler_labels
6909 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6910 nonlocal_goto_handler_labels);
6911 /* ??? Do not let expand_label treat us as such since we would
6912 not want to be both on the list of non-local labels and on
6913 the list of forced labels. */
6914 FORCED_LABEL (label) = 0;
6915 return const0_rtx;
6917 break;
6919 case BUILT_IN_SETJMP_RECEIVER:
6920 /* __builtin_setjmp_receiver is passed the receiver label. */
6921 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6923 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6924 rtx_insn *label_r = label_rtx (label);
6926 expand_builtin_setjmp_receiver (label_r);
6927 return const0_rtx;
6929 break;
6931 /* __builtin_longjmp is passed a pointer to an array of five words.
6932 It's similar to the C library longjmp function but works with
6933 __builtin_setjmp above. */
6934 case BUILT_IN_LONGJMP:
6935 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6937 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6938 VOIDmode, EXPAND_NORMAL);
6939 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6941 if (value != const1_rtx)
6943 error ("%<__builtin_longjmp%> second argument must be 1");
6944 return const0_rtx;
6947 expand_builtin_longjmp (buf_addr, value);
6948 return const0_rtx;
6950 break;
6952 case BUILT_IN_NONLOCAL_GOTO:
6953 target = expand_builtin_nonlocal_goto (exp);
6954 if (target)
6955 return target;
6956 break;
6958 /* This updates the setjmp buffer that is its argument with the value
6959 of the current stack pointer. */
6960 case BUILT_IN_UPDATE_SETJMP_BUF:
6961 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6963 rtx buf_addr
6964 = expand_normal (CALL_EXPR_ARG (exp, 0));
6966 expand_builtin_update_setjmp_buf (buf_addr);
6967 return const0_rtx;
6969 break;
6971 case BUILT_IN_TRAP:
6972 expand_builtin_trap ();
6973 return const0_rtx;
6975 case BUILT_IN_UNREACHABLE:
6976 expand_builtin_unreachable ();
6977 return const0_rtx;
6979 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6980 case BUILT_IN_SIGNBITD32:
6981 case BUILT_IN_SIGNBITD64:
6982 case BUILT_IN_SIGNBITD128:
6983 target = expand_builtin_signbit (exp, target);
6984 if (target)
6985 return target;
6986 break;
6988 /* Various hooks for the DWARF 2 __throw routine. */
6989 case BUILT_IN_UNWIND_INIT:
6990 expand_builtin_unwind_init ();
6991 return const0_rtx;
6992 case BUILT_IN_DWARF_CFA:
6993 return virtual_cfa_rtx;
6994 #ifdef DWARF2_UNWIND_INFO
6995 case BUILT_IN_DWARF_SP_COLUMN:
6996 return expand_builtin_dwarf_sp_column ();
6997 case BUILT_IN_INIT_DWARF_REG_SIZES:
6998 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6999 return const0_rtx;
7000 #endif
7001 case BUILT_IN_FROB_RETURN_ADDR:
7002 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7003 case BUILT_IN_EXTRACT_RETURN_ADDR:
7004 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7005 case BUILT_IN_EH_RETURN:
7006 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7007 CALL_EXPR_ARG (exp, 1));
7008 return const0_rtx;
7009 case BUILT_IN_EH_RETURN_DATA_REGNO:
7010 return expand_builtin_eh_return_data_regno (exp);
7011 case BUILT_IN_EXTEND_POINTER:
7012 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7013 case BUILT_IN_EH_POINTER:
7014 return expand_builtin_eh_pointer (exp);
7015 case BUILT_IN_EH_FILTER:
7016 return expand_builtin_eh_filter (exp);
7017 case BUILT_IN_EH_COPY_VALUES:
7018 return expand_builtin_eh_copy_values (exp);
7020 case BUILT_IN_VA_START:
7021 return expand_builtin_va_start (exp);
7022 case BUILT_IN_VA_END:
7023 return expand_builtin_va_end (exp);
7024 case BUILT_IN_VA_COPY:
7025 return expand_builtin_va_copy (exp);
7026 case BUILT_IN_EXPECT:
7027 return expand_builtin_expect (exp, target);
7028 case BUILT_IN_ASSUME_ALIGNED:
7029 return expand_builtin_assume_aligned (exp, target);
7030 case BUILT_IN_PREFETCH:
7031 expand_builtin_prefetch (exp);
7032 return const0_rtx;
7034 case BUILT_IN_INIT_TRAMPOLINE:
7035 return expand_builtin_init_trampoline (exp, true);
7036 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7037 return expand_builtin_init_trampoline (exp, false);
7038 case BUILT_IN_ADJUST_TRAMPOLINE:
7039 return expand_builtin_adjust_trampoline (exp);
7041 case BUILT_IN_INIT_DESCRIPTOR:
7042 return expand_builtin_init_descriptor (exp);
7043 case BUILT_IN_ADJUST_DESCRIPTOR:
7044 return expand_builtin_adjust_descriptor (exp);
7046 case BUILT_IN_FORK:
7047 case BUILT_IN_EXECL:
7048 case BUILT_IN_EXECV:
7049 case BUILT_IN_EXECLP:
7050 case BUILT_IN_EXECLE:
7051 case BUILT_IN_EXECVP:
7052 case BUILT_IN_EXECVE:
7053 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7054 if (target)
7055 return target;
7056 break;
7058 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7059 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7060 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7061 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7062 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7063 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7064 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7065 if (target)
7066 return target;
7067 break;
7069 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7070 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7071 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7072 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7073 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7074 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7075 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7076 if (target)
7077 return target;
7078 break;
7080 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7081 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7082 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7083 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7084 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7085 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7086 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7087 if (target)
7088 return target;
7089 break;
7091 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7092 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7093 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7094 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7095 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7096 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7097 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7098 if (target)
7099 return target;
7100 break;
7102 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7103 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7104 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7105 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7106 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7107 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7108 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7109 if (target)
7110 return target;
7111 break;
7113 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7114 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7115 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7116 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7117 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7118 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7119 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7120 if (target)
7121 return target;
7122 break;
7124 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7125 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7126 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7127 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7128 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7129 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7130 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7131 if (target)
7132 return target;
7133 break;
7135 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7136 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7137 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7138 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7139 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7140 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7141 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7142 if (target)
7143 return target;
7144 break;
7146 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7147 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7148 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7149 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7150 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7151 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7152 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7153 if (target)
7154 return target;
7155 break;
7157 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7158 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7159 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7160 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7161 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7162 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7163 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7164 if (target)
7165 return target;
7166 break;
7168 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7169 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7170 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7171 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7172 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7173 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7174 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7175 if (target)
7176 return target;
7177 break;
7179 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7180 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7181 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7182 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7183 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7184 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7185 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7186 if (target)
7187 return target;
7188 break;
7190 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7191 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7192 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7193 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7194 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7195 if (mode == VOIDmode)
7196 mode = TYPE_MODE (boolean_type_node);
7197 if (!target || !register_operand (target, mode))
7198 target = gen_reg_rtx (mode);
7200 mode = get_builtin_sync_mode
7201 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7202 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7203 if (target)
7204 return target;
7205 break;
7207 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7208 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7209 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7210 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7211 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7212 mode = get_builtin_sync_mode
7213 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7214 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7215 if (target)
7216 return target;
7217 break;
7219 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7220 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7221 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7222 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7223 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7224 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7225 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7226 if (target)
7227 return target;
7228 break;
7230 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7231 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7232 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7233 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7234 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7235 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7236 expand_builtin_sync_lock_release (mode, exp);
7237 return const0_rtx;
7239 case BUILT_IN_SYNC_SYNCHRONIZE:
7240 expand_builtin_sync_synchronize ();
7241 return const0_rtx;
7243 case BUILT_IN_ATOMIC_EXCHANGE_1:
7244 case BUILT_IN_ATOMIC_EXCHANGE_2:
7245 case BUILT_IN_ATOMIC_EXCHANGE_4:
7246 case BUILT_IN_ATOMIC_EXCHANGE_8:
7247 case BUILT_IN_ATOMIC_EXCHANGE_16:
7248 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7249 target = expand_builtin_atomic_exchange (mode, exp, target);
7250 if (target)
7251 return target;
7252 break;
7254 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7255 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7256 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7257 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7258 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7260 unsigned int nargs, z;
7261 vec<tree, va_gc> *vec;
7263 mode =
7264 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7265 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7266 if (target)
7267 return target;
7269 /* If this is turned into an external library call, the weak parameter
7270 must be dropped to match the expected parameter list. */
7271 nargs = call_expr_nargs (exp);
7272 vec_alloc (vec, nargs - 1);
7273 for (z = 0; z < 3; z++)
7274 vec->quick_push (CALL_EXPR_ARG (exp, z));
7275 /* Skip the boolean weak parameter. */
7276 for (z = 4; z < 6; z++)
7277 vec->quick_push (CALL_EXPR_ARG (exp, z));
7278 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7279 break;
7282 case BUILT_IN_ATOMIC_LOAD_1:
7283 case BUILT_IN_ATOMIC_LOAD_2:
7284 case BUILT_IN_ATOMIC_LOAD_4:
7285 case BUILT_IN_ATOMIC_LOAD_8:
7286 case BUILT_IN_ATOMIC_LOAD_16:
7287 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7288 target = expand_builtin_atomic_load (mode, exp, target);
7289 if (target)
7290 return target;
7291 break;
7293 case BUILT_IN_ATOMIC_STORE_1:
7294 case BUILT_IN_ATOMIC_STORE_2:
7295 case BUILT_IN_ATOMIC_STORE_4:
7296 case BUILT_IN_ATOMIC_STORE_8:
7297 case BUILT_IN_ATOMIC_STORE_16:
7298 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7299 target = expand_builtin_atomic_store (mode, exp);
7300 if (target)
7301 return const0_rtx;
7302 break;
7304 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7305 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7306 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7307 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7308 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7310 enum built_in_function lib;
7311 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7312 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7313 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7314 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7315 ignore, lib);
7316 if (target)
7317 return target;
7318 break;
7320 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7321 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7322 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7323 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7324 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7326 enum built_in_function lib;
7327 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7328 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7329 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7330 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7331 ignore, lib);
7332 if (target)
7333 return target;
7334 break;
7336 case BUILT_IN_ATOMIC_AND_FETCH_1:
7337 case BUILT_IN_ATOMIC_AND_FETCH_2:
7338 case BUILT_IN_ATOMIC_AND_FETCH_4:
7339 case BUILT_IN_ATOMIC_AND_FETCH_8:
7340 case BUILT_IN_ATOMIC_AND_FETCH_16:
7342 enum built_in_function lib;
7343 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7344 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7345 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7346 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7347 ignore, lib);
7348 if (target)
7349 return target;
7350 break;
7352 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7353 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7354 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7355 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7356 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7358 enum built_in_function lib;
7359 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7360 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7361 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7362 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7363 ignore, lib);
7364 if (target)
7365 return target;
7366 break;
7368 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7369 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7370 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7371 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7372 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7374 enum built_in_function lib;
7375 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7376 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7377 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7378 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7379 ignore, lib);
7380 if (target)
7381 return target;
7382 break;
7384 case BUILT_IN_ATOMIC_OR_FETCH_1:
7385 case BUILT_IN_ATOMIC_OR_FETCH_2:
7386 case BUILT_IN_ATOMIC_OR_FETCH_4:
7387 case BUILT_IN_ATOMIC_OR_FETCH_8:
7388 case BUILT_IN_ATOMIC_OR_FETCH_16:
7390 enum built_in_function lib;
7391 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7392 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7393 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7394 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7395 ignore, lib);
7396 if (target)
7397 return target;
7398 break;
7400 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7401 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7402 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7403 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7404 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7405 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7406 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7407 ignore, BUILT_IN_NONE);
7408 if (target)
7409 return target;
7410 break;
7412 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7413 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7414 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7415 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7416 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7417 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7418 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7419 ignore, BUILT_IN_NONE);
7420 if (target)
7421 return target;
7422 break;
7424 case BUILT_IN_ATOMIC_FETCH_AND_1:
7425 case BUILT_IN_ATOMIC_FETCH_AND_2:
7426 case BUILT_IN_ATOMIC_FETCH_AND_4:
7427 case BUILT_IN_ATOMIC_FETCH_AND_8:
7428 case BUILT_IN_ATOMIC_FETCH_AND_16:
7429 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7430 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7431 ignore, BUILT_IN_NONE);
7432 if (target)
7433 return target;
7434 break;
7436 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7437 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7438 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7439 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7440 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7441 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7442 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7443 ignore, BUILT_IN_NONE);
7444 if (target)
7445 return target;
7446 break;
7448 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7449 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7450 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7451 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7452 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7453 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7454 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7455 ignore, BUILT_IN_NONE);
7456 if (target)
7457 return target;
7458 break;
7460 case BUILT_IN_ATOMIC_FETCH_OR_1:
7461 case BUILT_IN_ATOMIC_FETCH_OR_2:
7462 case BUILT_IN_ATOMIC_FETCH_OR_4:
7463 case BUILT_IN_ATOMIC_FETCH_OR_8:
7464 case BUILT_IN_ATOMIC_FETCH_OR_16:
7465 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7466 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7467 ignore, BUILT_IN_NONE);
7468 if (target)
7469 return target;
7470 break;
7472 case BUILT_IN_ATOMIC_TEST_AND_SET:
7473 return expand_builtin_atomic_test_and_set (exp, target);
7475 case BUILT_IN_ATOMIC_CLEAR:
7476 return expand_builtin_atomic_clear (exp);
7478 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7479 return expand_builtin_atomic_always_lock_free (exp);
7481 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7482 target = expand_builtin_atomic_is_lock_free (exp);
7483 if (target)
7484 return target;
7485 break;
7487 case BUILT_IN_ATOMIC_THREAD_FENCE:
7488 expand_builtin_atomic_thread_fence (exp);
7489 return const0_rtx;
7491 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7492 expand_builtin_atomic_signal_fence (exp);
7493 return const0_rtx;
7495 case BUILT_IN_OBJECT_SIZE:
7496 return expand_builtin_object_size (exp);
7498 case BUILT_IN_MEMCPY_CHK:
7499 case BUILT_IN_MEMPCPY_CHK:
7500 case BUILT_IN_MEMMOVE_CHK:
7501 case BUILT_IN_MEMSET_CHK:
7502 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7503 if (target)
7504 return target;
7505 break;
7507 case BUILT_IN_STRCPY_CHK:
7508 case BUILT_IN_STPCPY_CHK:
7509 case BUILT_IN_STRNCPY_CHK:
7510 case BUILT_IN_STPNCPY_CHK:
7511 case BUILT_IN_STRCAT_CHK:
7512 case BUILT_IN_STRNCAT_CHK:
7513 case BUILT_IN_SNPRINTF_CHK:
7514 case BUILT_IN_VSNPRINTF_CHK:
7515 maybe_emit_chk_warning (exp, fcode);
7516 break;
7518 case BUILT_IN_SPRINTF_CHK:
7519 case BUILT_IN_VSPRINTF_CHK:
7520 maybe_emit_sprintf_chk_warning (exp, fcode);
7521 break;
7523 case BUILT_IN_FREE:
7524 if (warn_free_nonheap_object)
7525 maybe_emit_free_warning (exp);
7526 break;
7528 case BUILT_IN_THREAD_POINTER:
7529 return expand_builtin_thread_pointer (exp, target);
7531 case BUILT_IN_SET_THREAD_POINTER:
7532 expand_builtin_set_thread_pointer (exp);
7533 return const0_rtx;
7535 case BUILT_IN_CILK_DETACH:
7536 expand_builtin_cilk_detach (exp);
7537 return const0_rtx;
7539 case BUILT_IN_CILK_POP_FRAME:
7540 expand_builtin_cilk_pop_frame (exp);
7541 return const0_rtx;
7543 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7544 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7545 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7546 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7547 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7548 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7549 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7550 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7551 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7552 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7553 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7554 /* We allow user CHKP builtins if Pointer Bounds
7555 Checker is off. */
7556 if (!chkp_function_instrumented_p (current_function_decl))
7558 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7559 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7560 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7561 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7562 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7563 return expand_normal (CALL_EXPR_ARG (exp, 0));
7564 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7565 return expand_normal (size_zero_node);
7566 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7567 return expand_normal (size_int (-1));
7568 else
7569 return const0_rtx;
7571 /* FALLTHROUGH */
7573 case BUILT_IN_CHKP_BNDMK:
7574 case BUILT_IN_CHKP_BNDSTX:
7575 case BUILT_IN_CHKP_BNDCL:
7576 case BUILT_IN_CHKP_BNDCU:
7577 case BUILT_IN_CHKP_BNDLDX:
7578 case BUILT_IN_CHKP_BNDRET:
7579 case BUILT_IN_CHKP_INTERSECT:
7580 case BUILT_IN_CHKP_NARROW:
7581 case BUILT_IN_CHKP_EXTRACT_LOWER:
7582 case BUILT_IN_CHKP_EXTRACT_UPPER:
7583 /* Software implementation of Pointer Bounds Checker is NYI.
7584 Target support is required. */
7585 error ("Your target platform does not support -fcheck-pointer-bounds");
7586 break;
7588 case BUILT_IN_ACC_ON_DEVICE:
7589 /* Do library call, if we failed to expand the builtin when
7590 folding. */
7591 break;
7593 default: /* just do library call, if unknown builtin */
7594 break;
7597 /* The switch statement above can drop through to cause the function
7598 to be called normally. */
7599 return expand_call (exp, target, ignore);
7602 /* Similar to expand_builtin but is used for instrumented calls. */
7605 expand_builtin_with_bounds (tree exp, rtx target,
7606 rtx subtarget ATTRIBUTE_UNUSED,
7607 machine_mode mode, int ignore)
7609 tree fndecl = get_callee_fndecl (exp);
7610 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7612 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7614 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7615 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7617 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7618 && fcode < END_CHKP_BUILTINS);
7620 switch (fcode)
7622 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7623 target = expand_builtin_memcpy_with_bounds (exp, target);
7624 if (target)
7625 return target;
7626 break;
7628 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7629 target = expand_builtin_mempcpy_with_bounds (exp, target);
7630 if (target)
7631 return target;
7632 break;
7634 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7635 target = expand_builtin_memset_with_bounds (exp, target, mode);
7636 if (target)
7637 return target;
7638 break;
7640 default:
7641 break;
7644 /* The switch statement above can drop through to cause the function
7645 to be called normally. */
7646 return expand_call (exp, target, ignore);
7649 /* Determine whether a tree node represents a call to a built-in
7650 function. If the tree T is a call to a built-in function with
7651 the right number of arguments of the appropriate types, return
7652 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7653 Otherwise the return value is END_BUILTINS. */
7655 enum built_in_function
7656 builtin_mathfn_code (const_tree t)
7658 const_tree fndecl, arg, parmlist;
7659 const_tree argtype, parmtype;
7660 const_call_expr_arg_iterator iter;
7662 if (TREE_CODE (t) != CALL_EXPR
7663 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7664 return END_BUILTINS;
7666 fndecl = get_callee_fndecl (t);
7667 if (fndecl == NULL_TREE
7668 || TREE_CODE (fndecl) != FUNCTION_DECL
7669 || ! DECL_BUILT_IN (fndecl)
7670 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7671 return END_BUILTINS;
7673 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7674 init_const_call_expr_arg_iterator (t, &iter);
7675 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7677 /* If a function doesn't take a variable number of arguments,
7678 the last element in the list will have type `void'. */
7679 parmtype = TREE_VALUE (parmlist);
7680 if (VOID_TYPE_P (parmtype))
7682 if (more_const_call_expr_args_p (&iter))
7683 return END_BUILTINS;
7684 return DECL_FUNCTION_CODE (fndecl);
7687 if (! more_const_call_expr_args_p (&iter))
7688 return END_BUILTINS;
7690 arg = next_const_call_expr_arg (&iter);
7691 argtype = TREE_TYPE (arg);
7693 if (SCALAR_FLOAT_TYPE_P (parmtype))
7695 if (! SCALAR_FLOAT_TYPE_P (argtype))
7696 return END_BUILTINS;
7698 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7700 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7701 return END_BUILTINS;
7703 else if (POINTER_TYPE_P (parmtype))
7705 if (! POINTER_TYPE_P (argtype))
7706 return END_BUILTINS;
7708 else if (INTEGRAL_TYPE_P (parmtype))
7710 if (! INTEGRAL_TYPE_P (argtype))
7711 return END_BUILTINS;
7713 else
7714 return END_BUILTINS;
7717 /* Variable-length argument list. */
7718 return DECL_FUNCTION_CODE (fndecl);
7721 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7722 evaluate to a constant. */
7724 static tree
7725 fold_builtin_constant_p (tree arg)
7727 /* We return 1 for a numeric type that's known to be a constant
7728 value at compile-time or for an aggregate type that's a
7729 literal constant. */
7730 STRIP_NOPS (arg);
7732 /* If we know this is a constant, emit the constant of one. */
7733 if (CONSTANT_CLASS_P (arg)
7734 || (TREE_CODE (arg) == CONSTRUCTOR
7735 && TREE_CONSTANT (arg)))
7736 return integer_one_node;
7737 if (TREE_CODE (arg) == ADDR_EXPR)
7739 tree op = TREE_OPERAND (arg, 0);
7740 if (TREE_CODE (op) == STRING_CST
7741 || (TREE_CODE (op) == ARRAY_REF
7742 && integer_zerop (TREE_OPERAND (op, 1))
7743 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7744 return integer_one_node;
7747 /* If this expression has side effects, show we don't know it to be a
7748 constant. Likewise if it's a pointer or aggregate type since in
7749 those case we only want literals, since those are only optimized
7750 when generating RTL, not later.
7751 And finally, if we are compiling an initializer, not code, we
7752 need to return a definite result now; there's not going to be any
7753 more optimization done. */
7754 if (TREE_SIDE_EFFECTS (arg)
7755 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7756 || POINTER_TYPE_P (TREE_TYPE (arg))
7757 || cfun == 0
7758 || folding_initializer
7759 || force_folding_builtin_constant_p)
7760 return integer_zero_node;
7762 return NULL_TREE;
7765 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7766 return it as a truthvalue. */
7768 static tree
7769 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7770 tree predictor)
7772 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7774 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7775 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7776 ret_type = TREE_TYPE (TREE_TYPE (fn));
7777 pred_type = TREE_VALUE (arg_types);
7778 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7780 pred = fold_convert_loc (loc, pred_type, pred);
7781 expected = fold_convert_loc (loc, expected_type, expected);
7782 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7783 predictor);
7785 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7786 build_int_cst (ret_type, 0));
7789 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7790 NULL_TREE if no simplification is possible. */
7792 tree
7793 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7795 tree inner, fndecl, inner_arg0;
7796 enum tree_code code;
7798 /* Distribute the expected value over short-circuiting operators.
7799 See through the cast from truthvalue_type_node to long. */
7800 inner_arg0 = arg0;
7801 while (CONVERT_EXPR_P (inner_arg0)
7802 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7803 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7804 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7806 /* If this is a builtin_expect within a builtin_expect keep the
7807 inner one. See through a comparison against a constant. It
7808 might have been added to create a thruthvalue. */
7809 inner = inner_arg0;
7811 if (COMPARISON_CLASS_P (inner)
7812 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7813 inner = TREE_OPERAND (inner, 0);
7815 if (TREE_CODE (inner) == CALL_EXPR
7816 && (fndecl = get_callee_fndecl (inner))
7817 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7818 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7819 return arg0;
7821 inner = inner_arg0;
7822 code = TREE_CODE (inner);
7823 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7825 tree op0 = TREE_OPERAND (inner, 0);
7826 tree op1 = TREE_OPERAND (inner, 1);
7828 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7829 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7830 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7832 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7835 /* If the argument isn't invariant then there's nothing else we can do. */
7836 if (!TREE_CONSTANT (inner_arg0))
7837 return NULL_TREE;
7839 /* If we expect that a comparison against the argument will fold to
7840 a constant return the constant. In practice, this means a true
7841 constant or the address of a non-weak symbol. */
7842 inner = inner_arg0;
7843 STRIP_NOPS (inner);
7844 if (TREE_CODE (inner) == ADDR_EXPR)
7848 inner = TREE_OPERAND (inner, 0);
7850 while (TREE_CODE (inner) == COMPONENT_REF
7851 || TREE_CODE (inner) == ARRAY_REF);
7852 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7853 return NULL_TREE;
7856 /* Otherwise, ARG0 already has the proper type for the return value. */
7857 return arg0;
7860 /* Fold a call to __builtin_classify_type with argument ARG. */
7862 static tree
7863 fold_builtin_classify_type (tree arg)
7865 if (arg == 0)
7866 return build_int_cst (integer_type_node, no_type_class);
7868 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7871 /* Fold a call to __builtin_strlen with argument ARG. */
7873 static tree
7874 fold_builtin_strlen (location_t loc, tree type, tree arg)
7876 if (!validate_arg (arg, POINTER_TYPE))
7877 return NULL_TREE;
7878 else
7880 tree len = c_strlen (arg, 0);
7882 if (len)
7883 return fold_convert_loc (loc, type, len);
7885 return NULL_TREE;
7889 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7891 static tree
7892 fold_builtin_inf (location_t loc, tree type, int warn)
7894 REAL_VALUE_TYPE real;
7896 /* __builtin_inff is intended to be usable to define INFINITY on all
7897 targets. If an infinity is not available, INFINITY expands "to a
7898 positive constant of type float that overflows at translation
7899 time", footnote "In this case, using INFINITY will violate the
7900 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7901 Thus we pedwarn to ensure this constraint violation is
7902 diagnosed. */
7903 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7904 pedwarn (loc, 0, "target format does not support infinity");
7906 real_inf (&real);
7907 return build_real (type, real);
7910 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7911 NULL_TREE if no simplification can be made. */
7913 static tree
7914 fold_builtin_sincos (location_t loc,
7915 tree arg0, tree arg1, tree arg2)
7917 tree type;
7918 tree fndecl, call = NULL_TREE;
7920 if (!validate_arg (arg0, REAL_TYPE)
7921 || !validate_arg (arg1, POINTER_TYPE)
7922 || !validate_arg (arg2, POINTER_TYPE))
7923 return NULL_TREE;
7925 type = TREE_TYPE (arg0);
7927 /* Calculate the result when the argument is a constant. */
7928 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7929 if (fn == END_BUILTINS)
7930 return NULL_TREE;
7932 /* Canonicalize sincos to cexpi. */
7933 if (TREE_CODE (arg0) == REAL_CST)
7935 tree complex_type = build_complex_type (type);
7936 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7938 if (!call)
7940 if (!targetm.libc_has_function (function_c99_math_complex)
7941 || !builtin_decl_implicit_p (fn))
7942 return NULL_TREE;
7943 fndecl = builtin_decl_explicit (fn);
7944 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7945 call = builtin_save_expr (call);
7948 return build2 (COMPOUND_EXPR, void_type_node,
7949 build2 (MODIFY_EXPR, void_type_node,
7950 build_fold_indirect_ref_loc (loc, arg1),
7951 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7952 build2 (MODIFY_EXPR, void_type_node,
7953 build_fold_indirect_ref_loc (loc, arg2),
7954 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7957 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7958 Return NULL_TREE if no simplification can be made. */
7960 static tree
7961 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7963 if (!validate_arg (arg1, POINTER_TYPE)
7964 || !validate_arg (arg2, POINTER_TYPE)
7965 || !validate_arg (len, INTEGER_TYPE))
7966 return NULL_TREE;
7968 /* If the LEN parameter is zero, return zero. */
7969 if (integer_zerop (len))
7970 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7971 arg1, arg2);
7973 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7974 if (operand_equal_p (arg1, arg2, 0))
7975 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7977 /* If len parameter is one, return an expression corresponding to
7978 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7979 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7981 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7982 tree cst_uchar_ptr_node
7983 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7985 tree ind1
7986 = fold_convert_loc (loc, integer_type_node,
7987 build1 (INDIRECT_REF, cst_uchar_node,
7988 fold_convert_loc (loc,
7989 cst_uchar_ptr_node,
7990 arg1)));
7991 tree ind2
7992 = fold_convert_loc (loc, integer_type_node,
7993 build1 (INDIRECT_REF, cst_uchar_node,
7994 fold_convert_loc (loc,
7995 cst_uchar_ptr_node,
7996 arg2)));
7997 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8000 return NULL_TREE;
8003 /* Fold a call to builtin isascii with argument ARG. */
8005 static tree
8006 fold_builtin_isascii (location_t loc, tree arg)
8008 if (!validate_arg (arg, INTEGER_TYPE))
8009 return NULL_TREE;
8010 else
8012 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8013 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8014 build_int_cst (integer_type_node,
8015 ~ (unsigned HOST_WIDE_INT) 0x7f));
8016 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8017 arg, integer_zero_node);
8021 /* Fold a call to builtin toascii with argument ARG. */
8023 static tree
8024 fold_builtin_toascii (location_t loc, tree arg)
8026 if (!validate_arg (arg, INTEGER_TYPE))
8027 return NULL_TREE;
8029 /* Transform toascii(c) -> (c & 0x7f). */
8030 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8031 build_int_cst (integer_type_node, 0x7f));
8034 /* Fold a call to builtin isdigit with argument ARG. */
8036 static tree
8037 fold_builtin_isdigit (location_t loc, tree arg)
8039 if (!validate_arg (arg, INTEGER_TYPE))
8040 return NULL_TREE;
8041 else
8043 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8044 /* According to the C standard, isdigit is unaffected by locale.
8045 However, it definitely is affected by the target character set. */
8046 unsigned HOST_WIDE_INT target_digit0
8047 = lang_hooks.to_target_charset ('0');
8049 if (target_digit0 == 0)
8050 return NULL_TREE;
8052 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8053 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8054 build_int_cst (unsigned_type_node, target_digit0));
8055 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8056 build_int_cst (unsigned_type_node, 9));
8060 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8062 static tree
8063 fold_builtin_fabs (location_t loc, tree arg, tree type)
8065 if (!validate_arg (arg, REAL_TYPE))
8066 return NULL_TREE;
8068 arg = fold_convert_loc (loc, type, arg);
8069 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8072 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8074 static tree
8075 fold_builtin_abs (location_t loc, tree arg, tree type)
8077 if (!validate_arg (arg, INTEGER_TYPE))
8078 return NULL_TREE;
8080 arg = fold_convert_loc (loc, type, arg);
8081 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8084 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8086 static tree
8087 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8089 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8090 if (validate_arg (arg0, REAL_TYPE)
8091 && validate_arg (arg1, REAL_TYPE)
8092 && validate_arg (arg2, REAL_TYPE)
8093 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8094 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8096 return NULL_TREE;
8099 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8101 static tree
8102 fold_builtin_carg (location_t loc, tree arg, tree type)
8104 if (validate_arg (arg, COMPLEX_TYPE)
8105 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8107 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8109 if (atan2_fn)
8111 tree new_arg = builtin_save_expr (arg);
8112 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8113 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8114 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8118 return NULL_TREE;
8121 /* Fold a call to builtin frexp, we can assume the base is 2. */
8123 static tree
8124 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8126 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8127 return NULL_TREE;
8129 STRIP_NOPS (arg0);
8131 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8132 return NULL_TREE;
8134 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8136 /* Proceed if a valid pointer type was passed in. */
8137 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8139 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8140 tree frac, exp;
8142 switch (value->cl)
8144 case rvc_zero:
8145 /* For +-0, return (*exp = 0, +-0). */
8146 exp = integer_zero_node;
8147 frac = arg0;
8148 break;
8149 case rvc_nan:
8150 case rvc_inf:
8151 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8152 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8153 case rvc_normal:
8155 /* Since the frexp function always expects base 2, and in
8156 GCC normalized significands are already in the range
8157 [0.5, 1.0), we have exactly what frexp wants. */
8158 REAL_VALUE_TYPE frac_rvt = *value;
8159 SET_REAL_EXP (&frac_rvt, 0);
8160 frac = build_real (rettype, frac_rvt);
8161 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8163 break;
8164 default:
8165 gcc_unreachable ();
8168 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8169 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8170 TREE_SIDE_EFFECTS (arg1) = 1;
8171 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8174 return NULL_TREE;
8177 /* Fold a call to builtin modf. */
8179 static tree
8180 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8182 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8183 return NULL_TREE;
8185 STRIP_NOPS (arg0);
8187 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8188 return NULL_TREE;
8190 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8192 /* Proceed if a valid pointer type was passed in. */
8193 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8195 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8196 REAL_VALUE_TYPE trunc, frac;
8198 switch (value->cl)
8200 case rvc_nan:
8201 case rvc_zero:
8202 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8203 trunc = frac = *value;
8204 break;
8205 case rvc_inf:
8206 /* For +-Inf, return (*arg1 = arg0, +-0). */
8207 frac = dconst0;
8208 frac.sign = value->sign;
8209 trunc = *value;
8210 break;
8211 case rvc_normal:
8212 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8213 real_trunc (&trunc, VOIDmode, value);
8214 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8215 /* If the original number was negative and already
8216 integral, then the fractional part is -0.0. */
8217 if (value->sign && frac.cl == rvc_zero)
8218 frac.sign = value->sign;
8219 break;
8222 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8223 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8224 build_real (rettype, trunc));
8225 TREE_SIDE_EFFECTS (arg1) = 1;
8226 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8227 build_real (rettype, frac));
8230 return NULL_TREE;
8233 /* Given a location LOC, an interclass builtin function decl FNDECL
8234 and its single argument ARG, return an folded expression computing
8235 the same, or NULL_TREE if we either couldn't or didn't want to fold
8236 (the latter happen if there's an RTL instruction available). */
8238 static tree
8239 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8241 machine_mode mode;
8243 if (!validate_arg (arg, REAL_TYPE))
8244 return NULL_TREE;
8246 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8247 return NULL_TREE;
8249 mode = TYPE_MODE (TREE_TYPE (arg));
8251 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8253 /* If there is no optab, try generic code. */
8254 switch (DECL_FUNCTION_CODE (fndecl))
8256 tree result;
8258 CASE_FLT_FN (BUILT_IN_ISINF):
8260 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8261 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8262 tree type = TREE_TYPE (arg);
8263 REAL_VALUE_TYPE r;
8264 char buf[128];
8266 if (is_ibm_extended)
8268 /* NaN and Inf are encoded in the high-order double value
8269 only. The low-order value is not significant. */
8270 type = double_type_node;
8271 mode = DFmode;
8272 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8274 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8275 real_from_string (&r, buf);
8276 result = build_call_expr (isgr_fn, 2,
8277 fold_build1_loc (loc, ABS_EXPR, type, arg),
8278 build_real (type, r));
8279 return result;
8281 CASE_FLT_FN (BUILT_IN_FINITE):
8282 case BUILT_IN_ISFINITE:
8284 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8285 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8286 tree type = TREE_TYPE (arg);
8287 REAL_VALUE_TYPE r;
8288 char buf[128];
8290 if (is_ibm_extended)
8292 /* NaN and Inf are encoded in the high-order double value
8293 only. The low-order value is not significant. */
8294 type = double_type_node;
8295 mode = DFmode;
8296 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8298 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8299 real_from_string (&r, buf);
8300 result = build_call_expr (isle_fn, 2,
8301 fold_build1_loc (loc, ABS_EXPR, type, arg),
8302 build_real (type, r));
8303 /*result = fold_build2_loc (loc, UNGT_EXPR,
8304 TREE_TYPE (TREE_TYPE (fndecl)),
8305 fold_build1_loc (loc, ABS_EXPR, type, arg),
8306 build_real (type, r));
8307 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8308 TREE_TYPE (TREE_TYPE (fndecl)),
8309 result);*/
8310 return result;
8312 case BUILT_IN_ISNORMAL:
8314 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8315 islessequal(fabs(x),DBL_MAX). */
8316 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8317 tree type = TREE_TYPE (arg);
8318 tree orig_arg, max_exp, min_exp;
8319 machine_mode orig_mode = mode;
8320 REAL_VALUE_TYPE rmax, rmin;
8321 char buf[128];
8323 orig_arg = arg = builtin_save_expr (arg);
8324 if (is_ibm_extended)
8326 /* Use double to test the normal range of IBM extended
8327 precision. Emin for IBM extended precision is
8328 different to emin for IEEE double, being 53 higher
8329 since the low double exponent is at least 53 lower
8330 than the high double exponent. */
8331 type = double_type_node;
8332 mode = DFmode;
8333 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8335 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8337 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8338 real_from_string (&rmax, buf);
8339 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8340 real_from_string (&rmin, buf);
8341 max_exp = build_real (type, rmax);
8342 min_exp = build_real (type, rmin);
8344 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8345 if (is_ibm_extended)
8347 /* Testing the high end of the range is done just using
8348 the high double, using the same test as isfinite().
8349 For the subnormal end of the range we first test the
8350 high double, then if its magnitude is equal to the
8351 limit of 0x1p-969, we test whether the low double is
8352 non-zero and opposite sign to the high double. */
8353 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8354 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8355 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8356 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8357 arg, min_exp);
8358 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8359 complex_double_type_node, orig_arg);
8360 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8361 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8362 tree zero = build_real (type, dconst0);
8363 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8364 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8365 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8366 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8367 fold_build3 (COND_EXPR,
8368 integer_type_node,
8369 hilt, logt, lolt));
8370 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8371 eq_min, ok_lo);
8372 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8373 gt_min, eq_min);
8375 else
8377 tree const isge_fn
8378 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8379 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8381 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8382 max_exp, min_exp);
8383 return result;
8385 default:
8386 break;
8389 return NULL_TREE;
8392 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8393 ARG is the argument for the call. */
8395 static tree
8396 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8398 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8400 if (!validate_arg (arg, REAL_TYPE))
8401 return NULL_TREE;
8403 switch (builtin_index)
8405 case BUILT_IN_ISINF:
8406 if (!HONOR_INFINITIES (arg))
8407 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8409 return NULL_TREE;
8411 case BUILT_IN_ISINF_SIGN:
8413 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8414 /* In a boolean context, GCC will fold the inner COND_EXPR to
8415 1. So e.g. "if (isinf_sign(x))" would be folded to just
8416 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8417 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8418 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8419 tree tmp = NULL_TREE;
8421 arg = builtin_save_expr (arg);
8423 if (signbit_fn && isinf_fn)
8425 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8426 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8428 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8429 signbit_call, integer_zero_node);
8430 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8431 isinf_call, integer_zero_node);
8433 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8434 integer_minus_one_node, integer_one_node);
8435 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8436 isinf_call, tmp,
8437 integer_zero_node);
8440 return tmp;
8443 case BUILT_IN_ISFINITE:
8444 if (!HONOR_NANS (arg)
8445 && !HONOR_INFINITIES (arg))
8446 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8448 return NULL_TREE;
8450 case BUILT_IN_ISNAN:
8451 if (!HONOR_NANS (arg))
8452 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8455 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8456 if (is_ibm_extended)
8458 /* NaN and Inf are encoded in the high-order double value
8459 only. The low-order value is not significant. */
8460 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8463 arg = builtin_save_expr (arg);
8464 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8466 default:
8467 gcc_unreachable ();
8471 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8472 This builtin will generate code to return the appropriate floating
8473 point classification depending on the value of the floating point
8474 number passed in. The possible return values must be supplied as
8475 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8476 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8477 one floating point argument which is "type generic". */
8479 static tree
8480 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8482 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8483 arg, type, res, tmp;
8484 machine_mode mode;
8485 REAL_VALUE_TYPE r;
8486 char buf[128];
8488 /* Verify the required arguments in the original call. */
8489 if (nargs != 6
8490 || !validate_arg (args[0], INTEGER_TYPE)
8491 || !validate_arg (args[1], INTEGER_TYPE)
8492 || !validate_arg (args[2], INTEGER_TYPE)
8493 || !validate_arg (args[3], INTEGER_TYPE)
8494 || !validate_arg (args[4], INTEGER_TYPE)
8495 || !validate_arg (args[5], REAL_TYPE))
8496 return NULL_TREE;
8498 fp_nan = args[0];
8499 fp_infinite = args[1];
8500 fp_normal = args[2];
8501 fp_subnormal = args[3];
8502 fp_zero = args[4];
8503 arg = args[5];
8504 type = TREE_TYPE (arg);
8505 mode = TYPE_MODE (type);
8506 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8508 /* fpclassify(x) ->
8509 isnan(x) ? FP_NAN :
8510 (fabs(x) == Inf ? FP_INFINITE :
8511 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8512 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8514 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8515 build_real (type, dconst0));
8516 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8517 tmp, fp_zero, fp_subnormal);
8519 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8520 real_from_string (&r, buf);
8521 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8522 arg, build_real (type, r));
8523 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8525 if (HONOR_INFINITIES (mode))
8527 real_inf (&r);
8528 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8529 build_real (type, r));
8530 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8531 fp_infinite, res);
8534 if (HONOR_NANS (mode))
8536 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8537 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8540 return res;
8543 /* Fold a call to an unordered comparison function such as
8544 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8545 being called and ARG0 and ARG1 are the arguments for the call.
8546 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8547 the opposite of the desired result. UNORDERED_CODE is used
8548 for modes that can hold NaNs and ORDERED_CODE is used for
8549 the rest. */
8551 static tree
8552 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8553 enum tree_code unordered_code,
8554 enum tree_code ordered_code)
8556 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8557 enum tree_code code;
8558 tree type0, type1;
8559 enum tree_code code0, code1;
8560 tree cmp_type = NULL_TREE;
8562 type0 = TREE_TYPE (arg0);
8563 type1 = TREE_TYPE (arg1);
8565 code0 = TREE_CODE (type0);
8566 code1 = TREE_CODE (type1);
8568 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8569 /* Choose the wider of two real types. */
8570 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8571 ? type0 : type1;
8572 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8573 cmp_type = type0;
8574 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8575 cmp_type = type1;
8577 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8578 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8580 if (unordered_code == UNORDERED_EXPR)
8582 if (!HONOR_NANS (arg0))
8583 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8584 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8587 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8588 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8589 fold_build2_loc (loc, code, type, arg0, arg1));
8592 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8593 arithmetics if it can never overflow, or into internal functions that
8594 return both result of arithmetics and overflowed boolean flag in
8595 a complex integer result, or some other check for overflow.
8596 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8597 checking part of that. */
8599 static tree
8600 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8601 tree arg0, tree arg1, tree arg2)
8603 enum internal_fn ifn = IFN_LAST;
8604 /* The code of the expression corresponding to the type-generic
8605 built-in, or ERROR_MARK for the type-specific ones. */
8606 enum tree_code opcode = ERROR_MARK;
8607 bool ovf_only = false;
8609 switch (fcode)
8611 case BUILT_IN_ADD_OVERFLOW_P:
8612 ovf_only = true;
8613 /* FALLTHRU */
8614 case BUILT_IN_ADD_OVERFLOW:
8615 opcode = PLUS_EXPR;
8616 /* FALLTHRU */
8617 case BUILT_IN_SADD_OVERFLOW:
8618 case BUILT_IN_SADDL_OVERFLOW:
8619 case BUILT_IN_SADDLL_OVERFLOW:
8620 case BUILT_IN_UADD_OVERFLOW:
8621 case BUILT_IN_UADDL_OVERFLOW:
8622 case BUILT_IN_UADDLL_OVERFLOW:
8623 ifn = IFN_ADD_OVERFLOW;
8624 break;
8625 case BUILT_IN_SUB_OVERFLOW_P:
8626 ovf_only = true;
8627 /* FALLTHRU */
8628 case BUILT_IN_SUB_OVERFLOW:
8629 opcode = MINUS_EXPR;
8630 /* FALLTHRU */
8631 case BUILT_IN_SSUB_OVERFLOW:
8632 case BUILT_IN_SSUBL_OVERFLOW:
8633 case BUILT_IN_SSUBLL_OVERFLOW:
8634 case BUILT_IN_USUB_OVERFLOW:
8635 case BUILT_IN_USUBL_OVERFLOW:
8636 case BUILT_IN_USUBLL_OVERFLOW:
8637 ifn = IFN_SUB_OVERFLOW;
8638 break;
8639 case BUILT_IN_MUL_OVERFLOW_P:
8640 ovf_only = true;
8641 /* FALLTHRU */
8642 case BUILT_IN_MUL_OVERFLOW:
8643 opcode = MULT_EXPR;
8644 /* FALLTHRU */
8645 case BUILT_IN_SMUL_OVERFLOW:
8646 case BUILT_IN_SMULL_OVERFLOW:
8647 case BUILT_IN_SMULLL_OVERFLOW:
8648 case BUILT_IN_UMUL_OVERFLOW:
8649 case BUILT_IN_UMULL_OVERFLOW:
8650 case BUILT_IN_UMULLL_OVERFLOW:
8651 ifn = IFN_MUL_OVERFLOW;
8652 break;
8653 default:
8654 gcc_unreachable ();
8657 /* For the "generic" overloads, the first two arguments can have different
8658 types and the last argument determines the target type to use to check
8659 for overflow. The arguments of the other overloads all have the same
8660 type. */
8661 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8663 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8664 arguments are constant, attempt to fold the built-in call into a constant
8665 expression indicating whether or not it detected an overflow. */
8666 if (ovf_only
8667 && TREE_CODE (arg0) == INTEGER_CST
8668 && TREE_CODE (arg1) == INTEGER_CST)
8669 /* Perform the computation in the target type and check for overflow. */
8670 return omit_one_operand_loc (loc, boolean_type_node,
8671 arith_overflowed_p (opcode, type, arg0, arg1)
8672 ? boolean_true_node : boolean_false_node,
8673 arg2);
8675 tree ctype = build_complex_type (type);
8676 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8677 2, arg0, arg1);
8678 tree tgt = save_expr (call);
8679 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8680 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8681 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8683 if (ovf_only)
8684 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8686 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8687 tree store
8688 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8689 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8692 /* Fold a call to __builtin_FILE to a constant string. */
8694 static inline tree
8695 fold_builtin_FILE (location_t loc)
8697 if (const char *fname = LOCATION_FILE (loc))
8698 return build_string_literal (strlen (fname) + 1, fname);
8700 return build_string_literal (1, "");
8703 /* Fold a call to __builtin_FUNCTION to a constant string. */
8705 static inline tree
8706 fold_builtin_FUNCTION ()
8708 const char *name = "";
8710 if (current_function_decl)
8711 name = lang_hooks.decl_printable_name (current_function_decl, 0);
8713 return build_string_literal (strlen (name) + 1, name);
8716 /* Fold a call to __builtin_LINE to an integer constant. */
8718 static inline tree
8719 fold_builtin_LINE (location_t loc, tree type)
8721 return build_int_cst (type, LOCATION_LINE (loc));
8724 /* Fold a call to built-in function FNDECL with 0 arguments.
8725 This function returns NULL_TREE if no simplification was possible. */
8727 static tree
8728 fold_builtin_0 (location_t loc, tree fndecl)
8730 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8731 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8732 switch (fcode)
8734 case BUILT_IN_FILE:
8735 return fold_builtin_FILE (loc);
8737 case BUILT_IN_FUNCTION:
8738 return fold_builtin_FUNCTION ();
8740 case BUILT_IN_LINE:
8741 return fold_builtin_LINE (loc, type);
8743 CASE_FLT_FN (BUILT_IN_INF):
8744 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8745 case BUILT_IN_INFD32:
8746 case BUILT_IN_INFD64:
8747 case BUILT_IN_INFD128:
8748 return fold_builtin_inf (loc, type, true);
8750 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8751 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8752 return fold_builtin_inf (loc, type, false);
8754 case BUILT_IN_CLASSIFY_TYPE:
8755 return fold_builtin_classify_type (NULL_TREE);
8757 default:
8758 break;
8760 return NULL_TREE;
8763 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8764 This function returns NULL_TREE if no simplification was possible. */
8766 static tree
8767 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8769 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8770 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8772 if (TREE_CODE (arg0) == ERROR_MARK)
8773 return NULL_TREE;
8775 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8776 return ret;
8778 switch (fcode)
8780 case BUILT_IN_CONSTANT_P:
8782 tree val = fold_builtin_constant_p (arg0);
8784 /* Gimplification will pull the CALL_EXPR for the builtin out of
8785 an if condition. When not optimizing, we'll not CSE it back.
8786 To avoid link error types of regressions, return false now. */
8787 if (!val && !optimize)
8788 val = integer_zero_node;
8790 return val;
8793 case BUILT_IN_CLASSIFY_TYPE:
8794 return fold_builtin_classify_type (arg0);
8796 case BUILT_IN_STRLEN:
8797 return fold_builtin_strlen (loc, type, arg0);
8799 CASE_FLT_FN (BUILT_IN_FABS):
8800 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8801 case BUILT_IN_FABSD32:
8802 case BUILT_IN_FABSD64:
8803 case BUILT_IN_FABSD128:
8804 return fold_builtin_fabs (loc, arg0, type);
8806 case BUILT_IN_ABS:
8807 case BUILT_IN_LABS:
8808 case BUILT_IN_LLABS:
8809 case BUILT_IN_IMAXABS:
8810 return fold_builtin_abs (loc, arg0, type);
8812 CASE_FLT_FN (BUILT_IN_CONJ):
8813 if (validate_arg (arg0, COMPLEX_TYPE)
8814 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8815 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8816 break;
8818 CASE_FLT_FN (BUILT_IN_CREAL):
8819 if (validate_arg (arg0, COMPLEX_TYPE)
8820 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8821 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8822 break;
8824 CASE_FLT_FN (BUILT_IN_CIMAG):
8825 if (validate_arg (arg0, COMPLEX_TYPE)
8826 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8827 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8828 break;
8830 CASE_FLT_FN (BUILT_IN_CARG):
8831 return fold_builtin_carg (loc, arg0, type);
8833 case BUILT_IN_ISASCII:
8834 return fold_builtin_isascii (loc, arg0);
8836 case BUILT_IN_TOASCII:
8837 return fold_builtin_toascii (loc, arg0);
8839 case BUILT_IN_ISDIGIT:
8840 return fold_builtin_isdigit (loc, arg0);
8842 CASE_FLT_FN (BUILT_IN_FINITE):
8843 case BUILT_IN_FINITED32:
8844 case BUILT_IN_FINITED64:
8845 case BUILT_IN_FINITED128:
8846 case BUILT_IN_ISFINITE:
8848 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8849 if (ret)
8850 return ret;
8851 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8854 CASE_FLT_FN (BUILT_IN_ISINF):
8855 case BUILT_IN_ISINFD32:
8856 case BUILT_IN_ISINFD64:
8857 case BUILT_IN_ISINFD128:
8859 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8860 if (ret)
8861 return ret;
8862 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8865 case BUILT_IN_ISNORMAL:
8866 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8868 case BUILT_IN_ISINF_SIGN:
8869 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8871 CASE_FLT_FN (BUILT_IN_ISNAN):
8872 case BUILT_IN_ISNAND32:
8873 case BUILT_IN_ISNAND64:
8874 case BUILT_IN_ISNAND128:
8875 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8877 case BUILT_IN_FREE:
8878 if (integer_zerop (arg0))
8879 return build_empty_stmt (loc);
8880 break;
8882 default:
8883 break;
8886 return NULL_TREE;
8890 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8891 This function returns NULL_TREE if no simplification was possible. */
8893 static tree
8894 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8896 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8897 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8899 if (TREE_CODE (arg0) == ERROR_MARK
8900 || TREE_CODE (arg1) == ERROR_MARK)
8901 return NULL_TREE;
8903 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8904 return ret;
8906 switch (fcode)
8908 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8909 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8910 if (validate_arg (arg0, REAL_TYPE)
8911 && validate_arg (arg1, POINTER_TYPE))
8912 return do_mpfr_lgamma_r (arg0, arg1, type);
8913 break;
8915 CASE_FLT_FN (BUILT_IN_FREXP):
8916 return fold_builtin_frexp (loc, arg0, arg1, type);
8918 CASE_FLT_FN (BUILT_IN_MODF):
8919 return fold_builtin_modf (loc, arg0, arg1, type);
8921 case BUILT_IN_STRSPN:
8922 return fold_builtin_strspn (loc, arg0, arg1);
8924 case BUILT_IN_STRCSPN:
8925 return fold_builtin_strcspn (loc, arg0, arg1);
8927 case BUILT_IN_STRPBRK:
8928 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8930 case BUILT_IN_EXPECT:
8931 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8933 case BUILT_IN_ISGREATER:
8934 return fold_builtin_unordered_cmp (loc, fndecl,
8935 arg0, arg1, UNLE_EXPR, LE_EXPR);
8936 case BUILT_IN_ISGREATEREQUAL:
8937 return fold_builtin_unordered_cmp (loc, fndecl,
8938 arg0, arg1, UNLT_EXPR, LT_EXPR);
8939 case BUILT_IN_ISLESS:
8940 return fold_builtin_unordered_cmp (loc, fndecl,
8941 arg0, arg1, UNGE_EXPR, GE_EXPR);
8942 case BUILT_IN_ISLESSEQUAL:
8943 return fold_builtin_unordered_cmp (loc, fndecl,
8944 arg0, arg1, UNGT_EXPR, GT_EXPR);
8945 case BUILT_IN_ISLESSGREATER:
8946 return fold_builtin_unordered_cmp (loc, fndecl,
8947 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8948 case BUILT_IN_ISUNORDERED:
8949 return fold_builtin_unordered_cmp (loc, fndecl,
8950 arg0, arg1, UNORDERED_EXPR,
8951 NOP_EXPR);
8953 /* We do the folding for va_start in the expander. */
8954 case BUILT_IN_VA_START:
8955 break;
8957 case BUILT_IN_OBJECT_SIZE:
8958 return fold_builtin_object_size (arg0, arg1);
8960 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8961 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8963 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8964 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8966 default:
8967 break;
8969 return NULL_TREE;
8972 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8973 and ARG2.
8974 This function returns NULL_TREE if no simplification was possible. */
8976 static tree
8977 fold_builtin_3 (location_t loc, tree fndecl,
8978 tree arg0, tree arg1, tree arg2)
8980 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8981 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8983 if (TREE_CODE (arg0) == ERROR_MARK
8984 || TREE_CODE (arg1) == ERROR_MARK
8985 || TREE_CODE (arg2) == ERROR_MARK)
8986 return NULL_TREE;
8988 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8989 arg0, arg1, arg2))
8990 return ret;
8992 switch (fcode)
8995 CASE_FLT_FN (BUILT_IN_SINCOS):
8996 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8998 CASE_FLT_FN (BUILT_IN_FMA):
8999 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
9001 CASE_FLT_FN (BUILT_IN_REMQUO):
9002 if (validate_arg (arg0, REAL_TYPE)
9003 && validate_arg (arg1, REAL_TYPE)
9004 && validate_arg (arg2, POINTER_TYPE))
9005 return do_mpfr_remquo (arg0, arg1, arg2);
9006 break;
9008 case BUILT_IN_MEMCMP:
9009 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
9011 case BUILT_IN_EXPECT:
9012 return fold_builtin_expect (loc, arg0, arg1, arg2);
9014 case BUILT_IN_ADD_OVERFLOW:
9015 case BUILT_IN_SUB_OVERFLOW:
9016 case BUILT_IN_MUL_OVERFLOW:
9017 case BUILT_IN_ADD_OVERFLOW_P:
9018 case BUILT_IN_SUB_OVERFLOW_P:
9019 case BUILT_IN_MUL_OVERFLOW_P:
9020 case BUILT_IN_SADD_OVERFLOW:
9021 case BUILT_IN_SADDL_OVERFLOW:
9022 case BUILT_IN_SADDLL_OVERFLOW:
9023 case BUILT_IN_SSUB_OVERFLOW:
9024 case BUILT_IN_SSUBL_OVERFLOW:
9025 case BUILT_IN_SSUBLL_OVERFLOW:
9026 case BUILT_IN_SMUL_OVERFLOW:
9027 case BUILT_IN_SMULL_OVERFLOW:
9028 case BUILT_IN_SMULLL_OVERFLOW:
9029 case BUILT_IN_UADD_OVERFLOW:
9030 case BUILT_IN_UADDL_OVERFLOW:
9031 case BUILT_IN_UADDLL_OVERFLOW:
9032 case BUILT_IN_USUB_OVERFLOW:
9033 case BUILT_IN_USUBL_OVERFLOW:
9034 case BUILT_IN_USUBLL_OVERFLOW:
9035 case BUILT_IN_UMUL_OVERFLOW:
9036 case BUILT_IN_UMULL_OVERFLOW:
9037 case BUILT_IN_UMULLL_OVERFLOW:
9038 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9040 default:
9041 break;
9043 return NULL_TREE;
9046 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9047 arguments. IGNORE is true if the result of the
9048 function call is ignored. This function returns NULL_TREE if no
9049 simplification was possible. */
9051 tree
9052 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9054 tree ret = NULL_TREE;
9056 switch (nargs)
9058 case 0:
9059 ret = fold_builtin_0 (loc, fndecl);
9060 break;
9061 case 1:
9062 ret = fold_builtin_1 (loc, fndecl, args[0]);
9063 break;
9064 case 2:
9065 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9066 break;
9067 case 3:
9068 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9069 break;
9070 default:
9071 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9072 break;
9074 if (ret)
9076 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9077 SET_EXPR_LOCATION (ret, loc);
9078 TREE_NO_WARNING (ret) = 1;
9079 return ret;
9081 return NULL_TREE;
9084 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9085 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9086 of arguments in ARGS to be omitted. OLDNARGS is the number of
9087 elements in ARGS. */
9089 static tree
9090 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9091 int skip, tree fndecl, int n, va_list newargs)
9093 int nargs = oldnargs - skip + n;
9094 tree *buffer;
9096 if (n > 0)
9098 int i, j;
9100 buffer = XALLOCAVEC (tree, nargs);
9101 for (i = 0; i < n; i++)
9102 buffer[i] = va_arg (newargs, tree);
9103 for (j = skip; j < oldnargs; j++, i++)
9104 buffer[i] = args[j];
9106 else
9107 buffer = args + skip;
9109 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9112 /* Return true if FNDECL shouldn't be folded right now.
9113 If a built-in function has an inline attribute always_inline
9114 wrapper, defer folding it after always_inline functions have
9115 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9116 might not be performed. */
9118 bool
9119 avoid_folding_inline_builtin (tree fndecl)
9121 return (DECL_DECLARED_INLINE_P (fndecl)
9122 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9123 && cfun
9124 && !cfun->always_inline_functions_inlined
9125 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9128 /* A wrapper function for builtin folding that prevents warnings for
9129 "statement without effect" and the like, caused by removing the
9130 call node earlier than the warning is generated. */
9132 tree
9133 fold_call_expr (location_t loc, tree exp, bool ignore)
9135 tree ret = NULL_TREE;
9136 tree fndecl = get_callee_fndecl (exp);
9137 if (fndecl
9138 && TREE_CODE (fndecl) == FUNCTION_DECL
9139 && DECL_BUILT_IN (fndecl)
9140 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9141 yet. Defer folding until we see all the arguments
9142 (after inlining). */
9143 && !CALL_EXPR_VA_ARG_PACK (exp))
9145 int nargs = call_expr_nargs (exp);
9147 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9148 instead last argument is __builtin_va_arg_pack (). Defer folding
9149 even in that case, until arguments are finalized. */
9150 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9152 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9153 if (fndecl2
9154 && TREE_CODE (fndecl2) == FUNCTION_DECL
9155 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9156 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9157 return NULL_TREE;
9160 if (avoid_folding_inline_builtin (fndecl))
9161 return NULL_TREE;
9163 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9164 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9165 CALL_EXPR_ARGP (exp), ignore);
9166 else
9168 tree *args = CALL_EXPR_ARGP (exp);
9169 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9170 if (ret)
9171 return ret;
9174 return NULL_TREE;
9177 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9178 N arguments are passed in the array ARGARRAY. Return a folded
9179 expression or NULL_TREE if no simplification was possible. */
9181 tree
9182 fold_builtin_call_array (location_t loc, tree,
9183 tree fn,
9184 int n,
9185 tree *argarray)
9187 if (TREE_CODE (fn) != ADDR_EXPR)
9188 return NULL_TREE;
9190 tree fndecl = TREE_OPERAND (fn, 0);
9191 if (TREE_CODE (fndecl) == FUNCTION_DECL
9192 && DECL_BUILT_IN (fndecl))
9194 /* If last argument is __builtin_va_arg_pack (), arguments to this
9195 function are not finalized yet. Defer folding until they are. */
9196 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9198 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9199 if (fndecl2
9200 && TREE_CODE (fndecl2) == FUNCTION_DECL
9201 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9202 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9203 return NULL_TREE;
9205 if (avoid_folding_inline_builtin (fndecl))
9206 return NULL_TREE;
9207 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9208 return targetm.fold_builtin (fndecl, n, argarray, false);
9209 else
9210 return fold_builtin_n (loc, fndecl, argarray, n, false);
9213 return NULL_TREE;
9216 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9217 along with N new arguments specified as the "..." parameters. SKIP
9218 is the number of arguments in EXP to be omitted. This function is used
9219 to do varargs-to-varargs transformations. */
9221 static tree
9222 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9224 va_list ap;
9225 tree t;
9227 va_start (ap, n);
9228 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9229 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9230 va_end (ap);
9232 return t;
9235 /* Validate a single argument ARG against a tree code CODE representing
9236 a type. Return true when argument is valid. */
9238 static bool
9239 validate_arg (const_tree arg, enum tree_code code)
9241 if (!arg)
9242 return false;
9243 else if (code == POINTER_TYPE)
9244 return POINTER_TYPE_P (TREE_TYPE (arg));
9245 else if (code == INTEGER_TYPE)
9246 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9247 return code == TREE_CODE (TREE_TYPE (arg));
9250 /* This function validates the types of a function call argument list
9251 against a specified list of tree_codes. If the last specifier is a 0,
9252 that represents an ellipses, otherwise the last specifier must be a
9253 VOID_TYPE.
9255 This is the GIMPLE version of validate_arglist. Eventually we want to
9256 completely convert builtins.c to work from GIMPLEs and the tree based
9257 validate_arglist will then be removed. */
9259 bool
9260 validate_gimple_arglist (const gcall *call, ...)
9262 enum tree_code code;
9263 bool res = 0;
9264 va_list ap;
9265 const_tree arg;
9266 size_t i;
9268 va_start (ap, call);
9269 i = 0;
9273 code = (enum tree_code) va_arg (ap, int);
9274 switch (code)
9276 case 0:
9277 /* This signifies an ellipses, any further arguments are all ok. */
9278 res = true;
9279 goto end;
9280 case VOID_TYPE:
9281 /* This signifies an endlink, if no arguments remain, return
9282 true, otherwise return false. */
9283 res = (i == gimple_call_num_args (call));
9284 goto end;
9285 default:
9286 /* If no parameters remain or the parameter's code does not
9287 match the specified code, return false. Otherwise continue
9288 checking any remaining arguments. */
9289 arg = gimple_call_arg (call, i++);
9290 if (!validate_arg (arg, code))
9291 goto end;
9292 break;
9295 while (1);
9297 /* We need gotos here since we can only have one VA_CLOSE in a
9298 function. */
9299 end: ;
9300 va_end (ap);
9302 return res;
9305 /* Default target-specific builtin expander that does nothing. */
9308 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9309 rtx target ATTRIBUTE_UNUSED,
9310 rtx subtarget ATTRIBUTE_UNUSED,
9311 machine_mode mode ATTRIBUTE_UNUSED,
9312 int ignore ATTRIBUTE_UNUSED)
9314 return NULL_RTX;
9317 /* Returns true is EXP represents data that would potentially reside
9318 in a readonly section. */
9320 bool
9321 readonly_data_expr (tree exp)
9323 STRIP_NOPS (exp);
9325 if (TREE_CODE (exp) != ADDR_EXPR)
9326 return false;
9328 exp = get_base_address (TREE_OPERAND (exp, 0));
9329 if (!exp)
9330 return false;
9332 /* Make sure we call decl_readonly_section only for trees it
9333 can handle (since it returns true for everything it doesn't
9334 understand). */
9335 if (TREE_CODE (exp) == STRING_CST
9336 || TREE_CODE (exp) == CONSTRUCTOR
9337 || (VAR_P (exp) && TREE_STATIC (exp)))
9338 return decl_readonly_section (exp, 0);
9339 else
9340 return false;
9343 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9344 to the call, and TYPE is its return type.
9346 Return NULL_TREE if no simplification was possible, otherwise return the
9347 simplified form of the call as a tree.
9349 The simplified form may be a constant or other expression which
9350 computes the same value, but in a more efficient manner (including
9351 calls to other builtin functions).
9353 The call may contain arguments which need to be evaluated, but
9354 which are not useful to determine the result of the call. In
9355 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9356 COMPOUND_EXPR will be an argument which must be evaluated.
9357 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9358 COMPOUND_EXPR in the chain will contain the tree for the simplified
9359 form of the builtin function call. */
9361 static tree
9362 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9364 if (!validate_arg (s1, POINTER_TYPE)
9365 || !validate_arg (s2, POINTER_TYPE))
9366 return NULL_TREE;
9367 else
9369 tree fn;
9370 const char *p1, *p2;
9372 p2 = c_getstr (s2);
9373 if (p2 == NULL)
9374 return NULL_TREE;
9376 p1 = c_getstr (s1);
9377 if (p1 != NULL)
9379 const char *r = strpbrk (p1, p2);
9380 tree tem;
9382 if (r == NULL)
9383 return build_int_cst (TREE_TYPE (s1), 0);
9385 /* Return an offset into the constant string argument. */
9386 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9387 return fold_convert_loc (loc, type, tem);
9390 if (p2[0] == '\0')
9391 /* strpbrk(x, "") == NULL.
9392 Evaluate and ignore s1 in case it had side-effects. */
9393 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9395 if (p2[1] != '\0')
9396 return NULL_TREE; /* Really call strpbrk. */
9398 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9399 if (!fn)
9400 return NULL_TREE;
9402 /* New argument list transforming strpbrk(s1, s2) to
9403 strchr(s1, s2[0]). */
9404 return build_call_expr_loc (loc, fn, 2, s1,
9405 build_int_cst (integer_type_node, p2[0]));
9409 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9410 to the call.
9412 Return NULL_TREE if no simplification was possible, otherwise return the
9413 simplified form of the call as a tree.
9415 The simplified form may be a constant or other expression which
9416 computes the same value, but in a more efficient manner (including
9417 calls to other builtin functions).
9419 The call may contain arguments which need to be evaluated, but
9420 which are not useful to determine the result of the call. In
9421 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9422 COMPOUND_EXPR will be an argument which must be evaluated.
9423 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9424 COMPOUND_EXPR in the chain will contain the tree for the simplified
9425 form of the builtin function call. */
9427 static tree
9428 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9430 if (!validate_arg (s1, POINTER_TYPE)
9431 || !validate_arg (s2, POINTER_TYPE))
9432 return NULL_TREE;
9433 else
9435 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9437 /* If either argument is "", return NULL_TREE. */
9438 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9439 /* Evaluate and ignore both arguments in case either one has
9440 side-effects. */
9441 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9442 s1, s2);
9443 return NULL_TREE;
9447 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9448 to the call.
9450 Return NULL_TREE if no simplification was possible, otherwise return the
9451 simplified form of the call as a tree.
9453 The simplified form may be a constant or other expression which
9454 computes the same value, but in a more efficient manner (including
9455 calls to other builtin functions).
9457 The call may contain arguments which need to be evaluated, but
9458 which are not useful to determine the result of the call. In
9459 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9460 COMPOUND_EXPR will be an argument which must be evaluated.
9461 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9462 COMPOUND_EXPR in the chain will contain the tree for the simplified
9463 form of the builtin function call. */
9465 static tree
9466 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9468 if (!validate_arg (s1, POINTER_TYPE)
9469 || !validate_arg (s2, POINTER_TYPE))
9470 return NULL_TREE;
9471 else
9473 /* If the first argument is "", return NULL_TREE. */
9474 const char *p1 = c_getstr (s1);
9475 if (p1 && *p1 == '\0')
9477 /* Evaluate and ignore argument s2 in case it has
9478 side-effects. */
9479 return omit_one_operand_loc (loc, size_type_node,
9480 size_zero_node, s2);
9483 /* If the second argument is "", return __builtin_strlen(s1). */
9484 const char *p2 = c_getstr (s2);
9485 if (p2 && *p2 == '\0')
9487 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9489 /* If the replacement _DECL isn't initialized, don't do the
9490 transformation. */
9491 if (!fn)
9492 return NULL_TREE;
9494 return build_call_expr_loc (loc, fn, 1, s1);
9496 return NULL_TREE;
9500 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9501 produced. False otherwise. This is done so that we don't output the error
9502 or warning twice or three times. */
9504 bool
9505 fold_builtin_next_arg (tree exp, bool va_start_p)
9507 tree fntype = TREE_TYPE (current_function_decl);
9508 int nargs = call_expr_nargs (exp);
9509 tree arg;
9510 /* There is good chance the current input_location points inside the
9511 definition of the va_start macro (perhaps on the token for
9512 builtin) in a system header, so warnings will not be emitted.
9513 Use the location in real source code. */
9514 source_location current_location =
9515 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9516 NULL);
9518 if (!stdarg_p (fntype))
9520 error ("%<va_start%> used in function with fixed args");
9521 return true;
9524 if (va_start_p)
9526 if (va_start_p && (nargs != 2))
9528 error ("wrong number of arguments to function %<va_start%>");
9529 return true;
9531 arg = CALL_EXPR_ARG (exp, 1);
9533 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9534 when we checked the arguments and if needed issued a warning. */
9535 else
9537 if (nargs == 0)
9539 /* Evidently an out of date version of <stdarg.h>; can't validate
9540 va_start's second argument, but can still work as intended. */
9541 warning_at (current_location,
9542 OPT_Wvarargs,
9543 "%<__builtin_next_arg%> called without an argument");
9544 return true;
9546 else if (nargs > 1)
9548 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9549 return true;
9551 arg = CALL_EXPR_ARG (exp, 0);
9554 if (TREE_CODE (arg) == SSA_NAME)
9555 arg = SSA_NAME_VAR (arg);
9557 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9558 or __builtin_next_arg (0) the first time we see it, after checking
9559 the arguments and if needed issuing a warning. */
9560 if (!integer_zerop (arg))
9562 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9564 /* Strip off all nops for the sake of the comparison. This
9565 is not quite the same as STRIP_NOPS. It does more.
9566 We must also strip off INDIRECT_EXPR for C++ reference
9567 parameters. */
9568 while (CONVERT_EXPR_P (arg)
9569 || TREE_CODE (arg) == INDIRECT_REF)
9570 arg = TREE_OPERAND (arg, 0);
9571 if (arg != last_parm)
9573 /* FIXME: Sometimes with the tree optimizers we can get the
9574 not the last argument even though the user used the last
9575 argument. We just warn and set the arg to be the last
9576 argument so that we will get wrong-code because of
9577 it. */
9578 warning_at (current_location,
9579 OPT_Wvarargs,
9580 "second parameter of %<va_start%> not last named argument");
9583 /* Undefined by C99 7.15.1.4p4 (va_start):
9584 "If the parameter parmN is declared with the register storage
9585 class, with a function or array type, or with a type that is
9586 not compatible with the type that results after application of
9587 the default argument promotions, the behavior is undefined."
9589 else if (DECL_REGISTER (arg))
9591 warning_at (current_location,
9592 OPT_Wvarargs,
9593 "undefined behavior when second parameter of "
9594 "%<va_start%> is declared with %<register%> storage");
9597 /* We want to verify the second parameter just once before the tree
9598 optimizers are run and then avoid keeping it in the tree,
9599 as otherwise we could warn even for correct code like:
9600 void foo (int i, ...)
9601 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9602 if (va_start_p)
9603 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9604 else
9605 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9607 return false;
9611 /* Expand a call EXP to __builtin_object_size. */
9613 static rtx
9614 expand_builtin_object_size (tree exp)
9616 tree ost;
9617 int object_size_type;
9618 tree fndecl = get_callee_fndecl (exp);
9620 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9622 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9623 exp, fndecl);
9624 expand_builtin_trap ();
9625 return const0_rtx;
9628 ost = CALL_EXPR_ARG (exp, 1);
9629 STRIP_NOPS (ost);
9631 if (TREE_CODE (ost) != INTEGER_CST
9632 || tree_int_cst_sgn (ost) < 0
9633 || compare_tree_int (ost, 3) > 0)
9635 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9636 exp, fndecl);
9637 expand_builtin_trap ();
9638 return const0_rtx;
9641 object_size_type = tree_to_shwi (ost);
9643 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9646 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9647 FCODE is the BUILT_IN_* to use.
9648 Return NULL_RTX if we failed; the caller should emit a normal call,
9649 otherwise try to get the result in TARGET, if convenient (and in
9650 mode MODE if that's convenient). */
9652 static rtx
9653 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9654 enum built_in_function fcode)
9656 tree dest, src, len, size;
9658 if (!validate_arglist (exp,
9659 POINTER_TYPE,
9660 fcode == BUILT_IN_MEMSET_CHK
9661 ? INTEGER_TYPE : POINTER_TYPE,
9662 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9663 return NULL_RTX;
9665 dest = CALL_EXPR_ARG (exp, 0);
9666 src = CALL_EXPR_ARG (exp, 1);
9667 len = CALL_EXPR_ARG (exp, 2);
9668 size = CALL_EXPR_ARG (exp, 3);
9670 bool sizes_ok = check_sizes (OPT_Wstringop_overflow_,
9671 exp, len, /*maxlen=*/NULL_TREE,
9672 /*str=*/NULL_TREE, size);
9674 if (!tree_fits_uhwi_p (size))
9675 return NULL_RTX;
9677 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9679 /* Avoid transforming the checking call to an ordinary one when
9680 an overflow has been detected or when the call couldn't be
9681 validated because the size is not constant. */
9682 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9683 return NULL_RTX;
9685 tree fn = NULL_TREE;
9686 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9687 mem{cpy,pcpy,move,set} is available. */
9688 switch (fcode)
9690 case BUILT_IN_MEMCPY_CHK:
9691 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9692 break;
9693 case BUILT_IN_MEMPCPY_CHK:
9694 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9695 break;
9696 case BUILT_IN_MEMMOVE_CHK:
9697 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9698 break;
9699 case BUILT_IN_MEMSET_CHK:
9700 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9701 break;
9702 default:
9703 break;
9706 if (! fn)
9707 return NULL_RTX;
9709 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9710 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9711 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9712 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9714 else if (fcode == BUILT_IN_MEMSET_CHK)
9715 return NULL_RTX;
9716 else
9718 unsigned int dest_align = get_pointer_alignment (dest);
9720 /* If DEST is not a pointer type, call the normal function. */
9721 if (dest_align == 0)
9722 return NULL_RTX;
9724 /* If SRC and DEST are the same (and not volatile), do nothing. */
9725 if (operand_equal_p (src, dest, 0))
9727 tree expr;
9729 if (fcode != BUILT_IN_MEMPCPY_CHK)
9731 /* Evaluate and ignore LEN in case it has side-effects. */
9732 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9733 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9736 expr = fold_build_pointer_plus (dest, len);
9737 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9740 /* __memmove_chk special case. */
9741 if (fcode == BUILT_IN_MEMMOVE_CHK)
9743 unsigned int src_align = get_pointer_alignment (src);
9745 if (src_align == 0)
9746 return NULL_RTX;
9748 /* If src is categorized for a readonly section we can use
9749 normal __memcpy_chk. */
9750 if (readonly_data_expr (src))
9752 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9753 if (!fn)
9754 return NULL_RTX;
9755 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9756 dest, src, len, size);
9757 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9758 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9759 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9762 return NULL_RTX;
9766 /* Emit warning if a buffer overflow is detected at compile time. */
9768 static void
9769 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9771 /* The source string. */
9772 tree srcstr = NULL_TREE;
9773 /* The size of the destination object. */
9774 tree objsize = NULL_TREE;
9775 /* The string that is being concatenated with (as in __strcat_chk)
9776 or null if it isn't. */
9777 tree catstr = NULL_TREE;
9778 /* The maximum length of the source sequence in a bounded operation
9779 (such as __strncat_chk) or null if the operation isn't bounded
9780 (such as __strcat_chk). */
9781 tree maxlen = NULL_TREE;
9783 switch (fcode)
9785 case BUILT_IN_STRCPY_CHK:
9786 case BUILT_IN_STPCPY_CHK:
9787 srcstr = CALL_EXPR_ARG (exp, 1);
9788 objsize = CALL_EXPR_ARG (exp, 2);
9789 break;
9791 case BUILT_IN_STRCAT_CHK:
9792 /* For __strcat_chk the warning will be emitted only if overflowing
9793 by at least strlen (dest) + 1 bytes. */
9794 catstr = CALL_EXPR_ARG (exp, 0);
9795 srcstr = CALL_EXPR_ARG (exp, 1);
9796 objsize = CALL_EXPR_ARG (exp, 2);
9797 break;
9799 case BUILT_IN_STRNCAT_CHK:
9800 catstr = CALL_EXPR_ARG (exp, 0);
9801 srcstr = CALL_EXPR_ARG (exp, 1);
9802 maxlen = CALL_EXPR_ARG (exp, 2);
9803 objsize = CALL_EXPR_ARG (exp, 3);
9804 break;
9806 case BUILT_IN_STRNCPY_CHK:
9807 case BUILT_IN_STPNCPY_CHK:
9808 srcstr = CALL_EXPR_ARG (exp, 1);
9809 maxlen = CALL_EXPR_ARG (exp, 2);
9810 objsize = CALL_EXPR_ARG (exp, 3);
9811 break;
9813 case BUILT_IN_SNPRINTF_CHK:
9814 case BUILT_IN_VSNPRINTF_CHK:
9815 maxlen = CALL_EXPR_ARG (exp, 1);
9816 objsize = CALL_EXPR_ARG (exp, 3);
9817 break;
9818 default:
9819 gcc_unreachable ();
9822 if (catstr && maxlen)
9824 /* Check __strncat_chk. There is no way to determine the length
9825 of the string to which the source string is being appended so
9826 just warn when the length of the source string is not known. */
9827 check_strncat_sizes (exp, objsize);
9828 return;
9831 check_sizes (OPT_Wstringop_overflow_, exp,
9832 /*size=*/NULL_TREE, maxlen, srcstr, objsize);
9835 /* Emit warning if a buffer overflow is detected at compile time
9836 in __sprintf_chk/__vsprintf_chk calls. */
9838 static void
9839 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9841 tree size, len, fmt;
9842 const char *fmt_str;
9843 int nargs = call_expr_nargs (exp);
9845 /* Verify the required arguments in the original call. */
9847 if (nargs < 4)
9848 return;
9849 size = CALL_EXPR_ARG (exp, 2);
9850 fmt = CALL_EXPR_ARG (exp, 3);
9852 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9853 return;
9855 /* Check whether the format is a literal string constant. */
9856 fmt_str = c_getstr (fmt);
9857 if (fmt_str == NULL)
9858 return;
9860 if (!init_target_chars ())
9861 return;
9863 /* If the format doesn't contain % args or %%, we know its size. */
9864 if (strchr (fmt_str, target_percent) == 0)
9865 len = build_int_cstu (size_type_node, strlen (fmt_str));
9866 /* If the format is "%s" and first ... argument is a string literal,
9867 we know it too. */
9868 else if (fcode == BUILT_IN_SPRINTF_CHK
9869 && strcmp (fmt_str, target_percent_s) == 0)
9871 tree arg;
9873 if (nargs < 5)
9874 return;
9875 arg = CALL_EXPR_ARG (exp, 4);
9876 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9877 return;
9879 len = c_strlen (arg, 1);
9880 if (!len || ! tree_fits_uhwi_p (len))
9881 return;
9883 else
9884 return;
9886 /* Add one for the terminating nul. */
9887 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9888 check_sizes (OPT_Wstringop_overflow_,
9889 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, len, size);
9892 /* Emit warning if a free is called with address of a variable. */
9894 static void
9895 maybe_emit_free_warning (tree exp)
9897 tree arg = CALL_EXPR_ARG (exp, 0);
9899 STRIP_NOPS (arg);
9900 if (TREE_CODE (arg) != ADDR_EXPR)
9901 return;
9903 arg = get_base_address (TREE_OPERAND (arg, 0));
9904 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9905 return;
9907 if (SSA_VAR_P (arg))
9908 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9909 "%Kattempt to free a non-heap object %qD", exp, arg);
9910 else
9911 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9912 "%Kattempt to free a non-heap object", exp);
9915 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9916 if possible. */
9918 static tree
9919 fold_builtin_object_size (tree ptr, tree ost)
9921 unsigned HOST_WIDE_INT bytes;
9922 int object_size_type;
9924 if (!validate_arg (ptr, POINTER_TYPE)
9925 || !validate_arg (ost, INTEGER_TYPE))
9926 return NULL_TREE;
9928 STRIP_NOPS (ost);
9930 if (TREE_CODE (ost) != INTEGER_CST
9931 || tree_int_cst_sgn (ost) < 0
9932 || compare_tree_int (ost, 3) > 0)
9933 return NULL_TREE;
9935 object_size_type = tree_to_shwi (ost);
9937 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9938 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9939 and (size_t) 0 for types 2 and 3. */
9940 if (TREE_SIDE_EFFECTS (ptr))
9941 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9943 if (TREE_CODE (ptr) == ADDR_EXPR)
9945 compute_builtin_object_size (ptr, object_size_type, &bytes);
9946 if (wi::fits_to_tree_p (bytes, size_type_node))
9947 return build_int_cstu (size_type_node, bytes);
9949 else if (TREE_CODE (ptr) == SSA_NAME)
9951 /* If object size is not known yet, delay folding until
9952 later. Maybe subsequent passes will help determining
9953 it. */
9954 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9955 && wi::fits_to_tree_p (bytes, size_type_node))
9956 return build_int_cstu (size_type_node, bytes);
9959 return NULL_TREE;
9962 /* Builtins with folding operations that operate on "..." arguments
9963 need special handling; we need to store the arguments in a convenient
9964 data structure before attempting any folding. Fortunately there are
9965 only a few builtins that fall into this category. FNDECL is the
9966 function, EXP is the CALL_EXPR for the call. */
9968 static tree
9969 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9971 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9972 tree ret = NULL_TREE;
9974 switch (fcode)
9976 case BUILT_IN_FPCLASSIFY:
9977 ret = fold_builtin_fpclassify (loc, args, nargs);
9978 break;
9980 default:
9981 break;
9983 if (ret)
9985 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9986 SET_EXPR_LOCATION (ret, loc);
9987 TREE_NO_WARNING (ret) = 1;
9988 return ret;
9990 return NULL_TREE;
9993 /* Initialize format string characters in the target charset. */
9995 bool
9996 init_target_chars (void)
9998 static bool init;
9999 if (!init)
10001 target_newline = lang_hooks.to_target_charset ('\n');
10002 target_percent = lang_hooks.to_target_charset ('%');
10003 target_c = lang_hooks.to_target_charset ('c');
10004 target_s = lang_hooks.to_target_charset ('s');
10005 if (target_newline == 0 || target_percent == 0 || target_c == 0
10006 || target_s == 0)
10007 return false;
10009 target_percent_c[0] = target_percent;
10010 target_percent_c[1] = target_c;
10011 target_percent_c[2] = '\0';
10013 target_percent_s[0] = target_percent;
10014 target_percent_s[1] = target_s;
10015 target_percent_s[2] = '\0';
10017 target_percent_s_newline[0] = target_percent;
10018 target_percent_s_newline[1] = target_s;
10019 target_percent_s_newline[2] = target_newline;
10020 target_percent_s_newline[3] = '\0';
10022 init = true;
10024 return true;
10027 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10028 and no overflow/underflow occurred. INEXACT is true if M was not
10029 exactly calculated. TYPE is the tree type for the result. This
10030 function assumes that you cleared the MPFR flags and then
10031 calculated M to see if anything subsequently set a flag prior to
10032 entering this function. Return NULL_TREE if any checks fail. */
10034 static tree
10035 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10037 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10038 overflow/underflow occurred. If -frounding-math, proceed iff the
10039 result of calling FUNC was exact. */
10040 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10041 && (!flag_rounding_math || !inexact))
10043 REAL_VALUE_TYPE rr;
10045 real_from_mpfr (&rr, m, type, GMP_RNDN);
10046 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10047 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10048 but the mpft_t is not, then we underflowed in the
10049 conversion. */
10050 if (real_isfinite (&rr)
10051 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10053 REAL_VALUE_TYPE rmode;
10055 real_convert (&rmode, TYPE_MODE (type), &rr);
10056 /* Proceed iff the specified mode can hold the value. */
10057 if (real_identical (&rmode, &rr))
10058 return build_real (type, rmode);
10061 return NULL_TREE;
10064 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10065 number and no overflow/underflow occurred. INEXACT is true if M
10066 was not exactly calculated. TYPE is the tree type for the result.
10067 This function assumes that you cleared the MPFR flags and then
10068 calculated M to see if anything subsequently set a flag prior to
10069 entering this function. Return NULL_TREE if any checks fail, if
10070 FORCE_CONVERT is true, then bypass the checks. */
10072 static tree
10073 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10075 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10076 overflow/underflow occurred. If -frounding-math, proceed iff the
10077 result of calling FUNC was exact. */
10078 if (force_convert
10079 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10080 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10081 && (!flag_rounding_math || !inexact)))
10083 REAL_VALUE_TYPE re, im;
10085 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10086 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10087 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10088 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10089 but the mpft_t is not, then we underflowed in the
10090 conversion. */
10091 if (force_convert
10092 || (real_isfinite (&re) && real_isfinite (&im)
10093 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10094 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10096 REAL_VALUE_TYPE re_mode, im_mode;
10098 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10099 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10100 /* Proceed iff the specified mode can hold the value. */
10101 if (force_convert
10102 || (real_identical (&re_mode, &re)
10103 && real_identical (&im_mode, &im)))
10104 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10105 build_real (TREE_TYPE (type), im_mode));
10108 return NULL_TREE;
10111 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10112 the pointer *(ARG_QUO) and return the result. The type is taken
10113 from the type of ARG0 and is used for setting the precision of the
10114 calculation and results. */
10116 static tree
10117 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10119 tree const type = TREE_TYPE (arg0);
10120 tree result = NULL_TREE;
10122 STRIP_NOPS (arg0);
10123 STRIP_NOPS (arg1);
10125 /* To proceed, MPFR must exactly represent the target floating point
10126 format, which only happens when the target base equals two. */
10127 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10128 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10129 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10131 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10132 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10134 if (real_isfinite (ra0) && real_isfinite (ra1))
10136 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10137 const int prec = fmt->p;
10138 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10139 tree result_rem;
10140 long integer_quo;
10141 mpfr_t m0, m1;
10143 mpfr_inits2 (prec, m0, m1, NULL);
10144 mpfr_from_real (m0, ra0, GMP_RNDN);
10145 mpfr_from_real (m1, ra1, GMP_RNDN);
10146 mpfr_clear_flags ();
10147 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10148 /* Remquo is independent of the rounding mode, so pass
10149 inexact=0 to do_mpfr_ckconv(). */
10150 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10151 mpfr_clears (m0, m1, NULL);
10152 if (result_rem)
10154 /* MPFR calculates quo in the host's long so it may
10155 return more bits in quo than the target int can hold
10156 if sizeof(host long) > sizeof(target int). This can
10157 happen even for native compilers in LP64 mode. In
10158 these cases, modulo the quo value with the largest
10159 number that the target int can hold while leaving one
10160 bit for the sign. */
10161 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10162 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10164 /* Dereference the quo pointer argument. */
10165 arg_quo = build_fold_indirect_ref (arg_quo);
10166 /* Proceed iff a valid pointer type was passed in. */
10167 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10169 /* Set the value. */
10170 tree result_quo
10171 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10172 build_int_cst (TREE_TYPE (arg_quo),
10173 integer_quo));
10174 TREE_SIDE_EFFECTS (result_quo) = 1;
10175 /* Combine the quo assignment with the rem. */
10176 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10177 result_quo, result_rem));
10182 return result;
10185 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10186 resulting value as a tree with type TYPE. The mpfr precision is
10187 set to the precision of TYPE. We assume that this mpfr function
10188 returns zero if the result could be calculated exactly within the
10189 requested precision. In addition, the integer pointer represented
10190 by ARG_SG will be dereferenced and set to the appropriate signgam
10191 (-1,1) value. */
10193 static tree
10194 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10196 tree result = NULL_TREE;
10198 STRIP_NOPS (arg);
10200 /* To proceed, MPFR must exactly represent the target floating point
10201 format, which only happens when the target base equals two. Also
10202 verify ARG is a constant and that ARG_SG is an int pointer. */
10203 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10204 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10205 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10206 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10208 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10210 /* In addition to NaN and Inf, the argument cannot be zero or a
10211 negative integer. */
10212 if (real_isfinite (ra)
10213 && ra->cl != rvc_zero
10214 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10216 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10217 const int prec = fmt->p;
10218 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10219 int inexact, sg;
10220 mpfr_t m;
10221 tree result_lg;
10223 mpfr_init2 (m, prec);
10224 mpfr_from_real (m, ra, GMP_RNDN);
10225 mpfr_clear_flags ();
10226 inexact = mpfr_lgamma (m, &sg, m, rnd);
10227 result_lg = do_mpfr_ckconv (m, type, inexact);
10228 mpfr_clear (m);
10229 if (result_lg)
10231 tree result_sg;
10233 /* Dereference the arg_sg pointer argument. */
10234 arg_sg = build_fold_indirect_ref (arg_sg);
10235 /* Assign the signgam value into *arg_sg. */
10236 result_sg = fold_build2 (MODIFY_EXPR,
10237 TREE_TYPE (arg_sg), arg_sg,
10238 build_int_cst (TREE_TYPE (arg_sg), sg));
10239 TREE_SIDE_EFFECTS (result_sg) = 1;
10240 /* Combine the signgam assignment with the lgamma result. */
10241 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10242 result_sg, result_lg));
10247 return result;
10250 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10251 mpc function FUNC on it and return the resulting value as a tree
10252 with type TYPE. The mpfr precision is set to the precision of
10253 TYPE. We assume that function FUNC returns zero if the result
10254 could be calculated exactly within the requested precision. If
10255 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10256 in the arguments and/or results. */
10258 tree
10259 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10260 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10262 tree result = NULL_TREE;
10264 STRIP_NOPS (arg0);
10265 STRIP_NOPS (arg1);
10267 /* To proceed, MPFR must exactly represent the target floating point
10268 format, which only happens when the target base equals two. */
10269 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10270 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10271 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10272 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10273 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10275 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10276 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10277 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10278 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10280 if (do_nonfinite
10281 || (real_isfinite (re0) && real_isfinite (im0)
10282 && real_isfinite (re1) && real_isfinite (im1)))
10284 const struct real_format *const fmt =
10285 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10286 const int prec = fmt->p;
10287 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10288 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10289 int inexact;
10290 mpc_t m0, m1;
10292 mpc_init2 (m0, prec);
10293 mpc_init2 (m1, prec);
10294 mpfr_from_real (mpc_realref (m0), re0, rnd);
10295 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10296 mpfr_from_real (mpc_realref (m1), re1, rnd);
10297 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10298 mpfr_clear_flags ();
10299 inexact = func (m0, m0, m1, crnd);
10300 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10301 mpc_clear (m0);
10302 mpc_clear (m1);
10306 return result;
10309 /* A wrapper function for builtin folding that prevents warnings for
10310 "statement without effect" and the like, caused by removing the
10311 call node earlier than the warning is generated. */
10313 tree
10314 fold_call_stmt (gcall *stmt, bool ignore)
10316 tree ret = NULL_TREE;
10317 tree fndecl = gimple_call_fndecl (stmt);
10318 location_t loc = gimple_location (stmt);
10319 if (fndecl
10320 && TREE_CODE (fndecl) == FUNCTION_DECL
10321 && DECL_BUILT_IN (fndecl)
10322 && !gimple_call_va_arg_pack_p (stmt))
10324 int nargs = gimple_call_num_args (stmt);
10325 tree *args = (nargs > 0
10326 ? gimple_call_arg_ptr (stmt, 0)
10327 : &error_mark_node);
10329 if (avoid_folding_inline_builtin (fndecl))
10330 return NULL_TREE;
10331 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10333 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10335 else
10337 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10338 if (ret)
10340 /* Propagate location information from original call to
10341 expansion of builtin. Otherwise things like
10342 maybe_emit_chk_warning, that operate on the expansion
10343 of a builtin, will use the wrong location information. */
10344 if (gimple_has_location (stmt))
10346 tree realret = ret;
10347 if (TREE_CODE (ret) == NOP_EXPR)
10348 realret = TREE_OPERAND (ret, 0);
10349 if (CAN_HAVE_LOCATION_P (realret)
10350 && !EXPR_HAS_LOCATION (realret))
10351 SET_EXPR_LOCATION (realret, loc);
10352 return realret;
10354 return ret;
10358 return NULL_TREE;
10361 /* Look up the function in builtin_decl that corresponds to DECL
10362 and set ASMSPEC as its user assembler name. DECL must be a
10363 function decl that declares a builtin. */
10365 void
10366 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10368 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10369 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10370 && asmspec != 0);
10372 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10373 set_user_assembler_name (builtin, asmspec);
10375 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10376 && INT_TYPE_SIZE < BITS_PER_WORD)
10378 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10379 set_user_assembler_libfunc ("ffs", asmspec);
10380 set_optab_libfunc (ffs_optab, mode, "ffs");
10384 /* Return true if DECL is a builtin that expands to a constant or similarly
10385 simple code. */
10386 bool
10387 is_simple_builtin (tree decl)
10389 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10390 switch (DECL_FUNCTION_CODE (decl))
10392 /* Builtins that expand to constants. */
10393 case BUILT_IN_CONSTANT_P:
10394 case BUILT_IN_EXPECT:
10395 case BUILT_IN_OBJECT_SIZE:
10396 case BUILT_IN_UNREACHABLE:
10397 /* Simple register moves or loads from stack. */
10398 case BUILT_IN_ASSUME_ALIGNED:
10399 case BUILT_IN_RETURN_ADDRESS:
10400 case BUILT_IN_EXTRACT_RETURN_ADDR:
10401 case BUILT_IN_FROB_RETURN_ADDR:
10402 case BUILT_IN_RETURN:
10403 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10404 case BUILT_IN_FRAME_ADDRESS:
10405 case BUILT_IN_VA_END:
10406 case BUILT_IN_STACK_SAVE:
10407 case BUILT_IN_STACK_RESTORE:
10408 /* Exception state returns or moves registers around. */
10409 case BUILT_IN_EH_FILTER:
10410 case BUILT_IN_EH_POINTER:
10411 case BUILT_IN_EH_COPY_VALUES:
10412 return true;
10414 default:
10415 return false;
10418 return false;
10421 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10422 most probably expanded inline into reasonably simple code. This is a
10423 superset of is_simple_builtin. */
10424 bool
10425 is_inexpensive_builtin (tree decl)
10427 if (!decl)
10428 return false;
10429 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10430 return true;
10431 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10432 switch (DECL_FUNCTION_CODE (decl))
10434 case BUILT_IN_ABS:
10435 CASE_BUILT_IN_ALLOCA:
10436 case BUILT_IN_BSWAP16:
10437 case BUILT_IN_BSWAP32:
10438 case BUILT_IN_BSWAP64:
10439 case BUILT_IN_CLZ:
10440 case BUILT_IN_CLZIMAX:
10441 case BUILT_IN_CLZL:
10442 case BUILT_IN_CLZLL:
10443 case BUILT_IN_CTZ:
10444 case BUILT_IN_CTZIMAX:
10445 case BUILT_IN_CTZL:
10446 case BUILT_IN_CTZLL:
10447 case BUILT_IN_FFS:
10448 case BUILT_IN_FFSIMAX:
10449 case BUILT_IN_FFSL:
10450 case BUILT_IN_FFSLL:
10451 case BUILT_IN_IMAXABS:
10452 case BUILT_IN_FINITE:
10453 case BUILT_IN_FINITEF:
10454 case BUILT_IN_FINITEL:
10455 case BUILT_IN_FINITED32:
10456 case BUILT_IN_FINITED64:
10457 case BUILT_IN_FINITED128:
10458 case BUILT_IN_FPCLASSIFY:
10459 case BUILT_IN_ISFINITE:
10460 case BUILT_IN_ISINF_SIGN:
10461 case BUILT_IN_ISINF:
10462 case BUILT_IN_ISINFF:
10463 case BUILT_IN_ISINFL:
10464 case BUILT_IN_ISINFD32:
10465 case BUILT_IN_ISINFD64:
10466 case BUILT_IN_ISINFD128:
10467 case BUILT_IN_ISNAN:
10468 case BUILT_IN_ISNANF:
10469 case BUILT_IN_ISNANL:
10470 case BUILT_IN_ISNAND32:
10471 case BUILT_IN_ISNAND64:
10472 case BUILT_IN_ISNAND128:
10473 case BUILT_IN_ISNORMAL:
10474 case BUILT_IN_ISGREATER:
10475 case BUILT_IN_ISGREATEREQUAL:
10476 case BUILT_IN_ISLESS:
10477 case BUILT_IN_ISLESSEQUAL:
10478 case BUILT_IN_ISLESSGREATER:
10479 case BUILT_IN_ISUNORDERED:
10480 case BUILT_IN_VA_ARG_PACK:
10481 case BUILT_IN_VA_ARG_PACK_LEN:
10482 case BUILT_IN_VA_COPY:
10483 case BUILT_IN_TRAP:
10484 case BUILT_IN_SAVEREGS:
10485 case BUILT_IN_POPCOUNTL:
10486 case BUILT_IN_POPCOUNTLL:
10487 case BUILT_IN_POPCOUNTIMAX:
10488 case BUILT_IN_POPCOUNT:
10489 case BUILT_IN_PARITYL:
10490 case BUILT_IN_PARITYLL:
10491 case BUILT_IN_PARITYIMAX:
10492 case BUILT_IN_PARITY:
10493 case BUILT_IN_LABS:
10494 case BUILT_IN_LLABS:
10495 case BUILT_IN_PREFETCH:
10496 case BUILT_IN_ACC_ON_DEVICE:
10497 return true;
10499 default:
10500 return is_simple_builtin (decl);
10503 return false;
10506 /* Return true if T is a constant and the value cast to a target char
10507 can be represented by a host char.
10508 Store the casted char constant in *P if so. */
10510 bool
10511 target_char_cst_p (tree t, char *p)
10513 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10514 return false;
10516 *p = (char)tree_to_uhwi (t);
10517 return true;