rs6000: Fix gcc.target/powerpc testsuite target requirements.
[official-gcc.git] / gcc / builtins.c
blob4754602e0ecda13a3a445fbd809921ddb698ccbd
1 /* Expand builtin functions.
2 Copyright (C) 1988-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
77 struct target_builtins default_target_builtins;
78 #if SWITCHABLE_TARGET
79 struct target_builtins *this_target_builtins = &default_target_builtins;
80 #endif
82 /* Define the names of the builtin function types and codes. */
83 const char *const built_in_class_names[BUILT_IN_LAST]
84 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
86 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
87 const char * built_in_names[(int) END_BUILTINS] =
89 #include "builtins.def"
92 /* Setup an array of builtin_info_type, make sure each element decl is
93 initialized to NULL_TREE. */
94 builtin_info_type builtin_info[(int)END_BUILTINS];
96 /* Non-zero if __builtin_constant_p should be folded right away. */
97 bool force_folding_builtin_constant_p;
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp,
130 memop_ret retmode,
131 bool might_overlap);
132 static rtx expand_builtin_memmove (tree, rtx);
133 static rtx expand_builtin_mempcpy (tree, rtx);
134 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
135 static rtx expand_builtin_strcat (tree);
136 static rtx expand_builtin_strcpy (tree, rtx);
137 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
138 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
139 static rtx expand_builtin_stpncpy (tree, rtx);
140 static rtx expand_builtin_strncat (tree, rtx);
141 static rtx expand_builtin_strncpy (tree, rtx);
142 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
143 static rtx expand_builtin_memset (tree, rtx, machine_mode);
144 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
145 static rtx expand_builtin_bzero (tree);
146 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
147 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
148 static rtx expand_builtin_alloca (tree);
149 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
150 static rtx expand_builtin_frame_address (tree, tree);
151 static tree stabilize_va_list_loc (location_t, tree, int);
152 static rtx expand_builtin_expect (tree, rtx);
153 static rtx expand_builtin_expect_with_probability (tree, rtx);
154 static tree fold_builtin_constant_p (tree);
155 static tree fold_builtin_classify_type (tree);
156 static tree fold_builtin_strlen (location_t, tree, tree);
157 static tree fold_builtin_inf (location_t, tree, int);
158 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
159 static bool validate_arg (const_tree, enum tree_code code);
160 static rtx expand_builtin_fabs (tree, rtx, rtx);
161 static rtx expand_builtin_signbit (tree, rtx);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_isascii (location_t, tree);
164 static tree fold_builtin_toascii (location_t, tree);
165 static tree fold_builtin_isdigit (location_t, tree);
166 static tree fold_builtin_fabs (location_t, tree, tree);
167 static tree fold_builtin_abs (location_t, tree, tree);
168 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
169 enum tree_code);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_free_warning (tree);
182 static tree fold_builtin_object_size (tree, tree);
184 unsigned HOST_WIDE_INT target_newline;
185 unsigned HOST_WIDE_INT target_percent;
186 static unsigned HOST_WIDE_INT target_c;
187 static unsigned HOST_WIDE_INT target_s;
188 char target_percent_c[3];
189 char target_percent_s[3];
190 char target_percent_s_newline[4];
191 static tree do_mpfr_remquo (tree, tree, tree);
192 static tree do_mpfr_lgamma_r (tree, tree, tree);
193 static void expand_builtin_sync_synchronize (void);
195 /* Return true if NAME starts with __builtin_ or __sync_. */
197 static bool
198 is_builtin_name (const char *name)
200 if (strncmp (name, "__builtin_", 10) == 0)
201 return true;
202 if (strncmp (name, "__sync_", 7) == 0)
203 return true;
204 if (strncmp (name, "__atomic_", 9) == 0)
205 return true;
206 return false;
209 /* Return true if NODE should be considered for inline expansion regardless
210 of the optimization level. This means whenever a function is invoked with
211 its "internal" name, which normally contains the prefix "__builtin". */
213 bool
214 called_as_built_in (tree node)
216 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
217 we want the name used to call the function, not the name it
218 will have. */
219 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
220 return is_builtin_name (name);
223 /* Compute values M and N such that M divides (address of EXP - N) and such
224 that N < M. If these numbers can be determined, store M in alignp and N in
225 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
226 *alignp and any bit-offset to *bitposp.
228 Note that the address (and thus the alignment) computed here is based
229 on the address to which a symbol resolves, whereas DECL_ALIGN is based
230 on the address at which an object is actually located. These two
231 addresses are not always the same. For example, on ARM targets,
232 the address &foo of a Thumb function foo() has the lowest bit set,
233 whereas foo() itself starts on an even address.
235 If ADDR_P is true we are taking the address of the memory reference EXP
236 and thus cannot rely on the access taking place. */
238 static bool
239 get_object_alignment_2 (tree exp, unsigned int *alignp,
240 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
242 poly_int64 bitsize, bitpos;
243 tree offset;
244 machine_mode mode;
245 int unsignedp, reversep, volatilep;
246 unsigned int align = BITS_PER_UNIT;
247 bool known_alignment = false;
249 /* Get the innermost object and the constant (bitpos) and possibly
250 variable (offset) offset of the access. */
251 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
252 &unsignedp, &reversep, &volatilep);
254 /* Extract alignment information from the innermost object and
255 possibly adjust bitpos and offset. */
256 if (TREE_CODE (exp) == FUNCTION_DECL)
258 /* Function addresses can encode extra information besides their
259 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
260 allows the low bit to be used as a virtual bit, we know
261 that the address itself must be at least 2-byte aligned. */
262 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
263 align = 2 * BITS_PER_UNIT;
265 else if (TREE_CODE (exp) == LABEL_DECL)
267 else if (TREE_CODE (exp) == CONST_DECL)
269 /* The alignment of a CONST_DECL is determined by its initializer. */
270 exp = DECL_INITIAL (exp);
271 align = TYPE_ALIGN (TREE_TYPE (exp));
272 if (CONSTANT_CLASS_P (exp))
273 align = targetm.constant_alignment (exp, align);
275 known_alignment = true;
277 else if (DECL_P (exp))
279 align = DECL_ALIGN (exp);
280 known_alignment = true;
282 else if (TREE_CODE (exp) == INDIRECT_REF
283 || TREE_CODE (exp) == MEM_REF
284 || TREE_CODE (exp) == TARGET_MEM_REF)
286 tree addr = TREE_OPERAND (exp, 0);
287 unsigned ptr_align;
288 unsigned HOST_WIDE_INT ptr_bitpos;
289 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
291 /* If the address is explicitely aligned, handle that. */
292 if (TREE_CODE (addr) == BIT_AND_EXPR
293 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
295 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
296 ptr_bitmask *= BITS_PER_UNIT;
297 align = least_bit_hwi (ptr_bitmask);
298 addr = TREE_OPERAND (addr, 0);
301 known_alignment
302 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
303 align = MAX (ptr_align, align);
305 /* Re-apply explicit alignment to the bitpos. */
306 ptr_bitpos &= ptr_bitmask;
308 /* The alignment of the pointer operand in a TARGET_MEM_REF
309 has to take the variable offset parts into account. */
310 if (TREE_CODE (exp) == TARGET_MEM_REF)
312 if (TMR_INDEX (exp))
314 unsigned HOST_WIDE_INT step = 1;
315 if (TMR_STEP (exp))
316 step = TREE_INT_CST_LOW (TMR_STEP (exp));
317 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
319 if (TMR_INDEX2 (exp))
320 align = BITS_PER_UNIT;
321 known_alignment = false;
324 /* When EXP is an actual memory reference then we can use
325 TYPE_ALIGN of a pointer indirection to derive alignment.
326 Do so only if get_pointer_alignment_1 did not reveal absolute
327 alignment knowledge and if using that alignment would
328 improve the situation. */
329 unsigned int talign;
330 if (!addr_p && !known_alignment
331 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
332 && talign > align)
333 align = talign;
334 else
336 /* Else adjust bitpos accordingly. */
337 bitpos += ptr_bitpos;
338 if (TREE_CODE (exp) == MEM_REF
339 || TREE_CODE (exp) == TARGET_MEM_REF)
340 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
343 else if (TREE_CODE (exp) == STRING_CST)
345 /* STRING_CST are the only constant objects we allow to be not
346 wrapped inside a CONST_DECL. */
347 align = TYPE_ALIGN (TREE_TYPE (exp));
348 if (CONSTANT_CLASS_P (exp))
349 align = targetm.constant_alignment (exp, align);
351 known_alignment = true;
354 /* If there is a non-constant offset part extract the maximum
355 alignment that can prevail. */
356 if (offset)
358 unsigned int trailing_zeros = tree_ctz (offset);
359 if (trailing_zeros < HOST_BITS_PER_INT)
361 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
362 if (inner)
363 align = MIN (align, inner);
367 /* Account for the alignment of runtime coefficients, so that the constant
368 bitpos is guaranteed to be accurate. */
369 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
370 if (alt_align != 0 && alt_align < align)
372 align = alt_align;
373 known_alignment = false;
376 *alignp = align;
377 *bitposp = bitpos.coeffs[0] & (align - 1);
378 return known_alignment;
381 /* For a memory reference expression EXP compute values M and N such that M
382 divides (&EXP - N) and such that N < M. If these numbers can be determined,
383 store M in alignp and N in *BITPOSP and return true. Otherwise return false
384 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
386 bool
387 get_object_alignment_1 (tree exp, unsigned int *alignp,
388 unsigned HOST_WIDE_INT *bitposp)
390 return get_object_alignment_2 (exp, alignp, bitposp, false);
393 /* Return the alignment in bits of EXP, an object. */
395 unsigned int
396 get_object_alignment (tree exp)
398 unsigned HOST_WIDE_INT bitpos = 0;
399 unsigned int align;
401 get_object_alignment_1 (exp, &align, &bitpos);
403 /* align and bitpos now specify known low bits of the pointer.
404 ptr & (align - 1) == bitpos. */
406 if (bitpos != 0)
407 align = least_bit_hwi (bitpos);
408 return align;
411 /* For a pointer valued expression EXP compute values M and N such that M
412 divides (EXP - N) and such that N < M. If these numbers can be determined,
413 store M in alignp and N in *BITPOSP and return true. Return false if
414 the results are just a conservative approximation.
416 If EXP is not a pointer, false is returned too. */
418 bool
419 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
420 unsigned HOST_WIDE_INT *bitposp)
422 STRIP_NOPS (exp);
424 if (TREE_CODE (exp) == ADDR_EXPR)
425 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
426 alignp, bitposp, true);
427 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
429 unsigned int align;
430 unsigned HOST_WIDE_INT bitpos;
431 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
432 &align, &bitpos);
433 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
434 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
435 else
437 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
438 if (trailing_zeros < HOST_BITS_PER_INT)
440 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
441 if (inner)
442 align = MIN (align, inner);
445 *alignp = align;
446 *bitposp = bitpos & (align - 1);
447 return res;
449 else if (TREE_CODE (exp) == SSA_NAME
450 && POINTER_TYPE_P (TREE_TYPE (exp)))
452 unsigned int ptr_align, ptr_misalign;
453 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
455 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
457 *bitposp = ptr_misalign * BITS_PER_UNIT;
458 *alignp = ptr_align * BITS_PER_UNIT;
459 /* Make sure to return a sensible alignment when the multiplication
460 by BITS_PER_UNIT overflowed. */
461 if (*alignp == 0)
462 *alignp = 1u << (HOST_BITS_PER_INT - 1);
463 /* We cannot really tell whether this result is an approximation. */
464 return false;
466 else
468 *bitposp = 0;
469 *alignp = BITS_PER_UNIT;
470 return false;
473 else if (TREE_CODE (exp) == INTEGER_CST)
475 *alignp = BIGGEST_ALIGNMENT;
476 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
477 & (BIGGEST_ALIGNMENT - 1));
478 return true;
481 *bitposp = 0;
482 *alignp = BITS_PER_UNIT;
483 return false;
486 /* Return the alignment in bits of EXP, a pointer valued expression.
487 The alignment returned is, by default, the alignment of the thing that
488 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
490 Otherwise, look at the expression to see if we can do better, i.e., if the
491 expression is actually pointing at an object whose alignment is tighter. */
493 unsigned int
494 get_pointer_alignment (tree exp)
496 unsigned HOST_WIDE_INT bitpos = 0;
497 unsigned int align;
499 get_pointer_alignment_1 (exp, &align, &bitpos);
501 /* align and bitpos now specify known low bits of the pointer.
502 ptr & (align - 1) == bitpos. */
504 if (bitpos != 0)
505 align = least_bit_hwi (bitpos);
507 return align;
510 /* Return the number of leading non-zero elements in the sequence
511 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
512 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
514 unsigned
515 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
517 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
519 unsigned n;
521 if (eltsize == 1)
523 /* Optimize the common case of plain char. */
524 for (n = 0; n < maxelts; n++)
526 const char *elt = (const char*) ptr + n;
527 if (!*elt)
528 break;
531 else
533 for (n = 0; n < maxelts; n++)
535 const char *elt = (const char*) ptr + n * eltsize;
536 if (!memcmp (elt, "\0\0\0\0", eltsize))
537 break;
540 return n;
543 /* For a call at LOC to a function FN that expects a string in the argument
544 ARG, issue a diagnostic due to it being a called with an argument
545 declared at NONSTR that is a character array with no terminating NUL. */
547 void
548 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
550 if (TREE_NO_WARNING (arg))
551 return;
553 loc = expansion_point_location_if_in_system_header (loc);
555 if (warning_at (loc, OPT_Wstringop_overflow_,
556 "%qs argument missing terminating nul", fn))
558 inform (DECL_SOURCE_LOCATION (decl),
559 "referenced argument declared here");
560 TREE_NO_WARNING (arg) = 1;
564 /* For a call EXPR (which may be null) that expects a string argument
565 and SRC as the argument, returns false if SRC is a character array
566 with no terminating NUL. When nonnull, BOUND is the number of
567 characters in which to expect the terminating NUL.
568 When EXPR is nonnull also issues a warning. */
570 bool
571 check_nul_terminated_array (tree expr, tree src, tree bound /* = NULL_TREE */)
573 tree size;
574 bool exact;
575 tree nonstr = unterminated_array (src, &size, &exact);
576 if (!nonstr)
577 return true;
579 /* NONSTR refers to the non-nul terminated constant array and SIZE
580 is the constant size of the array in bytes. EXACT is true when
581 SIZE is exact. */
583 if (bound)
585 wide_int min, max;
586 if (TREE_CODE (bound) == INTEGER_CST)
587 min = max = wi::to_wide (bound);
588 else
590 value_range_kind rng = get_range_info (bound, &min, &max);
591 if (rng != VR_RANGE)
592 return true;
595 if (wi::leu_p (min, wi::to_wide (size)))
596 return true;
599 if (expr && !TREE_NO_WARNING (expr))
601 tree fndecl = get_callee_fndecl (expr);
602 const char *fname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
603 warn_string_no_nul (EXPR_LOCATION (expr), fname, src, nonstr);
606 return false;
609 /* If EXP refers to an unterminated constant character array return
610 the declaration of the object of which the array is a member or
611 element and if SIZE is not null, set *SIZE to the size of
612 the unterminated array and set *EXACT if the size is exact or
613 clear it otherwise. Otherwise return null. */
615 tree
616 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
618 /* C_STRLEN will return NULL and set DECL in the info
619 structure if EXP references a unterminated array. */
620 c_strlen_data lendata = { };
621 tree len = c_strlen (exp, 1, &lendata);
622 if (len == NULL_TREE && lendata.minlen && lendata.decl)
624 if (size)
626 len = lendata.minlen;
627 if (lendata.off)
629 /* Constant offsets are already accounted for in LENDATA.MINLEN,
630 but not in a SSA_NAME + CST expression. */
631 if (TREE_CODE (lendata.off) == INTEGER_CST)
632 *exact = true;
633 else if (TREE_CODE (lendata.off) == PLUS_EXPR
634 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
636 /* Subtract the offset from the size of the array. */
637 *exact = false;
638 tree temp = TREE_OPERAND (lendata.off, 1);
639 temp = fold_convert (ssizetype, temp);
640 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
642 else
643 *exact = false;
645 else
646 *exact = true;
648 *size = len;
650 return lendata.decl;
653 return NULL_TREE;
656 /* Compute the length of a null-terminated character string or wide
657 character string handling character sizes of 1, 2, and 4 bytes.
658 TREE_STRING_LENGTH is not the right way because it evaluates to
659 the size of the character array in bytes (as opposed to characters)
660 and because it can contain a zero byte in the middle.
662 ONLY_VALUE should be nonzero if the result is not going to be emitted
663 into the instruction stream and zero if it is going to be expanded.
664 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
665 is returned, otherwise NULL, since
666 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
667 evaluate the side-effects.
669 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
670 accesses. Note that this implies the result is not going to be emitted
671 into the instruction stream.
673 Additional information about the string accessed may be recorded
674 in DATA. For example, if ARG references an unterminated string,
675 then the declaration will be stored in the DECL field. If the
676 length of the unterminated string can be determined, it'll be
677 stored in the LEN field. Note this length could well be different
678 than what a C strlen call would return.
680 ELTSIZE is 1 for normal single byte character strings, and 2 or
681 4 for wide characer strings. ELTSIZE is by default 1.
683 The value returned is of type `ssizetype'. */
685 tree
686 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
688 /* If we were not passed a DATA pointer, then get one to a local
689 structure. That avoids having to check DATA for NULL before
690 each time we want to use it. */
691 c_strlen_data local_strlen_data = { };
692 if (!data)
693 data = &local_strlen_data;
695 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
697 tree src = STRIP_NOPS (arg);
698 if (TREE_CODE (src) == COND_EXPR
699 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
701 tree len1, len2;
703 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
704 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
705 if (tree_int_cst_equal (len1, len2))
706 return len1;
709 if (TREE_CODE (src) == COMPOUND_EXPR
710 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
711 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
713 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
715 /* Offset from the beginning of the string in bytes. */
716 tree byteoff;
717 tree memsize;
718 tree decl;
719 src = string_constant (src, &byteoff, &memsize, &decl);
720 if (src == 0)
721 return NULL_TREE;
723 /* Determine the size of the string element. */
724 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
725 return NULL_TREE;
727 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
728 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
729 in case the latter is less than the size of the array, such as when
730 SRC refers to a short string literal used to initialize a large array.
731 In that case, the elements of the array after the terminating NUL are
732 all NUL. */
733 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
734 strelts = strelts / eltsize;
736 if (!tree_fits_uhwi_p (memsize))
737 return NULL_TREE;
739 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
741 /* PTR can point to the byte representation of any string type, including
742 char* and wchar_t*. */
743 const char *ptr = TREE_STRING_POINTER (src);
745 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
747 /* The code below works only for single byte character types. */
748 if (eltsize != 1)
749 return NULL_TREE;
751 /* If the string has an internal NUL character followed by any
752 non-NUL characters (e.g., "foo\0bar"), we can't compute
753 the offset to the following NUL if we don't know where to
754 start searching for it. */
755 unsigned len = string_length (ptr, eltsize, strelts);
757 /* Return when an embedded null character is found or none at all.
758 In the latter case, set the DECL/LEN field in the DATA structure
759 so that callers may examine them. */
760 if (len + 1 < strelts)
761 return NULL_TREE;
762 else if (len >= maxelts)
764 data->decl = decl;
765 data->off = byteoff;
766 data->minlen = ssize_int (len);
767 return NULL_TREE;
770 /* For empty strings the result should be zero. */
771 if (len == 0)
772 return ssize_int (0);
774 /* We don't know the starting offset, but we do know that the string
775 has no internal zero bytes. If the offset falls within the bounds
776 of the string subtract the offset from the length of the string,
777 and return that. Otherwise the length is zero. Take care to
778 use SAVE_EXPR in case the OFFSET has side-effects. */
779 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
780 : byteoff;
781 offsave = fold_convert_loc (loc, sizetype, offsave);
782 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
783 size_int (len));
784 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
785 offsave);
786 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
787 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
788 build_zero_cst (ssizetype));
791 /* Offset from the beginning of the string in elements. */
792 HOST_WIDE_INT eltoff;
794 /* We have a known offset into the string. Start searching there for
795 a null character if we can represent it as a single HOST_WIDE_INT. */
796 if (byteoff == 0)
797 eltoff = 0;
798 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
799 eltoff = -1;
800 else
801 eltoff = tree_to_uhwi (byteoff) / eltsize;
803 /* If the offset is known to be out of bounds, warn, and call strlen at
804 runtime. */
805 if (eltoff < 0 || eltoff >= maxelts)
807 /* Suppress multiple warnings for propagated constant strings. */
808 if (only_value != 2
809 && !TREE_NO_WARNING (arg)
810 && warning_at (loc, OPT_Warray_bounds,
811 "offset %qwi outside bounds of constant string",
812 eltoff))
814 if (decl)
815 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
816 TREE_NO_WARNING (arg) = 1;
818 return NULL_TREE;
821 /* If eltoff is larger than strelts but less than maxelts the
822 string length is zero, since the excess memory will be zero. */
823 if (eltoff > strelts)
824 return ssize_int (0);
826 /* Use strlen to search for the first zero byte. Since any strings
827 constructed with build_string will have nulls appended, we win even
828 if we get handed something like (char[4])"abcd".
830 Since ELTOFF is our starting index into the string, no further
831 calculation is needed. */
832 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
833 strelts - eltoff);
835 /* Don't know what to return if there was no zero termination.
836 Ideally this would turn into a gcc_checking_assert over time.
837 Set DECL/LEN so callers can examine them. */
838 if (len >= maxelts - eltoff)
840 data->decl = decl;
841 data->off = byteoff;
842 data->minlen = ssize_int (len);
843 return NULL_TREE;
846 return ssize_int (len);
849 /* Return a constant integer corresponding to target reading
850 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
851 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
852 are assumed to be zero, otherwise it reads as many characters
853 as needed. */
856 c_readstr (const char *str, scalar_int_mode mode,
857 bool null_terminated_p/*=true*/)
859 HOST_WIDE_INT ch;
860 unsigned int i, j;
861 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
863 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
864 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
865 / HOST_BITS_PER_WIDE_INT;
867 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
868 for (i = 0; i < len; i++)
869 tmp[i] = 0;
871 ch = 1;
872 for (i = 0; i < GET_MODE_SIZE (mode); i++)
874 j = i;
875 if (WORDS_BIG_ENDIAN)
876 j = GET_MODE_SIZE (mode) - i - 1;
877 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
878 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
879 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
880 j *= BITS_PER_UNIT;
882 if (ch || !null_terminated_p)
883 ch = (unsigned char) str[i];
884 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
887 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
888 return immed_wide_int_const (c, mode);
891 /* Cast a target constant CST to target CHAR and if that value fits into
892 host char type, return zero and put that value into variable pointed to by
893 P. */
895 static int
896 target_char_cast (tree cst, char *p)
898 unsigned HOST_WIDE_INT val, hostval;
900 if (TREE_CODE (cst) != INTEGER_CST
901 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
902 return 1;
904 /* Do not care if it fits or not right here. */
905 val = TREE_INT_CST_LOW (cst);
907 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
908 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
910 hostval = val;
911 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
912 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
914 if (val != hostval)
915 return 1;
917 *p = hostval;
918 return 0;
921 /* Similar to save_expr, but assumes that arbitrary code is not executed
922 in between the multiple evaluations. In particular, we assume that a
923 non-addressable local variable will not be modified. */
925 static tree
926 builtin_save_expr (tree exp)
928 if (TREE_CODE (exp) == SSA_NAME
929 || (TREE_ADDRESSABLE (exp) == 0
930 && (TREE_CODE (exp) == PARM_DECL
931 || (VAR_P (exp) && !TREE_STATIC (exp)))))
932 return exp;
934 return save_expr (exp);
937 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
938 times to get the address of either a higher stack frame, or a return
939 address located within it (depending on FNDECL_CODE). */
941 static rtx
942 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
944 int i;
945 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
946 if (tem == NULL_RTX)
948 /* For a zero count with __builtin_return_address, we don't care what
949 frame address we return, because target-specific definitions will
950 override us. Therefore frame pointer elimination is OK, and using
951 the soft frame pointer is OK.
953 For a nonzero count, or a zero count with __builtin_frame_address,
954 we require a stable offset from the current frame pointer to the
955 previous one, so we must use the hard frame pointer, and
956 we must disable frame pointer elimination. */
957 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
958 tem = frame_pointer_rtx;
959 else
961 tem = hard_frame_pointer_rtx;
963 /* Tell reload not to eliminate the frame pointer. */
964 crtl->accesses_prior_frames = 1;
968 if (count > 0)
969 SETUP_FRAME_ADDRESSES ();
971 /* On the SPARC, the return address is not in the frame, it is in a
972 register. There is no way to access it off of the current frame
973 pointer, but it can be accessed off the previous frame pointer by
974 reading the value from the register window save area. */
975 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
976 count--;
978 /* Scan back COUNT frames to the specified frame. */
979 for (i = 0; i < count; i++)
981 /* Assume the dynamic chain pointer is in the word that the
982 frame address points to, unless otherwise specified. */
983 tem = DYNAMIC_CHAIN_ADDRESS (tem);
984 tem = memory_address (Pmode, tem);
985 tem = gen_frame_mem (Pmode, tem);
986 tem = copy_to_reg (tem);
989 /* For __builtin_frame_address, return what we've got. But, on
990 the SPARC for example, we may have to add a bias. */
991 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
992 return FRAME_ADDR_RTX (tem);
994 /* For __builtin_return_address, get the return address from that frame. */
995 #ifdef RETURN_ADDR_RTX
996 tem = RETURN_ADDR_RTX (count, tem);
997 #else
998 tem = memory_address (Pmode,
999 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
1000 tem = gen_frame_mem (Pmode, tem);
1001 #endif
1002 return tem;
1005 /* Alias set used for setjmp buffer. */
1006 static alias_set_type setjmp_alias_set = -1;
1008 /* Construct the leading half of a __builtin_setjmp call. Control will
1009 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1010 exception handling code. */
1012 void
1013 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
1015 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1016 rtx stack_save;
1017 rtx mem;
1019 if (setjmp_alias_set == -1)
1020 setjmp_alias_set = new_alias_set ();
1022 buf_addr = convert_memory_address (Pmode, buf_addr);
1024 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
1026 /* We store the frame pointer and the address of receiver_label in
1027 the buffer and use the rest of it for the stack save area, which
1028 is machine-dependent. */
1030 mem = gen_rtx_MEM (Pmode, buf_addr);
1031 set_mem_alias_set (mem, setjmp_alias_set);
1032 emit_move_insn (mem, hard_frame_pointer_rtx);
1034 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1035 GET_MODE_SIZE (Pmode))),
1036 set_mem_alias_set (mem, setjmp_alias_set);
1038 emit_move_insn (validize_mem (mem),
1039 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
1041 stack_save = gen_rtx_MEM (sa_mode,
1042 plus_constant (Pmode, buf_addr,
1043 2 * GET_MODE_SIZE (Pmode)));
1044 set_mem_alias_set (stack_save, setjmp_alias_set);
1045 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1047 /* If there is further processing to do, do it. */
1048 if (targetm.have_builtin_setjmp_setup ())
1049 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1051 /* We have a nonlocal label. */
1052 cfun->has_nonlocal_label = 1;
1055 /* Construct the trailing part of a __builtin_setjmp call. This is
1056 also called directly by the SJLJ exception handling code.
1057 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1059 void
1060 expand_builtin_setjmp_receiver (rtx receiver_label)
1062 rtx chain;
1064 /* Mark the FP as used when we get here, so we have to make sure it's
1065 marked as used by this function. */
1066 emit_use (hard_frame_pointer_rtx);
1068 /* Mark the static chain as clobbered here so life information
1069 doesn't get messed up for it. */
1070 chain = rtx_for_static_chain (current_function_decl, true);
1071 if (chain && REG_P (chain))
1072 emit_clobber (chain);
1074 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1076 /* If the argument pointer can be eliminated in favor of the
1077 frame pointer, we don't need to restore it. We assume here
1078 that if such an elimination is present, it can always be used.
1079 This is the case on all known machines; if we don't make this
1080 assumption, we do unnecessary saving on many machines. */
1081 size_t i;
1082 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1084 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1085 if (elim_regs[i].from == ARG_POINTER_REGNUM
1086 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1087 break;
1089 if (i == ARRAY_SIZE (elim_regs))
1091 /* Now restore our arg pointer from the address at which it
1092 was saved in our stack frame. */
1093 emit_move_insn (crtl->args.internal_arg_pointer,
1094 copy_to_reg (get_arg_pointer_save_area ()));
1098 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1099 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1100 else if (targetm.have_nonlocal_goto_receiver ())
1101 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1102 else
1103 { /* Nothing */ }
1105 /* We must not allow the code we just generated to be reordered by
1106 scheduling. Specifically, the update of the frame pointer must
1107 happen immediately, not later. */
1108 emit_insn (gen_blockage ());
1111 /* __builtin_longjmp is passed a pointer to an array of five words (not
1112 all will be used on all machines). It operates similarly to the C
1113 library function of the same name, but is more efficient. Much of
1114 the code below is copied from the handling of non-local gotos. */
1116 static void
1117 expand_builtin_longjmp (rtx buf_addr, rtx value)
1119 rtx fp, lab, stack;
1120 rtx_insn *insn, *last;
1121 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1123 /* DRAP is needed for stack realign if longjmp is expanded to current
1124 function */
1125 if (SUPPORTS_STACK_ALIGNMENT)
1126 crtl->need_drap = true;
1128 if (setjmp_alias_set == -1)
1129 setjmp_alias_set = new_alias_set ();
1131 buf_addr = convert_memory_address (Pmode, buf_addr);
1133 buf_addr = force_reg (Pmode, buf_addr);
1135 /* We require that the user must pass a second argument of 1, because
1136 that is what builtin_setjmp will return. */
1137 gcc_assert (value == const1_rtx);
1139 last = get_last_insn ();
1140 if (targetm.have_builtin_longjmp ())
1141 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1142 else
1144 fp = gen_rtx_MEM (Pmode, buf_addr);
1145 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1146 GET_MODE_SIZE (Pmode)));
1148 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1149 2 * GET_MODE_SIZE (Pmode)));
1150 set_mem_alias_set (fp, setjmp_alias_set);
1151 set_mem_alias_set (lab, setjmp_alias_set);
1152 set_mem_alias_set (stack, setjmp_alias_set);
1154 /* Pick up FP, label, and SP from the block and jump. This code is
1155 from expand_goto in stmt.c; see there for detailed comments. */
1156 if (targetm.have_nonlocal_goto ())
1157 /* We have to pass a value to the nonlocal_goto pattern that will
1158 get copied into the static_chain pointer, but it does not matter
1159 what that value is, because builtin_setjmp does not use it. */
1160 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1161 else
1163 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1164 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1166 lab = copy_to_reg (lab);
1168 /* Restore the frame pointer and stack pointer. We must use a
1169 temporary since the setjmp buffer may be a local. */
1170 fp = copy_to_reg (fp);
1171 emit_stack_restore (SAVE_NONLOCAL, stack);
1173 /* Ensure the frame pointer move is not optimized. */
1174 emit_insn (gen_blockage ());
1175 emit_clobber (hard_frame_pointer_rtx);
1176 emit_clobber (frame_pointer_rtx);
1177 emit_move_insn (hard_frame_pointer_rtx, fp);
1179 emit_use (hard_frame_pointer_rtx);
1180 emit_use (stack_pointer_rtx);
1181 emit_indirect_jump (lab);
1185 /* Search backwards and mark the jump insn as a non-local goto.
1186 Note that this precludes the use of __builtin_longjmp to a
1187 __builtin_setjmp target in the same function. However, we've
1188 already cautioned the user that these functions are for
1189 internal exception handling use only. */
1190 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1192 gcc_assert (insn != last);
1194 if (JUMP_P (insn))
1196 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1197 break;
1199 else if (CALL_P (insn))
1200 break;
1204 static inline bool
1205 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1207 return (iter->i < iter->n);
1210 /* This function validates the types of a function call argument list
1211 against a specified list of tree_codes. If the last specifier is a 0,
1212 that represents an ellipsis, otherwise the last specifier must be a
1213 VOID_TYPE. */
1215 static bool
1216 validate_arglist (const_tree callexpr, ...)
1218 enum tree_code code;
1219 bool res = 0;
1220 va_list ap;
1221 const_call_expr_arg_iterator iter;
1222 const_tree arg;
1224 va_start (ap, callexpr);
1225 init_const_call_expr_arg_iterator (callexpr, &iter);
1227 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1228 tree fn = CALL_EXPR_FN (callexpr);
1229 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1231 for (unsigned argno = 1; ; ++argno)
1233 code = (enum tree_code) va_arg (ap, int);
1235 switch (code)
1237 case 0:
1238 /* This signifies an ellipses, any further arguments are all ok. */
1239 res = true;
1240 goto end;
1241 case VOID_TYPE:
1242 /* This signifies an endlink, if no arguments remain, return
1243 true, otherwise return false. */
1244 res = !more_const_call_expr_args_p (&iter);
1245 goto end;
1246 case POINTER_TYPE:
1247 /* The actual argument must be nonnull when either the whole
1248 called function has been declared nonnull, or when the formal
1249 argument corresponding to the actual argument has been. */
1250 if (argmap
1251 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1253 arg = next_const_call_expr_arg (&iter);
1254 if (!validate_arg (arg, code) || integer_zerop (arg))
1255 goto end;
1256 break;
1258 /* FALLTHRU */
1259 default:
1260 /* If no parameters remain or the parameter's code does not
1261 match the specified code, return false. Otherwise continue
1262 checking any remaining arguments. */
1263 arg = next_const_call_expr_arg (&iter);
1264 if (!validate_arg (arg, code))
1265 goto end;
1266 break;
1270 /* We need gotos here since we can only have one VA_CLOSE in a
1271 function. */
1272 end: ;
1273 va_end (ap);
1275 BITMAP_FREE (argmap);
1277 return res;
1280 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1281 and the address of the save area. */
1283 static rtx
1284 expand_builtin_nonlocal_goto (tree exp)
1286 tree t_label, t_save_area;
1287 rtx r_label, r_save_area, r_fp, r_sp;
1288 rtx_insn *insn;
1290 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1291 return NULL_RTX;
1293 t_label = CALL_EXPR_ARG (exp, 0);
1294 t_save_area = CALL_EXPR_ARG (exp, 1);
1296 r_label = expand_normal (t_label);
1297 r_label = convert_memory_address (Pmode, r_label);
1298 r_save_area = expand_normal (t_save_area);
1299 r_save_area = convert_memory_address (Pmode, r_save_area);
1300 /* Copy the address of the save location to a register just in case it was
1301 based on the frame pointer. */
1302 r_save_area = copy_to_reg (r_save_area);
1303 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1304 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1305 plus_constant (Pmode, r_save_area,
1306 GET_MODE_SIZE (Pmode)));
1308 crtl->has_nonlocal_goto = 1;
1310 /* ??? We no longer need to pass the static chain value, afaik. */
1311 if (targetm.have_nonlocal_goto ())
1312 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1313 else
1315 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1316 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1318 r_label = copy_to_reg (r_label);
1320 /* Restore the frame pointer and stack pointer. We must use a
1321 temporary since the setjmp buffer may be a local. */
1322 r_fp = copy_to_reg (r_fp);
1323 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1325 /* Ensure the frame pointer move is not optimized. */
1326 emit_insn (gen_blockage ());
1327 emit_clobber (hard_frame_pointer_rtx);
1328 emit_clobber (frame_pointer_rtx);
1329 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1331 /* USE of hard_frame_pointer_rtx added for consistency;
1332 not clear if really needed. */
1333 emit_use (hard_frame_pointer_rtx);
1334 emit_use (stack_pointer_rtx);
1336 /* If the architecture is using a GP register, we must
1337 conservatively assume that the target function makes use of it.
1338 The prologue of functions with nonlocal gotos must therefore
1339 initialize the GP register to the appropriate value, and we
1340 must then make sure that this value is live at the point
1341 of the jump. (Note that this doesn't necessarily apply
1342 to targets with a nonlocal_goto pattern; they are free
1343 to implement it in their own way. Note also that this is
1344 a no-op if the GP register is a global invariant.) */
1345 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1346 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1347 emit_use (pic_offset_table_rtx);
1349 emit_indirect_jump (r_label);
1352 /* Search backwards to the jump insn and mark it as a
1353 non-local goto. */
1354 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1356 if (JUMP_P (insn))
1358 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1359 break;
1361 else if (CALL_P (insn))
1362 break;
1365 return const0_rtx;
1368 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1369 (not all will be used on all machines) that was passed to __builtin_setjmp.
1370 It updates the stack pointer in that block to the current value. This is
1371 also called directly by the SJLJ exception handling code. */
1373 void
1374 expand_builtin_update_setjmp_buf (rtx buf_addr)
1376 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1377 buf_addr = convert_memory_address (Pmode, buf_addr);
1378 rtx stack_save
1379 = gen_rtx_MEM (sa_mode,
1380 memory_address
1381 (sa_mode,
1382 plus_constant (Pmode, buf_addr,
1383 2 * GET_MODE_SIZE (Pmode))));
1385 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1388 /* Expand a call to __builtin_prefetch. For a target that does not support
1389 data prefetch, evaluate the memory address argument in case it has side
1390 effects. */
1392 static void
1393 expand_builtin_prefetch (tree exp)
1395 tree arg0, arg1, arg2;
1396 int nargs;
1397 rtx op0, op1, op2;
1399 if (!validate_arglist (exp, POINTER_TYPE, 0))
1400 return;
1402 arg0 = CALL_EXPR_ARG (exp, 0);
1404 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1405 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1406 locality). */
1407 nargs = call_expr_nargs (exp);
1408 if (nargs > 1)
1409 arg1 = CALL_EXPR_ARG (exp, 1);
1410 else
1411 arg1 = integer_zero_node;
1412 if (nargs > 2)
1413 arg2 = CALL_EXPR_ARG (exp, 2);
1414 else
1415 arg2 = integer_three_node;
1417 /* Argument 0 is an address. */
1418 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1420 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1421 if (TREE_CODE (arg1) != INTEGER_CST)
1423 error ("second argument to %<__builtin_prefetch%> must be a constant");
1424 arg1 = integer_zero_node;
1426 op1 = expand_normal (arg1);
1427 /* Argument 1 must be either zero or one. */
1428 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1430 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1431 " using zero");
1432 op1 = const0_rtx;
1435 /* Argument 2 (locality) must be a compile-time constant int. */
1436 if (TREE_CODE (arg2) != INTEGER_CST)
1438 error ("third argument to %<__builtin_prefetch%> must be a constant");
1439 arg2 = integer_zero_node;
1441 op2 = expand_normal (arg2);
1442 /* Argument 2 must be 0, 1, 2, or 3. */
1443 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1445 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1446 op2 = const0_rtx;
1449 if (targetm.have_prefetch ())
1451 class expand_operand ops[3];
1453 create_address_operand (&ops[0], op0);
1454 create_integer_operand (&ops[1], INTVAL (op1));
1455 create_integer_operand (&ops[2], INTVAL (op2));
1456 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1457 return;
1460 /* Don't do anything with direct references to volatile memory, but
1461 generate code to handle other side effects. */
1462 if (!MEM_P (op0) && side_effects_p (op0))
1463 emit_insn (op0);
1466 /* Get a MEM rtx for expression EXP which is the address of an operand
1467 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1468 the maximum length of the block of memory that might be accessed or
1469 NULL if unknown. */
1471 static rtx
1472 get_memory_rtx (tree exp, tree len)
1474 tree orig_exp = exp;
1475 rtx addr, mem;
1477 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1478 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1479 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1480 exp = TREE_OPERAND (exp, 0);
1482 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1483 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1485 /* Get an expression we can use to find the attributes to assign to MEM.
1486 First remove any nops. */
1487 while (CONVERT_EXPR_P (exp)
1488 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1489 exp = TREE_OPERAND (exp, 0);
1491 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1492 (as builtin stringops may alias with anything). */
1493 exp = fold_build2 (MEM_REF,
1494 build_array_type (char_type_node,
1495 build_range_type (sizetype,
1496 size_one_node, len)),
1497 exp, build_int_cst (ptr_type_node, 0));
1499 /* If the MEM_REF has no acceptable address, try to get the base object
1500 from the original address we got, and build an all-aliasing
1501 unknown-sized access to that one. */
1502 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1503 set_mem_attributes (mem, exp, 0);
1504 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1505 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1506 0))))
1508 exp = build_fold_addr_expr (exp);
1509 exp = fold_build2 (MEM_REF,
1510 build_array_type (char_type_node,
1511 build_range_type (sizetype,
1512 size_zero_node,
1513 NULL)),
1514 exp, build_int_cst (ptr_type_node, 0));
1515 set_mem_attributes (mem, exp, 0);
1517 set_mem_alias_set (mem, 0);
1518 return mem;
1521 /* Built-in functions to perform an untyped call and return. */
1523 #define apply_args_mode \
1524 (this_target_builtins->x_apply_args_mode)
1525 #define apply_result_mode \
1526 (this_target_builtins->x_apply_result_mode)
1528 /* Return the size required for the block returned by __builtin_apply_args,
1529 and initialize apply_args_mode. */
1531 static int
1532 apply_args_size (void)
1534 static int size = -1;
1535 int align;
1536 unsigned int regno;
1538 /* The values computed by this function never change. */
1539 if (size < 0)
1541 /* The first value is the incoming arg-pointer. */
1542 size = GET_MODE_SIZE (Pmode);
1544 /* The second value is the structure value address unless this is
1545 passed as an "invisible" first argument. */
1546 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1547 size += GET_MODE_SIZE (Pmode);
1549 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1550 if (FUNCTION_ARG_REGNO_P (regno))
1552 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1554 gcc_assert (mode != VOIDmode);
1556 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1557 if (size % align != 0)
1558 size = CEIL (size, align) * align;
1559 size += GET_MODE_SIZE (mode);
1560 apply_args_mode[regno] = mode;
1562 else
1564 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1567 return size;
1570 /* Return the size required for the block returned by __builtin_apply,
1571 and initialize apply_result_mode. */
1573 static int
1574 apply_result_size (void)
1576 static int size = -1;
1577 int align, regno;
1579 /* The values computed by this function never change. */
1580 if (size < 0)
1582 size = 0;
1584 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1585 if (targetm.calls.function_value_regno_p (regno))
1587 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1589 gcc_assert (mode != VOIDmode);
1591 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1592 if (size % align != 0)
1593 size = CEIL (size, align) * align;
1594 size += GET_MODE_SIZE (mode);
1595 apply_result_mode[regno] = mode;
1597 else
1598 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1600 /* Allow targets that use untyped_call and untyped_return to override
1601 the size so that machine-specific information can be stored here. */
1602 #ifdef APPLY_RESULT_SIZE
1603 size = APPLY_RESULT_SIZE;
1604 #endif
1606 return size;
1609 /* Create a vector describing the result block RESULT. If SAVEP is true,
1610 the result block is used to save the values; otherwise it is used to
1611 restore the values. */
1613 static rtx
1614 result_vector (int savep, rtx result)
1616 int regno, size, align, nelts;
1617 fixed_size_mode mode;
1618 rtx reg, mem;
1619 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1621 size = nelts = 0;
1622 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1623 if ((mode = apply_result_mode[regno]) != VOIDmode)
1625 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1626 if (size % align != 0)
1627 size = CEIL (size, align) * align;
1628 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1629 mem = adjust_address (result, mode, size);
1630 savevec[nelts++] = (savep
1631 ? gen_rtx_SET (mem, reg)
1632 : gen_rtx_SET (reg, mem));
1633 size += GET_MODE_SIZE (mode);
1635 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1638 /* Save the state required to perform an untyped call with the same
1639 arguments as were passed to the current function. */
1641 static rtx
1642 expand_builtin_apply_args_1 (void)
1644 rtx registers, tem;
1645 int size, align, regno;
1646 fixed_size_mode mode;
1647 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1649 /* Create a block where the arg-pointer, structure value address,
1650 and argument registers can be saved. */
1651 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1653 /* Walk past the arg-pointer and structure value address. */
1654 size = GET_MODE_SIZE (Pmode);
1655 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1656 size += GET_MODE_SIZE (Pmode);
1658 /* Save each register used in calling a function to the block. */
1659 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1660 if ((mode = apply_args_mode[regno]) != VOIDmode)
1662 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1663 if (size % align != 0)
1664 size = CEIL (size, align) * align;
1666 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1668 emit_move_insn (adjust_address (registers, mode, size), tem);
1669 size += GET_MODE_SIZE (mode);
1672 /* Save the arg pointer to the block. */
1673 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1674 /* We need the pointer as the caller actually passed them to us, not
1675 as we might have pretended they were passed. Make sure it's a valid
1676 operand, as emit_move_insn isn't expected to handle a PLUS. */
1677 if (STACK_GROWS_DOWNWARD)
1679 = force_operand (plus_constant (Pmode, tem,
1680 crtl->args.pretend_args_size),
1681 NULL_RTX);
1682 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1684 size = GET_MODE_SIZE (Pmode);
1686 /* Save the structure value address unless this is passed as an
1687 "invisible" first argument. */
1688 if (struct_incoming_value)
1689 emit_move_insn (adjust_address (registers, Pmode, size),
1690 copy_to_reg (struct_incoming_value));
1692 /* Return the address of the block. */
1693 return copy_addr_to_reg (XEXP (registers, 0));
1696 /* __builtin_apply_args returns block of memory allocated on
1697 the stack into which is stored the arg pointer, structure
1698 value address, static chain, and all the registers that might
1699 possibly be used in performing a function call. The code is
1700 moved to the start of the function so the incoming values are
1701 saved. */
1703 static rtx
1704 expand_builtin_apply_args (void)
1706 /* Don't do __builtin_apply_args more than once in a function.
1707 Save the result of the first call and reuse it. */
1708 if (apply_args_value != 0)
1709 return apply_args_value;
1711 /* When this function is called, it means that registers must be
1712 saved on entry to this function. So we migrate the
1713 call to the first insn of this function. */
1714 rtx temp;
1716 start_sequence ();
1717 temp = expand_builtin_apply_args_1 ();
1718 rtx_insn *seq = get_insns ();
1719 end_sequence ();
1721 apply_args_value = temp;
1723 /* Put the insns after the NOTE that starts the function.
1724 If this is inside a start_sequence, make the outer-level insn
1725 chain current, so the code is placed at the start of the
1726 function. If internal_arg_pointer is a non-virtual pseudo,
1727 it needs to be placed after the function that initializes
1728 that pseudo. */
1729 push_topmost_sequence ();
1730 if (REG_P (crtl->args.internal_arg_pointer)
1731 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1732 emit_insn_before (seq, parm_birth_insn);
1733 else
1734 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1735 pop_topmost_sequence ();
1736 return temp;
1740 /* Perform an untyped call and save the state required to perform an
1741 untyped return of whatever value was returned by the given function. */
1743 static rtx
1744 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1746 int size, align, regno;
1747 fixed_size_mode mode;
1748 rtx incoming_args, result, reg, dest, src;
1749 rtx_call_insn *call_insn;
1750 rtx old_stack_level = 0;
1751 rtx call_fusage = 0;
1752 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1754 arguments = convert_memory_address (Pmode, arguments);
1756 /* Create a block where the return registers can be saved. */
1757 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1759 /* Fetch the arg pointer from the ARGUMENTS block. */
1760 incoming_args = gen_reg_rtx (Pmode);
1761 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1762 if (!STACK_GROWS_DOWNWARD)
1763 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1764 incoming_args, 0, OPTAB_LIB_WIDEN);
1766 /* Push a new argument block and copy the arguments. Do not allow
1767 the (potential) memcpy call below to interfere with our stack
1768 manipulations. */
1769 do_pending_stack_adjust ();
1770 NO_DEFER_POP;
1772 /* Save the stack with nonlocal if available. */
1773 if (targetm.have_save_stack_nonlocal ())
1774 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1775 else
1776 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1778 /* Allocate a block of memory onto the stack and copy the memory
1779 arguments to the outgoing arguments address. We can pass TRUE
1780 as the 4th argument because we just saved the stack pointer
1781 and will restore it right after the call. */
1782 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1784 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1785 may have already set current_function_calls_alloca to true.
1786 current_function_calls_alloca won't be set if argsize is zero,
1787 so we have to guarantee need_drap is true here. */
1788 if (SUPPORTS_STACK_ALIGNMENT)
1789 crtl->need_drap = true;
1791 dest = virtual_outgoing_args_rtx;
1792 if (!STACK_GROWS_DOWNWARD)
1794 if (CONST_INT_P (argsize))
1795 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1796 else
1797 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1799 dest = gen_rtx_MEM (BLKmode, dest);
1800 set_mem_align (dest, PARM_BOUNDARY);
1801 src = gen_rtx_MEM (BLKmode, incoming_args);
1802 set_mem_align (src, PARM_BOUNDARY);
1803 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1805 /* Refer to the argument block. */
1806 apply_args_size ();
1807 arguments = gen_rtx_MEM (BLKmode, arguments);
1808 set_mem_align (arguments, PARM_BOUNDARY);
1810 /* Walk past the arg-pointer and structure value address. */
1811 size = GET_MODE_SIZE (Pmode);
1812 if (struct_value)
1813 size += GET_MODE_SIZE (Pmode);
1815 /* Restore each of the registers previously saved. Make USE insns
1816 for each of these registers for use in making the call. */
1817 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1818 if ((mode = apply_args_mode[regno]) != VOIDmode)
1820 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1821 if (size % align != 0)
1822 size = CEIL (size, align) * align;
1823 reg = gen_rtx_REG (mode, regno);
1824 emit_move_insn (reg, adjust_address (arguments, mode, size));
1825 use_reg (&call_fusage, reg);
1826 size += GET_MODE_SIZE (mode);
1829 /* Restore the structure value address unless this is passed as an
1830 "invisible" first argument. */
1831 size = GET_MODE_SIZE (Pmode);
1832 if (struct_value)
1834 rtx value = gen_reg_rtx (Pmode);
1835 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1836 emit_move_insn (struct_value, value);
1837 if (REG_P (struct_value))
1838 use_reg (&call_fusage, struct_value);
1841 /* All arguments and registers used for the call are set up by now! */
1842 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1844 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1845 and we don't want to load it into a register as an optimization,
1846 because prepare_call_address already did it if it should be done. */
1847 if (GET_CODE (function) != SYMBOL_REF)
1848 function = memory_address (FUNCTION_MODE, function);
1850 /* Generate the actual call instruction and save the return value. */
1851 if (targetm.have_untyped_call ())
1853 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1854 emit_call_insn (targetm.gen_untyped_call (mem, result,
1855 result_vector (1, result)));
1857 else if (targetm.have_call_value ())
1859 rtx valreg = 0;
1861 /* Locate the unique return register. It is not possible to
1862 express a call that sets more than one return register using
1863 call_value; use untyped_call for that. In fact, untyped_call
1864 only needs to save the return registers in the given block. */
1865 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1866 if ((mode = apply_result_mode[regno]) != VOIDmode)
1868 gcc_assert (!valreg); /* have_untyped_call required. */
1870 valreg = gen_rtx_REG (mode, regno);
1873 emit_insn (targetm.gen_call_value (valreg,
1874 gen_rtx_MEM (FUNCTION_MODE, function),
1875 const0_rtx, NULL_RTX, const0_rtx));
1877 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1879 else
1880 gcc_unreachable ();
1882 /* Find the CALL insn we just emitted, and attach the register usage
1883 information. */
1884 call_insn = last_call_insn ();
1885 add_function_usage_to (call_insn, call_fusage);
1887 /* Restore the stack. */
1888 if (targetm.have_save_stack_nonlocal ())
1889 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1890 else
1891 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1892 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1894 OK_DEFER_POP;
1896 /* Return the address of the result block. */
1897 result = copy_addr_to_reg (XEXP (result, 0));
1898 return convert_memory_address (ptr_mode, result);
1901 /* Perform an untyped return. */
1903 static void
1904 expand_builtin_return (rtx result)
1906 int size, align, regno;
1907 fixed_size_mode mode;
1908 rtx reg;
1909 rtx_insn *call_fusage = 0;
1911 result = convert_memory_address (Pmode, result);
1913 apply_result_size ();
1914 result = gen_rtx_MEM (BLKmode, result);
1916 if (targetm.have_untyped_return ())
1918 rtx vector = result_vector (0, result);
1919 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1920 emit_barrier ();
1921 return;
1924 /* Restore the return value and note that each value is used. */
1925 size = 0;
1926 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1927 if ((mode = apply_result_mode[regno]) != VOIDmode)
1929 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1930 if (size % align != 0)
1931 size = CEIL (size, align) * align;
1932 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1933 emit_move_insn (reg, adjust_address (result, mode, size));
1935 push_to_sequence (call_fusage);
1936 emit_use (reg);
1937 call_fusage = get_insns ();
1938 end_sequence ();
1939 size += GET_MODE_SIZE (mode);
1942 /* Put the USE insns before the return. */
1943 emit_insn (call_fusage);
1945 /* Return whatever values was restored by jumping directly to the end
1946 of the function. */
1947 expand_naked_return ();
1950 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1952 static enum type_class
1953 type_to_class (tree type)
1955 switch (TREE_CODE (type))
1957 case VOID_TYPE: return void_type_class;
1958 case INTEGER_TYPE: return integer_type_class;
1959 case ENUMERAL_TYPE: return enumeral_type_class;
1960 case BOOLEAN_TYPE: return boolean_type_class;
1961 case POINTER_TYPE: return pointer_type_class;
1962 case REFERENCE_TYPE: return reference_type_class;
1963 case OFFSET_TYPE: return offset_type_class;
1964 case REAL_TYPE: return real_type_class;
1965 case COMPLEX_TYPE: return complex_type_class;
1966 case FUNCTION_TYPE: return function_type_class;
1967 case METHOD_TYPE: return method_type_class;
1968 case RECORD_TYPE: return record_type_class;
1969 case UNION_TYPE:
1970 case QUAL_UNION_TYPE: return union_type_class;
1971 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1972 ? string_type_class : array_type_class);
1973 case LANG_TYPE: return lang_type_class;
1974 default: return no_type_class;
1978 /* Expand a call EXP to __builtin_classify_type. */
1980 static rtx
1981 expand_builtin_classify_type (tree exp)
1983 if (call_expr_nargs (exp))
1984 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1985 return GEN_INT (no_type_class);
1988 /* This helper macro, meant to be used in mathfn_built_in below, determines
1989 which among a set of builtin math functions is appropriate for a given type
1990 mode. The `F' (float) and `L' (long double) are automatically generated
1991 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1992 types, there are additional types that are considered with 'F32', 'F64',
1993 'F128', etc. suffixes. */
1994 #define CASE_MATHFN(MATHFN) \
1995 CASE_CFN_##MATHFN: \
1996 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1997 fcodel = BUILT_IN_##MATHFN##L ; break;
1998 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1999 types. */
2000 #define CASE_MATHFN_FLOATN(MATHFN) \
2001 CASE_CFN_##MATHFN: \
2002 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2003 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2004 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2005 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2006 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2007 break;
2008 /* Similar to above, but appends _R after any F/L suffix. */
2009 #define CASE_MATHFN_REENT(MATHFN) \
2010 case CFN_BUILT_IN_##MATHFN##_R: \
2011 case CFN_BUILT_IN_##MATHFN##F_R: \
2012 case CFN_BUILT_IN_##MATHFN##L_R: \
2013 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2014 fcodel = BUILT_IN_##MATHFN##L_R ; break;
2016 /* Return a function equivalent to FN but operating on floating-point
2017 values of type TYPE, or END_BUILTINS if no such function exists.
2018 This is purely an operation on function codes; it does not guarantee
2019 that the target actually has an implementation of the function. */
2021 static built_in_function
2022 mathfn_built_in_2 (tree type, combined_fn fn)
2024 tree mtype;
2025 built_in_function fcode, fcodef, fcodel;
2026 built_in_function fcodef16 = END_BUILTINS;
2027 built_in_function fcodef32 = END_BUILTINS;
2028 built_in_function fcodef64 = END_BUILTINS;
2029 built_in_function fcodef128 = END_BUILTINS;
2030 built_in_function fcodef32x = END_BUILTINS;
2031 built_in_function fcodef64x = END_BUILTINS;
2032 built_in_function fcodef128x = END_BUILTINS;
2034 switch (fn)
2036 CASE_MATHFN (ACOS)
2037 CASE_MATHFN (ACOSH)
2038 CASE_MATHFN (ASIN)
2039 CASE_MATHFN (ASINH)
2040 CASE_MATHFN (ATAN)
2041 CASE_MATHFN (ATAN2)
2042 CASE_MATHFN (ATANH)
2043 CASE_MATHFN (CBRT)
2044 CASE_MATHFN_FLOATN (CEIL)
2045 CASE_MATHFN (CEXPI)
2046 CASE_MATHFN_FLOATN (COPYSIGN)
2047 CASE_MATHFN (COS)
2048 CASE_MATHFN (COSH)
2049 CASE_MATHFN (DREM)
2050 CASE_MATHFN (ERF)
2051 CASE_MATHFN (ERFC)
2052 CASE_MATHFN (EXP)
2053 CASE_MATHFN (EXP10)
2054 CASE_MATHFN (EXP2)
2055 CASE_MATHFN (EXPM1)
2056 CASE_MATHFN (FABS)
2057 CASE_MATHFN (FDIM)
2058 CASE_MATHFN_FLOATN (FLOOR)
2059 CASE_MATHFN_FLOATN (FMA)
2060 CASE_MATHFN_FLOATN (FMAX)
2061 CASE_MATHFN_FLOATN (FMIN)
2062 CASE_MATHFN (FMOD)
2063 CASE_MATHFN (FREXP)
2064 CASE_MATHFN (GAMMA)
2065 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2066 CASE_MATHFN (HUGE_VAL)
2067 CASE_MATHFN (HYPOT)
2068 CASE_MATHFN (ILOGB)
2069 CASE_MATHFN (ICEIL)
2070 CASE_MATHFN (IFLOOR)
2071 CASE_MATHFN (INF)
2072 CASE_MATHFN (IRINT)
2073 CASE_MATHFN (IROUND)
2074 CASE_MATHFN (ISINF)
2075 CASE_MATHFN (J0)
2076 CASE_MATHFN (J1)
2077 CASE_MATHFN (JN)
2078 CASE_MATHFN (LCEIL)
2079 CASE_MATHFN (LDEXP)
2080 CASE_MATHFN (LFLOOR)
2081 CASE_MATHFN (LGAMMA)
2082 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2083 CASE_MATHFN (LLCEIL)
2084 CASE_MATHFN (LLFLOOR)
2085 CASE_MATHFN (LLRINT)
2086 CASE_MATHFN (LLROUND)
2087 CASE_MATHFN (LOG)
2088 CASE_MATHFN (LOG10)
2089 CASE_MATHFN (LOG1P)
2090 CASE_MATHFN (LOG2)
2091 CASE_MATHFN (LOGB)
2092 CASE_MATHFN (LRINT)
2093 CASE_MATHFN (LROUND)
2094 CASE_MATHFN (MODF)
2095 CASE_MATHFN (NAN)
2096 CASE_MATHFN (NANS)
2097 CASE_MATHFN_FLOATN (NEARBYINT)
2098 CASE_MATHFN (NEXTAFTER)
2099 CASE_MATHFN (NEXTTOWARD)
2100 CASE_MATHFN (POW)
2101 CASE_MATHFN (POWI)
2102 CASE_MATHFN (POW10)
2103 CASE_MATHFN (REMAINDER)
2104 CASE_MATHFN (REMQUO)
2105 CASE_MATHFN_FLOATN (RINT)
2106 CASE_MATHFN_FLOATN (ROUND)
2107 CASE_MATHFN_FLOATN (ROUNDEVEN)
2108 CASE_MATHFN (SCALB)
2109 CASE_MATHFN (SCALBLN)
2110 CASE_MATHFN (SCALBN)
2111 CASE_MATHFN (SIGNBIT)
2112 CASE_MATHFN (SIGNIFICAND)
2113 CASE_MATHFN (SIN)
2114 CASE_MATHFN (SINCOS)
2115 CASE_MATHFN (SINH)
2116 CASE_MATHFN_FLOATN (SQRT)
2117 CASE_MATHFN (TAN)
2118 CASE_MATHFN (TANH)
2119 CASE_MATHFN (TGAMMA)
2120 CASE_MATHFN_FLOATN (TRUNC)
2121 CASE_MATHFN (Y0)
2122 CASE_MATHFN (Y1)
2123 CASE_MATHFN (YN)
2125 default:
2126 return END_BUILTINS;
2129 mtype = TYPE_MAIN_VARIANT (type);
2130 if (mtype == double_type_node)
2131 return fcode;
2132 else if (mtype == float_type_node)
2133 return fcodef;
2134 else if (mtype == long_double_type_node)
2135 return fcodel;
2136 else if (mtype == float16_type_node)
2137 return fcodef16;
2138 else if (mtype == float32_type_node)
2139 return fcodef32;
2140 else if (mtype == float64_type_node)
2141 return fcodef64;
2142 else if (mtype == float128_type_node)
2143 return fcodef128;
2144 else if (mtype == float32x_type_node)
2145 return fcodef32x;
2146 else if (mtype == float64x_type_node)
2147 return fcodef64x;
2148 else if (mtype == float128x_type_node)
2149 return fcodef128x;
2150 else
2151 return END_BUILTINS;
2154 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2155 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2156 otherwise use the explicit declaration. If we can't do the conversion,
2157 return null. */
2159 static tree
2160 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2162 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2163 if (fcode2 == END_BUILTINS)
2164 return NULL_TREE;
2166 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2167 return NULL_TREE;
2169 return builtin_decl_explicit (fcode2);
2172 /* Like mathfn_built_in_1, but always use the implicit array. */
2174 tree
2175 mathfn_built_in (tree type, combined_fn fn)
2177 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2180 /* Like mathfn_built_in_1, but take a built_in_function and
2181 always use the implicit array. */
2183 tree
2184 mathfn_built_in (tree type, enum built_in_function fn)
2186 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2189 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2190 return its code, otherwise return IFN_LAST. Note that this function
2191 only tests whether the function is defined in internals.def, not whether
2192 it is actually available on the target. */
2194 internal_fn
2195 associated_internal_fn (tree fndecl)
2197 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2198 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2199 switch (DECL_FUNCTION_CODE (fndecl))
2201 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2202 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2203 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2204 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2205 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2206 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2207 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2208 #include "internal-fn.def"
2210 CASE_FLT_FN (BUILT_IN_POW10):
2211 return IFN_EXP10;
2213 CASE_FLT_FN (BUILT_IN_DREM):
2214 return IFN_REMAINDER;
2216 CASE_FLT_FN (BUILT_IN_SCALBN):
2217 CASE_FLT_FN (BUILT_IN_SCALBLN):
2218 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2219 return IFN_LDEXP;
2220 return IFN_LAST;
2222 default:
2223 return IFN_LAST;
2227 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2228 on the current target by a call to an internal function, return the
2229 code of that internal function, otherwise return IFN_LAST. The caller
2230 is responsible for ensuring that any side-effects of the built-in
2231 call are dealt with correctly. E.g. if CALL sets errno, the caller
2232 must decide that the errno result isn't needed or make it available
2233 in some other way. */
2235 internal_fn
2236 replacement_internal_fn (gcall *call)
2238 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2240 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2241 if (ifn != IFN_LAST)
2243 tree_pair types = direct_internal_fn_types (ifn, call);
2244 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2245 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2246 return ifn;
2249 return IFN_LAST;
2252 /* Expand a call to the builtin trinary math functions (fma).
2253 Return NULL_RTX if a normal call should be emitted rather than expanding the
2254 function in-line. EXP is the expression that is a call to the builtin
2255 function; if convenient, the result should be placed in TARGET.
2256 SUBTARGET may be used as the target for computing one of EXP's
2257 operands. */
2259 static rtx
2260 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2262 optab builtin_optab;
2263 rtx op0, op1, op2, result;
2264 rtx_insn *insns;
2265 tree fndecl = get_callee_fndecl (exp);
2266 tree arg0, arg1, arg2;
2267 machine_mode mode;
2269 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2270 return NULL_RTX;
2272 arg0 = CALL_EXPR_ARG (exp, 0);
2273 arg1 = CALL_EXPR_ARG (exp, 1);
2274 arg2 = CALL_EXPR_ARG (exp, 2);
2276 switch (DECL_FUNCTION_CODE (fndecl))
2278 CASE_FLT_FN (BUILT_IN_FMA):
2279 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2280 builtin_optab = fma_optab; break;
2281 default:
2282 gcc_unreachable ();
2285 /* Make a suitable register to place result in. */
2286 mode = TYPE_MODE (TREE_TYPE (exp));
2288 /* Before working hard, check whether the instruction is available. */
2289 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2290 return NULL_RTX;
2292 result = gen_reg_rtx (mode);
2294 /* Always stabilize the argument list. */
2295 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2296 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2297 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2299 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2300 op1 = expand_normal (arg1);
2301 op2 = expand_normal (arg2);
2303 start_sequence ();
2305 /* Compute into RESULT.
2306 Set RESULT to wherever the result comes back. */
2307 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2308 result, 0);
2310 /* If we were unable to expand via the builtin, stop the sequence
2311 (without outputting the insns) and call to the library function
2312 with the stabilized argument list. */
2313 if (result == 0)
2315 end_sequence ();
2316 return expand_call (exp, target, target == const0_rtx);
2319 /* Output the entire sequence. */
2320 insns = get_insns ();
2321 end_sequence ();
2322 emit_insn (insns);
2324 return result;
2327 /* Expand a call to the builtin sin and cos math functions.
2328 Return NULL_RTX if a normal call should be emitted rather than expanding the
2329 function in-line. EXP is the expression that is a call to the builtin
2330 function; if convenient, the result should be placed in TARGET.
2331 SUBTARGET may be used as the target for computing one of EXP's
2332 operands. */
2334 static rtx
2335 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2337 optab builtin_optab;
2338 rtx op0;
2339 rtx_insn *insns;
2340 tree fndecl = get_callee_fndecl (exp);
2341 machine_mode mode;
2342 tree arg;
2344 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2345 return NULL_RTX;
2347 arg = CALL_EXPR_ARG (exp, 0);
2349 switch (DECL_FUNCTION_CODE (fndecl))
2351 CASE_FLT_FN (BUILT_IN_SIN):
2352 CASE_FLT_FN (BUILT_IN_COS):
2353 builtin_optab = sincos_optab; break;
2354 default:
2355 gcc_unreachable ();
2358 /* Make a suitable register to place result in. */
2359 mode = TYPE_MODE (TREE_TYPE (exp));
2361 /* Check if sincos insn is available, otherwise fallback
2362 to sin or cos insn. */
2363 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2364 switch (DECL_FUNCTION_CODE (fndecl))
2366 CASE_FLT_FN (BUILT_IN_SIN):
2367 builtin_optab = sin_optab; break;
2368 CASE_FLT_FN (BUILT_IN_COS):
2369 builtin_optab = cos_optab; break;
2370 default:
2371 gcc_unreachable ();
2374 /* Before working hard, check whether the instruction is available. */
2375 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2377 rtx result = gen_reg_rtx (mode);
2379 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2380 need to expand the argument again. This way, we will not perform
2381 side-effects more the once. */
2382 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2384 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2386 start_sequence ();
2388 /* Compute into RESULT.
2389 Set RESULT to wherever the result comes back. */
2390 if (builtin_optab == sincos_optab)
2392 int ok;
2394 switch (DECL_FUNCTION_CODE (fndecl))
2396 CASE_FLT_FN (BUILT_IN_SIN):
2397 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2398 break;
2399 CASE_FLT_FN (BUILT_IN_COS):
2400 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2401 break;
2402 default:
2403 gcc_unreachable ();
2405 gcc_assert (ok);
2407 else
2408 result = expand_unop (mode, builtin_optab, op0, result, 0);
2410 if (result != 0)
2412 /* Output the entire sequence. */
2413 insns = get_insns ();
2414 end_sequence ();
2415 emit_insn (insns);
2416 return result;
2419 /* If we were unable to expand via the builtin, stop the sequence
2420 (without outputting the insns) and call to the library function
2421 with the stabilized argument list. */
2422 end_sequence ();
2425 return expand_call (exp, target, target == const0_rtx);
2428 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2429 return an RTL instruction code that implements the functionality.
2430 If that isn't possible or available return CODE_FOR_nothing. */
2432 static enum insn_code
2433 interclass_mathfn_icode (tree arg, tree fndecl)
2435 bool errno_set = false;
2436 optab builtin_optab = unknown_optab;
2437 machine_mode mode;
2439 switch (DECL_FUNCTION_CODE (fndecl))
2441 CASE_FLT_FN (BUILT_IN_ILOGB):
2442 errno_set = true; builtin_optab = ilogb_optab; break;
2443 CASE_FLT_FN (BUILT_IN_ISINF):
2444 builtin_optab = isinf_optab; break;
2445 case BUILT_IN_ISNORMAL:
2446 case BUILT_IN_ISFINITE:
2447 CASE_FLT_FN (BUILT_IN_FINITE):
2448 case BUILT_IN_FINITED32:
2449 case BUILT_IN_FINITED64:
2450 case BUILT_IN_FINITED128:
2451 case BUILT_IN_ISINFD32:
2452 case BUILT_IN_ISINFD64:
2453 case BUILT_IN_ISINFD128:
2454 /* These builtins have no optabs (yet). */
2455 break;
2456 default:
2457 gcc_unreachable ();
2460 /* There's no easy way to detect the case we need to set EDOM. */
2461 if (flag_errno_math && errno_set)
2462 return CODE_FOR_nothing;
2464 /* Optab mode depends on the mode of the input argument. */
2465 mode = TYPE_MODE (TREE_TYPE (arg));
2467 if (builtin_optab)
2468 return optab_handler (builtin_optab, mode);
2469 return CODE_FOR_nothing;
2472 /* Expand a call to one of the builtin math functions that operate on
2473 floating point argument and output an integer result (ilogb, isinf,
2474 isnan, etc).
2475 Return 0 if a normal call should be emitted rather than expanding the
2476 function in-line. EXP is the expression that is a call to the builtin
2477 function; if convenient, the result should be placed in TARGET. */
2479 static rtx
2480 expand_builtin_interclass_mathfn (tree exp, rtx target)
2482 enum insn_code icode = CODE_FOR_nothing;
2483 rtx op0;
2484 tree fndecl = get_callee_fndecl (exp);
2485 machine_mode mode;
2486 tree arg;
2488 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2489 return NULL_RTX;
2491 arg = CALL_EXPR_ARG (exp, 0);
2492 icode = interclass_mathfn_icode (arg, fndecl);
2493 mode = TYPE_MODE (TREE_TYPE (arg));
2495 if (icode != CODE_FOR_nothing)
2497 class expand_operand ops[1];
2498 rtx_insn *last = get_last_insn ();
2499 tree orig_arg = arg;
2501 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2502 need to expand the argument again. This way, we will not perform
2503 side-effects more the once. */
2504 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2506 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2508 if (mode != GET_MODE (op0))
2509 op0 = convert_to_mode (mode, op0, 0);
2511 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2512 if (maybe_legitimize_operands (icode, 0, 1, ops)
2513 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2514 return ops[0].value;
2516 delete_insns_since (last);
2517 CALL_EXPR_ARG (exp, 0) = orig_arg;
2520 return NULL_RTX;
2523 /* Expand a call to the builtin sincos math function.
2524 Return NULL_RTX if a normal call should be emitted rather than expanding the
2525 function in-line. EXP is the expression that is a call to the builtin
2526 function. */
2528 static rtx
2529 expand_builtin_sincos (tree exp)
2531 rtx op0, op1, op2, target1, target2;
2532 machine_mode mode;
2533 tree arg, sinp, cosp;
2534 int result;
2535 location_t loc = EXPR_LOCATION (exp);
2536 tree alias_type, alias_off;
2538 if (!validate_arglist (exp, REAL_TYPE,
2539 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2540 return NULL_RTX;
2542 arg = CALL_EXPR_ARG (exp, 0);
2543 sinp = CALL_EXPR_ARG (exp, 1);
2544 cosp = CALL_EXPR_ARG (exp, 2);
2546 /* Make a suitable register to place result in. */
2547 mode = TYPE_MODE (TREE_TYPE (arg));
2549 /* Check if sincos insn is available, otherwise emit the call. */
2550 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2551 return NULL_RTX;
2553 target1 = gen_reg_rtx (mode);
2554 target2 = gen_reg_rtx (mode);
2556 op0 = expand_normal (arg);
2557 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2558 alias_off = build_int_cst (alias_type, 0);
2559 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2560 sinp, alias_off));
2561 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2562 cosp, alias_off));
2564 /* Compute into target1 and target2.
2565 Set TARGET to wherever the result comes back. */
2566 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2567 gcc_assert (result);
2569 /* Move target1 and target2 to the memory locations indicated
2570 by op1 and op2. */
2571 emit_move_insn (op1, target1);
2572 emit_move_insn (op2, target2);
2574 return const0_rtx;
2577 /* Expand a call to the internal cexpi builtin to the sincos math function.
2578 EXP is the expression that is a call to the builtin function; if convenient,
2579 the result should be placed in TARGET. */
2581 static rtx
2582 expand_builtin_cexpi (tree exp, rtx target)
2584 tree fndecl = get_callee_fndecl (exp);
2585 tree arg, type;
2586 machine_mode mode;
2587 rtx op0, op1, op2;
2588 location_t loc = EXPR_LOCATION (exp);
2590 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2591 return NULL_RTX;
2593 arg = CALL_EXPR_ARG (exp, 0);
2594 type = TREE_TYPE (arg);
2595 mode = TYPE_MODE (TREE_TYPE (arg));
2597 /* Try expanding via a sincos optab, fall back to emitting a libcall
2598 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2599 is only generated from sincos, cexp or if we have either of them. */
2600 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2602 op1 = gen_reg_rtx (mode);
2603 op2 = gen_reg_rtx (mode);
2605 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2607 /* Compute into op1 and op2. */
2608 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2610 else if (targetm.libc_has_function (function_sincos))
2612 tree call, fn = NULL_TREE;
2613 tree top1, top2;
2614 rtx op1a, op2a;
2616 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2617 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2618 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2619 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2620 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2621 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2622 else
2623 gcc_unreachable ();
2625 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2626 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2627 op1a = copy_addr_to_reg (XEXP (op1, 0));
2628 op2a = copy_addr_to_reg (XEXP (op2, 0));
2629 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2630 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2632 /* Make sure not to fold the sincos call again. */
2633 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2634 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2635 call, 3, arg, top1, top2));
2637 else
2639 tree call, fn = NULL_TREE, narg;
2640 tree ctype = build_complex_type (type);
2642 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2643 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2644 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2645 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2646 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2647 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2648 else
2649 gcc_unreachable ();
2651 /* If we don't have a decl for cexp create one. This is the
2652 friendliest fallback if the user calls __builtin_cexpi
2653 without full target C99 function support. */
2654 if (fn == NULL_TREE)
2656 tree fntype;
2657 const char *name = NULL;
2659 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2660 name = "cexpf";
2661 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2662 name = "cexp";
2663 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2664 name = "cexpl";
2666 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2667 fn = build_fn_decl (name, fntype);
2670 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2671 build_real (type, dconst0), arg);
2673 /* Make sure not to fold the cexp call again. */
2674 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2675 return expand_expr (build_call_nary (ctype, call, 1, narg),
2676 target, VOIDmode, EXPAND_NORMAL);
2679 /* Now build the proper return type. */
2680 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2681 make_tree (TREE_TYPE (arg), op2),
2682 make_tree (TREE_TYPE (arg), op1)),
2683 target, VOIDmode, EXPAND_NORMAL);
2686 /* Conveniently construct a function call expression. FNDECL names the
2687 function to be called, N is the number of arguments, and the "..."
2688 parameters are the argument expressions. Unlike build_call_exr
2689 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2691 static tree
2692 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2694 va_list ap;
2695 tree fntype = TREE_TYPE (fndecl);
2696 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2698 va_start (ap, n);
2699 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2700 va_end (ap);
2701 SET_EXPR_LOCATION (fn, loc);
2702 return fn;
2705 /* Expand a call to one of the builtin rounding functions gcc defines
2706 as an extension (lfloor and lceil). As these are gcc extensions we
2707 do not need to worry about setting errno to EDOM.
2708 If expanding via optab fails, lower expression to (int)(floor(x)).
2709 EXP is the expression that is a call to the builtin function;
2710 if convenient, the result should be placed in TARGET. */
2712 static rtx
2713 expand_builtin_int_roundingfn (tree exp, rtx target)
2715 convert_optab builtin_optab;
2716 rtx op0, tmp;
2717 rtx_insn *insns;
2718 tree fndecl = get_callee_fndecl (exp);
2719 enum built_in_function fallback_fn;
2720 tree fallback_fndecl;
2721 machine_mode mode;
2722 tree arg;
2724 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2725 return NULL_RTX;
2727 arg = CALL_EXPR_ARG (exp, 0);
2729 switch (DECL_FUNCTION_CODE (fndecl))
2731 CASE_FLT_FN (BUILT_IN_ICEIL):
2732 CASE_FLT_FN (BUILT_IN_LCEIL):
2733 CASE_FLT_FN (BUILT_IN_LLCEIL):
2734 builtin_optab = lceil_optab;
2735 fallback_fn = BUILT_IN_CEIL;
2736 break;
2738 CASE_FLT_FN (BUILT_IN_IFLOOR):
2739 CASE_FLT_FN (BUILT_IN_LFLOOR):
2740 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2741 builtin_optab = lfloor_optab;
2742 fallback_fn = BUILT_IN_FLOOR;
2743 break;
2745 default:
2746 gcc_unreachable ();
2749 /* Make a suitable register to place result in. */
2750 mode = TYPE_MODE (TREE_TYPE (exp));
2752 target = gen_reg_rtx (mode);
2754 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2755 need to expand the argument again. This way, we will not perform
2756 side-effects more the once. */
2757 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2759 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2761 start_sequence ();
2763 /* Compute into TARGET. */
2764 if (expand_sfix_optab (target, op0, builtin_optab))
2766 /* Output the entire sequence. */
2767 insns = get_insns ();
2768 end_sequence ();
2769 emit_insn (insns);
2770 return target;
2773 /* If we were unable to expand via the builtin, stop the sequence
2774 (without outputting the insns). */
2775 end_sequence ();
2777 /* Fall back to floating point rounding optab. */
2778 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2780 /* For non-C99 targets we may end up without a fallback fndecl here
2781 if the user called __builtin_lfloor directly. In this case emit
2782 a call to the floor/ceil variants nevertheless. This should result
2783 in the best user experience for not full C99 targets. */
2784 if (fallback_fndecl == NULL_TREE)
2786 tree fntype;
2787 const char *name = NULL;
2789 switch (DECL_FUNCTION_CODE (fndecl))
2791 case BUILT_IN_ICEIL:
2792 case BUILT_IN_LCEIL:
2793 case BUILT_IN_LLCEIL:
2794 name = "ceil";
2795 break;
2796 case BUILT_IN_ICEILF:
2797 case BUILT_IN_LCEILF:
2798 case BUILT_IN_LLCEILF:
2799 name = "ceilf";
2800 break;
2801 case BUILT_IN_ICEILL:
2802 case BUILT_IN_LCEILL:
2803 case BUILT_IN_LLCEILL:
2804 name = "ceill";
2805 break;
2806 case BUILT_IN_IFLOOR:
2807 case BUILT_IN_LFLOOR:
2808 case BUILT_IN_LLFLOOR:
2809 name = "floor";
2810 break;
2811 case BUILT_IN_IFLOORF:
2812 case BUILT_IN_LFLOORF:
2813 case BUILT_IN_LLFLOORF:
2814 name = "floorf";
2815 break;
2816 case BUILT_IN_IFLOORL:
2817 case BUILT_IN_LFLOORL:
2818 case BUILT_IN_LLFLOORL:
2819 name = "floorl";
2820 break;
2821 default:
2822 gcc_unreachable ();
2825 fntype = build_function_type_list (TREE_TYPE (arg),
2826 TREE_TYPE (arg), NULL_TREE);
2827 fallback_fndecl = build_fn_decl (name, fntype);
2830 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2832 tmp = expand_normal (exp);
2833 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2835 /* Truncate the result of floating point optab to integer
2836 via expand_fix (). */
2837 target = gen_reg_rtx (mode);
2838 expand_fix (target, tmp, 0);
2840 return target;
2843 /* Expand a call to one of the builtin math functions doing integer
2844 conversion (lrint).
2845 Return 0 if a normal call should be emitted rather than expanding the
2846 function in-line. EXP is the expression that is a call to the builtin
2847 function; if convenient, the result should be placed in TARGET. */
2849 static rtx
2850 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2852 convert_optab builtin_optab;
2853 rtx op0;
2854 rtx_insn *insns;
2855 tree fndecl = get_callee_fndecl (exp);
2856 tree arg;
2857 machine_mode mode;
2858 enum built_in_function fallback_fn = BUILT_IN_NONE;
2860 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2861 return NULL_RTX;
2863 arg = CALL_EXPR_ARG (exp, 0);
2865 switch (DECL_FUNCTION_CODE (fndecl))
2867 CASE_FLT_FN (BUILT_IN_IRINT):
2868 fallback_fn = BUILT_IN_LRINT;
2869 gcc_fallthrough ();
2870 CASE_FLT_FN (BUILT_IN_LRINT):
2871 CASE_FLT_FN (BUILT_IN_LLRINT):
2872 builtin_optab = lrint_optab;
2873 break;
2875 CASE_FLT_FN (BUILT_IN_IROUND):
2876 fallback_fn = BUILT_IN_LROUND;
2877 gcc_fallthrough ();
2878 CASE_FLT_FN (BUILT_IN_LROUND):
2879 CASE_FLT_FN (BUILT_IN_LLROUND):
2880 builtin_optab = lround_optab;
2881 break;
2883 default:
2884 gcc_unreachable ();
2887 /* There's no easy way to detect the case we need to set EDOM. */
2888 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2889 return NULL_RTX;
2891 /* Make a suitable register to place result in. */
2892 mode = TYPE_MODE (TREE_TYPE (exp));
2894 /* There's no easy way to detect the case we need to set EDOM. */
2895 if (!flag_errno_math)
2897 rtx result = gen_reg_rtx (mode);
2899 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2900 need to expand the argument again. This way, we will not perform
2901 side-effects more the once. */
2902 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2904 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2906 start_sequence ();
2908 if (expand_sfix_optab (result, op0, builtin_optab))
2910 /* Output the entire sequence. */
2911 insns = get_insns ();
2912 end_sequence ();
2913 emit_insn (insns);
2914 return result;
2917 /* If we were unable to expand via the builtin, stop the sequence
2918 (without outputting the insns) and call to the library function
2919 with the stabilized argument list. */
2920 end_sequence ();
2923 if (fallback_fn != BUILT_IN_NONE)
2925 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2926 targets, (int) round (x) should never be transformed into
2927 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2928 a call to lround in the hope that the target provides at least some
2929 C99 functions. This should result in the best user experience for
2930 not full C99 targets. */
2931 tree fallback_fndecl = mathfn_built_in_1
2932 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2934 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2935 fallback_fndecl, 1, arg);
2937 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2938 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2939 return convert_to_mode (mode, target, 0);
2942 return expand_call (exp, target, target == const0_rtx);
2945 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2946 a normal call should be emitted rather than expanding the function
2947 in-line. EXP is the expression that is a call to the builtin
2948 function; if convenient, the result should be placed in TARGET. */
2950 static rtx
2951 expand_builtin_powi (tree exp, rtx target)
2953 tree arg0, arg1;
2954 rtx op0, op1;
2955 machine_mode mode;
2956 machine_mode mode2;
2958 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2959 return NULL_RTX;
2961 arg0 = CALL_EXPR_ARG (exp, 0);
2962 arg1 = CALL_EXPR_ARG (exp, 1);
2963 mode = TYPE_MODE (TREE_TYPE (exp));
2965 /* Emit a libcall to libgcc. */
2967 /* Mode of the 2nd argument must match that of an int. */
2968 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2970 if (target == NULL_RTX)
2971 target = gen_reg_rtx (mode);
2973 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2974 if (GET_MODE (op0) != mode)
2975 op0 = convert_to_mode (mode, op0, 0);
2976 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2977 if (GET_MODE (op1) != mode2)
2978 op1 = convert_to_mode (mode2, op1, 0);
2980 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2981 target, LCT_CONST, mode,
2982 op0, mode, op1, mode2);
2984 return target;
2987 /* Expand expression EXP which is a call to the strlen builtin. Return
2988 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2989 try to get the result in TARGET, if convenient. */
2991 static rtx
2992 expand_builtin_strlen (tree exp, rtx target,
2993 machine_mode target_mode)
2995 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2996 return NULL_RTX;
2998 class expand_operand ops[4];
2999 rtx pat;
3000 tree len;
3001 tree src = CALL_EXPR_ARG (exp, 0);
3002 rtx src_reg;
3003 rtx_insn *before_strlen;
3004 machine_mode insn_mode;
3005 enum insn_code icode = CODE_FOR_nothing;
3006 unsigned int align;
3008 /* If the length can be computed at compile-time, return it. */
3009 len = c_strlen (src, 0);
3010 if (len)
3011 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3013 /* If the length can be computed at compile-time and is constant
3014 integer, but there are side-effects in src, evaluate
3015 src for side-effects, then return len.
3016 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3017 can be optimized into: i++; x = 3; */
3018 len = c_strlen (src, 1);
3019 if (len && TREE_CODE (len) == INTEGER_CST)
3021 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3022 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3025 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3027 /* If SRC is not a pointer type, don't do this operation inline. */
3028 if (align == 0)
3029 return NULL_RTX;
3031 /* Bail out if we can't compute strlen in the right mode. */
3032 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3034 icode = optab_handler (strlen_optab, insn_mode);
3035 if (icode != CODE_FOR_nothing)
3036 break;
3038 if (insn_mode == VOIDmode)
3039 return NULL_RTX;
3041 /* Make a place to hold the source address. We will not expand
3042 the actual source until we are sure that the expansion will
3043 not fail -- there are trees that cannot be expanded twice. */
3044 src_reg = gen_reg_rtx (Pmode);
3046 /* Mark the beginning of the strlen sequence so we can emit the
3047 source operand later. */
3048 before_strlen = get_last_insn ();
3050 create_output_operand (&ops[0], target, insn_mode);
3051 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3052 create_integer_operand (&ops[2], 0);
3053 create_integer_operand (&ops[3], align);
3054 if (!maybe_expand_insn (icode, 4, ops))
3055 return NULL_RTX;
3057 /* Check to see if the argument was declared attribute nonstring
3058 and if so, issue a warning since at this point it's not known
3059 to be nul-terminated. */
3060 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3062 /* Now that we are assured of success, expand the source. */
3063 start_sequence ();
3064 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3065 if (pat != src_reg)
3067 #ifdef POINTERS_EXTEND_UNSIGNED
3068 if (GET_MODE (pat) != Pmode)
3069 pat = convert_to_mode (Pmode, pat,
3070 POINTERS_EXTEND_UNSIGNED);
3071 #endif
3072 emit_move_insn (src_reg, pat);
3074 pat = get_insns ();
3075 end_sequence ();
3077 if (before_strlen)
3078 emit_insn_after (pat, before_strlen);
3079 else
3080 emit_insn_before (pat, get_insns ());
3082 /* Return the value in the proper mode for this function. */
3083 if (GET_MODE (ops[0].value) == target_mode)
3084 target = ops[0].value;
3085 else if (target != 0)
3086 convert_move (target, ops[0].value, 0);
3087 else
3088 target = convert_to_mode (target_mode, ops[0].value, 0);
3090 return target;
3093 /* Expand call EXP to the strnlen built-in, returning the result
3094 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3096 static rtx
3097 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3099 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3100 return NULL_RTX;
3102 tree src = CALL_EXPR_ARG (exp, 0);
3103 tree bound = CALL_EXPR_ARG (exp, 1);
3105 if (!bound)
3106 return NULL_RTX;
3108 location_t loc = UNKNOWN_LOCATION;
3109 if (EXPR_HAS_LOCATION (exp))
3110 loc = EXPR_LOCATION (exp);
3112 tree maxobjsize = max_object_size ();
3113 tree func = get_callee_fndecl (exp);
3115 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3116 so these conversions aren't necessary. */
3117 c_strlen_data lendata = { };
3118 tree len = c_strlen (src, 0, &lendata, 1);
3119 if (len)
3120 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3122 if (TREE_CODE (bound) == INTEGER_CST)
3124 if (!TREE_NO_WARNING (exp)
3125 && tree_int_cst_lt (maxobjsize, bound)
3126 && warning_at (loc, OPT_Wstringop_overflow_,
3127 "%K%qD specified bound %E "
3128 "exceeds maximum object size %E",
3129 exp, func, bound, maxobjsize))
3130 TREE_NO_WARNING (exp) = true;
3132 bool exact = true;
3133 if (!len || TREE_CODE (len) != INTEGER_CST)
3135 /* Clear EXACT if LEN may be less than SRC suggests,
3136 such as in
3137 strnlen (&a[i], sizeof a)
3138 where the value of i is unknown. Unless i's value is
3139 zero, the call is unsafe because the bound is greater. */
3140 lendata.decl = unterminated_array (src, &len, &exact);
3141 if (!lendata.decl)
3142 return NULL_RTX;
3145 if (lendata.decl && (tree_int_cst_lt (len, bound) || !exact))
3147 location_t warnloc
3148 = expansion_point_location_if_in_system_header (loc);
3150 if (!TREE_NO_WARNING (exp)
3151 && warning_at (warnloc, OPT_Wstringop_overflow_,
3152 exact
3153 ? G_("%K%qD specified bound %E exceeds the size "
3154 "%E of unterminated array")
3155 : G_("%K%qD specified bound %E may exceed the "
3156 "size of at most %E of unterminated array"),
3157 exp, func, bound, len))
3159 inform (DECL_SOURCE_LOCATION (lendata.decl),
3160 "referenced argument declared here");
3161 TREE_NO_WARNING (exp) = true;
3163 return NULL_RTX;
3166 if (!len)
3167 return NULL_RTX;
3169 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3170 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3173 if (TREE_CODE (bound) != SSA_NAME)
3174 return NULL_RTX;
3176 wide_int min, max;
3177 enum value_range_kind rng = get_range_info (bound, &min, &max);
3178 if (rng != VR_RANGE)
3179 return NULL_RTX;
3181 if (!TREE_NO_WARNING (exp)
3182 && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
3183 && warning_at (loc, OPT_Wstringop_overflow_,
3184 "%K%qD specified bound [%wu, %wu] "
3185 "exceeds maximum object size %E",
3186 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3187 TREE_NO_WARNING (exp) = true;
3189 bool exact = true;
3190 if (!len || TREE_CODE (len) != INTEGER_CST)
3192 lendata.decl = unterminated_array (src, &len, &exact);
3193 if (!lendata.decl)
3194 return NULL_RTX;
3197 if (lendata.decl
3198 && !TREE_NO_WARNING (exp)
3199 && (wi::ltu_p (wi::to_wide (len), min)
3200 || !exact))
3202 location_t warnloc
3203 = expansion_point_location_if_in_system_header (loc);
3205 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3206 exact
3207 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3208 "the size %E of unterminated array")
3209 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3210 "the size of at most %E of unterminated array"),
3211 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3213 inform (DECL_SOURCE_LOCATION (lendata.decl),
3214 "referenced argument declared here");
3215 TREE_NO_WARNING (exp) = true;
3219 if (lendata.decl)
3220 return NULL_RTX;
3222 if (wi::gtu_p (min, wi::to_wide (len)))
3223 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3225 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3226 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3229 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3230 bytes from constant string DATA + OFFSET and return it as target
3231 constant. */
3233 static rtx
3234 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3235 scalar_int_mode mode)
3237 const char *str = (const char *) data;
3239 gcc_assert (offset >= 0
3240 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3241 <= strlen (str) + 1));
3243 return c_readstr (str + offset, mode);
3246 /* LEN specify length of the block of memcpy/memset operation.
3247 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3248 In some cases we can make very likely guess on max size, then we
3249 set it into PROBABLE_MAX_SIZE. */
3251 static void
3252 determine_block_size (tree len, rtx len_rtx,
3253 unsigned HOST_WIDE_INT *min_size,
3254 unsigned HOST_WIDE_INT *max_size,
3255 unsigned HOST_WIDE_INT *probable_max_size)
3257 if (CONST_INT_P (len_rtx))
3259 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3260 return;
3262 else
3264 wide_int min, max;
3265 enum value_range_kind range_type = VR_UNDEFINED;
3267 /* Determine bounds from the type. */
3268 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3269 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3270 else
3271 *min_size = 0;
3272 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3273 *probable_max_size = *max_size
3274 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3275 else
3276 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3278 if (TREE_CODE (len) == SSA_NAME)
3279 range_type = get_range_info (len, &min, &max);
3280 if (range_type == VR_RANGE)
3282 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3283 *min_size = min.to_uhwi ();
3284 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3285 *probable_max_size = *max_size = max.to_uhwi ();
3287 else if (range_type == VR_ANTI_RANGE)
3289 /* Anti range 0...N lets us to determine minimal size to N+1. */
3290 if (min == 0)
3292 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3293 *min_size = max.to_uhwi () + 1;
3295 /* Code like
3297 int n;
3298 if (n < 100)
3299 memcpy (a, b, n)
3301 Produce anti range allowing negative values of N. We still
3302 can use the information and make a guess that N is not negative.
3304 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3305 *probable_max_size = min.to_uhwi () - 1;
3308 gcc_checking_assert (*max_size <=
3309 (unsigned HOST_WIDE_INT)
3310 GET_MODE_MASK (GET_MODE (len_rtx)));
3313 /* For an expression EXP issue an access warning controlled by option OPT
3314 with access to a region SLEN bytes in size in the RANGE of sizes. */
3316 static bool
3317 warn_for_access (location_t loc, tree func, tree exp, int opt, tree range[2],
3318 tree slen, bool access)
3320 bool warned = false;
3322 if (access)
3324 if (tree_int_cst_equal (range[0], range[1]))
3325 warned = (func
3326 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3327 "%K%qD reading %E byte from a region of size %E",
3328 "%K%qD reading %E bytes from a region of size %E",
3329 exp, func, range[0], slen)
3330 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3331 "%Kreading %E byte from a region of size %E",
3332 "%Kreading %E bytes from a region of size %E",
3333 exp, range[0], slen));
3334 else if (tree_int_cst_sign_bit (range[1]))
3336 /* Avoid printing the upper bound if it's invalid. */
3337 warned = (func
3338 ? warning_at (loc, opt,
3339 "%K%qD reading %E or more bytes from a region "
3340 "of size %E",
3341 exp, func, range[0], slen)
3342 : warning_at (loc, opt,
3343 "%Kreading %E or more bytes from a region "
3344 "of size %E",
3345 exp, range[0], slen));
3347 else
3348 warned = (func
3349 ? warning_at (loc, opt,
3350 "%K%qD reading between %E and %E bytes from "
3351 "a region of size %E",
3352 exp, func, range[0], range[1], slen)
3353 : warning_at (loc, opt,
3354 "%Kreading between %E and %E bytes from "
3355 "a region of size %E",
3356 exp, range[0], range[1], slen));
3358 return warned;
3361 if (tree_int_cst_equal (range[0], range[1]))
3362 warned = (func
3363 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3364 "%K%qD epecting %E byte in a region of size %E",
3365 "%K%qD expecting %E bytes in a region of size %E",
3366 exp, func, range[0], slen)
3367 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3368 "%Kexpecting %E byte in a region of size %E",
3369 "%Kexpecting %E bytes in a region of size %E",
3370 exp, range[0], slen));
3371 else if (tree_int_cst_sign_bit (range[1]))
3373 /* Avoid printing the upper bound if it's invalid. */
3374 warned = (func
3375 ? warning_at (loc, opt,
3376 "%K%qD expecting %E or more bytes in a region "
3377 "of size %E",
3378 exp, func, range[0], slen)
3379 : warning_at (loc, opt,
3380 "%Kexpecting %E or more bytes in a region "
3381 "of size %E",
3382 exp, range[0], slen));
3384 else
3385 warned = (func
3386 ? warning_at (loc, opt,
3387 "%K%qD expecting between %E and %E bytes in "
3388 "a region of size %E",
3389 exp, func, range[0], range[1], slen)
3390 : warning_at (loc, opt,
3391 "%Kexpectting between %E and %E bytes in "
3392 "a region of size %E",
3393 exp, range[0], range[1], slen));
3394 return warned;
3397 /* Issue an inform message describing the target of an access REF.
3398 WRITE is set for a write access and clear for a read access. */
3400 static void
3401 inform_access (const access_ref &ref, bool write)
3403 if (!ref.ref)
3404 return;
3406 /* Convert offset range and avoid including a zero range since it isn't
3407 necessarily meaningful. */
3408 long long minoff = 0, maxoff = 0;
3409 if (wi::fits_shwi_p (ref.offrng[0])
3410 && wi::fits_shwi_p (ref.offrng[1]))
3412 minoff = ref.offrng[0].to_shwi ();
3413 maxoff = ref.offrng[1].to_shwi ();
3416 /* Convert size range and always include it since all sizes are
3417 meaningful. */
3418 unsigned long long minsize = 0, maxsize = 0;
3419 if (wi::fits_shwi_p (ref.sizrng[0])
3420 && wi::fits_shwi_p (ref.sizrng[1]))
3422 minsize = ref.sizrng[0].to_shwi ();
3423 maxsize = ref.sizrng[1].to_shwi ();
3426 char sizestr[80];
3427 location_t loc;
3428 tree allocfn = NULL_TREE;
3429 if (TREE_CODE (ref.ref) == SSA_NAME)
3431 gimple *stmt = SSA_NAME_DEF_STMT (ref.ref);
3432 gcc_assert (is_gimple_call (stmt));
3433 loc = gimple_location (stmt);
3434 allocfn = gimple_call_fndecl (stmt);
3435 if (!allocfn)
3436 /* Handle calls through pointers to functions. */
3437 allocfn = gimple_call_fn (stmt);
3439 /* SIZRNG doesn't necessarily have the same range as the allocation
3440 size determined by gimple_call_alloc_size (). */
3442 if (minsize == maxsize)
3443 sprintf (sizestr, "%llu", minsize);
3444 else
3445 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
3448 else
3449 loc = DECL_SOURCE_LOCATION (ref.ref);
3451 if (write)
3453 if (DECL_P (ref.ref))
3455 if (minoff == maxoff)
3457 if (minoff == 0)
3458 inform (loc, "destination object %qD", ref.ref);
3459 else
3460 inform (loc, "at offset %lli into destination object %qD",
3461 minoff, ref.ref);
3463 else
3464 inform (loc, "at offset [%lli, %lli] into destination object %qD",
3465 minoff, maxoff, ref.ref);
3466 return;
3469 if (minoff == maxoff)
3471 if (minoff == 0)
3472 inform (loc, "destination object of size %s allocated by %qE",
3473 sizestr, allocfn);
3474 else
3475 inform (loc,
3476 "at offset %lli into destination object of size %s "
3477 "allocated by %qE", minoff, sizestr, allocfn);
3479 else
3480 inform (loc,
3481 "at offset [%lli, %lli] into destination object of size %s "
3482 "allocated by %qE",
3483 minoff, maxoff, sizestr, allocfn);
3485 return;
3488 if (DECL_P (ref.ref))
3490 if (minoff == maxoff)
3492 if (minoff == 0)
3493 inform (loc, "source object %qD", ref.ref);
3494 else
3495 inform (loc, "at offset %lli into source object %qD",
3496 minoff, ref.ref);
3498 else
3499 inform (loc, "at offset [%lli, %lli] into source object %qD",
3500 minoff, maxoff, ref.ref);
3501 return;
3504 if (minoff == maxoff)
3506 if (minoff == 0)
3507 inform (loc, "source object of size %s allocated by %qE",
3508 sizestr, allocfn);
3509 else
3510 inform (loc,
3511 "at offset %lli into source object of size %s "
3512 "allocated by %qE", minoff, sizestr, allocfn);
3514 else
3515 inform (loc,
3516 "at offset [%lli, %lli] into source object of size %s "
3517 "allocated by %qE",
3518 minoff, maxoff, sizestr, allocfn);
3521 /* Try to verify that the sizes and lengths of the arguments to a string
3522 manipulation function given by EXP are within valid bounds and that
3523 the operation does not lead to buffer overflow or read past the end.
3524 Arguments other than EXP may be null. When non-null, the arguments
3525 have the following meaning:
3526 DST is the destination of a copy call or NULL otherwise.
3527 SRC is the source of a copy call or NULL otherwise.
3528 DSTWRITE is the number of bytes written into the destination obtained
3529 from the user-supplied size argument to the function (such as in
3530 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3531 MAXREAD is the user-supplied bound on the length of the source sequence
3532 (such as in strncat(d, s, N). It specifies the upper limit on the number
3533 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3534 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3535 expression EXP is a string function call (as opposed to a memory call
3536 like memcpy). As an exception, SRCSTR can also be an integer denoting
3537 the precomputed size of the source string or object (for functions like
3538 memcpy).
3539 DSTSIZE is the size of the destination object specified by the last
3540 argument to the _chk builtins, typically resulting from the expansion
3541 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3542 DSTSIZE).
3544 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3545 SIZE_MAX.
3547 ACCESS is true for accesses, false for simple size checks in calls
3548 to functions that neither read from nor write to the region.
3550 When nonnull, PAD points to a more detailed description of the access.
3552 If the call is successfully verified as safe return true, otherwise
3553 return false. */
3555 bool
3556 check_access (tree exp, tree, tree, tree dstwrite,
3557 tree maxread, tree srcstr, tree dstsize,
3558 bool access /* = true */,
3559 const access_data *pad /* = NULL */)
3561 int opt = OPT_Wstringop_overflow_;
3563 /* The size of the largest object is half the address space, or
3564 PTRDIFF_MAX. (This is way too permissive.) */
3565 tree maxobjsize = max_object_size ();
3567 /* Either the length of the source string for string functions or
3568 the size of the source object for raw memory functions. */
3569 tree slen = NULL_TREE;
3571 tree range[2] = { NULL_TREE, NULL_TREE };
3573 /* Set to true when the exact number of bytes written by a string
3574 function like strcpy is not known and the only thing that is
3575 known is that it must be at least one (for the terminating nul). */
3576 bool at_least_one = false;
3577 if (srcstr)
3579 /* SRCSTR is normally a pointer to string but as a special case
3580 it can be an integer denoting the length of a string. */
3581 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3583 /* Try to determine the range of lengths the source string
3584 refers to. If it can be determined and is less than
3585 the upper bound given by MAXREAD add one to it for
3586 the terminating nul. Otherwise, set it to one for
3587 the same reason, or to MAXREAD as appropriate. */
3588 c_strlen_data lendata = { };
3589 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3590 range[0] = lendata.minlen;
3591 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
3592 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3594 if (maxread && tree_int_cst_le (maxread, range[0]))
3595 range[0] = range[1] = maxread;
3596 else
3597 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3598 range[0], size_one_node);
3600 if (maxread && tree_int_cst_le (maxread, range[1]))
3601 range[1] = maxread;
3602 else if (!integer_all_onesp (range[1]))
3603 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3604 range[1], size_one_node);
3606 slen = range[0];
3608 else
3610 at_least_one = true;
3611 slen = size_one_node;
3614 else
3615 slen = srcstr;
3618 if (!dstwrite && !maxread)
3620 /* When the only available piece of data is the object size
3621 there is nothing to do. */
3622 if (!slen)
3623 return true;
3625 /* Otherwise, when the length of the source sequence is known
3626 (as with strlen), set DSTWRITE to it. */
3627 if (!range[0])
3628 dstwrite = slen;
3631 if (!dstsize)
3632 dstsize = maxobjsize;
3634 if (dstwrite)
3635 get_size_range (dstwrite, range);
3637 tree func = get_callee_fndecl (exp);
3639 /* First check the number of bytes to be written against the maximum
3640 object size. */
3641 if (range[0]
3642 && TREE_CODE (range[0]) == INTEGER_CST
3643 && tree_int_cst_lt (maxobjsize, range[0]))
3645 if (TREE_NO_WARNING (exp))
3646 return false;
3648 location_t loc = tree_nonartificial_location (exp);
3649 loc = expansion_point_location_if_in_system_header (loc);
3651 bool warned;
3652 if (range[0] == range[1])
3653 warned = (func
3654 ? warning_at (loc, opt,
3655 "%K%qD specified size %E "
3656 "exceeds maximum object size %E",
3657 exp, func, range[0], maxobjsize)
3658 : warning_at (loc, opt,
3659 "%Kspecified size %E "
3660 "exceeds maximum object size %E",
3661 exp, range[0], maxobjsize));
3662 else
3663 warned = (func
3664 ? warning_at (loc, opt,
3665 "%K%qD specified size between %E and %E "
3666 "exceeds maximum object size %E",
3667 exp, func,
3668 range[0], range[1], maxobjsize)
3669 : warning_at (loc, opt,
3670 "%Kspecified size between %E and %E "
3671 "exceeds maximum object size %E",
3672 exp, range[0], range[1], maxobjsize));
3673 if (warned)
3674 TREE_NO_WARNING (exp) = true;
3676 return false;
3679 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3680 constant, and in range of unsigned HOST_WIDE_INT. */
3681 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3683 /* Next check the number of bytes to be written against the destination
3684 object size. */
3685 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3687 if (range[0]
3688 && TREE_CODE (range[0]) == INTEGER_CST
3689 && ((tree_fits_uhwi_p (dstsize)
3690 && tree_int_cst_lt (dstsize, range[0]))
3691 || (dstwrite
3692 && tree_fits_uhwi_p (dstwrite)
3693 && tree_int_cst_lt (dstwrite, range[0]))))
3695 if (TREE_NO_WARNING (exp))
3696 return false;
3698 location_t loc = tree_nonartificial_location (exp);
3699 loc = expansion_point_location_if_in_system_header (loc);
3701 bool warned = false;
3702 if (dstwrite == slen && at_least_one)
3704 /* This is a call to strcpy with a destination of 0 size
3705 and a source of unknown length. The call will write
3706 at least one byte past the end of the destination. */
3707 warned = (func
3708 ? warning_at (loc, opt,
3709 "%K%qD writing %E or more bytes into "
3710 "a region of size %E overflows "
3711 "the destination",
3712 exp, func, range[0], dstsize)
3713 : warning_at (loc, opt,
3714 "%Kwriting %E or more bytes into "
3715 "a region of size %E overflows "
3716 "the destination",
3717 exp, range[0], dstsize));
3719 else if (tree_int_cst_equal (range[0], range[1]))
3720 warned = (func
3721 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3722 "%K%qD writing %E byte into a region "
3723 "of size %E overflows the destination",
3724 "%K%qD writing %E bytes into a region "
3725 "of size %E overflows the destination",
3726 exp, func, range[0], dstsize)
3727 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3728 "%Kwriting %E byte into a region "
3729 "of size %E overflows the destination",
3730 "%Kwriting %E bytes into a region "
3731 "of size %E overflows the destination",
3732 exp, range[0], dstsize));
3733 else if (tree_int_cst_sign_bit (range[1]))
3735 /* Avoid printing the upper bound if it's invalid. */
3736 warned = (func
3737 ? warning_at (loc, opt,
3738 "%K%qD writing %E or more bytes into "
3739 "a region of size %E overflows "
3740 "the destination",
3741 exp, func, range[0], dstsize)
3742 : warning_at (loc, opt,
3743 "%Kwriting %E or more bytes into "
3744 "a region of size %E overflows "
3745 "the destination",
3746 exp, range[0], dstsize));
3748 else
3749 warned = (func
3750 ? warning_at (loc, opt,
3751 "%K%qD writing between %E and %E bytes "
3752 "into a region of size %E overflows "
3753 "the destination",
3754 exp, func, range[0], range[1],
3755 dstsize)
3756 : warning_at (loc, opt,
3757 "%Kwriting between %E and %E bytes "
3758 "into a region of size %E overflows "
3759 "the destination",
3760 exp, range[0], range[1],
3761 dstsize));
3762 if (warned)
3764 TREE_NO_WARNING (exp) = true;
3765 if (pad)
3766 inform_access (pad->dst, true);
3769 /* Return error when an overflow has been detected. */
3770 return false;
3774 /* Check the maximum length of the source sequence against the size
3775 of the destination object if known, or against the maximum size
3776 of an object. */
3777 if (maxread)
3779 get_size_range (maxread, range);
3780 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3782 location_t loc = tree_nonartificial_location (exp);
3783 loc = expansion_point_location_if_in_system_header (loc);
3785 if (tree_int_cst_lt (maxobjsize, range[0]))
3787 if (TREE_NO_WARNING (exp))
3788 return false;
3790 bool warned = false;
3792 /* Warn about crazy big sizes first since that's more
3793 likely to be meaningful than saying that the bound
3794 is greater than the object size if both are big. */
3795 if (range[0] == range[1])
3796 warned = (func
3797 ? warning_at (loc, opt,
3798 "%K%qD specified bound %E "
3799 "exceeds maximum object size %E",
3800 exp, func, range[0], maxobjsize)
3801 : warning_at (loc, opt,
3802 "%Kspecified bound %E "
3803 "exceeds maximum object size %E",
3804 exp, range[0], maxobjsize));
3805 else
3806 warned = (func
3807 ? warning_at (loc, opt,
3808 "%K%qD specified bound between "
3809 "%E and %E exceeds maximum object "
3810 "size %E",
3811 exp, func,
3812 range[0], range[1], maxobjsize)
3813 : warning_at (loc, opt,
3814 "%Kspecified bound between "
3815 "%E and %E exceeds maximum object "
3816 "size %E",
3817 exp, range[0], range[1], maxobjsize));
3818 if (warned)
3819 TREE_NO_WARNING (exp) = true;
3821 return false;
3824 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3826 if (TREE_NO_WARNING (exp))
3827 return false;
3829 bool warned = false;
3831 if (tree_int_cst_equal (range[0], range[1]))
3832 warned = (func
3833 ? warning_at (loc, opt,
3834 "%K%qD specified bound %E "
3835 "exceeds destination size %E",
3836 exp, func,
3837 range[0], dstsize)
3838 : warning_at (loc, opt,
3839 "%Kspecified bound %E "
3840 "exceeds destination size %E",
3841 exp, range[0], dstsize));
3842 else
3843 warned = (func
3844 ? warning_at (loc, opt,
3845 "%K%qD specified bound between %E "
3846 "and %E exceeds destination size %E",
3847 exp, func,
3848 range[0], range[1], dstsize)
3849 : warning_at (loc, opt,
3850 "%Kspecified bound between %E "
3851 "and %E exceeds destination size %E",
3852 exp,
3853 range[0], range[1], dstsize));
3854 if (warned)
3855 TREE_NO_WARNING (exp) = true;
3857 return false;
3862 /* Check for reading past the end of SRC. */
3863 if (slen
3864 && slen == srcstr
3865 && dstwrite && range[0]
3866 && tree_int_cst_lt (slen, range[0]))
3868 if (TREE_NO_WARNING (exp))
3869 return false;
3871 location_t loc = tree_nonartificial_location (exp);
3872 loc = expansion_point_location_if_in_system_header (loc);
3874 if (warn_for_access (loc, func, exp, opt, range, slen, access))
3876 TREE_NO_WARNING (exp) = true;
3877 if (pad)
3878 inform_access (pad->src, false);
3880 return false;
3883 return true;
3886 /* If STMT is a call to an allocation function, returns the constant
3887 size of the object allocated by the call represented as sizetype.
3888 If nonnull, sets RNG1[] to the range of the size. */
3890 tree
3891 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
3892 const vr_values *rvals /* = NULL */)
3894 if (!stmt)
3895 return NULL_TREE;
3897 tree allocfntype;
3898 if (tree fndecl = gimple_call_fndecl (stmt))
3899 allocfntype = TREE_TYPE (fndecl);
3900 else
3901 allocfntype = gimple_call_fntype (stmt);
3903 if (!allocfntype)
3904 return NULL_TREE;
3906 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
3907 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
3908 if (!at)
3910 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
3911 return NULL_TREE;
3913 argidx1 = 0;
3916 unsigned nargs = gimple_call_num_args (stmt);
3918 if (argidx1 == UINT_MAX)
3920 tree atval = TREE_VALUE (at);
3921 if (!atval)
3922 return NULL_TREE;
3924 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
3925 if (nargs <= argidx1)
3926 return NULL_TREE;
3928 atval = TREE_CHAIN (atval);
3929 if (atval)
3931 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
3932 if (nargs <= argidx2)
3933 return NULL_TREE;
3937 tree size = gimple_call_arg (stmt, argidx1);
3939 wide_int rng1_buf[2];
3940 /* If RNG1 is not set, use the buffer. */
3941 if (!rng1)
3942 rng1 = rng1_buf;
3944 if (!get_range (size, rng1, rvals))
3945 return NULL_TREE;
3947 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
3948 return fold_convert (sizetype, size);
3950 /* To handle ranges do the math in wide_int and return the product
3951 of the upper bounds as a constant. Ignore anti-ranges. */
3952 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
3953 wide_int rng2[2];
3954 if (!get_range (n, rng2, rvals))
3955 return NULL_TREE;
3957 /* Extend to the maximum precision to avoid overflow. */
3958 const int prec = ADDR_MAX_PRECISION;
3959 rng1[0] = wide_int::from (rng1[0], prec, UNSIGNED);
3960 rng1[1] = wide_int::from (rng1[1], prec, UNSIGNED);
3961 rng2[0] = wide_int::from (rng2[0], prec, UNSIGNED);
3962 rng2[1] = wide_int::from (rng2[1], prec, UNSIGNED);
3964 /* Compute products of both bounds for the caller but return the lesser
3965 of SIZE_MAX and the product of the upper bounds as a constant. */
3966 rng1[0] = rng1[0] * rng2[0];
3967 rng1[1] = rng1[1] * rng2[1];
3968 tree size_max = TYPE_MAX_VALUE (sizetype);
3969 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
3971 rng1[1] = wi::to_wide (size_max);
3972 return size_max;
3975 return wide_int_to_tree (sizetype, rng1[1]);
3978 /* Wrapper around the wide_int overload of get_range. Returns the same
3979 result but accepts offset_int instead. */
3981 static bool
3982 get_range (tree x, signop sgn, offset_int r[2],
3983 const vr_values *rvals /* = NULL */)
3985 wide_int wr[2];
3986 if (!get_range (x, wr, rvals))
3987 return false;
3989 r[0] = offset_int::from (wr[0], sgn);
3990 r[1] = offset_int::from (wr[1], sgn);
3991 return true;
3994 /* Helper to compute the size of the object referenced by the PTR
3995 expression which must have pointer type, using Object Size type
3996 OSTYPE (only the least significant 2 bits are used).
3997 On success, sets PREF->REF to the DECL of the referenced object
3998 if it's unique, otherwise to null, PREF->OFFRNG to the range of
3999 offsets into it, and PREF->SIZRNG to the range of sizes of
4000 the object(s).
4001 VISITED is used to avoid visiting the same PHI operand multiple
4002 times, and, when nonnull, RVALS to determine range information.
4003 Returns true on success, false when the size cannot be determined.
4005 The function is intended for diagnostics and should not be used
4006 to influence code generation or optimization. */
4008 static bool
4009 compute_objsize (tree ptr, int ostype, access_ref *pref,
4010 bitmap *visited, const vr_values *rvals /* = NULL */)
4012 const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
4013 if (addr)
4014 ptr = TREE_OPERAND (ptr, 0);
4016 if (DECL_P (ptr))
4018 /* Bail if the reference is to the pointer itself (as opposed
4019 to what it points to). */
4020 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
4021 return false;
4023 tree size = decl_init_size (ptr, false);
4024 if (!size || TREE_CODE (size) != INTEGER_CST)
4025 return false;
4027 pref->ref = ptr;
4028 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
4029 return true;
4032 const tree_code code = TREE_CODE (ptr);
4034 if (code == COMPONENT_REF)
4036 tree field = TREE_OPERAND (ptr, 1);
4038 if (ostype == 0)
4040 /* For raw memory functions like memcpy bail if the size
4041 of the enclosing object cannot be determined. */
4042 tree ref = TREE_OPERAND (ptr, 0);
4043 if (!compute_objsize (ref, ostype, pref, visited, rvals)
4044 || !pref->ref)
4045 return false;
4047 /* Otherwise, use the size of the enclosing object and add
4048 the offset of the member to the offset computed so far. */
4049 tree offset = byte_position (field);
4050 if (TREE_CODE (offset) != INTEGER_CST)
4051 return false;
4052 offset_int off = wi::to_offset (offset);
4053 pref->offrng[0] += off;
4054 pref->offrng[1] += off;
4055 return true;
4058 /* Bail if the reference is to the pointer itself (as opposed
4059 to what it points to). */
4060 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
4061 return false;
4063 pref->ref = field;
4064 /* Only return constant sizes for now while callers depend
4065 on it. INT0LEN is true for interior zero-length arrays. */
4066 bool int0len = false;
4067 tree size = component_ref_size (ptr, &int0len);
4068 if (int0len)
4070 pref->sizrng[0] = pref->sizrng[1] = 0;
4071 return true;
4074 if (!size || TREE_CODE (size) != INTEGER_CST)
4075 return false;
4077 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
4078 return true;
4081 if (code == ARRAY_REF || code == MEM_REF)
4083 tree ref = TREE_OPERAND (ptr, 0);
4084 tree reftype = TREE_TYPE (ref);
4085 if (code == ARRAY_REF
4086 && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
4087 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
4088 of known bound. */
4089 return false;
4091 if (code == MEM_REF && TREE_CODE (reftype) == POINTER_TYPE)
4093 /* Give up for MEM_REFs of vector types; those may be synthesized
4094 from multiple assignments to consecutive data members. See PR
4095 93200.
4096 FIXME: Deal with this more generally, e.g., by marking up such
4097 MEM_REFs at the time they're created. */
4098 reftype = TREE_TYPE (reftype);
4099 if (TREE_CODE (reftype) == VECTOR_TYPE)
4100 return false;
4103 if (!compute_objsize (ref, ostype, pref, visited, rvals))
4104 return false;
4106 offset_int orng[2];
4107 tree off = TREE_OPERAND (ptr, 1);
4108 if (!get_range (off, SIGNED, orng, rvals))
4109 /* Fail unless the size of the object is zero. */
4110 return pref->sizrng[0] == 0 && pref->sizrng[0] == pref->sizrng[1];
4112 if (TREE_CODE (ptr) == ARRAY_REF)
4114 /* Convert the array index range determined above to a byte
4115 offset. */
4116 tree lowbnd = array_ref_low_bound (ptr);
4117 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
4119 /* Adjust the index by the low bound of the array domain
4120 (normally zero but 1 in Fortran). */
4121 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
4122 orng[0] -= lb;
4123 orng[1] -= lb;
4126 tree eltype = TREE_TYPE (ptr);
4127 tree tpsize = TYPE_SIZE_UNIT (eltype);
4128 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
4129 return false;
4131 offset_int sz = wi::to_offset (tpsize);
4132 orng[0] *= sz;
4133 orng[1] *= sz;
4135 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
4137 /* Execpt for the permissive raw memory functions which
4138 use the size of the whole object determined above,
4139 use the size of the referenced array. */
4140 pref->sizrng[0] = pref->offrng[0] + orng[0] + sz;
4141 pref->sizrng[1] = pref->offrng[1] + orng[1] + sz;
4145 pref->offrng[0] += orng[0];
4146 pref->offrng[1] += orng[1];
4148 return true;
4151 if (TREE_CODE (ptr) == SSA_NAME)
4153 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
4154 if (is_gimple_call (stmt))
4156 /* If STMT is a call to an allocation function get the size
4157 from its argument(s). If successful, also set *PDECL to
4158 PTR for the caller to include in diagnostics. */
4159 wide_int wr[2];
4160 if (gimple_call_alloc_size (stmt, wr, rvals))
4162 pref->ref = ptr;
4163 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
4164 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
4165 return true;
4167 return false;
4170 /* TODO: Handle PHI. */
4172 if (!is_gimple_assign (stmt))
4173 return false;
4175 ptr = gimple_assign_rhs1 (stmt);
4177 tree_code code = gimple_assign_rhs_code (stmt);
4178 if (TREE_CODE (TREE_TYPE (ptr)) != POINTER_TYPE)
4179 /* Avoid conversions from non-pointers. */
4180 return false;
4182 if (code == POINTER_PLUS_EXPR)
4184 /* If the the offset in the expression can be determined use
4185 it to adjust the overall offset. Otherwise, set the overall
4186 offset to the maximum. */
4187 offset_int orng[2];
4188 tree off = gimple_assign_rhs2 (stmt);
4189 if (!get_range (off, SIGNED, orng, rvals)
4190 || !wi::les_p (orng[0], orng[1]))
4192 orng[0] = wi::to_offset (TYPE_MIN_VALUE (ptrdiff_type_node));
4193 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
4196 pref->offrng[0] += orng[0];
4197 pref->offrng[1] += orng[1];
4199 else if (code != ADDR_EXPR)
4200 return false;
4202 return compute_objsize (ptr, ostype, pref, visited, rvals);
4205 tree type = TREE_TYPE (ptr);
4206 type = TYPE_MAIN_VARIANT (type);
4207 if (TREE_CODE (ptr) == ADDR_EXPR)
4208 ptr = TREE_OPERAND (ptr, 0);
4210 if (TREE_CODE (type) == ARRAY_TYPE
4211 && !array_at_struct_end_p (ptr))
4213 if (tree size = TYPE_SIZE_UNIT (type))
4214 return get_range (size, UNSIGNED, pref->sizrng, rvals);
4217 return false;
4220 /* Convenience wrapper around the above. */
4222 static tree
4223 compute_objsize (tree ptr, int ostype, access_ref *pref,
4224 const vr_values *rvals = NULL)
4226 bitmap visited = NULL;
4228 bool success
4229 = compute_objsize (ptr, ostype, pref, &visited, rvals);
4231 if (visited)
4232 BITMAP_FREE (visited);
4234 if (!success)
4235 return NULL_TREE;
4237 if (pref->offrng[0] < 0)
4239 if (pref->offrng[1] < 0)
4240 return size_zero_node;
4242 pref->offrng[0] = 0;
4245 if (pref->sizrng[1] < pref->offrng[0])
4246 return size_zero_node;
4248 return wide_int_to_tree (sizetype, pref->sizrng[1] - pref->offrng[0]);
4251 /* Transitional wrapper around the above. The function should be removed
4252 once callers transition to one of the two above. */
4254 tree
4255 compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
4256 tree *poff /* = NULL */, const vr_values *rvals /* = NULL */)
4258 /* Set the initial offsets to zero and size to negative to indicate
4259 none has been computed yet. */
4260 access_ref ref;
4261 tree size = compute_objsize (ptr, ostype, &ref, rvals);
4262 if (!size)
4263 return NULL_TREE;
4265 if (pdecl)
4266 *pdecl = ref.ref;
4268 if (poff)
4269 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
4271 return size;
4274 /* Helper to determine and check the sizes of the source and the destination
4275 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
4276 call expression, DEST is the destination argument, SRC is the source
4277 argument or null, and LEN is the number of bytes. Use Object Size type-0
4278 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
4279 (no overflow or invalid sizes), false otherwise. */
4281 static bool
4282 check_memop_access (tree exp, tree dest, tree src, tree size)
4284 /* For functions like memset and memcpy that operate on raw memory
4285 try to determine the size of the largest source and destination
4286 object using type-0 Object Size regardless of the object size
4287 type specified by the option. */
4288 access_data data;
4289 tree srcsize = src ? compute_objsize (src, 0, &data.src) : NULL_TREE;
4290 tree dstsize = compute_objsize (dest, 0, &data.dst);
4292 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
4293 srcsize, dstsize, true, &data);
4296 /* Validate memchr arguments without performing any expansion.
4297 Return NULL_RTX. */
4299 static rtx
4300 expand_builtin_memchr (tree exp, rtx)
4302 if (!validate_arglist (exp,
4303 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4304 return NULL_RTX;
4306 tree arg1 = CALL_EXPR_ARG (exp, 0);
4307 tree len = CALL_EXPR_ARG (exp, 2);
4309 /* Diagnose calls where the specified length exceeds the size
4310 of the object. */
4311 if (warn_stringop_overflow)
4313 access_data data;
4314 tree size = compute_objsize (arg1, 0, &data.src);
4315 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4316 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE,
4317 true, &data);
4320 return NULL_RTX;
4323 /* Expand a call EXP to the memcpy builtin.
4324 Return NULL_RTX if we failed, the caller should emit a normal call,
4325 otherwise try to get the result in TARGET, if convenient (and in
4326 mode MODE if that's convenient). */
4328 static rtx
4329 expand_builtin_memcpy (tree exp, rtx target)
4331 if (!validate_arglist (exp,
4332 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4333 return NULL_RTX;
4335 tree dest = CALL_EXPR_ARG (exp, 0);
4336 tree src = CALL_EXPR_ARG (exp, 1);
4337 tree len = CALL_EXPR_ARG (exp, 2);
4339 check_memop_access (exp, dest, src, len);
4341 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4342 /*retmode=*/ RETURN_BEGIN, false);
4345 /* Check a call EXP to the memmove built-in for validity.
4346 Return NULL_RTX on both success and failure. */
4348 static rtx
4349 expand_builtin_memmove (tree exp, rtx target)
4351 if (!validate_arglist (exp,
4352 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4353 return NULL_RTX;
4355 tree dest = CALL_EXPR_ARG (exp, 0);
4356 tree src = CALL_EXPR_ARG (exp, 1);
4357 tree len = CALL_EXPR_ARG (exp, 2);
4359 check_memop_access (exp, dest, src, len);
4361 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4362 /*retmode=*/ RETURN_BEGIN, true);
4365 /* Expand a call EXP to the mempcpy builtin.
4366 Return NULL_RTX if we failed; the caller should emit a normal call,
4367 otherwise try to get the result in TARGET, if convenient (and in
4368 mode MODE if that's convenient). */
4370 static rtx
4371 expand_builtin_mempcpy (tree exp, rtx target)
4373 if (!validate_arglist (exp,
4374 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4375 return NULL_RTX;
4377 tree dest = CALL_EXPR_ARG (exp, 0);
4378 tree src = CALL_EXPR_ARG (exp, 1);
4379 tree len = CALL_EXPR_ARG (exp, 2);
4381 /* Policy does not generally allow using compute_objsize (which
4382 is used internally by check_memop_size) to change code generation
4383 or drive optimization decisions.
4385 In this instance it is safe because the code we generate has
4386 the same semantics regardless of the return value of
4387 check_memop_sizes. Exactly the same amount of data is copied
4388 and the return value is exactly the same in both cases.
4390 Furthermore, check_memop_size always uses mode 0 for the call to
4391 compute_objsize, so the imprecise nature of compute_objsize is
4392 avoided. */
4394 /* Avoid expanding mempcpy into memcpy when the call is determined
4395 to overflow the buffer. This also prevents the same overflow
4396 from being diagnosed again when expanding memcpy. */
4397 if (!check_memop_access (exp, dest, src, len))
4398 return NULL_RTX;
4400 return expand_builtin_mempcpy_args (dest, src, len,
4401 target, exp, /*retmode=*/ RETURN_END);
4404 /* Helper function to do the actual work for expand of memory copy family
4405 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
4406 of memory from SRC to DEST and assign to TARGET if convenient. Return
4407 value is based on RETMODE argument. */
4409 static rtx
4410 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
4411 rtx target, tree exp, memop_ret retmode,
4412 bool might_overlap)
4414 const char *src_str;
4415 unsigned int src_align = get_pointer_alignment (src);
4416 unsigned int dest_align = get_pointer_alignment (dest);
4417 rtx dest_mem, src_mem, dest_addr, len_rtx;
4418 HOST_WIDE_INT expected_size = -1;
4419 unsigned int expected_align = 0;
4420 unsigned HOST_WIDE_INT min_size;
4421 unsigned HOST_WIDE_INT max_size;
4422 unsigned HOST_WIDE_INT probable_max_size;
4424 bool is_move_done;
4426 /* If DEST is not a pointer type, call the normal function. */
4427 if (dest_align == 0)
4428 return NULL_RTX;
4430 /* If either SRC is not a pointer type, don't do this
4431 operation in-line. */
4432 if (src_align == 0)
4433 return NULL_RTX;
4435 if (currently_expanding_gimple_stmt)
4436 stringop_block_profile (currently_expanding_gimple_stmt,
4437 &expected_align, &expected_size);
4439 if (expected_align < dest_align)
4440 expected_align = dest_align;
4441 dest_mem = get_memory_rtx (dest, len);
4442 set_mem_align (dest_mem, dest_align);
4443 len_rtx = expand_normal (len);
4444 determine_block_size (len, len_rtx, &min_size, &max_size,
4445 &probable_max_size);
4446 src_str = c_getstr (src);
4448 /* If SRC is a string constant and block move would be done by
4449 pieces, we can avoid loading the string from memory and only
4450 stored the computed constants. This works in the overlap
4451 (memmove) case as well because store_by_pieces just generates a
4452 series of stores of constants from the string constant returned
4453 by c_getstr(). */
4454 if (src_str
4455 && CONST_INT_P (len_rtx)
4456 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
4457 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
4458 CONST_CAST (char *, src_str),
4459 dest_align, false))
4461 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
4462 builtin_memcpy_read_str,
4463 CONST_CAST (char *, src_str),
4464 dest_align, false, retmode);
4465 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4466 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4467 return dest_mem;
4470 src_mem = get_memory_rtx (src, len);
4471 set_mem_align (src_mem, src_align);
4473 /* Copy word part most expediently. */
4474 enum block_op_methods method = BLOCK_OP_NORMAL;
4475 if (CALL_EXPR_TAILCALL (exp)
4476 && (retmode == RETURN_BEGIN || target == const0_rtx))
4477 method = BLOCK_OP_TAILCALL;
4478 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
4479 && retmode == RETURN_END
4480 && !might_overlap
4481 && target != const0_rtx);
4482 if (use_mempcpy_call)
4483 method = BLOCK_OP_NO_LIBCALL_RET;
4484 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
4485 expected_align, expected_size,
4486 min_size, max_size, probable_max_size,
4487 use_mempcpy_call, &is_move_done, might_overlap);
4489 /* Bail out when a mempcpy call would be expanded as libcall and when
4490 we have a target that provides a fast implementation
4491 of mempcpy routine. */
4492 if (!is_move_done)
4493 return NULL_RTX;
4495 if (dest_addr == pc_rtx)
4496 return NULL_RTX;
4498 if (dest_addr == 0)
4500 dest_addr = force_operand (XEXP (dest_mem, 0), target);
4501 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4504 if (retmode != RETURN_BEGIN && target != const0_rtx)
4506 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
4507 /* stpcpy pointer to last byte. */
4508 if (retmode == RETURN_END_MINUS_ONE)
4509 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
4512 return dest_addr;
4515 static rtx
4516 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
4517 rtx target, tree orig_exp, memop_ret retmode)
4519 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
4520 retmode, false);
4523 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
4524 we failed, the caller should emit a normal call, otherwise try to
4525 get the result in TARGET, if convenient.
4526 Return value is based on RETMODE argument. */
4528 static rtx
4529 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
4531 class expand_operand ops[3];
4532 rtx dest_mem;
4533 rtx src_mem;
4535 if (!targetm.have_movstr ())
4536 return NULL_RTX;
4538 dest_mem = get_memory_rtx (dest, NULL);
4539 src_mem = get_memory_rtx (src, NULL);
4540 if (retmode == RETURN_BEGIN)
4542 target = force_reg (Pmode, XEXP (dest_mem, 0));
4543 dest_mem = replace_equiv_address (dest_mem, target);
4546 create_output_operand (&ops[0],
4547 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
4548 create_fixed_operand (&ops[1], dest_mem);
4549 create_fixed_operand (&ops[2], src_mem);
4550 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
4551 return NULL_RTX;
4553 if (retmode != RETURN_BEGIN && target != const0_rtx)
4555 target = ops[0].value;
4556 /* movstr is supposed to set end to the address of the NUL
4557 terminator. If the caller requested a mempcpy-like return value,
4558 adjust it. */
4559 if (retmode == RETURN_END)
4561 rtx tem = plus_constant (GET_MODE (target),
4562 gen_lowpart (GET_MODE (target), target), 1);
4563 emit_move_insn (target, force_operand (tem, NULL_RTX));
4566 return target;
4569 /* Do some very basic size validation of a call to the strcpy builtin
4570 given by EXP. Return NULL_RTX to have the built-in expand to a call
4571 to the library function. */
4573 static rtx
4574 expand_builtin_strcat (tree exp)
4576 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
4577 || !warn_stringop_overflow)
4578 return NULL_RTX;
4580 tree dest = CALL_EXPR_ARG (exp, 0);
4581 tree src = CALL_EXPR_ARG (exp, 1);
4583 /* Detect unterminated source (only). */
4584 if (!check_nul_terminated_array (exp, src))
4585 return NULL_RTX;
4587 /* There is no way here to determine the length of the string in
4588 the destination to which the SRC string is being appended so
4589 just diagnose cases when the souce string is longer than
4590 the destination object. */
4592 access_data data;
4593 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
4595 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
4596 destsize, true, &data);
4598 return NULL_RTX;
4601 /* Expand expression EXP, which is a call to the strcpy builtin. Return
4602 NULL_RTX if we failed the caller should emit a normal call, otherwise
4603 try to get the result in TARGET, if convenient (and in mode MODE if that's
4604 convenient). */
4606 static rtx
4607 expand_builtin_strcpy (tree exp, rtx target)
4609 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4610 return NULL_RTX;
4612 tree dest = CALL_EXPR_ARG (exp, 0);
4613 tree src = CALL_EXPR_ARG (exp, 1);
4615 if (warn_stringop_overflow)
4617 access_data data;
4618 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1,
4619 &data.dst);
4620 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4621 src, destsize, true, &data);
4624 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4626 /* Check to see if the argument was declared attribute nonstring
4627 and if so, issue a warning since at this point it's not known
4628 to be nul-terminated. */
4629 tree fndecl = get_callee_fndecl (exp);
4630 maybe_warn_nonstring_arg (fndecl, exp);
4631 return ret;
4634 return NULL_RTX;
4637 /* Helper function to do the actual work for expand_builtin_strcpy. The
4638 arguments to the builtin_strcpy call DEST and SRC are broken out
4639 so that this can also be called without constructing an actual CALL_EXPR.
4640 The other arguments and return value are the same as for
4641 expand_builtin_strcpy. */
4643 static rtx
4644 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4646 /* Detect strcpy calls with unterminated arrays.. */
4647 if (tree nonstr = unterminated_array (src))
4649 /* NONSTR refers to the non-nul terminated constant array. */
4650 if (!TREE_NO_WARNING (exp))
4651 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4652 return NULL_RTX;
4655 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4658 /* Expand a call EXP to the stpcpy builtin.
4659 Return NULL_RTX if we failed the caller should emit a normal call,
4660 otherwise try to get the result in TARGET, if convenient (and in
4661 mode MODE if that's convenient). */
4663 static rtx
4664 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4666 tree dst, src;
4667 location_t loc = EXPR_LOCATION (exp);
4669 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4670 return NULL_RTX;
4672 dst = CALL_EXPR_ARG (exp, 0);
4673 src = CALL_EXPR_ARG (exp, 1);
4675 if (warn_stringop_overflow)
4677 access_data data;
4678 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1,
4679 &data.dst);
4680 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4681 src, destsize, true, &data);
4684 /* If return value is ignored, transform stpcpy into strcpy. */
4685 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4687 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4688 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4689 return expand_expr (result, target, mode, EXPAND_NORMAL);
4691 else
4693 tree len, lenp1;
4694 rtx ret;
4696 /* Ensure we get an actual string whose length can be evaluated at
4697 compile-time, not an expression containing a string. This is
4698 because the latter will potentially produce pessimized code
4699 when used to produce the return value. */
4700 c_strlen_data lendata = { };
4701 if (!c_getstr (src, NULL)
4702 || !(len = c_strlen (src, 0, &lendata, 1)))
4703 return expand_movstr (dst, src, target,
4704 /*retmode=*/ RETURN_END_MINUS_ONE);
4706 if (lendata.decl && !TREE_NO_WARNING (exp))
4707 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
4709 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4710 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4711 target, exp,
4712 /*retmode=*/ RETURN_END_MINUS_ONE);
4714 if (ret)
4715 return ret;
4717 if (TREE_CODE (len) == INTEGER_CST)
4719 rtx len_rtx = expand_normal (len);
4721 if (CONST_INT_P (len_rtx))
4723 ret = expand_builtin_strcpy_args (exp, dst, src, target);
4725 if (ret)
4727 if (! target)
4729 if (mode != VOIDmode)
4730 target = gen_reg_rtx (mode);
4731 else
4732 target = gen_reg_rtx (GET_MODE (ret));
4734 if (GET_MODE (target) != GET_MODE (ret))
4735 ret = gen_lowpart (GET_MODE (target), ret);
4737 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4738 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4739 gcc_assert (ret);
4741 return target;
4746 return expand_movstr (dst, src, target,
4747 /*retmode=*/ RETURN_END_MINUS_ONE);
4751 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4752 arguments while being careful to avoid duplicate warnings (which could
4753 be issued if the expander were to expand the call, resulting in it
4754 being emitted in expand_call(). */
4756 static rtx
4757 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4759 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4761 /* The call has been successfully expanded. Check for nonstring
4762 arguments and issue warnings as appropriate. */
4763 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4764 return ret;
4767 return NULL_RTX;
4770 /* Check a call EXP to the stpncpy built-in for validity.
4771 Return NULL_RTX on both success and failure. */
4773 static rtx
4774 expand_builtin_stpncpy (tree exp, rtx)
4776 if (!validate_arglist (exp,
4777 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4778 || !warn_stringop_overflow)
4779 return NULL_RTX;
4781 /* The source and destination of the call. */
4782 tree dest = CALL_EXPR_ARG (exp, 0);
4783 tree src = CALL_EXPR_ARG (exp, 1);
4785 /* The exact number of bytes to write (not the maximum). */
4786 tree len = CALL_EXPR_ARG (exp, 2);
4787 if (!check_nul_terminated_array (exp, src, len))
4788 return NULL_RTX;
4790 access_data data;
4791 /* The size of the destination object. */
4792 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
4794 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize,
4795 true, &data);
4797 return NULL_RTX;
4800 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4801 bytes from constant string DATA + OFFSET and return it as target
4802 constant. */
4805 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4806 scalar_int_mode mode)
4808 const char *str = (const char *) data;
4810 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4811 return const0_rtx;
4813 return c_readstr (str + offset, mode);
4816 /* Helper to check the sizes of sequences and the destination of calls
4817 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4818 success (no overflow or invalid sizes), false otherwise. */
4820 static bool
4821 check_strncat_sizes (tree exp, tree objsize)
4823 tree dest = CALL_EXPR_ARG (exp, 0);
4824 tree src = CALL_EXPR_ARG (exp, 1);
4825 tree maxread = CALL_EXPR_ARG (exp, 2);
4827 /* Try to determine the range of lengths that the source expression
4828 refers to. */
4829 c_strlen_data lendata = { };
4830 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4832 /* Try to verify that the destination is big enough for the shortest
4833 string. */
4835 access_data data;
4836 if (!objsize && warn_stringop_overflow)
4838 /* If it hasn't been provided by __strncat_chk, try to determine
4839 the size of the destination object into which the source is
4840 being copied. */
4841 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
4844 /* Add one for the terminating nul. */
4845 tree srclen = (lendata.minlen
4846 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4847 size_one_node)
4848 : NULL_TREE);
4850 /* The strncat function copies at most MAXREAD bytes and always appends
4851 the terminating nul so the specified upper bound should never be equal
4852 to (or greater than) the size of the destination. */
4853 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4854 && tree_int_cst_equal (objsize, maxread))
4856 location_t loc = tree_nonartificial_location (exp);
4857 loc = expansion_point_location_if_in_system_header (loc);
4859 warning_at (loc, OPT_Wstringop_overflow_,
4860 "%K%qD specified bound %E equals destination size",
4861 exp, get_callee_fndecl (exp), maxread);
4863 return false;
4866 if (!srclen
4867 || (maxread && tree_fits_uhwi_p (maxread)
4868 && tree_fits_uhwi_p (srclen)
4869 && tree_int_cst_lt (maxread, srclen)))
4870 srclen = maxread;
4872 /* The number of bytes to write is LEN but check_access will alsoa
4873 check SRCLEN if LEN's value isn't known. */
4874 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4875 objsize, true, &data);
4878 /* Similar to expand_builtin_strcat, do some very basic size validation
4879 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4880 the built-in expand to a call to the library function. */
4882 static rtx
4883 expand_builtin_strncat (tree exp, rtx)
4885 if (!validate_arglist (exp,
4886 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4887 || !warn_stringop_overflow)
4888 return NULL_RTX;
4890 tree dest = CALL_EXPR_ARG (exp, 0);
4891 tree src = CALL_EXPR_ARG (exp, 1);
4892 /* The upper bound on the number of bytes to write. */
4893 tree maxread = CALL_EXPR_ARG (exp, 2);
4895 /* Detect unterminated source (only). */
4896 if (!check_nul_terminated_array (exp, src, maxread))
4897 return NULL_RTX;
4899 /* The length of the source sequence. */
4900 tree slen = c_strlen (src, 1);
4902 /* Try to determine the range of lengths that the source expression
4903 refers to. Since the lengths are only used for warning and not
4904 for code generation disable strict mode below. */
4905 tree maxlen = slen;
4906 if (!maxlen)
4908 c_strlen_data lendata = { };
4909 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4910 maxlen = lendata.maxbound;
4913 access_data data;
4914 /* Try to verify that the destination is big enough for the shortest
4915 string. First try to determine the size of the destination object
4916 into which the source is being copied. */
4917 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
4919 /* Add one for the terminating nul. */
4920 tree srclen = (maxlen
4921 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
4922 size_one_node)
4923 : NULL_TREE);
4925 /* The strncat function copies at most MAXREAD bytes and always appends
4926 the terminating nul so the specified upper bound should never be equal
4927 to (or greater than) the size of the destination. */
4928 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4929 && tree_int_cst_equal (destsize, maxread))
4931 location_t loc = tree_nonartificial_location (exp);
4932 loc = expansion_point_location_if_in_system_header (loc);
4934 warning_at (loc, OPT_Wstringop_overflow_,
4935 "%K%qD specified bound %E equals destination size",
4936 exp, get_callee_fndecl (exp), maxread);
4938 return NULL_RTX;
4941 if (!srclen
4942 || (maxread && tree_fits_uhwi_p (maxread)
4943 && tree_fits_uhwi_p (srclen)
4944 && tree_int_cst_lt (maxread, srclen)))
4945 srclen = maxread;
4947 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize,
4948 true, &data);
4950 return NULL_RTX;
4953 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4954 NULL_RTX if we failed the caller should emit a normal call. */
4956 static rtx
4957 expand_builtin_strncpy (tree exp, rtx target)
4959 location_t loc = EXPR_LOCATION (exp);
4961 if (!validate_arglist (exp,
4962 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4963 return NULL_RTX;
4964 tree dest = CALL_EXPR_ARG (exp, 0);
4965 tree src = CALL_EXPR_ARG (exp, 1);
4966 /* The number of bytes to write (not the maximum). */
4967 tree len = CALL_EXPR_ARG (exp, 2);
4969 if (!check_nul_terminated_array (exp, src, len))
4970 return NULL_RTX;
4972 /* The length of the source sequence. */
4973 tree slen = c_strlen (src, 1);
4975 if (warn_stringop_overflow)
4977 access_data data;
4978 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1,
4979 &data.dst);
4981 /* The number of bytes to write is LEN but check_access will also
4982 check SLEN if LEN's value isn't known. */
4983 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4984 destsize, true, &data);
4987 /* We must be passed a constant len and src parameter. */
4988 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4989 return NULL_RTX;
4991 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4993 /* We're required to pad with trailing zeros if the requested
4994 len is greater than strlen(s2)+1. In that case try to
4995 use store_by_pieces, if it fails, punt. */
4996 if (tree_int_cst_lt (slen, len))
4998 unsigned int dest_align = get_pointer_alignment (dest);
4999 const char *p = c_getstr (src);
5000 rtx dest_mem;
5002 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
5003 || !can_store_by_pieces (tree_to_uhwi (len),
5004 builtin_strncpy_read_str,
5005 CONST_CAST (char *, p),
5006 dest_align, false))
5007 return NULL_RTX;
5009 dest_mem = get_memory_rtx (dest, len);
5010 store_by_pieces (dest_mem, tree_to_uhwi (len),
5011 builtin_strncpy_read_str,
5012 CONST_CAST (char *, p), dest_align, false,
5013 RETURN_BEGIN);
5014 dest_mem = force_operand (XEXP (dest_mem, 0), target);
5015 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5016 return dest_mem;
5019 return NULL_RTX;
5022 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
5023 bytes from constant string DATA + OFFSET and return it as target
5024 constant. */
5027 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
5028 scalar_int_mode mode)
5030 const char *c = (const char *) data;
5031 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
5033 memset (p, *c, GET_MODE_SIZE (mode));
5035 return c_readstr (p, mode);
5038 /* Callback routine for store_by_pieces. Return the RTL of a register
5039 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
5040 char value given in the RTL register data. For example, if mode is
5041 4 bytes wide, return the RTL for 0x01010101*data. */
5043 static rtx
5044 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
5045 scalar_int_mode mode)
5047 rtx target, coeff;
5048 size_t size;
5049 char *p;
5051 size = GET_MODE_SIZE (mode);
5052 if (size == 1)
5053 return (rtx) data;
5055 p = XALLOCAVEC (char, size);
5056 memset (p, 1, size);
5057 coeff = c_readstr (p, mode);
5059 target = convert_to_mode (mode, (rtx) data, 1);
5060 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
5061 return force_reg (mode, target);
5064 /* Expand expression EXP, which is a call to the memset builtin. Return
5065 NULL_RTX if we failed the caller should emit a normal call, otherwise
5066 try to get the result in TARGET, if convenient (and in mode MODE if that's
5067 convenient). */
5069 static rtx
5070 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
5072 if (!validate_arglist (exp,
5073 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5074 return NULL_RTX;
5076 tree dest = CALL_EXPR_ARG (exp, 0);
5077 tree val = CALL_EXPR_ARG (exp, 1);
5078 tree len = CALL_EXPR_ARG (exp, 2);
5080 check_memop_access (exp, dest, NULL_TREE, len);
5082 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
5085 /* Helper function to do the actual work for expand_builtin_memset. The
5086 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
5087 so that this can also be called without constructing an actual CALL_EXPR.
5088 The other arguments and return value are the same as for
5089 expand_builtin_memset. */
5091 static rtx
5092 expand_builtin_memset_args (tree dest, tree val, tree len,
5093 rtx target, machine_mode mode, tree orig_exp)
5095 tree fndecl, fn;
5096 enum built_in_function fcode;
5097 machine_mode val_mode;
5098 char c;
5099 unsigned int dest_align;
5100 rtx dest_mem, dest_addr, len_rtx;
5101 HOST_WIDE_INT expected_size = -1;
5102 unsigned int expected_align = 0;
5103 unsigned HOST_WIDE_INT min_size;
5104 unsigned HOST_WIDE_INT max_size;
5105 unsigned HOST_WIDE_INT probable_max_size;
5107 dest_align = get_pointer_alignment (dest);
5109 /* If DEST is not a pointer type, don't do this operation in-line. */
5110 if (dest_align == 0)
5111 return NULL_RTX;
5113 if (currently_expanding_gimple_stmt)
5114 stringop_block_profile (currently_expanding_gimple_stmt,
5115 &expected_align, &expected_size);
5117 if (expected_align < dest_align)
5118 expected_align = dest_align;
5120 /* If the LEN parameter is zero, return DEST. */
5121 if (integer_zerop (len))
5123 /* Evaluate and ignore VAL in case it has side-effects. */
5124 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
5125 return expand_expr (dest, target, mode, EXPAND_NORMAL);
5128 /* Stabilize the arguments in case we fail. */
5129 dest = builtin_save_expr (dest);
5130 val = builtin_save_expr (val);
5131 len = builtin_save_expr (len);
5133 len_rtx = expand_normal (len);
5134 determine_block_size (len, len_rtx, &min_size, &max_size,
5135 &probable_max_size);
5136 dest_mem = get_memory_rtx (dest, len);
5137 val_mode = TYPE_MODE (unsigned_char_type_node);
5139 if (TREE_CODE (val) != INTEGER_CST)
5141 rtx val_rtx;
5143 val_rtx = expand_normal (val);
5144 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
5146 /* Assume that we can memset by pieces if we can store
5147 * the coefficients by pieces (in the required modes).
5148 * We can't pass builtin_memset_gen_str as that emits RTL. */
5149 c = 1;
5150 if (tree_fits_uhwi_p (len)
5151 && can_store_by_pieces (tree_to_uhwi (len),
5152 builtin_memset_read_str, &c, dest_align,
5153 true))
5155 val_rtx = force_reg (val_mode, val_rtx);
5156 store_by_pieces (dest_mem, tree_to_uhwi (len),
5157 builtin_memset_gen_str, val_rtx, dest_align,
5158 true, RETURN_BEGIN);
5160 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
5161 dest_align, expected_align,
5162 expected_size, min_size, max_size,
5163 probable_max_size))
5164 goto do_libcall;
5166 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5167 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5168 return dest_mem;
5171 if (target_char_cast (val, &c))
5172 goto do_libcall;
5174 if (c)
5176 if (tree_fits_uhwi_p (len)
5177 && can_store_by_pieces (tree_to_uhwi (len),
5178 builtin_memset_read_str, &c, dest_align,
5179 true))
5180 store_by_pieces (dest_mem, tree_to_uhwi (len),
5181 builtin_memset_read_str, &c, dest_align, true,
5182 RETURN_BEGIN);
5183 else if (!set_storage_via_setmem (dest_mem, len_rtx,
5184 gen_int_mode (c, val_mode),
5185 dest_align, expected_align,
5186 expected_size, min_size, max_size,
5187 probable_max_size))
5188 goto do_libcall;
5190 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5191 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5192 return dest_mem;
5195 set_mem_align (dest_mem, dest_align);
5196 dest_addr = clear_storage_hints (dest_mem, len_rtx,
5197 CALL_EXPR_TAILCALL (orig_exp)
5198 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
5199 expected_align, expected_size,
5200 min_size, max_size,
5201 probable_max_size);
5203 if (dest_addr == 0)
5205 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5206 dest_addr = convert_memory_address (ptr_mode, dest_addr);
5209 return dest_addr;
5211 do_libcall:
5212 fndecl = get_callee_fndecl (orig_exp);
5213 fcode = DECL_FUNCTION_CODE (fndecl);
5214 if (fcode == BUILT_IN_MEMSET)
5215 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
5216 dest, val, len);
5217 else if (fcode == BUILT_IN_BZERO)
5218 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
5219 dest, len);
5220 else
5221 gcc_unreachable ();
5222 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5223 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
5224 return expand_call (fn, target, target == const0_rtx);
5227 /* Expand expression EXP, which is a call to the bzero builtin. Return
5228 NULL_RTX if we failed the caller should emit a normal call. */
5230 static rtx
5231 expand_builtin_bzero (tree exp)
5233 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5234 return NULL_RTX;
5236 tree dest = CALL_EXPR_ARG (exp, 0);
5237 tree size = CALL_EXPR_ARG (exp, 1);
5239 check_memop_access (exp, dest, NULL_TREE, size);
5241 /* New argument list transforming bzero(ptr x, int y) to
5242 memset(ptr x, int 0, size_t y). This is done this way
5243 so that if it isn't expanded inline, we fallback to
5244 calling bzero instead of memset. */
5246 location_t loc = EXPR_LOCATION (exp);
5248 return expand_builtin_memset_args (dest, integer_zero_node,
5249 fold_convert_loc (loc,
5250 size_type_node, size),
5251 const0_rtx, VOIDmode, exp);
5254 /* Try to expand cmpstr operation ICODE with the given operands.
5255 Return the result rtx on success, otherwise return null. */
5257 static rtx
5258 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
5259 HOST_WIDE_INT align)
5261 machine_mode insn_mode = insn_data[icode].operand[0].mode;
5263 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
5264 target = NULL_RTX;
5266 class expand_operand ops[4];
5267 create_output_operand (&ops[0], target, insn_mode);
5268 create_fixed_operand (&ops[1], arg1_rtx);
5269 create_fixed_operand (&ops[2], arg2_rtx);
5270 create_integer_operand (&ops[3], align);
5271 if (maybe_expand_insn (icode, 4, ops))
5272 return ops[0].value;
5273 return NULL_RTX;
5276 /* Expand expression EXP, which is a call to the memcmp built-in function.
5277 Return NULL_RTX if we failed and the caller should emit a normal call,
5278 otherwise try to get the result in TARGET, if convenient.
5279 RESULT_EQ is true if we can relax the returned value to be either zero
5280 or nonzero, without caring about the sign. */
5282 static rtx
5283 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
5285 if (!validate_arglist (exp,
5286 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5287 return NULL_RTX;
5289 tree arg1 = CALL_EXPR_ARG (exp, 0);
5290 tree arg2 = CALL_EXPR_ARG (exp, 1);
5291 tree len = CALL_EXPR_ARG (exp, 2);
5292 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
5293 bool no_overflow = true;
5295 /* Diagnose calls where the specified length exceeds the size of either
5296 object. */
5297 access_data data;
5298 tree size = compute_objsize (arg1, 0, &data.src);
5299 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
5300 len, /*maxread=*/NULL_TREE, size,
5301 /*objsize=*/NULL_TREE, true, &data);
5302 if (no_overflow)
5304 access_data data;
5305 size = compute_objsize (arg2, 0, &data.src);
5306 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
5307 len, /*maxread=*/NULL_TREE, size,
5308 /*objsize=*/NULL_TREE, true, &data);
5311 /* If the specified length exceeds the size of either object,
5312 call the function. */
5313 if (!no_overflow)
5314 return NULL_RTX;
5316 /* Due to the performance benefit, always inline the calls first
5317 when result_eq is false. */
5318 rtx result = NULL_RTX;
5320 if (!result_eq && fcode != BUILT_IN_BCMP)
5322 result = inline_expand_builtin_string_cmp (exp, target);
5323 if (result)
5324 return result;
5327 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5328 location_t loc = EXPR_LOCATION (exp);
5330 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5331 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5333 /* If we don't have POINTER_TYPE, call the function. */
5334 if (arg1_align == 0 || arg2_align == 0)
5335 return NULL_RTX;
5337 rtx arg1_rtx = get_memory_rtx (arg1, len);
5338 rtx arg2_rtx = get_memory_rtx (arg2, len);
5339 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
5341 /* Set MEM_SIZE as appropriate. */
5342 if (CONST_INT_P (len_rtx))
5344 set_mem_size (arg1_rtx, INTVAL (len_rtx));
5345 set_mem_size (arg2_rtx, INTVAL (len_rtx));
5348 by_pieces_constfn constfn = NULL;
5350 const char *src_str = c_getstr (arg2);
5351 if (result_eq && src_str == NULL)
5353 src_str = c_getstr (arg1);
5354 if (src_str != NULL)
5355 std::swap (arg1_rtx, arg2_rtx);
5358 /* If SRC is a string constant and block move would be done
5359 by pieces, we can avoid loading the string from memory
5360 and only stored the computed constants. */
5361 if (src_str
5362 && CONST_INT_P (len_rtx)
5363 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
5364 constfn = builtin_memcpy_read_str;
5366 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
5367 TREE_TYPE (len), target,
5368 result_eq, constfn,
5369 CONST_CAST (char *, src_str));
5371 if (result)
5373 /* Return the value in the proper mode for this function. */
5374 if (GET_MODE (result) == mode)
5375 return result;
5377 if (target != 0)
5379 convert_move (target, result, 0);
5380 return target;
5383 return convert_to_mode (mode, result, 0);
5386 return NULL_RTX;
5389 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
5390 if we failed the caller should emit a normal call, otherwise try to get
5391 the result in TARGET, if convenient. */
5393 static rtx
5394 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
5396 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5397 return NULL_RTX;
5399 tree arg1 = CALL_EXPR_ARG (exp, 0);
5400 tree arg2 = CALL_EXPR_ARG (exp, 1);
5402 if (!check_nul_terminated_array (exp, arg1)
5403 || !check_nul_terminated_array (exp, arg2))
5404 return NULL_RTX;
5406 /* Due to the performance benefit, always inline the calls first. */
5407 rtx result = NULL_RTX;
5408 result = inline_expand_builtin_string_cmp (exp, target);
5409 if (result)
5410 return result;
5412 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
5413 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5414 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
5415 return NULL_RTX;
5417 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5418 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5420 /* If we don't have POINTER_TYPE, call the function. */
5421 if (arg1_align == 0 || arg2_align == 0)
5422 return NULL_RTX;
5424 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
5425 arg1 = builtin_save_expr (arg1);
5426 arg2 = builtin_save_expr (arg2);
5428 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
5429 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
5431 /* Try to call cmpstrsi. */
5432 if (cmpstr_icode != CODE_FOR_nothing)
5433 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
5434 MIN (arg1_align, arg2_align));
5436 /* Try to determine at least one length and call cmpstrnsi. */
5437 if (!result && cmpstrn_icode != CODE_FOR_nothing)
5439 tree len;
5440 rtx arg3_rtx;
5442 tree len1 = c_strlen (arg1, 1);
5443 tree len2 = c_strlen (arg2, 1);
5445 if (len1)
5446 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
5447 if (len2)
5448 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
5450 /* If we don't have a constant length for the first, use the length
5451 of the second, if we know it. We don't require a constant for
5452 this case; some cost analysis could be done if both are available
5453 but neither is constant. For now, assume they're equally cheap,
5454 unless one has side effects. If both strings have constant lengths,
5455 use the smaller. */
5457 if (!len1)
5458 len = len2;
5459 else if (!len2)
5460 len = len1;
5461 else if (TREE_SIDE_EFFECTS (len1))
5462 len = len2;
5463 else if (TREE_SIDE_EFFECTS (len2))
5464 len = len1;
5465 else if (TREE_CODE (len1) != INTEGER_CST)
5466 len = len2;
5467 else if (TREE_CODE (len2) != INTEGER_CST)
5468 len = len1;
5469 else if (tree_int_cst_lt (len1, len2))
5470 len = len1;
5471 else
5472 len = len2;
5474 /* If both arguments have side effects, we cannot optimize. */
5475 if (len && !TREE_SIDE_EFFECTS (len))
5477 arg3_rtx = expand_normal (len);
5478 result = expand_cmpstrn_or_cmpmem
5479 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
5480 arg3_rtx, MIN (arg1_align, arg2_align));
5484 tree fndecl = get_callee_fndecl (exp);
5485 if (result)
5487 /* Check to see if the argument was declared attribute nonstring
5488 and if so, issue a warning since at this point it's not known
5489 to be nul-terminated. */
5490 maybe_warn_nonstring_arg (fndecl, exp);
5492 /* Return the value in the proper mode for this function. */
5493 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5494 if (GET_MODE (result) == mode)
5495 return result;
5496 if (target == 0)
5497 return convert_to_mode (mode, result, 0);
5498 convert_move (target, result, 0);
5499 return target;
5502 /* Expand the library call ourselves using a stabilized argument
5503 list to avoid re-evaluating the function's arguments twice. */
5504 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
5505 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5506 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5507 return expand_call (fn, target, target == const0_rtx);
5510 /* Expand expression EXP, which is a call to the strncmp builtin. Return
5511 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
5512 the result in TARGET, if convenient. */
5514 static rtx
5515 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
5516 ATTRIBUTE_UNUSED machine_mode mode)
5518 if (!validate_arglist (exp,
5519 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5520 return NULL_RTX;
5522 tree arg1 = CALL_EXPR_ARG (exp, 0);
5523 tree arg2 = CALL_EXPR_ARG (exp, 1);
5524 tree arg3 = CALL_EXPR_ARG (exp, 2);
5526 if (!check_nul_terminated_array (exp, arg1, arg3)
5527 || !check_nul_terminated_array (exp, arg2, arg3))
5528 return NULL_RTX;
5530 /* Due to the performance benefit, always inline the calls first. */
5531 rtx result = NULL_RTX;
5532 result = inline_expand_builtin_string_cmp (exp, target);
5533 if (result)
5534 return result;
5536 /* If c_strlen can determine an expression for one of the string
5537 lengths, and it doesn't have side effects, then emit cmpstrnsi
5538 using length MIN(strlen(string)+1, arg3). */
5539 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5540 if (cmpstrn_icode == CODE_FOR_nothing)
5541 return NULL_RTX;
5543 tree len;
5545 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5546 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5548 tree len1 = c_strlen (arg1, 1);
5549 tree len2 = c_strlen (arg2, 1);
5551 location_t loc = EXPR_LOCATION (exp);
5553 if (len1)
5554 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5555 if (len2)
5556 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5558 tree len3 = fold_convert_loc (loc, sizetype, arg3);
5560 /* If we don't have a constant length for the first, use the length
5561 of the second, if we know it. If neither string is constant length,
5562 use the given length argument. We don't require a constant for
5563 this case; some cost analysis could be done if both are available
5564 but neither is constant. For now, assume they're equally cheap,
5565 unless one has side effects. If both strings have constant lengths,
5566 use the smaller. */
5568 if (!len1 && !len2)
5569 len = len3;
5570 else if (!len1)
5571 len = len2;
5572 else if (!len2)
5573 len = len1;
5574 else if (TREE_SIDE_EFFECTS (len1))
5575 len = len2;
5576 else if (TREE_SIDE_EFFECTS (len2))
5577 len = len1;
5578 else if (TREE_CODE (len1) != INTEGER_CST)
5579 len = len2;
5580 else if (TREE_CODE (len2) != INTEGER_CST)
5581 len = len1;
5582 else if (tree_int_cst_lt (len1, len2))
5583 len = len1;
5584 else
5585 len = len2;
5587 /* If we are not using the given length, we must incorporate it here.
5588 The actual new length parameter will be MIN(len,arg3) in this case. */
5589 if (len != len3)
5591 len = fold_convert_loc (loc, sizetype, len);
5592 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5594 rtx arg1_rtx = get_memory_rtx (arg1, len);
5595 rtx arg2_rtx = get_memory_rtx (arg2, len);
5596 rtx arg3_rtx = expand_normal (len);
5597 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5598 arg2_rtx, TREE_TYPE (len), arg3_rtx,
5599 MIN (arg1_align, arg2_align));
5601 tree fndecl = get_callee_fndecl (exp);
5602 if (result)
5604 /* Check to see if the argument was declared attribute nonstring
5605 and if so, issue a warning since at this point it's not known
5606 to be nul-terminated. */
5607 maybe_warn_nonstring_arg (fndecl, exp);
5609 /* Return the value in the proper mode for this function. */
5610 mode = TYPE_MODE (TREE_TYPE (exp));
5611 if (GET_MODE (result) == mode)
5612 return result;
5613 if (target == 0)
5614 return convert_to_mode (mode, result, 0);
5615 convert_move (target, result, 0);
5616 return target;
5619 /* Expand the library call ourselves using a stabilized argument
5620 list to avoid re-evaluating the function's arguments twice. */
5621 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5622 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5623 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5624 return expand_call (fn, target, target == const0_rtx);
5627 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5628 if that's convenient. */
5631 expand_builtin_saveregs (void)
5633 rtx val;
5634 rtx_insn *seq;
5636 /* Don't do __builtin_saveregs more than once in a function.
5637 Save the result of the first call and reuse it. */
5638 if (saveregs_value != 0)
5639 return saveregs_value;
5641 /* When this function is called, it means that registers must be
5642 saved on entry to this function. So we migrate the call to the
5643 first insn of this function. */
5645 start_sequence ();
5647 /* Do whatever the machine needs done in this case. */
5648 val = targetm.calls.expand_builtin_saveregs ();
5650 seq = get_insns ();
5651 end_sequence ();
5653 saveregs_value = val;
5655 /* Put the insns after the NOTE that starts the function. If this
5656 is inside a start_sequence, make the outer-level insn chain current, so
5657 the code is placed at the start of the function. */
5658 push_topmost_sequence ();
5659 emit_insn_after (seq, entry_of_function ());
5660 pop_topmost_sequence ();
5662 return val;
5665 /* Expand a call to __builtin_next_arg. */
5667 static rtx
5668 expand_builtin_next_arg (void)
5670 /* Checking arguments is already done in fold_builtin_next_arg
5671 that must be called before this function. */
5672 return expand_binop (ptr_mode, add_optab,
5673 crtl->args.internal_arg_pointer,
5674 crtl->args.arg_offset_rtx,
5675 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5678 /* Make it easier for the backends by protecting the valist argument
5679 from multiple evaluations. */
5681 static tree
5682 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5684 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5686 /* The current way of determining the type of valist is completely
5687 bogus. We should have the information on the va builtin instead. */
5688 if (!vatype)
5689 vatype = targetm.fn_abi_va_list (cfun->decl);
5691 if (TREE_CODE (vatype) == ARRAY_TYPE)
5693 if (TREE_SIDE_EFFECTS (valist))
5694 valist = save_expr (valist);
5696 /* For this case, the backends will be expecting a pointer to
5697 vatype, but it's possible we've actually been given an array
5698 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5699 So fix it. */
5700 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5702 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5703 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5706 else
5708 tree pt = build_pointer_type (vatype);
5710 if (! needs_lvalue)
5712 if (! TREE_SIDE_EFFECTS (valist))
5713 return valist;
5715 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5716 TREE_SIDE_EFFECTS (valist) = 1;
5719 if (TREE_SIDE_EFFECTS (valist))
5720 valist = save_expr (valist);
5721 valist = fold_build2_loc (loc, MEM_REF,
5722 vatype, valist, build_int_cst (pt, 0));
5725 return valist;
5728 /* The "standard" definition of va_list is void*. */
5730 tree
5731 std_build_builtin_va_list (void)
5733 return ptr_type_node;
5736 /* The "standard" abi va_list is va_list_type_node. */
5738 tree
5739 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5741 return va_list_type_node;
5744 /* The "standard" type of va_list is va_list_type_node. */
5746 tree
5747 std_canonical_va_list_type (tree type)
5749 tree wtype, htype;
5751 wtype = va_list_type_node;
5752 htype = type;
5754 if (TREE_CODE (wtype) == ARRAY_TYPE)
5756 /* If va_list is an array type, the argument may have decayed
5757 to a pointer type, e.g. by being passed to another function.
5758 In that case, unwrap both types so that we can compare the
5759 underlying records. */
5760 if (TREE_CODE (htype) == ARRAY_TYPE
5761 || POINTER_TYPE_P (htype))
5763 wtype = TREE_TYPE (wtype);
5764 htype = TREE_TYPE (htype);
5767 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5768 return va_list_type_node;
5770 return NULL_TREE;
5773 /* The "standard" implementation of va_start: just assign `nextarg' to
5774 the variable. */
5776 void
5777 std_expand_builtin_va_start (tree valist, rtx nextarg)
5779 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5780 convert_move (va_r, nextarg, 0);
5783 /* Expand EXP, a call to __builtin_va_start. */
5785 static rtx
5786 expand_builtin_va_start (tree exp)
5788 rtx nextarg;
5789 tree valist;
5790 location_t loc = EXPR_LOCATION (exp);
5792 if (call_expr_nargs (exp) < 2)
5794 error_at (loc, "too few arguments to function %<va_start%>");
5795 return const0_rtx;
5798 if (fold_builtin_next_arg (exp, true))
5799 return const0_rtx;
5801 nextarg = expand_builtin_next_arg ();
5802 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5804 if (targetm.expand_builtin_va_start)
5805 targetm.expand_builtin_va_start (valist, nextarg);
5806 else
5807 std_expand_builtin_va_start (valist, nextarg);
5809 return const0_rtx;
5812 /* Expand EXP, a call to __builtin_va_end. */
5814 static rtx
5815 expand_builtin_va_end (tree exp)
5817 tree valist = CALL_EXPR_ARG (exp, 0);
5819 /* Evaluate for side effects, if needed. I hate macros that don't
5820 do that. */
5821 if (TREE_SIDE_EFFECTS (valist))
5822 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5824 return const0_rtx;
5827 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5828 builtin rather than just as an assignment in stdarg.h because of the
5829 nastiness of array-type va_list types. */
5831 static rtx
5832 expand_builtin_va_copy (tree exp)
5834 tree dst, src, t;
5835 location_t loc = EXPR_LOCATION (exp);
5837 dst = CALL_EXPR_ARG (exp, 0);
5838 src = CALL_EXPR_ARG (exp, 1);
5840 dst = stabilize_va_list_loc (loc, dst, 1);
5841 src = stabilize_va_list_loc (loc, src, 0);
5843 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5845 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5847 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5848 TREE_SIDE_EFFECTS (t) = 1;
5849 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5851 else
5853 rtx dstb, srcb, size;
5855 /* Evaluate to pointers. */
5856 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5857 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5858 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5859 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5861 dstb = convert_memory_address (Pmode, dstb);
5862 srcb = convert_memory_address (Pmode, srcb);
5864 /* "Dereference" to BLKmode memories. */
5865 dstb = gen_rtx_MEM (BLKmode, dstb);
5866 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5867 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5868 srcb = gen_rtx_MEM (BLKmode, srcb);
5869 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5870 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5872 /* Copy. */
5873 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5876 return const0_rtx;
5879 /* Expand a call to one of the builtin functions __builtin_frame_address or
5880 __builtin_return_address. */
5882 static rtx
5883 expand_builtin_frame_address (tree fndecl, tree exp)
5885 /* The argument must be a nonnegative integer constant.
5886 It counts the number of frames to scan up the stack.
5887 The value is either the frame pointer value or the return
5888 address saved in that frame. */
5889 if (call_expr_nargs (exp) == 0)
5890 /* Warning about missing arg was already issued. */
5891 return const0_rtx;
5892 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5894 error ("invalid argument to %qD", fndecl);
5895 return const0_rtx;
5897 else
5899 /* Number of frames to scan up the stack. */
5900 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5902 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5904 /* Some ports cannot access arbitrary stack frames. */
5905 if (tem == NULL)
5907 warning (0, "unsupported argument to %qD", fndecl);
5908 return const0_rtx;
5911 if (count)
5913 /* Warn since no effort is made to ensure that any frame
5914 beyond the current one exists or can be safely reached. */
5915 warning (OPT_Wframe_address, "calling %qD with "
5916 "a nonzero argument is unsafe", fndecl);
5919 /* For __builtin_frame_address, return what we've got. */
5920 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5921 return tem;
5923 if (!REG_P (tem)
5924 && ! CONSTANT_P (tem))
5925 tem = copy_addr_to_reg (tem);
5926 return tem;
5930 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5931 failed and the caller should emit a normal call. */
5933 static rtx
5934 expand_builtin_alloca (tree exp)
5936 rtx op0;
5937 rtx result;
5938 unsigned int align;
5939 tree fndecl = get_callee_fndecl (exp);
5940 HOST_WIDE_INT max_size;
5941 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5942 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5943 bool valid_arglist
5944 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5945 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5946 VOID_TYPE)
5947 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5948 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5949 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5951 if (!valid_arglist)
5952 return NULL_RTX;
5954 if ((alloca_for_var
5955 && warn_vla_limit >= HOST_WIDE_INT_MAX
5956 && warn_alloc_size_limit < warn_vla_limit)
5957 || (!alloca_for_var
5958 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5959 && warn_alloc_size_limit < warn_alloca_limit
5962 /* -Walloca-larger-than and -Wvla-larger-than settings of
5963 less than HOST_WIDE_INT_MAX override the more general
5964 -Walloc-size-larger-than so unless either of the former
5965 options is smaller than the last one (wchich would imply
5966 that the call was already checked), check the alloca
5967 arguments for overflow. */
5968 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5969 int idx[] = { 0, -1 };
5970 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5973 /* Compute the argument. */
5974 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5976 /* Compute the alignment. */
5977 align = (fcode == BUILT_IN_ALLOCA
5978 ? BIGGEST_ALIGNMENT
5979 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5981 /* Compute the maximum size. */
5982 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5983 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5984 : -1);
5986 /* Allocate the desired space. If the allocation stems from the declaration
5987 of a variable-sized object, it cannot accumulate. */
5988 result
5989 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5990 result = convert_memory_address (ptr_mode, result);
5992 /* Dynamic allocations for variables are recorded during gimplification. */
5993 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5994 record_dynamic_alloc (exp);
5996 return result;
5999 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
6000 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
6001 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
6002 handle_builtin_stack_restore function. */
6004 static rtx
6005 expand_asan_emit_allocas_unpoison (tree exp)
6007 tree arg0 = CALL_EXPR_ARG (exp, 0);
6008 tree arg1 = CALL_EXPR_ARG (exp, 1);
6009 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
6010 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
6011 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
6012 stack_pointer_rtx, NULL_RTX, 0,
6013 OPTAB_LIB_WIDEN);
6014 off = convert_modes (ptr_mode, Pmode, off, 0);
6015 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
6016 OPTAB_LIB_WIDEN);
6017 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
6018 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
6019 top, ptr_mode, bot, ptr_mode);
6020 return ret;
6023 /* Expand a call to bswap builtin in EXP.
6024 Return NULL_RTX if a normal call should be emitted rather than expanding the
6025 function in-line. If convenient, the result should be placed in TARGET.
6026 SUBTARGET may be used as the target for computing one of EXP's operands. */
6028 static rtx
6029 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
6030 rtx subtarget)
6032 tree arg;
6033 rtx op0;
6035 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
6036 return NULL_RTX;
6038 arg = CALL_EXPR_ARG (exp, 0);
6039 op0 = expand_expr (arg,
6040 subtarget && GET_MODE (subtarget) == target_mode
6041 ? subtarget : NULL_RTX,
6042 target_mode, EXPAND_NORMAL);
6043 if (GET_MODE (op0) != target_mode)
6044 op0 = convert_to_mode (target_mode, op0, 1);
6046 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
6048 gcc_assert (target);
6050 return convert_to_mode (target_mode, target, 1);
6053 /* Expand a call to a unary builtin in EXP.
6054 Return NULL_RTX if a normal call should be emitted rather than expanding the
6055 function in-line. If convenient, the result should be placed in TARGET.
6056 SUBTARGET may be used as the target for computing one of EXP's operands. */
6058 static rtx
6059 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
6060 rtx subtarget, optab op_optab)
6062 rtx op0;
6064 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
6065 return NULL_RTX;
6067 /* Compute the argument. */
6068 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
6069 (subtarget
6070 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
6071 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
6072 VOIDmode, EXPAND_NORMAL);
6073 /* Compute op, into TARGET if possible.
6074 Set TARGET to wherever the result comes back. */
6075 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6076 op_optab, op0, target, op_optab != clrsb_optab);
6077 gcc_assert (target);
6079 return convert_to_mode (target_mode, target, 0);
6082 /* Expand a call to __builtin_expect. We just return our argument
6083 as the builtin_expect semantic should've been already executed by
6084 tree branch prediction pass. */
6086 static rtx
6087 expand_builtin_expect (tree exp, rtx target)
6089 tree arg;
6091 if (call_expr_nargs (exp) < 2)
6092 return const0_rtx;
6093 arg = CALL_EXPR_ARG (exp, 0);
6095 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6096 /* When guessing was done, the hints should be already stripped away. */
6097 gcc_assert (!flag_guess_branch_prob
6098 || optimize == 0 || seen_error ());
6099 return target;
6102 /* Expand a call to __builtin_expect_with_probability. We just return our
6103 argument as the builtin_expect semantic should've been already executed by
6104 tree branch prediction pass. */
6106 static rtx
6107 expand_builtin_expect_with_probability (tree exp, rtx target)
6109 tree arg;
6111 if (call_expr_nargs (exp) < 3)
6112 return const0_rtx;
6113 arg = CALL_EXPR_ARG (exp, 0);
6115 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6116 /* When guessing was done, the hints should be already stripped away. */
6117 gcc_assert (!flag_guess_branch_prob
6118 || optimize == 0 || seen_error ());
6119 return target;
6123 /* Expand a call to __builtin_assume_aligned. We just return our first
6124 argument as the builtin_assume_aligned semantic should've been already
6125 executed by CCP. */
6127 static rtx
6128 expand_builtin_assume_aligned (tree exp, rtx target)
6130 if (call_expr_nargs (exp) < 2)
6131 return const0_rtx;
6132 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
6133 EXPAND_NORMAL);
6134 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
6135 && (call_expr_nargs (exp) < 3
6136 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
6137 return target;
6140 void
6141 expand_builtin_trap (void)
6143 if (targetm.have_trap ())
6145 rtx_insn *insn = emit_insn (targetm.gen_trap ());
6146 /* For trap insns when not accumulating outgoing args force
6147 REG_ARGS_SIZE note to prevent crossjumping of calls with
6148 different args sizes. */
6149 if (!ACCUMULATE_OUTGOING_ARGS)
6150 add_args_size_note (insn, stack_pointer_delta);
6152 else
6154 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
6155 tree call_expr = build_call_expr (fn, 0);
6156 expand_call (call_expr, NULL_RTX, false);
6159 emit_barrier ();
6162 /* Expand a call to __builtin_unreachable. We do nothing except emit
6163 a barrier saying that control flow will not pass here.
6165 It is the responsibility of the program being compiled to ensure
6166 that control flow does never reach __builtin_unreachable. */
6167 static void
6168 expand_builtin_unreachable (void)
6170 emit_barrier ();
6173 /* Expand EXP, a call to fabs, fabsf or fabsl.
6174 Return NULL_RTX if a normal call should be emitted rather than expanding
6175 the function inline. If convenient, the result should be placed
6176 in TARGET. SUBTARGET may be used as the target for computing
6177 the operand. */
6179 static rtx
6180 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
6182 machine_mode mode;
6183 tree arg;
6184 rtx op0;
6186 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6187 return NULL_RTX;
6189 arg = CALL_EXPR_ARG (exp, 0);
6190 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6191 mode = TYPE_MODE (TREE_TYPE (arg));
6192 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6193 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
6196 /* Expand EXP, a call to copysign, copysignf, or copysignl.
6197 Return NULL is a normal call should be emitted rather than expanding the
6198 function inline. If convenient, the result should be placed in TARGET.
6199 SUBTARGET may be used as the target for computing the operand. */
6201 static rtx
6202 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
6204 rtx op0, op1;
6205 tree arg;
6207 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
6208 return NULL_RTX;
6210 arg = CALL_EXPR_ARG (exp, 0);
6211 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6213 arg = CALL_EXPR_ARG (exp, 1);
6214 op1 = expand_normal (arg);
6216 return expand_copysign (op0, op1, target);
6219 /* Expand a call to __builtin___clear_cache. */
6221 static rtx
6222 expand_builtin___clear_cache (tree exp)
6224 if (!targetm.code_for_clear_cache)
6226 #ifdef CLEAR_INSN_CACHE
6227 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6228 does something. Just do the default expansion to a call to
6229 __clear_cache(). */
6230 return NULL_RTX;
6231 #else
6232 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6233 does nothing. There is no need to call it. Do nothing. */
6234 return const0_rtx;
6235 #endif /* CLEAR_INSN_CACHE */
6238 /* We have a "clear_cache" insn, and it will handle everything. */
6239 tree begin, end;
6240 rtx begin_rtx, end_rtx;
6242 /* We must not expand to a library call. If we did, any
6243 fallback library function in libgcc that might contain a call to
6244 __builtin___clear_cache() would recurse infinitely. */
6245 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6247 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
6248 return const0_rtx;
6251 if (targetm.have_clear_cache ())
6253 class expand_operand ops[2];
6255 begin = CALL_EXPR_ARG (exp, 0);
6256 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
6258 end = CALL_EXPR_ARG (exp, 1);
6259 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
6261 create_address_operand (&ops[0], begin_rtx);
6262 create_address_operand (&ops[1], end_rtx);
6263 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
6264 return const0_rtx;
6266 return const0_rtx;
6269 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
6271 static rtx
6272 round_trampoline_addr (rtx tramp)
6274 rtx temp, addend, mask;
6276 /* If we don't need too much alignment, we'll have been guaranteed
6277 proper alignment by get_trampoline_type. */
6278 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
6279 return tramp;
6281 /* Round address up to desired boundary. */
6282 temp = gen_reg_rtx (Pmode);
6283 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
6284 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6286 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
6287 temp, 0, OPTAB_LIB_WIDEN);
6288 tramp = expand_simple_binop (Pmode, AND, temp, mask,
6289 temp, 0, OPTAB_LIB_WIDEN);
6291 return tramp;
6294 static rtx
6295 expand_builtin_init_trampoline (tree exp, bool onstack)
6297 tree t_tramp, t_func, t_chain;
6298 rtx m_tramp, r_tramp, r_chain, tmp;
6300 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6301 POINTER_TYPE, VOID_TYPE))
6302 return NULL_RTX;
6304 t_tramp = CALL_EXPR_ARG (exp, 0);
6305 t_func = CALL_EXPR_ARG (exp, 1);
6306 t_chain = CALL_EXPR_ARG (exp, 2);
6308 r_tramp = expand_normal (t_tramp);
6309 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
6310 MEM_NOTRAP_P (m_tramp) = 1;
6312 /* If ONSTACK, the TRAMP argument should be the address of a field
6313 within the local function's FRAME decl. Either way, let's see if
6314 we can fill in the MEM_ATTRs for this memory. */
6315 if (TREE_CODE (t_tramp) == ADDR_EXPR)
6316 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
6318 /* Creator of a heap trampoline is responsible for making sure the
6319 address is aligned to at least STACK_BOUNDARY. Normally malloc
6320 will ensure this anyhow. */
6321 tmp = round_trampoline_addr (r_tramp);
6322 if (tmp != r_tramp)
6324 m_tramp = change_address (m_tramp, BLKmode, tmp);
6325 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
6326 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
6329 /* The FUNC argument should be the address of the nested function.
6330 Extract the actual function decl to pass to the hook. */
6331 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
6332 t_func = TREE_OPERAND (t_func, 0);
6333 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
6335 r_chain = expand_normal (t_chain);
6337 /* Generate insns to initialize the trampoline. */
6338 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6340 if (onstack)
6342 trampolines_created = 1;
6344 if (targetm.calls.custom_function_descriptors != 0)
6345 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
6346 "trampoline generated for nested function %qD", t_func);
6349 return const0_rtx;
6352 static rtx
6353 expand_builtin_adjust_trampoline (tree exp)
6355 rtx tramp;
6357 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6358 return NULL_RTX;
6360 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6361 tramp = round_trampoline_addr (tramp);
6362 if (targetm.calls.trampoline_adjust_address)
6363 tramp = targetm.calls.trampoline_adjust_address (tramp);
6365 return tramp;
6368 /* Expand a call to the builtin descriptor initialization routine.
6369 A descriptor is made up of a couple of pointers to the static
6370 chain and the code entry in this order. */
6372 static rtx
6373 expand_builtin_init_descriptor (tree exp)
6375 tree t_descr, t_func, t_chain;
6376 rtx m_descr, r_descr, r_func, r_chain;
6378 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
6379 VOID_TYPE))
6380 return NULL_RTX;
6382 t_descr = CALL_EXPR_ARG (exp, 0);
6383 t_func = CALL_EXPR_ARG (exp, 1);
6384 t_chain = CALL_EXPR_ARG (exp, 2);
6386 r_descr = expand_normal (t_descr);
6387 m_descr = gen_rtx_MEM (BLKmode, r_descr);
6388 MEM_NOTRAP_P (m_descr) = 1;
6389 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
6391 r_func = expand_normal (t_func);
6392 r_chain = expand_normal (t_chain);
6394 /* Generate insns to initialize the descriptor. */
6395 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
6396 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
6397 POINTER_SIZE / BITS_PER_UNIT), r_func);
6399 return const0_rtx;
6402 /* Expand a call to the builtin descriptor adjustment routine. */
6404 static rtx
6405 expand_builtin_adjust_descriptor (tree exp)
6407 rtx tramp;
6409 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6410 return NULL_RTX;
6412 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6414 /* Unalign the descriptor to allow runtime identification. */
6415 tramp = plus_constant (ptr_mode, tramp,
6416 targetm.calls.custom_function_descriptors);
6418 return force_operand (tramp, NULL_RTX);
6421 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
6422 function. The function first checks whether the back end provides
6423 an insn to implement signbit for the respective mode. If not, it
6424 checks whether the floating point format of the value is such that
6425 the sign bit can be extracted. If that is not the case, error out.
6426 EXP is the expression that is a call to the builtin function; if
6427 convenient, the result should be placed in TARGET. */
6428 static rtx
6429 expand_builtin_signbit (tree exp, rtx target)
6431 const struct real_format *fmt;
6432 scalar_float_mode fmode;
6433 scalar_int_mode rmode, imode;
6434 tree arg;
6435 int word, bitpos;
6436 enum insn_code icode;
6437 rtx temp;
6438 location_t loc = EXPR_LOCATION (exp);
6440 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6441 return NULL_RTX;
6443 arg = CALL_EXPR_ARG (exp, 0);
6444 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
6445 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
6446 fmt = REAL_MODE_FORMAT (fmode);
6448 arg = builtin_save_expr (arg);
6450 /* Expand the argument yielding a RTX expression. */
6451 temp = expand_normal (arg);
6453 /* Check if the back end provides an insn that handles signbit for the
6454 argument's mode. */
6455 icode = optab_handler (signbit_optab, fmode);
6456 if (icode != CODE_FOR_nothing)
6458 rtx_insn *last = get_last_insn ();
6459 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6460 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
6461 return target;
6462 delete_insns_since (last);
6465 /* For floating point formats without a sign bit, implement signbit
6466 as "ARG < 0.0". */
6467 bitpos = fmt->signbit_ro;
6468 if (bitpos < 0)
6470 /* But we can't do this if the format supports signed zero. */
6471 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
6473 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
6474 build_real (TREE_TYPE (arg), dconst0));
6475 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6478 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
6480 imode = int_mode_for_mode (fmode).require ();
6481 temp = gen_lowpart (imode, temp);
6483 else
6485 imode = word_mode;
6486 /* Handle targets with different FP word orders. */
6487 if (FLOAT_WORDS_BIG_ENDIAN)
6488 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
6489 else
6490 word = bitpos / BITS_PER_WORD;
6491 temp = operand_subword_force (temp, word, fmode);
6492 bitpos = bitpos % BITS_PER_WORD;
6495 /* Force the intermediate word_mode (or narrower) result into a
6496 register. This avoids attempting to create paradoxical SUBREGs
6497 of floating point modes below. */
6498 temp = force_reg (imode, temp);
6500 /* If the bitpos is within the "result mode" lowpart, the operation
6501 can be implement with a single bitwise AND. Otherwise, we need
6502 a right shift and an AND. */
6504 if (bitpos < GET_MODE_BITSIZE (rmode))
6506 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
6508 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
6509 temp = gen_lowpart (rmode, temp);
6510 temp = expand_binop (rmode, and_optab, temp,
6511 immed_wide_int_const (mask, rmode),
6512 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6514 else
6516 /* Perform a logical right shift to place the signbit in the least
6517 significant bit, then truncate the result to the desired mode
6518 and mask just this bit. */
6519 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
6520 temp = gen_lowpart (rmode, temp);
6521 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
6522 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6525 return temp;
6528 /* Expand fork or exec calls. TARGET is the desired target of the
6529 call. EXP is the call. FN is the
6530 identificator of the actual function. IGNORE is nonzero if the
6531 value is to be ignored. */
6533 static rtx
6534 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
6536 tree id, decl;
6537 tree call;
6539 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
6541 /* Detect unterminated path. */
6542 if (!check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0)))
6543 return NULL_RTX;
6545 /* Also detect unterminated first argument. */
6546 switch (DECL_FUNCTION_CODE (fn))
6548 case BUILT_IN_EXECL:
6549 case BUILT_IN_EXECLE:
6550 case BUILT_IN_EXECLP:
6551 if (!check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0)))
6552 return NULL_RTX;
6553 default:
6554 break;
6559 /* If we are not profiling, just call the function. */
6560 if (!profile_arc_flag)
6561 return NULL_RTX;
6563 /* Otherwise call the wrapper. This should be equivalent for the rest of
6564 compiler, so the code does not diverge, and the wrapper may run the
6565 code necessary for keeping the profiling sane. */
6567 switch (DECL_FUNCTION_CODE (fn))
6569 case BUILT_IN_FORK:
6570 id = get_identifier ("__gcov_fork");
6571 break;
6573 case BUILT_IN_EXECL:
6574 id = get_identifier ("__gcov_execl");
6575 break;
6577 case BUILT_IN_EXECV:
6578 id = get_identifier ("__gcov_execv");
6579 break;
6581 case BUILT_IN_EXECLP:
6582 id = get_identifier ("__gcov_execlp");
6583 break;
6585 case BUILT_IN_EXECLE:
6586 id = get_identifier ("__gcov_execle");
6587 break;
6589 case BUILT_IN_EXECVP:
6590 id = get_identifier ("__gcov_execvp");
6591 break;
6593 case BUILT_IN_EXECVE:
6594 id = get_identifier ("__gcov_execve");
6595 break;
6597 default:
6598 gcc_unreachable ();
6601 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6602 FUNCTION_DECL, id, TREE_TYPE (fn));
6603 DECL_EXTERNAL (decl) = 1;
6604 TREE_PUBLIC (decl) = 1;
6605 DECL_ARTIFICIAL (decl) = 1;
6606 TREE_NOTHROW (decl) = 1;
6607 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6608 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6609 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6610 return expand_call (call, target, ignore);
6615 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6616 the pointer in these functions is void*, the tree optimizers may remove
6617 casts. The mode computed in expand_builtin isn't reliable either, due
6618 to __sync_bool_compare_and_swap.
6620 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6621 group of builtins. This gives us log2 of the mode size. */
6623 static inline machine_mode
6624 get_builtin_sync_mode (int fcode_diff)
6626 /* The size is not negotiable, so ask not to get BLKmode in return
6627 if the target indicates that a smaller size would be better. */
6628 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
6631 /* Expand the memory expression LOC and return the appropriate memory operand
6632 for the builtin_sync operations. */
6634 static rtx
6635 get_builtin_sync_mem (tree loc, machine_mode mode)
6637 rtx addr, mem;
6638 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6639 ? TREE_TYPE (TREE_TYPE (loc))
6640 : TREE_TYPE (loc));
6641 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
6643 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
6644 addr = convert_memory_address (addr_mode, addr);
6646 /* Note that we explicitly do not want any alias information for this
6647 memory, so that we kill all other live memories. Otherwise we don't
6648 satisfy the full barrier semantics of the intrinsic. */
6649 mem = gen_rtx_MEM (mode, addr);
6651 set_mem_addr_space (mem, addr_space);
6653 mem = validize_mem (mem);
6655 /* The alignment needs to be at least according to that of the mode. */
6656 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6657 get_pointer_alignment (loc)));
6658 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6659 MEM_VOLATILE_P (mem) = 1;
6661 return mem;
6664 /* Make sure an argument is in the right mode.
6665 EXP is the tree argument.
6666 MODE is the mode it should be in. */
6668 static rtx
6669 expand_expr_force_mode (tree exp, machine_mode mode)
6671 rtx val;
6672 machine_mode old_mode;
6674 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6675 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6676 of CONST_INTs, where we know the old_mode only from the call argument. */
6678 old_mode = GET_MODE (val);
6679 if (old_mode == VOIDmode)
6680 old_mode = TYPE_MODE (TREE_TYPE (exp));
6681 val = convert_modes (mode, old_mode, val, 1);
6682 return val;
6686 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6687 EXP is the CALL_EXPR. CODE is the rtx code
6688 that corresponds to the arithmetic or logical operation from the name;
6689 an exception here is that NOT actually means NAND. TARGET is an optional
6690 place for us to store the results; AFTER is true if this is the
6691 fetch_and_xxx form. */
6693 static rtx
6694 expand_builtin_sync_operation (machine_mode mode, tree exp,
6695 enum rtx_code code, bool after,
6696 rtx target)
6698 rtx val, mem;
6699 location_t loc = EXPR_LOCATION (exp);
6701 if (code == NOT && warn_sync_nand)
6703 tree fndecl = get_callee_fndecl (exp);
6704 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6706 static bool warned_f_a_n, warned_n_a_f;
6708 switch (fcode)
6710 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6711 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6712 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6713 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6714 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6715 if (warned_f_a_n)
6716 break;
6718 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6719 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6720 warned_f_a_n = true;
6721 break;
6723 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6724 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6725 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6726 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6727 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6728 if (warned_n_a_f)
6729 break;
6731 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6732 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6733 warned_n_a_f = true;
6734 break;
6736 default:
6737 gcc_unreachable ();
6741 /* Expand the operands. */
6742 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6743 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6745 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6746 after);
6749 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6750 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6751 true if this is the boolean form. TARGET is a place for us to store the
6752 results; this is NOT optional if IS_BOOL is true. */
6754 static rtx
6755 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6756 bool is_bool, rtx target)
6758 rtx old_val, new_val, mem;
6759 rtx *pbool, *poval;
6761 /* Expand the operands. */
6762 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6763 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6764 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6766 pbool = poval = NULL;
6767 if (target != const0_rtx)
6769 if (is_bool)
6770 pbool = &target;
6771 else
6772 poval = &target;
6774 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6775 false, MEMMODEL_SYNC_SEQ_CST,
6776 MEMMODEL_SYNC_SEQ_CST))
6777 return NULL_RTX;
6779 return target;
6782 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6783 general form is actually an atomic exchange, and some targets only
6784 support a reduced form with the second argument being a constant 1.
6785 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6786 the results. */
6788 static rtx
6789 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6790 rtx target)
6792 rtx val, mem;
6794 /* Expand the operands. */
6795 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6796 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6798 return expand_sync_lock_test_and_set (target, mem, val);
6801 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6803 static void
6804 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6806 rtx mem;
6808 /* Expand the operands. */
6809 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6811 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6814 /* Given an integer representing an ``enum memmodel'', verify its
6815 correctness and return the memory model enum. */
6817 static enum memmodel
6818 get_memmodel (tree exp)
6820 rtx op;
6821 unsigned HOST_WIDE_INT val;
6822 location_t loc
6823 = expansion_point_location_if_in_system_header (input_location);
6825 /* If the parameter is not a constant, it's a run time value so we'll just
6826 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6827 if (TREE_CODE (exp) != INTEGER_CST)
6828 return MEMMODEL_SEQ_CST;
6830 op = expand_normal (exp);
6832 val = INTVAL (op);
6833 if (targetm.memmodel_check)
6834 val = targetm.memmodel_check (val);
6835 else if (val & ~MEMMODEL_MASK)
6837 warning_at (loc, OPT_Winvalid_memory_model,
6838 "unknown architecture specifier in memory model to builtin");
6839 return MEMMODEL_SEQ_CST;
6842 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6843 if (memmodel_base (val) >= MEMMODEL_LAST)
6845 warning_at (loc, OPT_Winvalid_memory_model,
6846 "invalid memory model argument to builtin");
6847 return MEMMODEL_SEQ_CST;
6850 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6851 be conservative and promote consume to acquire. */
6852 if (val == MEMMODEL_CONSUME)
6853 val = MEMMODEL_ACQUIRE;
6855 return (enum memmodel) val;
6858 /* Expand the __atomic_exchange intrinsic:
6859 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6860 EXP is the CALL_EXPR.
6861 TARGET is an optional place for us to store the results. */
6863 static rtx
6864 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6866 rtx val, mem;
6867 enum memmodel model;
6869 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6871 if (!flag_inline_atomics)
6872 return NULL_RTX;
6874 /* Expand the operands. */
6875 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6876 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6878 return expand_atomic_exchange (target, mem, val, model);
6881 /* Expand the __atomic_compare_exchange intrinsic:
6882 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6883 TYPE desired, BOOL weak,
6884 enum memmodel success,
6885 enum memmodel failure)
6886 EXP is the CALL_EXPR.
6887 TARGET is an optional place for us to store the results. */
6889 static rtx
6890 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6891 rtx target)
6893 rtx expect, desired, mem, oldval;
6894 rtx_code_label *label;
6895 enum memmodel success, failure;
6896 tree weak;
6897 bool is_weak;
6898 location_t loc
6899 = expansion_point_location_if_in_system_header (input_location);
6901 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6902 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6904 if (failure > success)
6906 warning_at (loc, OPT_Winvalid_memory_model,
6907 "failure memory model cannot be stronger than success "
6908 "memory model for %<__atomic_compare_exchange%>");
6909 success = MEMMODEL_SEQ_CST;
6912 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6914 warning_at (loc, OPT_Winvalid_memory_model,
6915 "invalid failure memory model for "
6916 "%<__atomic_compare_exchange%>");
6917 failure = MEMMODEL_SEQ_CST;
6918 success = MEMMODEL_SEQ_CST;
6922 if (!flag_inline_atomics)
6923 return NULL_RTX;
6925 /* Expand the operands. */
6926 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6928 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6929 expect = convert_memory_address (Pmode, expect);
6930 expect = gen_rtx_MEM (mode, expect);
6931 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6933 weak = CALL_EXPR_ARG (exp, 3);
6934 is_weak = false;
6935 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6936 is_weak = true;
6938 if (target == const0_rtx)
6939 target = NULL;
6941 /* Lest the rtl backend create a race condition with an imporoper store
6942 to memory, always create a new pseudo for OLDVAL. */
6943 oldval = NULL;
6945 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6946 is_weak, success, failure))
6947 return NULL_RTX;
6949 /* Conditionally store back to EXPECT, lest we create a race condition
6950 with an improper store to memory. */
6951 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6952 the normal case where EXPECT is totally private, i.e. a register. At
6953 which point the store can be unconditional. */
6954 label = gen_label_rtx ();
6955 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6956 GET_MODE (target), 1, label);
6957 emit_move_insn (expect, oldval);
6958 emit_label (label);
6960 return target;
6963 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6964 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6965 call. The weak parameter must be dropped to match the expected parameter
6966 list and the expected argument changed from value to pointer to memory
6967 slot. */
6969 static void
6970 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6972 unsigned int z;
6973 vec<tree, va_gc> *vec;
6975 vec_alloc (vec, 5);
6976 vec->quick_push (gimple_call_arg (call, 0));
6977 tree expected = gimple_call_arg (call, 1);
6978 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6979 TREE_TYPE (expected));
6980 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6981 if (expd != x)
6982 emit_move_insn (x, expd);
6983 tree v = make_tree (TREE_TYPE (expected), x);
6984 vec->quick_push (build1 (ADDR_EXPR,
6985 build_pointer_type (TREE_TYPE (expected)), v));
6986 vec->quick_push (gimple_call_arg (call, 2));
6987 /* Skip the boolean weak parameter. */
6988 for (z = 4; z < 6; z++)
6989 vec->quick_push (gimple_call_arg (call, z));
6990 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6991 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6992 gcc_assert (bytes_log2 < 5);
6993 built_in_function fncode
6994 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6995 + bytes_log2);
6996 tree fndecl = builtin_decl_explicit (fncode);
6997 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6998 fndecl);
6999 tree exp = build_call_vec (boolean_type_node, fn, vec);
7000 tree lhs = gimple_call_lhs (call);
7001 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
7002 if (lhs)
7004 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7005 if (GET_MODE (boolret) != mode)
7006 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
7007 x = force_reg (mode, x);
7008 write_complex_part (target, boolret, true);
7009 write_complex_part (target, x, false);
7013 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
7015 void
7016 expand_ifn_atomic_compare_exchange (gcall *call)
7018 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
7019 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
7020 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
7021 rtx expect, desired, mem, oldval, boolret;
7022 enum memmodel success, failure;
7023 tree lhs;
7024 bool is_weak;
7025 location_t loc
7026 = expansion_point_location_if_in_system_header (gimple_location (call));
7028 success = get_memmodel (gimple_call_arg (call, 4));
7029 failure = get_memmodel (gimple_call_arg (call, 5));
7031 if (failure > success)
7033 warning_at (loc, OPT_Winvalid_memory_model,
7034 "failure memory model cannot be stronger than success "
7035 "memory model for %<__atomic_compare_exchange%>");
7036 success = MEMMODEL_SEQ_CST;
7039 if (is_mm_release (failure) || is_mm_acq_rel (failure))
7041 warning_at (loc, OPT_Winvalid_memory_model,
7042 "invalid failure memory model for "
7043 "%<__atomic_compare_exchange%>");
7044 failure = MEMMODEL_SEQ_CST;
7045 success = MEMMODEL_SEQ_CST;
7048 if (!flag_inline_atomics)
7050 expand_ifn_atomic_compare_exchange_into_call (call, mode);
7051 return;
7054 /* Expand the operands. */
7055 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
7057 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
7058 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
7060 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
7062 boolret = NULL;
7063 oldval = NULL;
7065 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
7066 is_weak, success, failure))
7068 expand_ifn_atomic_compare_exchange_into_call (call, mode);
7069 return;
7072 lhs = gimple_call_lhs (call);
7073 if (lhs)
7075 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7076 if (GET_MODE (boolret) != mode)
7077 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
7078 write_complex_part (target, boolret, true);
7079 write_complex_part (target, oldval, false);
7083 /* Expand the __atomic_load intrinsic:
7084 TYPE __atomic_load (TYPE *object, enum memmodel)
7085 EXP is the CALL_EXPR.
7086 TARGET is an optional place for us to store the results. */
7088 static rtx
7089 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
7091 rtx mem;
7092 enum memmodel model;
7094 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7095 if (is_mm_release (model) || is_mm_acq_rel (model))
7097 location_t loc
7098 = expansion_point_location_if_in_system_header (input_location);
7099 warning_at (loc, OPT_Winvalid_memory_model,
7100 "invalid memory model for %<__atomic_load%>");
7101 model = MEMMODEL_SEQ_CST;
7104 if (!flag_inline_atomics)
7105 return NULL_RTX;
7107 /* Expand the operand. */
7108 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7110 return expand_atomic_load (target, mem, model);
7114 /* Expand the __atomic_store intrinsic:
7115 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
7116 EXP is the CALL_EXPR.
7117 TARGET is an optional place for us to store the results. */
7119 static rtx
7120 expand_builtin_atomic_store (machine_mode mode, tree exp)
7122 rtx mem, val;
7123 enum memmodel model;
7125 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7126 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
7127 || is_mm_release (model)))
7129 location_t loc
7130 = expansion_point_location_if_in_system_header (input_location);
7131 warning_at (loc, OPT_Winvalid_memory_model,
7132 "invalid memory model for %<__atomic_store%>");
7133 model = MEMMODEL_SEQ_CST;
7136 if (!flag_inline_atomics)
7137 return NULL_RTX;
7139 /* Expand the operands. */
7140 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7141 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7143 return expand_atomic_store (mem, val, model, false);
7146 /* Expand the __atomic_fetch_XXX intrinsic:
7147 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
7148 EXP is the CALL_EXPR.
7149 TARGET is an optional place for us to store the results.
7150 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
7151 FETCH_AFTER is true if returning the result of the operation.
7152 FETCH_AFTER is false if returning the value before the operation.
7153 IGNORE is true if the result is not used.
7154 EXT_CALL is the correct builtin for an external call if this cannot be
7155 resolved to an instruction sequence. */
7157 static rtx
7158 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
7159 enum rtx_code code, bool fetch_after,
7160 bool ignore, enum built_in_function ext_call)
7162 rtx val, mem, ret;
7163 enum memmodel model;
7164 tree fndecl;
7165 tree addr;
7167 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7169 /* Expand the operands. */
7170 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7171 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7173 /* Only try generating instructions if inlining is turned on. */
7174 if (flag_inline_atomics)
7176 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
7177 if (ret)
7178 return ret;
7181 /* Return if a different routine isn't needed for the library call. */
7182 if (ext_call == BUILT_IN_NONE)
7183 return NULL_RTX;
7185 /* Change the call to the specified function. */
7186 fndecl = get_callee_fndecl (exp);
7187 addr = CALL_EXPR_FN (exp);
7188 STRIP_NOPS (addr);
7190 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
7191 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
7193 /* If we will emit code after the call, the call cannot be a tail call.
7194 If it is emitted as a tail call, a barrier is emitted after it, and
7195 then all trailing code is removed. */
7196 if (!ignore)
7197 CALL_EXPR_TAILCALL (exp) = 0;
7199 /* Expand the call here so we can emit trailing code. */
7200 ret = expand_call (exp, target, ignore);
7202 /* Replace the original function just in case it matters. */
7203 TREE_OPERAND (addr, 0) = fndecl;
7205 /* Then issue the arithmetic correction to return the right result. */
7206 if (!ignore)
7208 if (code == NOT)
7210 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
7211 OPTAB_LIB_WIDEN);
7212 ret = expand_simple_unop (mode, NOT, ret, target, true);
7214 else
7215 ret = expand_simple_binop (mode, code, ret, val, target, true,
7216 OPTAB_LIB_WIDEN);
7218 return ret;
7221 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
7223 void
7224 expand_ifn_atomic_bit_test_and (gcall *call)
7226 tree ptr = gimple_call_arg (call, 0);
7227 tree bit = gimple_call_arg (call, 1);
7228 tree flag = gimple_call_arg (call, 2);
7229 tree lhs = gimple_call_lhs (call);
7230 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7231 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
7232 enum rtx_code code;
7233 optab optab;
7234 class expand_operand ops[5];
7236 gcc_assert (flag_inline_atomics);
7238 if (gimple_call_num_args (call) == 4)
7239 model = get_memmodel (gimple_call_arg (call, 3));
7241 rtx mem = get_builtin_sync_mem (ptr, mode);
7242 rtx val = expand_expr_force_mode (bit, mode);
7244 switch (gimple_call_internal_fn (call))
7246 case IFN_ATOMIC_BIT_TEST_AND_SET:
7247 code = IOR;
7248 optab = atomic_bit_test_and_set_optab;
7249 break;
7250 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
7251 code = XOR;
7252 optab = atomic_bit_test_and_complement_optab;
7253 break;
7254 case IFN_ATOMIC_BIT_TEST_AND_RESET:
7255 code = AND;
7256 optab = atomic_bit_test_and_reset_optab;
7257 break;
7258 default:
7259 gcc_unreachable ();
7262 if (lhs == NULL_TREE)
7264 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7265 val, NULL_RTX, true, OPTAB_DIRECT);
7266 if (code == AND)
7267 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7268 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
7269 return;
7272 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7273 enum insn_code icode = direct_optab_handler (optab, mode);
7274 gcc_assert (icode != CODE_FOR_nothing);
7275 create_output_operand (&ops[0], target, mode);
7276 create_fixed_operand (&ops[1], mem);
7277 create_convert_operand_to (&ops[2], val, mode, true);
7278 create_integer_operand (&ops[3], model);
7279 create_integer_operand (&ops[4], integer_onep (flag));
7280 if (maybe_expand_insn (icode, 5, ops))
7281 return;
7283 rtx bitval = val;
7284 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7285 val, NULL_RTX, true, OPTAB_DIRECT);
7286 rtx maskval = val;
7287 if (code == AND)
7288 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7289 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
7290 code, model, false);
7291 if (integer_onep (flag))
7293 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
7294 NULL_RTX, true, OPTAB_DIRECT);
7295 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
7296 true, OPTAB_DIRECT);
7298 else
7299 result = expand_simple_binop (mode, AND, result, maskval, target, true,
7300 OPTAB_DIRECT);
7301 if (result != target)
7302 emit_move_insn (target, result);
7305 /* Expand an atomic clear operation.
7306 void _atomic_clear (BOOL *obj, enum memmodel)
7307 EXP is the call expression. */
7309 static rtx
7310 expand_builtin_atomic_clear (tree exp)
7312 machine_mode mode;
7313 rtx mem, ret;
7314 enum memmodel model;
7316 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7317 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7318 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7320 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
7322 location_t loc
7323 = expansion_point_location_if_in_system_header (input_location);
7324 warning_at (loc, OPT_Winvalid_memory_model,
7325 "invalid memory model for %<__atomic_store%>");
7326 model = MEMMODEL_SEQ_CST;
7329 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
7330 Failing that, a store is issued by __atomic_store. The only way this can
7331 fail is if the bool type is larger than a word size. Unlikely, but
7332 handle it anyway for completeness. Assume a single threaded model since
7333 there is no atomic support in this case, and no barriers are required. */
7334 ret = expand_atomic_store (mem, const0_rtx, model, true);
7335 if (!ret)
7336 emit_move_insn (mem, const0_rtx);
7337 return const0_rtx;
7340 /* Expand an atomic test_and_set operation.
7341 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
7342 EXP is the call expression. */
7344 static rtx
7345 expand_builtin_atomic_test_and_set (tree exp, rtx target)
7347 rtx mem;
7348 enum memmodel model;
7349 machine_mode mode;
7351 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7352 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7353 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7355 return expand_atomic_test_and_set (target, mem, model);
7359 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
7360 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
7362 static tree
7363 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
7365 int size;
7366 machine_mode mode;
7367 unsigned int mode_align, type_align;
7369 if (TREE_CODE (arg0) != INTEGER_CST)
7370 return NULL_TREE;
7372 /* We need a corresponding integer mode for the access to be lock-free. */
7373 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
7374 if (!int_mode_for_size (size, 0).exists (&mode))
7375 return boolean_false_node;
7377 mode_align = GET_MODE_ALIGNMENT (mode);
7379 if (TREE_CODE (arg1) == INTEGER_CST)
7381 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
7383 /* Either this argument is null, or it's a fake pointer encoding
7384 the alignment of the object. */
7385 val = least_bit_hwi (val);
7386 val *= BITS_PER_UNIT;
7388 if (val == 0 || mode_align < val)
7389 type_align = mode_align;
7390 else
7391 type_align = val;
7393 else
7395 tree ttype = TREE_TYPE (arg1);
7397 /* This function is usually invoked and folded immediately by the front
7398 end before anything else has a chance to look at it. The pointer
7399 parameter at this point is usually cast to a void *, so check for that
7400 and look past the cast. */
7401 if (CONVERT_EXPR_P (arg1)
7402 && POINTER_TYPE_P (ttype)
7403 && VOID_TYPE_P (TREE_TYPE (ttype))
7404 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
7405 arg1 = TREE_OPERAND (arg1, 0);
7407 ttype = TREE_TYPE (arg1);
7408 gcc_assert (POINTER_TYPE_P (ttype));
7410 /* Get the underlying type of the object. */
7411 ttype = TREE_TYPE (ttype);
7412 type_align = TYPE_ALIGN (ttype);
7415 /* If the object has smaller alignment, the lock free routines cannot
7416 be used. */
7417 if (type_align < mode_align)
7418 return boolean_false_node;
7420 /* Check if a compare_and_swap pattern exists for the mode which represents
7421 the required size. The pattern is not allowed to fail, so the existence
7422 of the pattern indicates support is present. Also require that an
7423 atomic load exists for the required size. */
7424 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
7425 return boolean_true_node;
7426 else
7427 return boolean_false_node;
7430 /* Return true if the parameters to call EXP represent an object which will
7431 always generate lock free instructions. The first argument represents the
7432 size of the object, and the second parameter is a pointer to the object
7433 itself. If NULL is passed for the object, then the result is based on
7434 typical alignment for an object of the specified size. Otherwise return
7435 false. */
7437 static rtx
7438 expand_builtin_atomic_always_lock_free (tree exp)
7440 tree size;
7441 tree arg0 = CALL_EXPR_ARG (exp, 0);
7442 tree arg1 = CALL_EXPR_ARG (exp, 1);
7444 if (TREE_CODE (arg0) != INTEGER_CST)
7446 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
7447 return const0_rtx;
7450 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
7451 if (size == boolean_true_node)
7452 return const1_rtx;
7453 return const0_rtx;
7456 /* Return a one or zero if it can be determined that object ARG1 of size ARG
7457 is lock free on this architecture. */
7459 static tree
7460 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
7462 if (!flag_inline_atomics)
7463 return NULL_TREE;
7465 /* If it isn't always lock free, don't generate a result. */
7466 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
7467 return boolean_true_node;
7469 return NULL_TREE;
7472 /* Return true if the parameters to call EXP represent an object which will
7473 always generate lock free instructions. The first argument represents the
7474 size of the object, and the second parameter is a pointer to the object
7475 itself. If NULL is passed for the object, then the result is based on
7476 typical alignment for an object of the specified size. Otherwise return
7477 NULL*/
7479 static rtx
7480 expand_builtin_atomic_is_lock_free (tree exp)
7482 tree size;
7483 tree arg0 = CALL_EXPR_ARG (exp, 0);
7484 tree arg1 = CALL_EXPR_ARG (exp, 1);
7486 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
7488 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
7489 return NULL_RTX;
7492 if (!flag_inline_atomics)
7493 return NULL_RTX;
7495 /* If the value is known at compile time, return the RTX for it. */
7496 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
7497 if (size == boolean_true_node)
7498 return const1_rtx;
7500 return NULL_RTX;
7503 /* Expand the __atomic_thread_fence intrinsic:
7504 void __atomic_thread_fence (enum memmodel)
7505 EXP is the CALL_EXPR. */
7507 static void
7508 expand_builtin_atomic_thread_fence (tree exp)
7510 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7511 expand_mem_thread_fence (model);
7514 /* Expand the __atomic_signal_fence intrinsic:
7515 void __atomic_signal_fence (enum memmodel)
7516 EXP is the CALL_EXPR. */
7518 static void
7519 expand_builtin_atomic_signal_fence (tree exp)
7521 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7522 expand_mem_signal_fence (model);
7525 /* Expand the __sync_synchronize intrinsic. */
7527 static void
7528 expand_builtin_sync_synchronize (void)
7530 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
7533 static rtx
7534 expand_builtin_thread_pointer (tree exp, rtx target)
7536 enum insn_code icode;
7537 if (!validate_arglist (exp, VOID_TYPE))
7538 return const0_rtx;
7539 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
7540 if (icode != CODE_FOR_nothing)
7542 class expand_operand op;
7543 /* If the target is not sutitable then create a new target. */
7544 if (target == NULL_RTX
7545 || !REG_P (target)
7546 || GET_MODE (target) != Pmode)
7547 target = gen_reg_rtx (Pmode);
7548 create_output_operand (&op, target, Pmode);
7549 expand_insn (icode, 1, &op);
7550 return target;
7552 error ("%<__builtin_thread_pointer%> is not supported on this target");
7553 return const0_rtx;
7556 static void
7557 expand_builtin_set_thread_pointer (tree exp)
7559 enum insn_code icode;
7560 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7561 return;
7562 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
7563 if (icode != CODE_FOR_nothing)
7565 class expand_operand op;
7566 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
7567 Pmode, EXPAND_NORMAL);
7568 create_input_operand (&op, val, Pmode);
7569 expand_insn (icode, 1, &op);
7570 return;
7572 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
7576 /* Emit code to restore the current value of stack. */
7578 static void
7579 expand_stack_restore (tree var)
7581 rtx_insn *prev;
7582 rtx sa = expand_normal (var);
7584 sa = convert_memory_address (Pmode, sa);
7586 prev = get_last_insn ();
7587 emit_stack_restore (SAVE_BLOCK, sa);
7589 record_new_stack_level ();
7591 fixup_args_size_notes (prev, get_last_insn (), 0);
7594 /* Emit code to save the current value of stack. */
7596 static rtx
7597 expand_stack_save (void)
7599 rtx ret = NULL_RTX;
7601 emit_stack_save (SAVE_BLOCK, &ret);
7602 return ret;
7605 /* Emit code to get the openacc gang, worker or vector id or size. */
7607 static rtx
7608 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7610 const char *name;
7611 rtx fallback_retval;
7612 rtx_insn *(*gen_fn) (rtx, rtx);
7613 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7615 case BUILT_IN_GOACC_PARLEVEL_ID:
7616 name = "__builtin_goacc_parlevel_id";
7617 fallback_retval = const0_rtx;
7618 gen_fn = targetm.gen_oacc_dim_pos;
7619 break;
7620 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7621 name = "__builtin_goacc_parlevel_size";
7622 fallback_retval = const1_rtx;
7623 gen_fn = targetm.gen_oacc_dim_size;
7624 break;
7625 default:
7626 gcc_unreachable ();
7629 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7631 error ("%qs only supported in OpenACC code", name);
7632 return const0_rtx;
7635 tree arg = CALL_EXPR_ARG (exp, 0);
7636 if (TREE_CODE (arg) != INTEGER_CST)
7638 error ("non-constant argument 0 to %qs", name);
7639 return const0_rtx;
7642 int dim = TREE_INT_CST_LOW (arg);
7643 switch (dim)
7645 case GOMP_DIM_GANG:
7646 case GOMP_DIM_WORKER:
7647 case GOMP_DIM_VECTOR:
7648 break;
7649 default:
7650 error ("illegal argument 0 to %qs", name);
7651 return const0_rtx;
7654 if (ignore)
7655 return target;
7657 if (target == NULL_RTX)
7658 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7660 if (!targetm.have_oacc_dim_size ())
7662 emit_move_insn (target, fallback_retval);
7663 return target;
7666 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7667 emit_insn (gen_fn (reg, GEN_INT (dim)));
7668 if (reg != target)
7669 emit_move_insn (target, reg);
7671 return target;
7674 /* Expand a string compare operation using a sequence of char comparison
7675 to get rid of the calling overhead, with result going to TARGET if
7676 that's convenient.
7678 VAR_STR is the variable string source;
7679 CONST_STR is the constant string source;
7680 LENGTH is the number of chars to compare;
7681 CONST_STR_N indicates which source string is the constant string;
7682 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7684 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7686 target = (int) (unsigned char) var_str[0]
7687 - (int) (unsigned char) const_str[0];
7688 if (target != 0)
7689 goto ne_label;
7691 target = (int) (unsigned char) var_str[length - 2]
7692 - (int) (unsigned char) const_str[length - 2];
7693 if (target != 0)
7694 goto ne_label;
7695 target = (int) (unsigned char) var_str[length - 1]
7696 - (int) (unsigned char) const_str[length - 1];
7697 ne_label:
7700 static rtx
7701 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7702 unsigned HOST_WIDE_INT length,
7703 int const_str_n, machine_mode mode)
7705 HOST_WIDE_INT offset = 0;
7706 rtx var_rtx_array
7707 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7708 rtx var_rtx = NULL_RTX;
7709 rtx const_rtx = NULL_RTX;
7710 rtx result = target ? target : gen_reg_rtx (mode);
7711 rtx_code_label *ne_label = gen_label_rtx ();
7712 tree unit_type_node = unsigned_char_type_node;
7713 scalar_int_mode unit_mode
7714 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7716 start_sequence ();
7718 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7720 var_rtx
7721 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7722 const_rtx = c_readstr (const_str + offset, unit_mode);
7723 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7724 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7726 op0 = convert_modes (mode, unit_mode, op0, 1);
7727 op1 = convert_modes (mode, unit_mode, op1, 1);
7728 result = expand_simple_binop (mode, MINUS, op0, op1,
7729 result, 1, OPTAB_WIDEN);
7730 if (i < length - 1)
7731 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7732 mode, true, ne_label);
7733 offset += GET_MODE_SIZE (unit_mode);
7736 emit_label (ne_label);
7737 rtx_insn *insns = get_insns ();
7738 end_sequence ();
7739 emit_insn (insns);
7741 return result;
7744 /* Inline expansion a call to str(n)cmp, with result going to
7745 TARGET if that's convenient.
7746 If the call is not been inlined, return NULL_RTX. */
7747 static rtx
7748 inline_expand_builtin_string_cmp (tree exp, rtx target)
7750 tree fndecl = get_callee_fndecl (exp);
7751 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7752 unsigned HOST_WIDE_INT length = 0;
7753 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7755 /* Do NOT apply this inlining expansion when optimizing for size or
7756 optimization level below 2. */
7757 if (optimize < 2 || optimize_insn_for_size_p ())
7758 return NULL_RTX;
7760 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7761 || fcode == BUILT_IN_STRNCMP
7762 || fcode == BUILT_IN_MEMCMP);
7764 /* On a target where the type of the call (int) has same or narrower presicion
7765 than unsigned char, give up the inlining expansion. */
7766 if (TYPE_PRECISION (unsigned_char_type_node)
7767 >= TYPE_PRECISION (TREE_TYPE (exp)))
7768 return NULL_RTX;
7770 tree arg1 = CALL_EXPR_ARG (exp, 0);
7771 tree arg2 = CALL_EXPR_ARG (exp, 1);
7772 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7774 unsigned HOST_WIDE_INT len1 = 0;
7775 unsigned HOST_WIDE_INT len2 = 0;
7776 unsigned HOST_WIDE_INT len3 = 0;
7778 const char *src_str1 = c_getstr (arg1, &len1);
7779 const char *src_str2 = c_getstr (arg2, &len2);
7781 /* If neither strings is constant string, the call is not qualify. */
7782 if (!src_str1 && !src_str2)
7783 return NULL_RTX;
7785 /* For strncmp, if the length is not a const, not qualify. */
7786 if (is_ncmp)
7788 if (!tree_fits_uhwi_p (len3_tree))
7789 return NULL_RTX;
7790 else
7791 len3 = tree_to_uhwi (len3_tree);
7794 if (src_str1 != NULL)
7795 len1 = strnlen (src_str1, len1) + 1;
7797 if (src_str2 != NULL)
7798 len2 = strnlen (src_str2, len2) + 1;
7800 int const_str_n = 0;
7801 if (!len1)
7802 const_str_n = 2;
7803 else if (!len2)
7804 const_str_n = 1;
7805 else if (len2 > len1)
7806 const_str_n = 1;
7807 else
7808 const_str_n = 2;
7810 gcc_checking_assert (const_str_n > 0);
7811 length = (const_str_n == 1) ? len1 : len2;
7813 if (is_ncmp && len3 < length)
7814 length = len3;
7816 /* If the length of the comparision is larger than the threshold,
7817 do nothing. */
7818 if (length > (unsigned HOST_WIDE_INT)
7819 param_builtin_string_cmp_inline_length)
7820 return NULL_RTX;
7822 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7824 /* Now, start inline expansion the call. */
7825 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7826 (const_str_n == 1) ? src_str1 : src_str2, length,
7827 const_str_n, mode);
7830 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7831 represents the size of the first argument to that call, or VOIDmode
7832 if the argument is a pointer. IGNORE will be true if the result
7833 isn't used. */
7834 static rtx
7835 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7836 bool ignore)
7838 rtx val, failsafe;
7839 unsigned nargs = call_expr_nargs (exp);
7841 tree arg0 = CALL_EXPR_ARG (exp, 0);
7843 if (mode == VOIDmode)
7845 mode = TYPE_MODE (TREE_TYPE (arg0));
7846 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7849 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7851 /* An optional second argument can be used as a failsafe value on
7852 some machines. If it isn't present, then the failsafe value is
7853 assumed to be 0. */
7854 if (nargs > 1)
7856 tree arg1 = CALL_EXPR_ARG (exp, 1);
7857 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7859 else
7860 failsafe = const0_rtx;
7862 /* If the result isn't used, the behavior is undefined. It would be
7863 nice to emit a warning here, but path splitting means this might
7864 happen with legitimate code. So simply drop the builtin
7865 expansion in that case; we've handled any side-effects above. */
7866 if (ignore)
7867 return const0_rtx;
7869 /* If we don't have a suitable target, create one to hold the result. */
7870 if (target == NULL || GET_MODE (target) != mode)
7871 target = gen_reg_rtx (mode);
7873 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7874 val = convert_modes (mode, VOIDmode, val, false);
7876 return targetm.speculation_safe_value (mode, target, val, failsafe);
7879 /* Expand an expression EXP that calls a built-in function,
7880 with result going to TARGET if that's convenient
7881 (and in mode MODE if that's convenient).
7882 SUBTARGET may be used as the target for computing one of EXP's operands.
7883 IGNORE is nonzero if the value is to be ignored. */
7886 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7887 int ignore)
7889 tree fndecl = get_callee_fndecl (exp);
7890 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7891 int flags;
7893 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7894 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7896 /* When ASan is enabled, we don't want to expand some memory/string
7897 builtins and rely on libsanitizer's hooks. This allows us to avoid
7898 redundant checks and be sure, that possible overflow will be detected
7899 by ASan. */
7901 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7902 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7903 return expand_call (exp, target, ignore);
7905 /* When not optimizing, generate calls to library functions for a certain
7906 set of builtins. */
7907 if (!optimize
7908 && !called_as_built_in (fndecl)
7909 && fcode != BUILT_IN_FORK
7910 && fcode != BUILT_IN_EXECL
7911 && fcode != BUILT_IN_EXECV
7912 && fcode != BUILT_IN_EXECLP
7913 && fcode != BUILT_IN_EXECLE
7914 && fcode != BUILT_IN_EXECVP
7915 && fcode != BUILT_IN_EXECVE
7916 && !ALLOCA_FUNCTION_CODE_P (fcode)
7917 && fcode != BUILT_IN_FREE)
7918 return expand_call (exp, target, ignore);
7920 /* The built-in function expanders test for target == const0_rtx
7921 to determine whether the function's result will be ignored. */
7922 if (ignore)
7923 target = const0_rtx;
7925 /* If the result of a pure or const built-in function is ignored, and
7926 none of its arguments are volatile, we can avoid expanding the
7927 built-in call and just evaluate the arguments for side-effects. */
7928 if (target == const0_rtx
7929 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7930 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7932 bool volatilep = false;
7933 tree arg;
7934 call_expr_arg_iterator iter;
7936 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7937 if (TREE_THIS_VOLATILE (arg))
7939 volatilep = true;
7940 break;
7943 if (! volatilep)
7945 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7946 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7947 return const0_rtx;
7951 switch (fcode)
7953 CASE_FLT_FN (BUILT_IN_FABS):
7954 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7955 case BUILT_IN_FABSD32:
7956 case BUILT_IN_FABSD64:
7957 case BUILT_IN_FABSD128:
7958 target = expand_builtin_fabs (exp, target, subtarget);
7959 if (target)
7960 return target;
7961 break;
7963 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7964 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7965 target = expand_builtin_copysign (exp, target, subtarget);
7966 if (target)
7967 return target;
7968 break;
7970 /* Just do a normal library call if we were unable to fold
7971 the values. */
7972 CASE_FLT_FN (BUILT_IN_CABS):
7973 break;
7975 CASE_FLT_FN (BUILT_IN_FMA):
7976 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7977 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7978 if (target)
7979 return target;
7980 break;
7982 CASE_FLT_FN (BUILT_IN_ILOGB):
7983 if (! flag_unsafe_math_optimizations)
7984 break;
7985 gcc_fallthrough ();
7986 CASE_FLT_FN (BUILT_IN_ISINF):
7987 CASE_FLT_FN (BUILT_IN_FINITE):
7988 case BUILT_IN_ISFINITE:
7989 case BUILT_IN_ISNORMAL:
7990 target = expand_builtin_interclass_mathfn (exp, target);
7991 if (target)
7992 return target;
7993 break;
7995 CASE_FLT_FN (BUILT_IN_ICEIL):
7996 CASE_FLT_FN (BUILT_IN_LCEIL):
7997 CASE_FLT_FN (BUILT_IN_LLCEIL):
7998 CASE_FLT_FN (BUILT_IN_LFLOOR):
7999 CASE_FLT_FN (BUILT_IN_IFLOOR):
8000 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8001 target = expand_builtin_int_roundingfn (exp, target);
8002 if (target)
8003 return target;
8004 break;
8006 CASE_FLT_FN (BUILT_IN_IRINT):
8007 CASE_FLT_FN (BUILT_IN_LRINT):
8008 CASE_FLT_FN (BUILT_IN_LLRINT):
8009 CASE_FLT_FN (BUILT_IN_IROUND):
8010 CASE_FLT_FN (BUILT_IN_LROUND):
8011 CASE_FLT_FN (BUILT_IN_LLROUND):
8012 target = expand_builtin_int_roundingfn_2 (exp, target);
8013 if (target)
8014 return target;
8015 break;
8017 CASE_FLT_FN (BUILT_IN_POWI):
8018 target = expand_builtin_powi (exp, target);
8019 if (target)
8020 return target;
8021 break;
8023 CASE_FLT_FN (BUILT_IN_CEXPI):
8024 target = expand_builtin_cexpi (exp, target);
8025 gcc_assert (target);
8026 return target;
8028 CASE_FLT_FN (BUILT_IN_SIN):
8029 CASE_FLT_FN (BUILT_IN_COS):
8030 if (! flag_unsafe_math_optimizations)
8031 break;
8032 target = expand_builtin_mathfn_3 (exp, target, subtarget);
8033 if (target)
8034 return target;
8035 break;
8037 CASE_FLT_FN (BUILT_IN_SINCOS):
8038 if (! flag_unsafe_math_optimizations)
8039 break;
8040 target = expand_builtin_sincos (exp);
8041 if (target)
8042 return target;
8043 break;
8045 case BUILT_IN_APPLY_ARGS:
8046 return expand_builtin_apply_args ();
8048 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8049 FUNCTION with a copy of the parameters described by
8050 ARGUMENTS, and ARGSIZE. It returns a block of memory
8051 allocated on the stack into which is stored all the registers
8052 that might possibly be used for returning the result of a
8053 function. ARGUMENTS is the value returned by
8054 __builtin_apply_args. ARGSIZE is the number of bytes of
8055 arguments that must be copied. ??? How should this value be
8056 computed? We'll also need a safe worst case value for varargs
8057 functions. */
8058 case BUILT_IN_APPLY:
8059 if (!validate_arglist (exp, POINTER_TYPE,
8060 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
8061 && !validate_arglist (exp, REFERENCE_TYPE,
8062 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8063 return const0_rtx;
8064 else
8066 rtx ops[3];
8068 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
8069 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
8070 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
8072 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8075 /* __builtin_return (RESULT) causes the function to return the
8076 value described by RESULT. RESULT is address of the block of
8077 memory returned by __builtin_apply. */
8078 case BUILT_IN_RETURN:
8079 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8080 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
8081 return const0_rtx;
8083 case BUILT_IN_SAVEREGS:
8084 return expand_builtin_saveregs ();
8086 case BUILT_IN_VA_ARG_PACK:
8087 /* All valid uses of __builtin_va_arg_pack () are removed during
8088 inlining. */
8089 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
8090 return const0_rtx;
8092 case BUILT_IN_VA_ARG_PACK_LEN:
8093 /* All valid uses of __builtin_va_arg_pack_len () are removed during
8094 inlining. */
8095 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
8096 return const0_rtx;
8098 /* Return the address of the first anonymous stack arg. */
8099 case BUILT_IN_NEXT_ARG:
8100 if (fold_builtin_next_arg (exp, false))
8101 return const0_rtx;
8102 return expand_builtin_next_arg ();
8104 case BUILT_IN_CLEAR_CACHE:
8105 target = expand_builtin___clear_cache (exp);
8106 if (target)
8107 return target;
8108 break;
8110 case BUILT_IN_CLASSIFY_TYPE:
8111 return expand_builtin_classify_type (exp);
8113 case BUILT_IN_CONSTANT_P:
8114 return const0_rtx;
8116 case BUILT_IN_FRAME_ADDRESS:
8117 case BUILT_IN_RETURN_ADDRESS:
8118 return expand_builtin_frame_address (fndecl, exp);
8120 /* Returns the address of the area where the structure is returned.
8121 0 otherwise. */
8122 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8123 if (call_expr_nargs (exp) != 0
8124 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8125 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
8126 return const0_rtx;
8127 else
8128 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8130 CASE_BUILT_IN_ALLOCA:
8131 target = expand_builtin_alloca (exp);
8132 if (target)
8133 return target;
8134 break;
8136 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
8137 return expand_asan_emit_allocas_unpoison (exp);
8139 case BUILT_IN_STACK_SAVE:
8140 return expand_stack_save ();
8142 case BUILT_IN_STACK_RESTORE:
8143 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
8144 return const0_rtx;
8146 case BUILT_IN_BSWAP16:
8147 case BUILT_IN_BSWAP32:
8148 case BUILT_IN_BSWAP64:
8149 case BUILT_IN_BSWAP128:
8150 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
8151 if (target)
8152 return target;
8153 break;
8155 CASE_INT_FN (BUILT_IN_FFS):
8156 target = expand_builtin_unop (target_mode, exp, target,
8157 subtarget, ffs_optab);
8158 if (target)
8159 return target;
8160 break;
8162 CASE_INT_FN (BUILT_IN_CLZ):
8163 target = expand_builtin_unop (target_mode, exp, target,
8164 subtarget, clz_optab);
8165 if (target)
8166 return target;
8167 break;
8169 CASE_INT_FN (BUILT_IN_CTZ):
8170 target = expand_builtin_unop (target_mode, exp, target,
8171 subtarget, ctz_optab);
8172 if (target)
8173 return target;
8174 break;
8176 CASE_INT_FN (BUILT_IN_CLRSB):
8177 target = expand_builtin_unop (target_mode, exp, target,
8178 subtarget, clrsb_optab);
8179 if (target)
8180 return target;
8181 break;
8183 CASE_INT_FN (BUILT_IN_POPCOUNT):
8184 target = expand_builtin_unop (target_mode, exp, target,
8185 subtarget, popcount_optab);
8186 if (target)
8187 return target;
8188 break;
8190 CASE_INT_FN (BUILT_IN_PARITY):
8191 target = expand_builtin_unop (target_mode, exp, target,
8192 subtarget, parity_optab);
8193 if (target)
8194 return target;
8195 break;
8197 case BUILT_IN_STRLEN:
8198 target = expand_builtin_strlen (exp, target, target_mode);
8199 if (target)
8200 return target;
8201 break;
8203 case BUILT_IN_STRNLEN:
8204 target = expand_builtin_strnlen (exp, target, target_mode);
8205 if (target)
8206 return target;
8207 break;
8209 case BUILT_IN_STRCAT:
8210 target = expand_builtin_strcat (exp);
8211 if (target)
8212 return target;
8213 break;
8215 case BUILT_IN_GETTEXT:
8216 case BUILT_IN_PUTS:
8217 case BUILT_IN_PUTS_UNLOCKED:
8218 case BUILT_IN_STRDUP:
8219 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8220 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8221 break;
8223 case BUILT_IN_INDEX:
8224 case BUILT_IN_RINDEX:
8225 case BUILT_IN_STRCHR:
8226 case BUILT_IN_STRRCHR:
8227 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8228 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8229 break;
8231 case BUILT_IN_FPUTS:
8232 case BUILT_IN_FPUTS_UNLOCKED:
8233 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8234 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8235 break;
8237 case BUILT_IN_STRNDUP:
8238 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8239 check_nul_terminated_array (exp,
8240 CALL_EXPR_ARG (exp, 0),
8241 CALL_EXPR_ARG (exp, 1));
8242 break;
8244 case BUILT_IN_STRCASECMP:
8245 case BUILT_IN_STRSTR:
8246 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8248 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8249 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 1));
8251 break;
8253 case BUILT_IN_STRCPY:
8254 target = expand_builtin_strcpy (exp, target);
8255 if (target)
8256 return target;
8257 break;
8259 case BUILT_IN_STRNCAT:
8260 target = expand_builtin_strncat (exp, target);
8261 if (target)
8262 return target;
8263 break;
8265 case BUILT_IN_STRNCPY:
8266 target = expand_builtin_strncpy (exp, target);
8267 if (target)
8268 return target;
8269 break;
8271 case BUILT_IN_STPCPY:
8272 target = expand_builtin_stpcpy (exp, target, mode);
8273 if (target)
8274 return target;
8275 break;
8277 case BUILT_IN_STPNCPY:
8278 target = expand_builtin_stpncpy (exp, target);
8279 if (target)
8280 return target;
8281 break;
8283 case BUILT_IN_MEMCHR:
8284 target = expand_builtin_memchr (exp, target);
8285 if (target)
8286 return target;
8287 break;
8289 case BUILT_IN_MEMCPY:
8290 target = expand_builtin_memcpy (exp, target);
8291 if (target)
8292 return target;
8293 break;
8295 case BUILT_IN_MEMMOVE:
8296 target = expand_builtin_memmove (exp, target);
8297 if (target)
8298 return target;
8299 break;
8301 case BUILT_IN_MEMPCPY:
8302 target = expand_builtin_mempcpy (exp, target);
8303 if (target)
8304 return target;
8305 break;
8307 case BUILT_IN_MEMSET:
8308 target = expand_builtin_memset (exp, target, mode);
8309 if (target)
8310 return target;
8311 break;
8313 case BUILT_IN_BZERO:
8314 target = expand_builtin_bzero (exp);
8315 if (target)
8316 return target;
8317 break;
8319 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8320 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
8321 when changing it to a strcmp call. */
8322 case BUILT_IN_STRCMP_EQ:
8323 target = expand_builtin_memcmp (exp, target, true);
8324 if (target)
8325 return target;
8327 /* Change this call back to a BUILT_IN_STRCMP. */
8328 TREE_OPERAND (exp, 1)
8329 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
8331 /* Delete the last parameter. */
8332 unsigned int i;
8333 vec<tree, va_gc> *arg_vec;
8334 vec_alloc (arg_vec, 2);
8335 for (i = 0; i < 2; i++)
8336 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
8337 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
8338 /* FALLTHROUGH */
8340 case BUILT_IN_STRCMP:
8341 target = expand_builtin_strcmp (exp, target);
8342 if (target)
8343 return target;
8344 break;
8346 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8347 back to a BUILT_IN_STRNCMP. */
8348 case BUILT_IN_STRNCMP_EQ:
8349 target = expand_builtin_memcmp (exp, target, true);
8350 if (target)
8351 return target;
8353 /* Change it back to a BUILT_IN_STRNCMP. */
8354 TREE_OPERAND (exp, 1)
8355 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
8356 /* FALLTHROUGH */
8358 case BUILT_IN_STRNCMP:
8359 target = expand_builtin_strncmp (exp, target, mode);
8360 if (target)
8361 return target;
8362 break;
8364 case BUILT_IN_BCMP:
8365 case BUILT_IN_MEMCMP:
8366 case BUILT_IN_MEMCMP_EQ:
8367 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
8368 if (target)
8369 return target;
8370 if (fcode == BUILT_IN_MEMCMP_EQ)
8372 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
8373 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
8375 break;
8377 case BUILT_IN_SETJMP:
8378 /* This should have been lowered to the builtins below. */
8379 gcc_unreachable ();
8381 case BUILT_IN_SETJMP_SETUP:
8382 /* __builtin_setjmp_setup is passed a pointer to an array of five words
8383 and the receiver label. */
8384 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8386 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8387 VOIDmode, EXPAND_NORMAL);
8388 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
8389 rtx_insn *label_r = label_rtx (label);
8391 /* This is copied from the handling of non-local gotos. */
8392 expand_builtin_setjmp_setup (buf_addr, label_r);
8393 nonlocal_goto_handler_labels
8394 = gen_rtx_INSN_LIST (VOIDmode, label_r,
8395 nonlocal_goto_handler_labels);
8396 /* ??? Do not let expand_label treat us as such since we would
8397 not want to be both on the list of non-local labels and on
8398 the list of forced labels. */
8399 FORCED_LABEL (label) = 0;
8400 return const0_rtx;
8402 break;
8404 case BUILT_IN_SETJMP_RECEIVER:
8405 /* __builtin_setjmp_receiver is passed the receiver label. */
8406 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8408 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
8409 rtx_insn *label_r = label_rtx (label);
8411 expand_builtin_setjmp_receiver (label_r);
8412 return const0_rtx;
8414 break;
8416 /* __builtin_longjmp is passed a pointer to an array of five words.
8417 It's similar to the C library longjmp function but works with
8418 __builtin_setjmp above. */
8419 case BUILT_IN_LONGJMP:
8420 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8422 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8423 VOIDmode, EXPAND_NORMAL);
8424 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
8426 if (value != const1_rtx)
8428 error ("%<__builtin_longjmp%> second argument must be 1");
8429 return const0_rtx;
8432 expand_builtin_longjmp (buf_addr, value);
8433 return const0_rtx;
8435 break;
8437 case BUILT_IN_NONLOCAL_GOTO:
8438 target = expand_builtin_nonlocal_goto (exp);
8439 if (target)
8440 return target;
8441 break;
8443 /* This updates the setjmp buffer that is its argument with the value
8444 of the current stack pointer. */
8445 case BUILT_IN_UPDATE_SETJMP_BUF:
8446 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8448 rtx buf_addr
8449 = expand_normal (CALL_EXPR_ARG (exp, 0));
8451 expand_builtin_update_setjmp_buf (buf_addr);
8452 return const0_rtx;
8454 break;
8456 case BUILT_IN_TRAP:
8457 expand_builtin_trap ();
8458 return const0_rtx;
8460 case BUILT_IN_UNREACHABLE:
8461 expand_builtin_unreachable ();
8462 return const0_rtx;
8464 CASE_FLT_FN (BUILT_IN_SIGNBIT):
8465 case BUILT_IN_SIGNBITD32:
8466 case BUILT_IN_SIGNBITD64:
8467 case BUILT_IN_SIGNBITD128:
8468 target = expand_builtin_signbit (exp, target);
8469 if (target)
8470 return target;
8471 break;
8473 /* Various hooks for the DWARF 2 __throw routine. */
8474 case BUILT_IN_UNWIND_INIT:
8475 expand_builtin_unwind_init ();
8476 return const0_rtx;
8477 case BUILT_IN_DWARF_CFA:
8478 return virtual_cfa_rtx;
8479 #ifdef DWARF2_UNWIND_INFO
8480 case BUILT_IN_DWARF_SP_COLUMN:
8481 return expand_builtin_dwarf_sp_column ();
8482 case BUILT_IN_INIT_DWARF_REG_SIZES:
8483 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
8484 return const0_rtx;
8485 #endif
8486 case BUILT_IN_FROB_RETURN_ADDR:
8487 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
8488 case BUILT_IN_EXTRACT_RETURN_ADDR:
8489 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
8490 case BUILT_IN_EH_RETURN:
8491 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
8492 CALL_EXPR_ARG (exp, 1));
8493 return const0_rtx;
8494 case BUILT_IN_EH_RETURN_DATA_REGNO:
8495 return expand_builtin_eh_return_data_regno (exp);
8496 case BUILT_IN_EXTEND_POINTER:
8497 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
8498 case BUILT_IN_EH_POINTER:
8499 return expand_builtin_eh_pointer (exp);
8500 case BUILT_IN_EH_FILTER:
8501 return expand_builtin_eh_filter (exp);
8502 case BUILT_IN_EH_COPY_VALUES:
8503 return expand_builtin_eh_copy_values (exp);
8505 case BUILT_IN_VA_START:
8506 return expand_builtin_va_start (exp);
8507 case BUILT_IN_VA_END:
8508 return expand_builtin_va_end (exp);
8509 case BUILT_IN_VA_COPY:
8510 return expand_builtin_va_copy (exp);
8511 case BUILT_IN_EXPECT:
8512 return expand_builtin_expect (exp, target);
8513 case BUILT_IN_EXPECT_WITH_PROBABILITY:
8514 return expand_builtin_expect_with_probability (exp, target);
8515 case BUILT_IN_ASSUME_ALIGNED:
8516 return expand_builtin_assume_aligned (exp, target);
8517 case BUILT_IN_PREFETCH:
8518 expand_builtin_prefetch (exp);
8519 return const0_rtx;
8521 case BUILT_IN_INIT_TRAMPOLINE:
8522 return expand_builtin_init_trampoline (exp, true);
8523 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
8524 return expand_builtin_init_trampoline (exp, false);
8525 case BUILT_IN_ADJUST_TRAMPOLINE:
8526 return expand_builtin_adjust_trampoline (exp);
8528 case BUILT_IN_INIT_DESCRIPTOR:
8529 return expand_builtin_init_descriptor (exp);
8530 case BUILT_IN_ADJUST_DESCRIPTOR:
8531 return expand_builtin_adjust_descriptor (exp);
8533 case BUILT_IN_FORK:
8534 case BUILT_IN_EXECL:
8535 case BUILT_IN_EXECV:
8536 case BUILT_IN_EXECLP:
8537 case BUILT_IN_EXECLE:
8538 case BUILT_IN_EXECVP:
8539 case BUILT_IN_EXECVE:
8540 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
8541 if (target)
8542 return target;
8543 break;
8545 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
8546 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
8547 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
8548 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
8549 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
8550 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
8551 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
8552 if (target)
8553 return target;
8554 break;
8556 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
8557 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
8558 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
8559 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
8560 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
8561 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
8562 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
8563 if (target)
8564 return target;
8565 break;
8567 case BUILT_IN_SYNC_FETCH_AND_OR_1:
8568 case BUILT_IN_SYNC_FETCH_AND_OR_2:
8569 case BUILT_IN_SYNC_FETCH_AND_OR_4:
8570 case BUILT_IN_SYNC_FETCH_AND_OR_8:
8571 case BUILT_IN_SYNC_FETCH_AND_OR_16:
8572 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
8573 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
8574 if (target)
8575 return target;
8576 break;
8578 case BUILT_IN_SYNC_FETCH_AND_AND_1:
8579 case BUILT_IN_SYNC_FETCH_AND_AND_2:
8580 case BUILT_IN_SYNC_FETCH_AND_AND_4:
8581 case BUILT_IN_SYNC_FETCH_AND_AND_8:
8582 case BUILT_IN_SYNC_FETCH_AND_AND_16:
8583 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
8584 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
8585 if (target)
8586 return target;
8587 break;
8589 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8590 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8591 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8592 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8593 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8594 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
8595 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
8596 if (target)
8597 return target;
8598 break;
8600 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8601 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8602 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8603 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8604 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8605 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
8606 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
8607 if (target)
8608 return target;
8609 break;
8611 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8612 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8613 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8614 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8615 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8616 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8617 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
8618 if (target)
8619 return target;
8620 break;
8622 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8623 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8624 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8625 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8626 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8627 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8628 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
8629 if (target)
8630 return target;
8631 break;
8633 case BUILT_IN_SYNC_OR_AND_FETCH_1:
8634 case BUILT_IN_SYNC_OR_AND_FETCH_2:
8635 case BUILT_IN_SYNC_OR_AND_FETCH_4:
8636 case BUILT_IN_SYNC_OR_AND_FETCH_8:
8637 case BUILT_IN_SYNC_OR_AND_FETCH_16:
8638 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8639 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
8640 if (target)
8641 return target;
8642 break;
8644 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8645 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8646 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8647 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8648 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8649 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8650 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8651 if (target)
8652 return target;
8653 break;
8655 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8656 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8657 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8658 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8659 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8660 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8661 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8662 if (target)
8663 return target;
8664 break;
8666 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8667 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8668 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8669 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8670 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8671 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8672 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8673 if (target)
8674 return target;
8675 break;
8677 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8678 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8679 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8680 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8681 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8682 if (mode == VOIDmode)
8683 mode = TYPE_MODE (boolean_type_node);
8684 if (!target || !register_operand (target, mode))
8685 target = gen_reg_rtx (mode);
8687 mode = get_builtin_sync_mode
8688 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8689 target = expand_builtin_compare_and_swap (mode, exp, true, target);
8690 if (target)
8691 return target;
8692 break;
8694 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8695 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8696 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8697 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8698 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8699 mode = get_builtin_sync_mode
8700 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8701 target = expand_builtin_compare_and_swap (mode, exp, false, target);
8702 if (target)
8703 return target;
8704 break;
8706 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8707 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8708 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8709 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8710 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8711 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8712 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8713 if (target)
8714 return target;
8715 break;
8717 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8718 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8719 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8720 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8721 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8722 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8723 expand_builtin_sync_lock_release (mode, exp);
8724 return const0_rtx;
8726 case BUILT_IN_SYNC_SYNCHRONIZE:
8727 expand_builtin_sync_synchronize ();
8728 return const0_rtx;
8730 case BUILT_IN_ATOMIC_EXCHANGE_1:
8731 case BUILT_IN_ATOMIC_EXCHANGE_2:
8732 case BUILT_IN_ATOMIC_EXCHANGE_4:
8733 case BUILT_IN_ATOMIC_EXCHANGE_8:
8734 case BUILT_IN_ATOMIC_EXCHANGE_16:
8735 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8736 target = expand_builtin_atomic_exchange (mode, exp, target);
8737 if (target)
8738 return target;
8739 break;
8741 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8742 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8743 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8744 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8745 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8747 unsigned int nargs, z;
8748 vec<tree, va_gc> *vec;
8750 mode =
8751 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8752 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8753 if (target)
8754 return target;
8756 /* If this is turned into an external library call, the weak parameter
8757 must be dropped to match the expected parameter list. */
8758 nargs = call_expr_nargs (exp);
8759 vec_alloc (vec, nargs - 1);
8760 for (z = 0; z < 3; z++)
8761 vec->quick_push (CALL_EXPR_ARG (exp, z));
8762 /* Skip the boolean weak parameter. */
8763 for (z = 4; z < 6; z++)
8764 vec->quick_push (CALL_EXPR_ARG (exp, z));
8765 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8766 break;
8769 case BUILT_IN_ATOMIC_LOAD_1:
8770 case BUILT_IN_ATOMIC_LOAD_2:
8771 case BUILT_IN_ATOMIC_LOAD_4:
8772 case BUILT_IN_ATOMIC_LOAD_8:
8773 case BUILT_IN_ATOMIC_LOAD_16:
8774 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8775 target = expand_builtin_atomic_load (mode, exp, target);
8776 if (target)
8777 return target;
8778 break;
8780 case BUILT_IN_ATOMIC_STORE_1:
8781 case BUILT_IN_ATOMIC_STORE_2:
8782 case BUILT_IN_ATOMIC_STORE_4:
8783 case BUILT_IN_ATOMIC_STORE_8:
8784 case BUILT_IN_ATOMIC_STORE_16:
8785 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8786 target = expand_builtin_atomic_store (mode, exp);
8787 if (target)
8788 return const0_rtx;
8789 break;
8791 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8792 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8793 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8794 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8795 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8797 enum built_in_function lib;
8798 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8799 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8800 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8801 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8802 ignore, lib);
8803 if (target)
8804 return target;
8805 break;
8807 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8808 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8809 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8810 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8811 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8813 enum built_in_function lib;
8814 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8815 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8816 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8817 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8818 ignore, lib);
8819 if (target)
8820 return target;
8821 break;
8823 case BUILT_IN_ATOMIC_AND_FETCH_1:
8824 case BUILT_IN_ATOMIC_AND_FETCH_2:
8825 case BUILT_IN_ATOMIC_AND_FETCH_4:
8826 case BUILT_IN_ATOMIC_AND_FETCH_8:
8827 case BUILT_IN_ATOMIC_AND_FETCH_16:
8829 enum built_in_function lib;
8830 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8831 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8832 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8833 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8834 ignore, lib);
8835 if (target)
8836 return target;
8837 break;
8839 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8840 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8841 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8842 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8843 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8845 enum built_in_function lib;
8846 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8847 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8848 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8849 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8850 ignore, lib);
8851 if (target)
8852 return target;
8853 break;
8855 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8856 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8857 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8858 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8859 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8861 enum built_in_function lib;
8862 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8863 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8864 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8865 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8866 ignore, lib);
8867 if (target)
8868 return target;
8869 break;
8871 case BUILT_IN_ATOMIC_OR_FETCH_1:
8872 case BUILT_IN_ATOMIC_OR_FETCH_2:
8873 case BUILT_IN_ATOMIC_OR_FETCH_4:
8874 case BUILT_IN_ATOMIC_OR_FETCH_8:
8875 case BUILT_IN_ATOMIC_OR_FETCH_16:
8877 enum built_in_function lib;
8878 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8879 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8880 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8881 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8882 ignore, lib);
8883 if (target)
8884 return target;
8885 break;
8887 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8888 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8889 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8890 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8891 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8892 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8893 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8894 ignore, BUILT_IN_NONE);
8895 if (target)
8896 return target;
8897 break;
8899 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8900 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8901 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8902 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8903 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8904 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8905 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8906 ignore, BUILT_IN_NONE);
8907 if (target)
8908 return target;
8909 break;
8911 case BUILT_IN_ATOMIC_FETCH_AND_1:
8912 case BUILT_IN_ATOMIC_FETCH_AND_2:
8913 case BUILT_IN_ATOMIC_FETCH_AND_4:
8914 case BUILT_IN_ATOMIC_FETCH_AND_8:
8915 case BUILT_IN_ATOMIC_FETCH_AND_16:
8916 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8917 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8918 ignore, BUILT_IN_NONE);
8919 if (target)
8920 return target;
8921 break;
8923 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8924 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8925 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8926 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8927 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8928 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8929 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8930 ignore, BUILT_IN_NONE);
8931 if (target)
8932 return target;
8933 break;
8935 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8936 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8937 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8938 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8939 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8940 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8941 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8942 ignore, BUILT_IN_NONE);
8943 if (target)
8944 return target;
8945 break;
8947 case BUILT_IN_ATOMIC_FETCH_OR_1:
8948 case BUILT_IN_ATOMIC_FETCH_OR_2:
8949 case BUILT_IN_ATOMIC_FETCH_OR_4:
8950 case BUILT_IN_ATOMIC_FETCH_OR_8:
8951 case BUILT_IN_ATOMIC_FETCH_OR_16:
8952 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8953 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8954 ignore, BUILT_IN_NONE);
8955 if (target)
8956 return target;
8957 break;
8959 case BUILT_IN_ATOMIC_TEST_AND_SET:
8960 return expand_builtin_atomic_test_and_set (exp, target);
8962 case BUILT_IN_ATOMIC_CLEAR:
8963 return expand_builtin_atomic_clear (exp);
8965 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8966 return expand_builtin_atomic_always_lock_free (exp);
8968 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8969 target = expand_builtin_atomic_is_lock_free (exp);
8970 if (target)
8971 return target;
8972 break;
8974 case BUILT_IN_ATOMIC_THREAD_FENCE:
8975 expand_builtin_atomic_thread_fence (exp);
8976 return const0_rtx;
8978 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8979 expand_builtin_atomic_signal_fence (exp);
8980 return const0_rtx;
8982 case BUILT_IN_OBJECT_SIZE:
8983 return expand_builtin_object_size (exp);
8985 case BUILT_IN_MEMCPY_CHK:
8986 case BUILT_IN_MEMPCPY_CHK:
8987 case BUILT_IN_MEMMOVE_CHK:
8988 case BUILT_IN_MEMSET_CHK:
8989 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8990 if (target)
8991 return target;
8992 break;
8994 case BUILT_IN_STRCPY_CHK:
8995 case BUILT_IN_STPCPY_CHK:
8996 case BUILT_IN_STRNCPY_CHK:
8997 case BUILT_IN_STPNCPY_CHK:
8998 case BUILT_IN_STRCAT_CHK:
8999 case BUILT_IN_STRNCAT_CHK:
9000 case BUILT_IN_SNPRINTF_CHK:
9001 case BUILT_IN_VSNPRINTF_CHK:
9002 maybe_emit_chk_warning (exp, fcode);
9003 break;
9005 case BUILT_IN_SPRINTF_CHK:
9006 case BUILT_IN_VSPRINTF_CHK:
9007 maybe_emit_sprintf_chk_warning (exp, fcode);
9008 break;
9010 case BUILT_IN_FREE:
9011 if (warn_free_nonheap_object)
9012 maybe_emit_free_warning (exp);
9013 break;
9015 case BUILT_IN_THREAD_POINTER:
9016 return expand_builtin_thread_pointer (exp, target);
9018 case BUILT_IN_SET_THREAD_POINTER:
9019 expand_builtin_set_thread_pointer (exp);
9020 return const0_rtx;
9022 case BUILT_IN_ACC_ON_DEVICE:
9023 /* Do library call, if we failed to expand the builtin when
9024 folding. */
9025 break;
9027 case BUILT_IN_GOACC_PARLEVEL_ID:
9028 case BUILT_IN_GOACC_PARLEVEL_SIZE:
9029 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
9031 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
9032 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
9034 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
9035 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
9036 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
9037 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
9038 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
9039 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
9040 return expand_speculation_safe_value (mode, exp, target, ignore);
9042 default: /* just do library call, if unknown builtin */
9043 break;
9046 /* The switch statement above can drop through to cause the function
9047 to be called normally. */
9048 return expand_call (exp, target, ignore);
9051 /* Determine whether a tree node represents a call to a built-in
9052 function. If the tree T is a call to a built-in function with
9053 the right number of arguments of the appropriate types, return
9054 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
9055 Otherwise the return value is END_BUILTINS. */
9057 enum built_in_function
9058 builtin_mathfn_code (const_tree t)
9060 const_tree fndecl, arg, parmlist;
9061 const_tree argtype, parmtype;
9062 const_call_expr_arg_iterator iter;
9064 if (TREE_CODE (t) != CALL_EXPR)
9065 return END_BUILTINS;
9067 fndecl = get_callee_fndecl (t);
9068 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9069 return END_BUILTINS;
9071 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
9072 init_const_call_expr_arg_iterator (t, &iter);
9073 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
9075 /* If a function doesn't take a variable number of arguments,
9076 the last element in the list will have type `void'. */
9077 parmtype = TREE_VALUE (parmlist);
9078 if (VOID_TYPE_P (parmtype))
9080 if (more_const_call_expr_args_p (&iter))
9081 return END_BUILTINS;
9082 return DECL_FUNCTION_CODE (fndecl);
9085 if (! more_const_call_expr_args_p (&iter))
9086 return END_BUILTINS;
9088 arg = next_const_call_expr_arg (&iter);
9089 argtype = TREE_TYPE (arg);
9091 if (SCALAR_FLOAT_TYPE_P (parmtype))
9093 if (! SCALAR_FLOAT_TYPE_P (argtype))
9094 return END_BUILTINS;
9096 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
9098 if (! COMPLEX_FLOAT_TYPE_P (argtype))
9099 return END_BUILTINS;
9101 else if (POINTER_TYPE_P (parmtype))
9103 if (! POINTER_TYPE_P (argtype))
9104 return END_BUILTINS;
9106 else if (INTEGRAL_TYPE_P (parmtype))
9108 if (! INTEGRAL_TYPE_P (argtype))
9109 return END_BUILTINS;
9111 else
9112 return END_BUILTINS;
9115 /* Variable-length argument list. */
9116 return DECL_FUNCTION_CODE (fndecl);
9119 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
9120 evaluate to a constant. */
9122 static tree
9123 fold_builtin_constant_p (tree arg)
9125 /* We return 1 for a numeric type that's known to be a constant
9126 value at compile-time or for an aggregate type that's a
9127 literal constant. */
9128 STRIP_NOPS (arg);
9130 /* If we know this is a constant, emit the constant of one. */
9131 if (CONSTANT_CLASS_P (arg)
9132 || (TREE_CODE (arg) == CONSTRUCTOR
9133 && TREE_CONSTANT (arg)))
9134 return integer_one_node;
9135 if (TREE_CODE (arg) == ADDR_EXPR)
9137 tree op = TREE_OPERAND (arg, 0);
9138 if (TREE_CODE (op) == STRING_CST
9139 || (TREE_CODE (op) == ARRAY_REF
9140 && integer_zerop (TREE_OPERAND (op, 1))
9141 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
9142 return integer_one_node;
9145 /* If this expression has side effects, show we don't know it to be a
9146 constant. Likewise if it's a pointer or aggregate type since in
9147 those case we only want literals, since those are only optimized
9148 when generating RTL, not later.
9149 And finally, if we are compiling an initializer, not code, we
9150 need to return a definite result now; there's not going to be any
9151 more optimization done. */
9152 if (TREE_SIDE_EFFECTS (arg)
9153 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9154 || POINTER_TYPE_P (TREE_TYPE (arg))
9155 || cfun == 0
9156 || folding_initializer
9157 || force_folding_builtin_constant_p)
9158 return integer_zero_node;
9160 return NULL_TREE;
9163 /* Create builtin_expect or builtin_expect_with_probability
9164 with PRED and EXPECTED as its arguments and return it as a truthvalue.
9165 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
9166 builtin_expect_with_probability instead uses third argument as PROBABILITY
9167 value. */
9169 static tree
9170 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
9171 tree predictor, tree probability)
9173 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
9175 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
9176 : BUILT_IN_EXPECT_WITH_PROBABILITY);
9177 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
9178 ret_type = TREE_TYPE (TREE_TYPE (fn));
9179 pred_type = TREE_VALUE (arg_types);
9180 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
9182 pred = fold_convert_loc (loc, pred_type, pred);
9183 expected = fold_convert_loc (loc, expected_type, expected);
9185 if (probability)
9186 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
9187 else
9188 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
9189 predictor);
9191 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
9192 build_int_cst (ret_type, 0));
9195 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
9196 NULL_TREE if no simplification is possible. */
9198 tree
9199 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
9200 tree arg3)
9202 tree inner, fndecl, inner_arg0;
9203 enum tree_code code;
9205 /* Distribute the expected value over short-circuiting operators.
9206 See through the cast from truthvalue_type_node to long. */
9207 inner_arg0 = arg0;
9208 while (CONVERT_EXPR_P (inner_arg0)
9209 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
9210 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
9211 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
9213 /* If this is a builtin_expect within a builtin_expect keep the
9214 inner one. See through a comparison against a constant. It
9215 might have been added to create a thruthvalue. */
9216 inner = inner_arg0;
9218 if (COMPARISON_CLASS_P (inner)
9219 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
9220 inner = TREE_OPERAND (inner, 0);
9222 if (TREE_CODE (inner) == CALL_EXPR
9223 && (fndecl = get_callee_fndecl (inner))
9224 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
9225 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
9226 return arg0;
9228 inner = inner_arg0;
9229 code = TREE_CODE (inner);
9230 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
9232 tree op0 = TREE_OPERAND (inner, 0);
9233 tree op1 = TREE_OPERAND (inner, 1);
9234 arg1 = save_expr (arg1);
9236 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
9237 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
9238 inner = build2 (code, TREE_TYPE (inner), op0, op1);
9240 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
9243 /* If the argument isn't invariant then there's nothing else we can do. */
9244 if (!TREE_CONSTANT (inner_arg0))
9245 return NULL_TREE;
9247 /* If we expect that a comparison against the argument will fold to
9248 a constant return the constant. In practice, this means a true
9249 constant or the address of a non-weak symbol. */
9250 inner = inner_arg0;
9251 STRIP_NOPS (inner);
9252 if (TREE_CODE (inner) == ADDR_EXPR)
9256 inner = TREE_OPERAND (inner, 0);
9258 while (TREE_CODE (inner) == COMPONENT_REF
9259 || TREE_CODE (inner) == ARRAY_REF);
9260 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
9261 return NULL_TREE;
9264 /* Otherwise, ARG0 already has the proper type for the return value. */
9265 return arg0;
9268 /* Fold a call to __builtin_classify_type with argument ARG. */
9270 static tree
9271 fold_builtin_classify_type (tree arg)
9273 if (arg == 0)
9274 return build_int_cst (integer_type_node, no_type_class);
9276 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
9279 /* Fold a call to __builtin_strlen with argument ARG. */
9281 static tree
9282 fold_builtin_strlen (location_t loc, tree type, tree arg)
9284 if (!validate_arg (arg, POINTER_TYPE))
9285 return NULL_TREE;
9286 else
9288 c_strlen_data lendata = { };
9289 tree len = c_strlen (arg, 0, &lendata);
9291 if (len)
9292 return fold_convert_loc (loc, type, len);
9294 if (!lendata.decl)
9295 c_strlen (arg, 1, &lendata);
9297 if (lendata.decl)
9299 if (EXPR_HAS_LOCATION (arg))
9300 loc = EXPR_LOCATION (arg);
9301 else if (loc == UNKNOWN_LOCATION)
9302 loc = input_location;
9303 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
9306 return NULL_TREE;
9310 /* Fold a call to __builtin_inf or __builtin_huge_val. */
9312 static tree
9313 fold_builtin_inf (location_t loc, tree type, int warn)
9315 REAL_VALUE_TYPE real;
9317 /* __builtin_inff is intended to be usable to define INFINITY on all
9318 targets. If an infinity is not available, INFINITY expands "to a
9319 positive constant of type float that overflows at translation
9320 time", footnote "In this case, using INFINITY will violate the
9321 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
9322 Thus we pedwarn to ensure this constraint violation is
9323 diagnosed. */
9324 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
9325 pedwarn (loc, 0, "target format does not support infinity");
9327 real_inf (&real);
9328 return build_real (type, real);
9331 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
9332 NULL_TREE if no simplification can be made. */
9334 static tree
9335 fold_builtin_sincos (location_t loc,
9336 tree arg0, tree arg1, tree arg2)
9338 tree type;
9339 tree fndecl, call = NULL_TREE;
9341 if (!validate_arg (arg0, REAL_TYPE)
9342 || !validate_arg (arg1, POINTER_TYPE)
9343 || !validate_arg (arg2, POINTER_TYPE))
9344 return NULL_TREE;
9346 type = TREE_TYPE (arg0);
9348 /* Calculate the result when the argument is a constant. */
9349 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
9350 if (fn == END_BUILTINS)
9351 return NULL_TREE;
9353 /* Canonicalize sincos to cexpi. */
9354 if (TREE_CODE (arg0) == REAL_CST)
9356 tree complex_type = build_complex_type (type);
9357 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
9359 if (!call)
9361 if (!targetm.libc_has_function (function_c99_math_complex)
9362 || !builtin_decl_implicit_p (fn))
9363 return NULL_TREE;
9364 fndecl = builtin_decl_explicit (fn);
9365 call = build_call_expr_loc (loc, fndecl, 1, arg0);
9366 call = builtin_save_expr (call);
9369 tree ptype = build_pointer_type (type);
9370 arg1 = fold_convert (ptype, arg1);
9371 arg2 = fold_convert (ptype, arg2);
9372 return build2 (COMPOUND_EXPR, void_type_node,
9373 build2 (MODIFY_EXPR, void_type_node,
9374 build_fold_indirect_ref_loc (loc, arg1),
9375 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
9376 build2 (MODIFY_EXPR, void_type_node,
9377 build_fold_indirect_ref_loc (loc, arg2),
9378 fold_build1_loc (loc, REALPART_EXPR, type, call)));
9381 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9382 Return NULL_TREE if no simplification can be made. */
9384 static tree
9385 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9387 if (!validate_arg (arg1, POINTER_TYPE)
9388 || !validate_arg (arg2, POINTER_TYPE)
9389 || !validate_arg (len, INTEGER_TYPE))
9390 return NULL_TREE;
9392 /* If the LEN parameter is zero, return zero. */
9393 if (integer_zerop (len))
9394 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9395 arg1, arg2);
9397 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9398 if (operand_equal_p (arg1, arg2, 0))
9399 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9401 /* If len parameter is one, return an expression corresponding to
9402 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9403 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9405 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9406 tree cst_uchar_ptr_node
9407 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9409 tree ind1
9410 = fold_convert_loc (loc, integer_type_node,
9411 build1 (INDIRECT_REF, cst_uchar_node,
9412 fold_convert_loc (loc,
9413 cst_uchar_ptr_node,
9414 arg1)));
9415 tree ind2
9416 = fold_convert_loc (loc, integer_type_node,
9417 build1 (INDIRECT_REF, cst_uchar_node,
9418 fold_convert_loc (loc,
9419 cst_uchar_ptr_node,
9420 arg2)));
9421 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9424 return NULL_TREE;
9427 /* Fold a call to builtin isascii with argument ARG. */
9429 static tree
9430 fold_builtin_isascii (location_t loc, tree arg)
9432 if (!validate_arg (arg, INTEGER_TYPE))
9433 return NULL_TREE;
9434 else
9436 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9437 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9438 build_int_cst (integer_type_node,
9439 ~ (unsigned HOST_WIDE_INT) 0x7f));
9440 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9441 arg, integer_zero_node);
9445 /* Fold a call to builtin toascii with argument ARG. */
9447 static tree
9448 fold_builtin_toascii (location_t loc, tree arg)
9450 if (!validate_arg (arg, INTEGER_TYPE))
9451 return NULL_TREE;
9453 /* Transform toascii(c) -> (c & 0x7f). */
9454 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9455 build_int_cst (integer_type_node, 0x7f));
9458 /* Fold a call to builtin isdigit with argument ARG. */
9460 static tree
9461 fold_builtin_isdigit (location_t loc, tree arg)
9463 if (!validate_arg (arg, INTEGER_TYPE))
9464 return NULL_TREE;
9465 else
9467 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9468 /* According to the C standard, isdigit is unaffected by locale.
9469 However, it definitely is affected by the target character set. */
9470 unsigned HOST_WIDE_INT target_digit0
9471 = lang_hooks.to_target_charset ('0');
9473 if (target_digit0 == 0)
9474 return NULL_TREE;
9476 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9477 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9478 build_int_cst (unsigned_type_node, target_digit0));
9479 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9480 build_int_cst (unsigned_type_node, 9));
9484 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9486 static tree
9487 fold_builtin_fabs (location_t loc, tree arg, tree type)
9489 if (!validate_arg (arg, REAL_TYPE))
9490 return NULL_TREE;
9492 arg = fold_convert_loc (loc, type, arg);
9493 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9496 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9498 static tree
9499 fold_builtin_abs (location_t loc, tree arg, tree type)
9501 if (!validate_arg (arg, INTEGER_TYPE))
9502 return NULL_TREE;
9504 arg = fold_convert_loc (loc, type, arg);
9505 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9508 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9510 static tree
9511 fold_builtin_carg (location_t loc, tree arg, tree type)
9513 if (validate_arg (arg, COMPLEX_TYPE)
9514 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9516 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9518 if (atan2_fn)
9520 tree new_arg = builtin_save_expr (arg);
9521 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9522 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9523 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9527 return NULL_TREE;
9530 /* Fold a call to builtin frexp, we can assume the base is 2. */
9532 static tree
9533 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9535 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9536 return NULL_TREE;
9538 STRIP_NOPS (arg0);
9540 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9541 return NULL_TREE;
9543 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9545 /* Proceed if a valid pointer type was passed in. */
9546 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9548 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9549 tree frac, exp;
9551 switch (value->cl)
9553 case rvc_zero:
9554 /* For +-0, return (*exp = 0, +-0). */
9555 exp = integer_zero_node;
9556 frac = arg0;
9557 break;
9558 case rvc_nan:
9559 case rvc_inf:
9560 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9561 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9562 case rvc_normal:
9564 /* Since the frexp function always expects base 2, and in
9565 GCC normalized significands are already in the range
9566 [0.5, 1.0), we have exactly what frexp wants. */
9567 REAL_VALUE_TYPE frac_rvt = *value;
9568 SET_REAL_EXP (&frac_rvt, 0);
9569 frac = build_real (rettype, frac_rvt);
9570 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9572 break;
9573 default:
9574 gcc_unreachable ();
9577 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9578 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9579 TREE_SIDE_EFFECTS (arg1) = 1;
9580 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9583 return NULL_TREE;
9586 /* Fold a call to builtin modf. */
9588 static tree
9589 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9591 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9592 return NULL_TREE;
9594 STRIP_NOPS (arg0);
9596 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9597 return NULL_TREE;
9599 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9601 /* Proceed if a valid pointer type was passed in. */
9602 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9604 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9605 REAL_VALUE_TYPE trunc, frac;
9607 switch (value->cl)
9609 case rvc_nan:
9610 case rvc_zero:
9611 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9612 trunc = frac = *value;
9613 break;
9614 case rvc_inf:
9615 /* For +-Inf, return (*arg1 = arg0, +-0). */
9616 frac = dconst0;
9617 frac.sign = value->sign;
9618 trunc = *value;
9619 break;
9620 case rvc_normal:
9621 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9622 real_trunc (&trunc, VOIDmode, value);
9623 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9624 /* If the original number was negative and already
9625 integral, then the fractional part is -0.0. */
9626 if (value->sign && frac.cl == rvc_zero)
9627 frac.sign = value->sign;
9628 break;
9631 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9632 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9633 build_real (rettype, trunc));
9634 TREE_SIDE_EFFECTS (arg1) = 1;
9635 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9636 build_real (rettype, frac));
9639 return NULL_TREE;
9642 /* Given a location LOC, an interclass builtin function decl FNDECL
9643 and its single argument ARG, return an folded expression computing
9644 the same, or NULL_TREE if we either couldn't or didn't want to fold
9645 (the latter happen if there's an RTL instruction available). */
9647 static tree
9648 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9650 machine_mode mode;
9652 if (!validate_arg (arg, REAL_TYPE))
9653 return NULL_TREE;
9655 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9656 return NULL_TREE;
9658 mode = TYPE_MODE (TREE_TYPE (arg));
9660 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9662 /* If there is no optab, try generic code. */
9663 switch (DECL_FUNCTION_CODE (fndecl))
9665 tree result;
9667 CASE_FLT_FN (BUILT_IN_ISINF):
9669 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9670 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9671 tree type = TREE_TYPE (arg);
9672 REAL_VALUE_TYPE r;
9673 char buf[128];
9675 if (is_ibm_extended)
9677 /* NaN and Inf are encoded in the high-order double value
9678 only. The low-order value is not significant. */
9679 type = double_type_node;
9680 mode = DFmode;
9681 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9683 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9684 real_from_string (&r, buf);
9685 result = build_call_expr (isgr_fn, 2,
9686 fold_build1_loc (loc, ABS_EXPR, type, arg),
9687 build_real (type, r));
9688 return result;
9690 CASE_FLT_FN (BUILT_IN_FINITE):
9691 case BUILT_IN_ISFINITE:
9693 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9694 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9695 tree type = TREE_TYPE (arg);
9696 REAL_VALUE_TYPE r;
9697 char buf[128];
9699 if (is_ibm_extended)
9701 /* NaN and Inf are encoded in the high-order double value
9702 only. The low-order value is not significant. */
9703 type = double_type_node;
9704 mode = DFmode;
9705 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9707 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9708 real_from_string (&r, buf);
9709 result = build_call_expr (isle_fn, 2,
9710 fold_build1_loc (loc, ABS_EXPR, type, arg),
9711 build_real (type, r));
9712 /*result = fold_build2_loc (loc, UNGT_EXPR,
9713 TREE_TYPE (TREE_TYPE (fndecl)),
9714 fold_build1_loc (loc, ABS_EXPR, type, arg),
9715 build_real (type, r));
9716 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9717 TREE_TYPE (TREE_TYPE (fndecl)),
9718 result);*/
9719 return result;
9721 case BUILT_IN_ISNORMAL:
9723 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9724 islessequal(fabs(x),DBL_MAX). */
9725 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9726 tree type = TREE_TYPE (arg);
9727 tree orig_arg, max_exp, min_exp;
9728 machine_mode orig_mode = mode;
9729 REAL_VALUE_TYPE rmax, rmin;
9730 char buf[128];
9732 orig_arg = arg = builtin_save_expr (arg);
9733 if (is_ibm_extended)
9735 /* Use double to test the normal range of IBM extended
9736 precision. Emin for IBM extended precision is
9737 different to emin for IEEE double, being 53 higher
9738 since the low double exponent is at least 53 lower
9739 than the high double exponent. */
9740 type = double_type_node;
9741 mode = DFmode;
9742 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9744 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9746 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9747 real_from_string (&rmax, buf);
9748 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9749 real_from_string (&rmin, buf);
9750 max_exp = build_real (type, rmax);
9751 min_exp = build_real (type, rmin);
9753 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9754 if (is_ibm_extended)
9756 /* Testing the high end of the range is done just using
9757 the high double, using the same test as isfinite().
9758 For the subnormal end of the range we first test the
9759 high double, then if its magnitude is equal to the
9760 limit of 0x1p-969, we test whether the low double is
9761 non-zero and opposite sign to the high double. */
9762 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9763 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9764 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9765 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9766 arg, min_exp);
9767 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9768 complex_double_type_node, orig_arg);
9769 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9770 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9771 tree zero = build_real (type, dconst0);
9772 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9773 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9774 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9775 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9776 fold_build3 (COND_EXPR,
9777 integer_type_node,
9778 hilt, logt, lolt));
9779 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9780 eq_min, ok_lo);
9781 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9782 gt_min, eq_min);
9784 else
9786 tree const isge_fn
9787 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9788 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9790 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9791 max_exp, min_exp);
9792 return result;
9794 default:
9795 break;
9798 return NULL_TREE;
9801 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9802 ARG is the argument for the call. */
9804 static tree
9805 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9807 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9809 if (!validate_arg (arg, REAL_TYPE))
9810 return NULL_TREE;
9812 switch (builtin_index)
9814 case BUILT_IN_ISINF:
9815 if (!HONOR_INFINITIES (arg))
9816 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9818 return NULL_TREE;
9820 case BUILT_IN_ISINF_SIGN:
9822 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9823 /* In a boolean context, GCC will fold the inner COND_EXPR to
9824 1. So e.g. "if (isinf_sign(x))" would be folded to just
9825 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9826 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9827 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9828 tree tmp = NULL_TREE;
9830 arg = builtin_save_expr (arg);
9832 if (signbit_fn && isinf_fn)
9834 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9835 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9837 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9838 signbit_call, integer_zero_node);
9839 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9840 isinf_call, integer_zero_node);
9842 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9843 integer_minus_one_node, integer_one_node);
9844 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9845 isinf_call, tmp,
9846 integer_zero_node);
9849 return tmp;
9852 case BUILT_IN_ISFINITE:
9853 if (!HONOR_NANS (arg)
9854 && !HONOR_INFINITIES (arg))
9855 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9857 return NULL_TREE;
9859 case BUILT_IN_ISNAN:
9860 if (!HONOR_NANS (arg))
9861 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9864 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9865 if (is_ibm_extended)
9867 /* NaN and Inf are encoded in the high-order double value
9868 only. The low-order value is not significant. */
9869 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9872 arg = builtin_save_expr (arg);
9873 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9875 default:
9876 gcc_unreachable ();
9880 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9881 This builtin will generate code to return the appropriate floating
9882 point classification depending on the value of the floating point
9883 number passed in. The possible return values must be supplied as
9884 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9885 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9886 one floating point argument which is "type generic". */
9888 static tree
9889 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9891 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9892 arg, type, res, tmp;
9893 machine_mode mode;
9894 REAL_VALUE_TYPE r;
9895 char buf[128];
9897 /* Verify the required arguments in the original call. */
9898 if (nargs != 6
9899 || !validate_arg (args[0], INTEGER_TYPE)
9900 || !validate_arg (args[1], INTEGER_TYPE)
9901 || !validate_arg (args[2], INTEGER_TYPE)
9902 || !validate_arg (args[3], INTEGER_TYPE)
9903 || !validate_arg (args[4], INTEGER_TYPE)
9904 || !validate_arg (args[5], REAL_TYPE))
9905 return NULL_TREE;
9907 fp_nan = args[0];
9908 fp_infinite = args[1];
9909 fp_normal = args[2];
9910 fp_subnormal = args[3];
9911 fp_zero = args[4];
9912 arg = args[5];
9913 type = TREE_TYPE (arg);
9914 mode = TYPE_MODE (type);
9915 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9917 /* fpclassify(x) ->
9918 isnan(x) ? FP_NAN :
9919 (fabs(x) == Inf ? FP_INFINITE :
9920 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9921 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9923 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9924 build_real (type, dconst0));
9925 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9926 tmp, fp_zero, fp_subnormal);
9928 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9929 real_from_string (&r, buf);
9930 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9931 arg, build_real (type, r));
9932 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9934 if (HONOR_INFINITIES (mode))
9936 real_inf (&r);
9937 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9938 build_real (type, r));
9939 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9940 fp_infinite, res);
9943 if (HONOR_NANS (mode))
9945 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9946 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9949 return res;
9952 /* Fold a call to an unordered comparison function such as
9953 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9954 being called and ARG0 and ARG1 are the arguments for the call.
9955 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9956 the opposite of the desired result. UNORDERED_CODE is used
9957 for modes that can hold NaNs and ORDERED_CODE is used for
9958 the rest. */
9960 static tree
9961 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9962 enum tree_code unordered_code,
9963 enum tree_code ordered_code)
9965 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9966 enum tree_code code;
9967 tree type0, type1;
9968 enum tree_code code0, code1;
9969 tree cmp_type = NULL_TREE;
9971 type0 = TREE_TYPE (arg0);
9972 type1 = TREE_TYPE (arg1);
9974 code0 = TREE_CODE (type0);
9975 code1 = TREE_CODE (type1);
9977 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9978 /* Choose the wider of two real types. */
9979 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9980 ? type0 : type1;
9981 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9982 cmp_type = type0;
9983 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9984 cmp_type = type1;
9986 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9987 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9989 if (unordered_code == UNORDERED_EXPR)
9991 if (!HONOR_NANS (arg0))
9992 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9993 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9996 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9997 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9998 fold_build2_loc (loc, code, type, arg0, arg1));
10001 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
10002 arithmetics if it can never overflow, or into internal functions that
10003 return both result of arithmetics and overflowed boolean flag in
10004 a complex integer result, or some other check for overflow.
10005 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
10006 checking part of that. */
10008 static tree
10009 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
10010 tree arg0, tree arg1, tree arg2)
10012 enum internal_fn ifn = IFN_LAST;
10013 /* The code of the expression corresponding to the built-in. */
10014 enum tree_code opcode = ERROR_MARK;
10015 bool ovf_only = false;
10017 switch (fcode)
10019 case BUILT_IN_ADD_OVERFLOW_P:
10020 ovf_only = true;
10021 /* FALLTHRU */
10022 case BUILT_IN_ADD_OVERFLOW:
10023 case BUILT_IN_SADD_OVERFLOW:
10024 case BUILT_IN_SADDL_OVERFLOW:
10025 case BUILT_IN_SADDLL_OVERFLOW:
10026 case BUILT_IN_UADD_OVERFLOW:
10027 case BUILT_IN_UADDL_OVERFLOW:
10028 case BUILT_IN_UADDLL_OVERFLOW:
10029 opcode = PLUS_EXPR;
10030 ifn = IFN_ADD_OVERFLOW;
10031 break;
10032 case BUILT_IN_SUB_OVERFLOW_P:
10033 ovf_only = true;
10034 /* FALLTHRU */
10035 case BUILT_IN_SUB_OVERFLOW:
10036 case BUILT_IN_SSUB_OVERFLOW:
10037 case BUILT_IN_SSUBL_OVERFLOW:
10038 case BUILT_IN_SSUBLL_OVERFLOW:
10039 case BUILT_IN_USUB_OVERFLOW:
10040 case BUILT_IN_USUBL_OVERFLOW:
10041 case BUILT_IN_USUBLL_OVERFLOW:
10042 opcode = MINUS_EXPR;
10043 ifn = IFN_SUB_OVERFLOW;
10044 break;
10045 case BUILT_IN_MUL_OVERFLOW_P:
10046 ovf_only = true;
10047 /* FALLTHRU */
10048 case BUILT_IN_MUL_OVERFLOW:
10049 case BUILT_IN_SMUL_OVERFLOW:
10050 case BUILT_IN_SMULL_OVERFLOW:
10051 case BUILT_IN_SMULLL_OVERFLOW:
10052 case BUILT_IN_UMUL_OVERFLOW:
10053 case BUILT_IN_UMULL_OVERFLOW:
10054 case BUILT_IN_UMULLL_OVERFLOW:
10055 opcode = MULT_EXPR;
10056 ifn = IFN_MUL_OVERFLOW;
10057 break;
10058 default:
10059 gcc_unreachable ();
10062 /* For the "generic" overloads, the first two arguments can have different
10063 types and the last argument determines the target type to use to check
10064 for overflow. The arguments of the other overloads all have the same
10065 type. */
10066 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
10068 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
10069 arguments are constant, attempt to fold the built-in call into a constant
10070 expression indicating whether or not it detected an overflow. */
10071 if (ovf_only
10072 && TREE_CODE (arg0) == INTEGER_CST
10073 && TREE_CODE (arg1) == INTEGER_CST)
10074 /* Perform the computation in the target type and check for overflow. */
10075 return omit_one_operand_loc (loc, boolean_type_node,
10076 arith_overflowed_p (opcode, type, arg0, arg1)
10077 ? boolean_true_node : boolean_false_node,
10078 arg2);
10080 tree intres, ovfres;
10081 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10083 intres = fold_binary_loc (loc, opcode, type,
10084 fold_convert_loc (loc, type, arg0),
10085 fold_convert_loc (loc, type, arg1));
10086 if (TREE_OVERFLOW (intres))
10087 intres = drop_tree_overflow (intres);
10088 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
10089 ? boolean_true_node : boolean_false_node);
10091 else
10093 tree ctype = build_complex_type (type);
10094 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10095 arg0, arg1);
10096 tree tgt = save_expr (call);
10097 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10098 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10099 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
10102 if (ovf_only)
10103 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
10105 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
10106 tree store
10107 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
10108 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
10111 /* Fold a call to __builtin_FILE to a constant string. */
10113 static inline tree
10114 fold_builtin_FILE (location_t loc)
10116 if (const char *fname = LOCATION_FILE (loc))
10118 /* The documentation says this builtin is equivalent to the preprocessor
10119 __FILE__ macro so it appears appropriate to use the same file prefix
10120 mappings. */
10121 fname = remap_macro_filename (fname);
10122 return build_string_literal (strlen (fname) + 1, fname);
10125 return build_string_literal (1, "");
10128 /* Fold a call to __builtin_FUNCTION to a constant string. */
10130 static inline tree
10131 fold_builtin_FUNCTION ()
10133 const char *name = "";
10135 if (current_function_decl)
10136 name = lang_hooks.decl_printable_name (current_function_decl, 0);
10138 return build_string_literal (strlen (name) + 1, name);
10141 /* Fold a call to __builtin_LINE to an integer constant. */
10143 static inline tree
10144 fold_builtin_LINE (location_t loc, tree type)
10146 return build_int_cst (type, LOCATION_LINE (loc));
10149 /* Fold a call to built-in function FNDECL with 0 arguments.
10150 This function returns NULL_TREE if no simplification was possible. */
10152 static tree
10153 fold_builtin_0 (location_t loc, tree fndecl)
10155 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10156 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10157 switch (fcode)
10159 case BUILT_IN_FILE:
10160 return fold_builtin_FILE (loc);
10162 case BUILT_IN_FUNCTION:
10163 return fold_builtin_FUNCTION ();
10165 case BUILT_IN_LINE:
10166 return fold_builtin_LINE (loc, type);
10168 CASE_FLT_FN (BUILT_IN_INF):
10169 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
10170 case BUILT_IN_INFD32:
10171 case BUILT_IN_INFD64:
10172 case BUILT_IN_INFD128:
10173 return fold_builtin_inf (loc, type, true);
10175 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10176 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
10177 return fold_builtin_inf (loc, type, false);
10179 case BUILT_IN_CLASSIFY_TYPE:
10180 return fold_builtin_classify_type (NULL_TREE);
10182 default:
10183 break;
10185 return NULL_TREE;
10188 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10189 This function returns NULL_TREE if no simplification was possible. */
10191 static tree
10192 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
10194 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10195 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10197 if (TREE_CODE (arg0) == ERROR_MARK)
10198 return NULL_TREE;
10200 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
10201 return ret;
10203 switch (fcode)
10205 case BUILT_IN_CONSTANT_P:
10207 tree val = fold_builtin_constant_p (arg0);
10209 /* Gimplification will pull the CALL_EXPR for the builtin out of
10210 an if condition. When not optimizing, we'll not CSE it back.
10211 To avoid link error types of regressions, return false now. */
10212 if (!val && !optimize)
10213 val = integer_zero_node;
10215 return val;
10218 case BUILT_IN_CLASSIFY_TYPE:
10219 return fold_builtin_classify_type (arg0);
10221 case BUILT_IN_STRLEN:
10222 return fold_builtin_strlen (loc, type, arg0);
10224 CASE_FLT_FN (BUILT_IN_FABS):
10225 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10226 case BUILT_IN_FABSD32:
10227 case BUILT_IN_FABSD64:
10228 case BUILT_IN_FABSD128:
10229 return fold_builtin_fabs (loc, arg0, type);
10231 case BUILT_IN_ABS:
10232 case BUILT_IN_LABS:
10233 case BUILT_IN_LLABS:
10234 case BUILT_IN_IMAXABS:
10235 return fold_builtin_abs (loc, arg0, type);
10237 CASE_FLT_FN (BUILT_IN_CONJ):
10238 if (validate_arg (arg0, COMPLEX_TYPE)
10239 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10240 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10241 break;
10243 CASE_FLT_FN (BUILT_IN_CREAL):
10244 if (validate_arg (arg0, COMPLEX_TYPE)
10245 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10246 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
10247 break;
10249 CASE_FLT_FN (BUILT_IN_CIMAG):
10250 if (validate_arg (arg0, COMPLEX_TYPE)
10251 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10252 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10253 break;
10255 CASE_FLT_FN (BUILT_IN_CARG):
10256 return fold_builtin_carg (loc, arg0, type);
10258 case BUILT_IN_ISASCII:
10259 return fold_builtin_isascii (loc, arg0);
10261 case BUILT_IN_TOASCII:
10262 return fold_builtin_toascii (loc, arg0);
10264 case BUILT_IN_ISDIGIT:
10265 return fold_builtin_isdigit (loc, arg0);
10267 CASE_FLT_FN (BUILT_IN_FINITE):
10268 case BUILT_IN_FINITED32:
10269 case BUILT_IN_FINITED64:
10270 case BUILT_IN_FINITED128:
10271 case BUILT_IN_ISFINITE:
10273 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10274 if (ret)
10275 return ret;
10276 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10279 CASE_FLT_FN (BUILT_IN_ISINF):
10280 case BUILT_IN_ISINFD32:
10281 case BUILT_IN_ISINFD64:
10282 case BUILT_IN_ISINFD128:
10284 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10285 if (ret)
10286 return ret;
10287 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10290 case BUILT_IN_ISNORMAL:
10291 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10293 case BUILT_IN_ISINF_SIGN:
10294 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10296 CASE_FLT_FN (BUILT_IN_ISNAN):
10297 case BUILT_IN_ISNAND32:
10298 case BUILT_IN_ISNAND64:
10299 case BUILT_IN_ISNAND128:
10300 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10302 case BUILT_IN_FREE:
10303 if (integer_zerop (arg0))
10304 return build_empty_stmt (loc);
10305 break;
10307 default:
10308 break;
10311 return NULL_TREE;
10315 /* Folds a call EXPR (which may be null) to built-in function FNDECL
10316 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10317 if no simplification was possible. */
10319 static tree
10320 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
10322 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10323 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10325 if (TREE_CODE (arg0) == ERROR_MARK
10326 || TREE_CODE (arg1) == ERROR_MARK)
10327 return NULL_TREE;
10329 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
10330 return ret;
10332 switch (fcode)
10334 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10335 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10336 if (validate_arg (arg0, REAL_TYPE)
10337 && validate_arg (arg1, POINTER_TYPE))
10338 return do_mpfr_lgamma_r (arg0, arg1, type);
10339 break;
10341 CASE_FLT_FN (BUILT_IN_FREXP):
10342 return fold_builtin_frexp (loc, arg0, arg1, type);
10344 CASE_FLT_FN (BUILT_IN_MODF):
10345 return fold_builtin_modf (loc, arg0, arg1, type);
10347 case BUILT_IN_STRSPN:
10348 return fold_builtin_strspn (loc, expr, arg0, arg1);
10350 case BUILT_IN_STRCSPN:
10351 return fold_builtin_strcspn (loc, expr, arg0, arg1);
10353 case BUILT_IN_STRPBRK:
10354 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
10356 case BUILT_IN_EXPECT:
10357 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
10359 case BUILT_IN_ISGREATER:
10360 return fold_builtin_unordered_cmp (loc, fndecl,
10361 arg0, arg1, UNLE_EXPR, LE_EXPR);
10362 case BUILT_IN_ISGREATEREQUAL:
10363 return fold_builtin_unordered_cmp (loc, fndecl,
10364 arg0, arg1, UNLT_EXPR, LT_EXPR);
10365 case BUILT_IN_ISLESS:
10366 return fold_builtin_unordered_cmp (loc, fndecl,
10367 arg0, arg1, UNGE_EXPR, GE_EXPR);
10368 case BUILT_IN_ISLESSEQUAL:
10369 return fold_builtin_unordered_cmp (loc, fndecl,
10370 arg0, arg1, UNGT_EXPR, GT_EXPR);
10371 case BUILT_IN_ISLESSGREATER:
10372 return fold_builtin_unordered_cmp (loc, fndecl,
10373 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10374 case BUILT_IN_ISUNORDERED:
10375 return fold_builtin_unordered_cmp (loc, fndecl,
10376 arg0, arg1, UNORDERED_EXPR,
10377 NOP_EXPR);
10379 /* We do the folding for va_start in the expander. */
10380 case BUILT_IN_VA_START:
10381 break;
10383 case BUILT_IN_OBJECT_SIZE:
10384 return fold_builtin_object_size (arg0, arg1);
10386 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10387 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10389 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10390 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10392 default:
10393 break;
10395 return NULL_TREE;
10398 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10399 and ARG2.
10400 This function returns NULL_TREE if no simplification was possible. */
10402 static tree
10403 fold_builtin_3 (location_t loc, tree fndecl,
10404 tree arg0, tree arg1, tree arg2)
10406 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10407 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10409 if (TREE_CODE (arg0) == ERROR_MARK
10410 || TREE_CODE (arg1) == ERROR_MARK
10411 || TREE_CODE (arg2) == ERROR_MARK)
10412 return NULL_TREE;
10414 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
10415 arg0, arg1, arg2))
10416 return ret;
10418 switch (fcode)
10421 CASE_FLT_FN (BUILT_IN_SINCOS):
10422 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10424 CASE_FLT_FN (BUILT_IN_REMQUO):
10425 if (validate_arg (arg0, REAL_TYPE)
10426 && validate_arg (arg1, REAL_TYPE)
10427 && validate_arg (arg2, POINTER_TYPE))
10428 return do_mpfr_remquo (arg0, arg1, arg2);
10429 break;
10431 case BUILT_IN_MEMCMP:
10432 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
10434 case BUILT_IN_EXPECT:
10435 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10437 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10438 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
10440 case BUILT_IN_ADD_OVERFLOW:
10441 case BUILT_IN_SUB_OVERFLOW:
10442 case BUILT_IN_MUL_OVERFLOW:
10443 case BUILT_IN_ADD_OVERFLOW_P:
10444 case BUILT_IN_SUB_OVERFLOW_P:
10445 case BUILT_IN_MUL_OVERFLOW_P:
10446 case BUILT_IN_SADD_OVERFLOW:
10447 case BUILT_IN_SADDL_OVERFLOW:
10448 case BUILT_IN_SADDLL_OVERFLOW:
10449 case BUILT_IN_SSUB_OVERFLOW:
10450 case BUILT_IN_SSUBL_OVERFLOW:
10451 case BUILT_IN_SSUBLL_OVERFLOW:
10452 case BUILT_IN_SMUL_OVERFLOW:
10453 case BUILT_IN_SMULL_OVERFLOW:
10454 case BUILT_IN_SMULLL_OVERFLOW:
10455 case BUILT_IN_UADD_OVERFLOW:
10456 case BUILT_IN_UADDL_OVERFLOW:
10457 case BUILT_IN_UADDLL_OVERFLOW:
10458 case BUILT_IN_USUB_OVERFLOW:
10459 case BUILT_IN_USUBL_OVERFLOW:
10460 case BUILT_IN_USUBLL_OVERFLOW:
10461 case BUILT_IN_UMUL_OVERFLOW:
10462 case BUILT_IN_UMULL_OVERFLOW:
10463 case BUILT_IN_UMULLL_OVERFLOW:
10464 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10466 default:
10467 break;
10469 return NULL_TREE;
10472 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
10473 ARGS is an array of NARGS arguments. IGNORE is true if the result
10474 of the function call is ignored. This function returns NULL_TREE
10475 if no simplification was possible. */
10477 static tree
10478 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10479 int nargs, bool)
10481 tree ret = NULL_TREE;
10483 switch (nargs)
10485 case 0:
10486 ret = fold_builtin_0 (loc, fndecl);
10487 break;
10488 case 1:
10489 ret = fold_builtin_1 (loc, fndecl, args[0]);
10490 break;
10491 case 2:
10492 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
10493 break;
10494 case 3:
10495 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10496 break;
10497 default:
10498 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10499 break;
10501 if (ret)
10503 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10504 SET_EXPR_LOCATION (ret, loc);
10505 return ret;
10507 return NULL_TREE;
10510 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10511 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10512 of arguments in ARGS to be omitted. OLDNARGS is the number of
10513 elements in ARGS. */
10515 static tree
10516 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10517 int skip, tree fndecl, int n, va_list newargs)
10519 int nargs = oldnargs - skip + n;
10520 tree *buffer;
10522 if (n > 0)
10524 int i, j;
10526 buffer = XALLOCAVEC (tree, nargs);
10527 for (i = 0; i < n; i++)
10528 buffer[i] = va_arg (newargs, tree);
10529 for (j = skip; j < oldnargs; j++, i++)
10530 buffer[i] = args[j];
10532 else
10533 buffer = args + skip;
10535 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10538 /* Return true if FNDECL shouldn't be folded right now.
10539 If a built-in function has an inline attribute always_inline
10540 wrapper, defer folding it after always_inline functions have
10541 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10542 might not be performed. */
10544 bool
10545 avoid_folding_inline_builtin (tree fndecl)
10547 return (DECL_DECLARED_INLINE_P (fndecl)
10548 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10549 && cfun
10550 && !cfun->always_inline_functions_inlined
10551 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10554 /* A wrapper function for builtin folding that prevents warnings for
10555 "statement without effect" and the like, caused by removing the
10556 call node earlier than the warning is generated. */
10558 tree
10559 fold_call_expr (location_t loc, tree exp, bool ignore)
10561 tree ret = NULL_TREE;
10562 tree fndecl = get_callee_fndecl (exp);
10563 if (fndecl && fndecl_built_in_p (fndecl)
10564 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10565 yet. Defer folding until we see all the arguments
10566 (after inlining). */
10567 && !CALL_EXPR_VA_ARG_PACK (exp))
10569 int nargs = call_expr_nargs (exp);
10571 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10572 instead last argument is __builtin_va_arg_pack (). Defer folding
10573 even in that case, until arguments are finalized. */
10574 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10576 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10577 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10578 return NULL_TREE;
10581 if (avoid_folding_inline_builtin (fndecl))
10582 return NULL_TREE;
10584 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10585 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10586 CALL_EXPR_ARGP (exp), ignore);
10587 else
10589 tree *args = CALL_EXPR_ARGP (exp);
10590 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
10591 if (ret)
10592 return ret;
10595 return NULL_TREE;
10598 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10599 N arguments are passed in the array ARGARRAY. Return a folded
10600 expression or NULL_TREE if no simplification was possible. */
10602 tree
10603 fold_builtin_call_array (location_t loc, tree,
10604 tree fn,
10605 int n,
10606 tree *argarray)
10608 if (TREE_CODE (fn) != ADDR_EXPR)
10609 return NULL_TREE;
10611 tree fndecl = TREE_OPERAND (fn, 0);
10612 if (TREE_CODE (fndecl) == FUNCTION_DECL
10613 && fndecl_built_in_p (fndecl))
10615 /* If last argument is __builtin_va_arg_pack (), arguments to this
10616 function are not finalized yet. Defer folding until they are. */
10617 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10619 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10620 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10621 return NULL_TREE;
10623 if (avoid_folding_inline_builtin (fndecl))
10624 return NULL_TREE;
10625 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10626 return targetm.fold_builtin (fndecl, n, argarray, false);
10627 else
10628 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
10631 return NULL_TREE;
10634 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10635 along with N new arguments specified as the "..." parameters. SKIP
10636 is the number of arguments in EXP to be omitted. This function is used
10637 to do varargs-to-varargs transformations. */
10639 static tree
10640 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10642 va_list ap;
10643 tree t;
10645 va_start (ap, n);
10646 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10647 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10648 va_end (ap);
10650 return t;
10653 /* Validate a single argument ARG against a tree code CODE representing
10654 a type. Return true when argument is valid. */
10656 static bool
10657 validate_arg (const_tree arg, enum tree_code code)
10659 if (!arg)
10660 return false;
10661 else if (code == POINTER_TYPE)
10662 return POINTER_TYPE_P (TREE_TYPE (arg));
10663 else if (code == INTEGER_TYPE)
10664 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10665 return code == TREE_CODE (TREE_TYPE (arg));
10668 /* This function validates the types of a function call argument list
10669 against a specified list of tree_codes. If the last specifier is a 0,
10670 that represents an ellipses, otherwise the last specifier must be a
10671 VOID_TYPE.
10673 This is the GIMPLE version of validate_arglist. Eventually we want to
10674 completely convert builtins.c to work from GIMPLEs and the tree based
10675 validate_arglist will then be removed. */
10677 bool
10678 validate_gimple_arglist (const gcall *call, ...)
10680 enum tree_code code;
10681 bool res = 0;
10682 va_list ap;
10683 const_tree arg;
10684 size_t i;
10686 va_start (ap, call);
10687 i = 0;
10691 code = (enum tree_code) va_arg (ap, int);
10692 switch (code)
10694 case 0:
10695 /* This signifies an ellipses, any further arguments are all ok. */
10696 res = true;
10697 goto end;
10698 case VOID_TYPE:
10699 /* This signifies an endlink, if no arguments remain, return
10700 true, otherwise return false. */
10701 res = (i == gimple_call_num_args (call));
10702 goto end;
10703 default:
10704 /* If no parameters remain or the parameter's code does not
10705 match the specified code, return false. Otherwise continue
10706 checking any remaining arguments. */
10707 arg = gimple_call_arg (call, i++);
10708 if (!validate_arg (arg, code))
10709 goto end;
10710 break;
10713 while (1);
10715 /* We need gotos here since we can only have one VA_CLOSE in a
10716 function. */
10717 end: ;
10718 va_end (ap);
10720 return res;
10723 /* Default target-specific builtin expander that does nothing. */
10726 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10727 rtx target ATTRIBUTE_UNUSED,
10728 rtx subtarget ATTRIBUTE_UNUSED,
10729 machine_mode mode ATTRIBUTE_UNUSED,
10730 int ignore ATTRIBUTE_UNUSED)
10732 return NULL_RTX;
10735 /* Returns true is EXP represents data that would potentially reside
10736 in a readonly section. */
10738 bool
10739 readonly_data_expr (tree exp)
10741 STRIP_NOPS (exp);
10743 if (TREE_CODE (exp) != ADDR_EXPR)
10744 return false;
10746 exp = get_base_address (TREE_OPERAND (exp, 0));
10747 if (!exp)
10748 return false;
10750 /* Make sure we call decl_readonly_section only for trees it
10751 can handle (since it returns true for everything it doesn't
10752 understand). */
10753 if (TREE_CODE (exp) == STRING_CST
10754 || TREE_CODE (exp) == CONSTRUCTOR
10755 || (VAR_P (exp) && TREE_STATIC (exp)))
10756 return decl_readonly_section (exp, 0);
10757 else
10758 return false;
10761 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10762 to the call, and TYPE is its return type.
10764 Return NULL_TREE if no simplification was possible, otherwise return the
10765 simplified form of the call as a tree.
10767 The simplified form may be a constant or other expression which
10768 computes the same value, but in a more efficient manner (including
10769 calls to other builtin functions).
10771 The call may contain arguments which need to be evaluated, but
10772 which are not useful to determine the result of the call. In
10773 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10774 COMPOUND_EXPR will be an argument which must be evaluated.
10775 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10776 COMPOUND_EXPR in the chain will contain the tree for the simplified
10777 form of the builtin function call. */
10779 static tree
10780 fold_builtin_strpbrk (location_t loc, tree expr, tree s1, tree s2, tree type)
10782 if (!validate_arg (s1, POINTER_TYPE)
10783 || !validate_arg (s2, POINTER_TYPE))
10784 return NULL_TREE;
10786 if (!check_nul_terminated_array (expr, s1)
10787 || !check_nul_terminated_array (expr, s2))
10788 return NULL_TREE;
10790 tree fn;
10791 const char *p1, *p2;
10793 p2 = c_getstr (s2);
10794 if (p2 == NULL)
10795 return NULL_TREE;
10797 p1 = c_getstr (s1);
10798 if (p1 != NULL)
10800 const char *r = strpbrk (p1, p2);
10801 tree tem;
10803 if (r == NULL)
10804 return build_int_cst (TREE_TYPE (s1), 0);
10806 /* Return an offset into the constant string argument. */
10807 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10808 return fold_convert_loc (loc, type, tem);
10811 if (p2[0] == '\0')
10812 /* strpbrk(x, "") == NULL.
10813 Evaluate and ignore s1 in case it had side-effects. */
10814 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10816 if (p2[1] != '\0')
10817 return NULL_TREE; /* Really call strpbrk. */
10819 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10820 if (!fn)
10821 return NULL_TREE;
10823 /* New argument list transforming strpbrk(s1, s2) to
10824 strchr(s1, s2[0]). */
10825 return build_call_expr_loc (loc, fn, 2, s1,
10826 build_int_cst (integer_type_node, p2[0]));
10829 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10830 to the call.
10832 Return NULL_TREE if no simplification was possible, otherwise return the
10833 simplified form of the call as a tree.
10835 The simplified form may be a constant or other expression which
10836 computes the same value, but in a more efficient manner (including
10837 calls to other builtin functions).
10839 The call may contain arguments which need to be evaluated, but
10840 which are not useful to determine the result of the call. In
10841 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10842 COMPOUND_EXPR will be an argument which must be evaluated.
10843 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10844 COMPOUND_EXPR in the chain will contain the tree for the simplified
10845 form of the builtin function call. */
10847 static tree
10848 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
10850 if (!validate_arg (s1, POINTER_TYPE)
10851 || !validate_arg (s2, POINTER_TYPE))
10852 return NULL_TREE;
10854 if (!check_nul_terminated_array (expr, s1)
10855 || !check_nul_terminated_array (expr, s2))
10856 return NULL_TREE;
10858 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10860 /* If either argument is "", return NULL_TREE. */
10861 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10862 /* Evaluate and ignore both arguments in case either one has
10863 side-effects. */
10864 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10865 s1, s2);
10866 return NULL_TREE;
10869 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10870 to the call.
10872 Return NULL_TREE if no simplification was possible, otherwise return the
10873 simplified form of the call as a tree.
10875 The simplified form may be a constant or other expression which
10876 computes the same value, but in a more efficient manner (including
10877 calls to other builtin functions).
10879 The call may contain arguments which need to be evaluated, but
10880 which are not useful to determine the result of the call. In
10881 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10882 COMPOUND_EXPR will be an argument which must be evaluated.
10883 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10884 COMPOUND_EXPR in the chain will contain the tree for the simplified
10885 form of the builtin function call. */
10887 static tree
10888 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
10890 if (!validate_arg (s1, POINTER_TYPE)
10891 || !validate_arg (s2, POINTER_TYPE))
10892 return NULL_TREE;
10894 if (!check_nul_terminated_array (expr, s1)
10895 || !check_nul_terminated_array (expr, s2))
10896 return NULL_TREE;
10898 /* If the first argument is "", return NULL_TREE. */
10899 const char *p1 = c_getstr (s1);
10900 if (p1 && *p1 == '\0')
10902 /* Evaluate and ignore argument s2 in case it has
10903 side-effects. */
10904 return omit_one_operand_loc (loc, size_type_node,
10905 size_zero_node, s2);
10908 /* If the second argument is "", return __builtin_strlen(s1). */
10909 const char *p2 = c_getstr (s2);
10910 if (p2 && *p2 == '\0')
10912 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10914 /* If the replacement _DECL isn't initialized, don't do the
10915 transformation. */
10916 if (!fn)
10917 return NULL_TREE;
10919 return build_call_expr_loc (loc, fn, 1, s1);
10921 return NULL_TREE;
10924 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10925 produced. False otherwise. This is done so that we don't output the error
10926 or warning twice or three times. */
10928 bool
10929 fold_builtin_next_arg (tree exp, bool va_start_p)
10931 tree fntype = TREE_TYPE (current_function_decl);
10932 int nargs = call_expr_nargs (exp);
10933 tree arg;
10934 /* There is good chance the current input_location points inside the
10935 definition of the va_start macro (perhaps on the token for
10936 builtin) in a system header, so warnings will not be emitted.
10937 Use the location in real source code. */
10938 location_t current_location =
10939 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10940 NULL);
10942 if (!stdarg_p (fntype))
10944 error ("%<va_start%> used in function with fixed arguments");
10945 return true;
10948 if (va_start_p)
10950 if (va_start_p && (nargs != 2))
10952 error ("wrong number of arguments to function %<va_start%>");
10953 return true;
10955 arg = CALL_EXPR_ARG (exp, 1);
10957 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10958 when we checked the arguments and if needed issued a warning. */
10959 else
10961 if (nargs == 0)
10963 /* Evidently an out of date version of <stdarg.h>; can't validate
10964 va_start's second argument, but can still work as intended. */
10965 warning_at (current_location,
10966 OPT_Wvarargs,
10967 "%<__builtin_next_arg%> called without an argument");
10968 return true;
10970 else if (nargs > 1)
10972 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10973 return true;
10975 arg = CALL_EXPR_ARG (exp, 0);
10978 if (TREE_CODE (arg) == SSA_NAME)
10979 arg = SSA_NAME_VAR (arg);
10981 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10982 or __builtin_next_arg (0) the first time we see it, after checking
10983 the arguments and if needed issuing a warning. */
10984 if (!integer_zerop (arg))
10986 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10988 /* Strip off all nops for the sake of the comparison. This
10989 is not quite the same as STRIP_NOPS. It does more.
10990 We must also strip off INDIRECT_EXPR for C++ reference
10991 parameters. */
10992 while (CONVERT_EXPR_P (arg)
10993 || TREE_CODE (arg) == INDIRECT_REF)
10994 arg = TREE_OPERAND (arg, 0);
10995 if (arg != last_parm)
10997 /* FIXME: Sometimes with the tree optimizers we can get the
10998 not the last argument even though the user used the last
10999 argument. We just warn and set the arg to be the last
11000 argument so that we will get wrong-code because of
11001 it. */
11002 warning_at (current_location,
11003 OPT_Wvarargs,
11004 "second parameter of %<va_start%> not last named argument");
11007 /* Undefined by C99 7.15.1.4p4 (va_start):
11008 "If the parameter parmN is declared with the register storage
11009 class, with a function or array type, or with a type that is
11010 not compatible with the type that results after application of
11011 the default argument promotions, the behavior is undefined."
11013 else if (DECL_REGISTER (arg))
11015 warning_at (current_location,
11016 OPT_Wvarargs,
11017 "undefined behavior when second parameter of "
11018 "%<va_start%> is declared with %<register%> storage");
11021 /* We want to verify the second parameter just once before the tree
11022 optimizers are run and then avoid keeping it in the tree,
11023 as otherwise we could warn even for correct code like:
11024 void foo (int i, ...)
11025 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11026 if (va_start_p)
11027 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11028 else
11029 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11031 return false;
11035 /* Expand a call EXP to __builtin_object_size. */
11037 static rtx
11038 expand_builtin_object_size (tree exp)
11040 tree ost;
11041 int object_size_type;
11042 tree fndecl = get_callee_fndecl (exp);
11044 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11046 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
11047 exp, fndecl);
11048 expand_builtin_trap ();
11049 return const0_rtx;
11052 ost = CALL_EXPR_ARG (exp, 1);
11053 STRIP_NOPS (ost);
11055 if (TREE_CODE (ost) != INTEGER_CST
11056 || tree_int_cst_sgn (ost) < 0
11057 || compare_tree_int (ost, 3) > 0)
11059 error ("%Klast argument of %qD is not integer constant between 0 and 3",
11060 exp, fndecl);
11061 expand_builtin_trap ();
11062 return const0_rtx;
11065 object_size_type = tree_to_shwi (ost);
11067 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11070 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11071 FCODE is the BUILT_IN_* to use.
11072 Return NULL_RTX if we failed; the caller should emit a normal call,
11073 otherwise try to get the result in TARGET, if convenient (and in
11074 mode MODE if that's convenient). */
11076 static rtx
11077 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11078 enum built_in_function fcode)
11080 if (!validate_arglist (exp,
11081 POINTER_TYPE,
11082 fcode == BUILT_IN_MEMSET_CHK
11083 ? INTEGER_TYPE : POINTER_TYPE,
11084 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11085 return NULL_RTX;
11087 tree dest = CALL_EXPR_ARG (exp, 0);
11088 tree src = CALL_EXPR_ARG (exp, 1);
11089 tree len = CALL_EXPR_ARG (exp, 2);
11090 tree size = CALL_EXPR_ARG (exp, 3);
11092 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
11093 /*str=*/NULL_TREE, size);
11095 if (!tree_fits_uhwi_p (size))
11096 return NULL_RTX;
11098 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11100 /* Avoid transforming the checking call to an ordinary one when
11101 an overflow has been detected or when the call couldn't be
11102 validated because the size is not constant. */
11103 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
11104 return NULL_RTX;
11106 tree fn = NULL_TREE;
11107 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11108 mem{cpy,pcpy,move,set} is available. */
11109 switch (fcode)
11111 case BUILT_IN_MEMCPY_CHK:
11112 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11113 break;
11114 case BUILT_IN_MEMPCPY_CHK:
11115 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11116 break;
11117 case BUILT_IN_MEMMOVE_CHK:
11118 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11119 break;
11120 case BUILT_IN_MEMSET_CHK:
11121 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11122 break;
11123 default:
11124 break;
11127 if (! fn)
11128 return NULL_RTX;
11130 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11131 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11132 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11133 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11135 else if (fcode == BUILT_IN_MEMSET_CHK)
11136 return NULL_RTX;
11137 else
11139 unsigned int dest_align = get_pointer_alignment (dest);
11141 /* If DEST is not a pointer type, call the normal function. */
11142 if (dest_align == 0)
11143 return NULL_RTX;
11145 /* If SRC and DEST are the same (and not volatile), do nothing. */
11146 if (operand_equal_p (src, dest, 0))
11148 tree expr;
11150 if (fcode != BUILT_IN_MEMPCPY_CHK)
11152 /* Evaluate and ignore LEN in case it has side-effects. */
11153 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11154 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11157 expr = fold_build_pointer_plus (dest, len);
11158 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11161 /* __memmove_chk special case. */
11162 if (fcode == BUILT_IN_MEMMOVE_CHK)
11164 unsigned int src_align = get_pointer_alignment (src);
11166 if (src_align == 0)
11167 return NULL_RTX;
11169 /* If src is categorized for a readonly section we can use
11170 normal __memcpy_chk. */
11171 if (readonly_data_expr (src))
11173 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11174 if (!fn)
11175 return NULL_RTX;
11176 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11177 dest, src, len, size);
11178 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11179 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11180 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11183 return NULL_RTX;
11187 /* Emit warning if a buffer overflow is detected at compile time. */
11189 static void
11190 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11192 /* The source string. */
11193 tree srcstr = NULL_TREE;
11194 /* The size of the destination object. */
11195 tree objsize = NULL_TREE;
11196 /* The string that is being concatenated with (as in __strcat_chk)
11197 or null if it isn't. */
11198 tree catstr = NULL_TREE;
11199 /* The maximum length of the source sequence in a bounded operation
11200 (such as __strncat_chk) or null if the operation isn't bounded
11201 (such as __strcat_chk). */
11202 tree maxread = NULL_TREE;
11203 /* The exact size of the access (such as in __strncpy_chk). */
11204 tree size = NULL_TREE;
11206 switch (fcode)
11208 case BUILT_IN_STRCPY_CHK:
11209 case BUILT_IN_STPCPY_CHK:
11210 srcstr = CALL_EXPR_ARG (exp, 1);
11211 objsize = CALL_EXPR_ARG (exp, 2);
11212 break;
11214 case BUILT_IN_STRCAT_CHK:
11215 /* For __strcat_chk the warning will be emitted only if overflowing
11216 by at least strlen (dest) + 1 bytes. */
11217 catstr = CALL_EXPR_ARG (exp, 0);
11218 srcstr = CALL_EXPR_ARG (exp, 1);
11219 objsize = CALL_EXPR_ARG (exp, 2);
11220 break;
11222 case BUILT_IN_STRNCAT_CHK:
11223 catstr = CALL_EXPR_ARG (exp, 0);
11224 srcstr = CALL_EXPR_ARG (exp, 1);
11225 maxread = CALL_EXPR_ARG (exp, 2);
11226 objsize = CALL_EXPR_ARG (exp, 3);
11227 break;
11229 case BUILT_IN_STRNCPY_CHK:
11230 case BUILT_IN_STPNCPY_CHK:
11231 srcstr = CALL_EXPR_ARG (exp, 1);
11232 size = CALL_EXPR_ARG (exp, 2);
11233 objsize = CALL_EXPR_ARG (exp, 3);
11234 break;
11236 case BUILT_IN_SNPRINTF_CHK:
11237 case BUILT_IN_VSNPRINTF_CHK:
11238 maxread = CALL_EXPR_ARG (exp, 1);
11239 objsize = CALL_EXPR_ARG (exp, 3);
11240 break;
11241 default:
11242 gcc_unreachable ();
11245 if (catstr && maxread)
11247 /* Check __strncat_chk. There is no way to determine the length
11248 of the string to which the source string is being appended so
11249 just warn when the length of the source string is not known. */
11250 check_strncat_sizes (exp, objsize);
11251 return;
11254 /* The destination argument is the first one for all built-ins above. */
11255 tree dst = CALL_EXPR_ARG (exp, 0);
11257 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
11260 /* Emit warning if a buffer overflow is detected at compile time
11261 in __sprintf_chk/__vsprintf_chk calls. */
11263 static void
11264 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11266 tree size, len, fmt;
11267 const char *fmt_str;
11268 int nargs = call_expr_nargs (exp);
11270 /* Verify the required arguments in the original call. */
11272 if (nargs < 4)
11273 return;
11274 size = CALL_EXPR_ARG (exp, 2);
11275 fmt = CALL_EXPR_ARG (exp, 3);
11277 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11278 return;
11280 /* Check whether the format is a literal string constant. */
11281 fmt_str = c_getstr (fmt);
11282 if (fmt_str == NULL)
11283 return;
11285 if (!init_target_chars ())
11286 return;
11288 /* If the format doesn't contain % args or %%, we know its size. */
11289 if (strchr (fmt_str, target_percent) == 0)
11290 len = build_int_cstu (size_type_node, strlen (fmt_str));
11291 /* If the format is "%s" and first ... argument is a string literal,
11292 we know it too. */
11293 else if (fcode == BUILT_IN_SPRINTF_CHK
11294 && strcmp (fmt_str, target_percent_s) == 0)
11296 tree arg;
11298 if (nargs < 5)
11299 return;
11300 arg = CALL_EXPR_ARG (exp, 4);
11301 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11302 return;
11304 len = c_strlen (arg, 1);
11305 if (!len || ! tree_fits_uhwi_p (len))
11306 return;
11308 else
11309 return;
11311 /* Add one for the terminating nul. */
11312 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
11314 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
11315 /*maxread=*/NULL_TREE, len, size);
11318 /* Emit warning if a free is called with address of a variable. */
11320 static void
11321 maybe_emit_free_warning (tree exp)
11323 if (call_expr_nargs (exp) != 1)
11324 return;
11326 tree arg = CALL_EXPR_ARG (exp, 0);
11328 STRIP_NOPS (arg);
11329 if (TREE_CODE (arg) != ADDR_EXPR)
11330 return;
11332 arg = get_base_address (TREE_OPERAND (arg, 0));
11333 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11334 return;
11336 if (SSA_VAR_P (arg))
11337 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11338 "%Kattempt to free a non-heap object %qD", exp, arg);
11339 else
11340 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11341 "%Kattempt to free a non-heap object", exp);
11344 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11345 if possible. */
11347 static tree
11348 fold_builtin_object_size (tree ptr, tree ost)
11350 unsigned HOST_WIDE_INT bytes;
11351 int object_size_type;
11353 if (!validate_arg (ptr, POINTER_TYPE)
11354 || !validate_arg (ost, INTEGER_TYPE))
11355 return NULL_TREE;
11357 STRIP_NOPS (ost);
11359 if (TREE_CODE (ost) != INTEGER_CST
11360 || tree_int_cst_sgn (ost) < 0
11361 || compare_tree_int (ost, 3) > 0)
11362 return NULL_TREE;
11364 object_size_type = tree_to_shwi (ost);
11366 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11367 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11368 and (size_t) 0 for types 2 and 3. */
11369 if (TREE_SIDE_EFFECTS (ptr))
11370 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11372 if (TREE_CODE (ptr) == ADDR_EXPR)
11374 compute_builtin_object_size (ptr, object_size_type, &bytes);
11375 if (wi::fits_to_tree_p (bytes, size_type_node))
11376 return build_int_cstu (size_type_node, bytes);
11378 else if (TREE_CODE (ptr) == SSA_NAME)
11380 /* If object size is not known yet, delay folding until
11381 later. Maybe subsequent passes will help determining
11382 it. */
11383 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11384 && wi::fits_to_tree_p (bytes, size_type_node))
11385 return build_int_cstu (size_type_node, bytes);
11388 return NULL_TREE;
11391 /* Builtins with folding operations that operate on "..." arguments
11392 need special handling; we need to store the arguments in a convenient
11393 data structure before attempting any folding. Fortunately there are
11394 only a few builtins that fall into this category. FNDECL is the
11395 function, EXP is the CALL_EXPR for the call. */
11397 static tree
11398 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11400 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11401 tree ret = NULL_TREE;
11403 switch (fcode)
11405 case BUILT_IN_FPCLASSIFY:
11406 ret = fold_builtin_fpclassify (loc, args, nargs);
11407 break;
11409 default:
11410 break;
11412 if (ret)
11414 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11415 SET_EXPR_LOCATION (ret, loc);
11416 TREE_NO_WARNING (ret) = 1;
11417 return ret;
11419 return NULL_TREE;
11422 /* Initialize format string characters in the target charset. */
11424 bool
11425 init_target_chars (void)
11427 static bool init;
11428 if (!init)
11430 target_newline = lang_hooks.to_target_charset ('\n');
11431 target_percent = lang_hooks.to_target_charset ('%');
11432 target_c = lang_hooks.to_target_charset ('c');
11433 target_s = lang_hooks.to_target_charset ('s');
11434 if (target_newline == 0 || target_percent == 0 || target_c == 0
11435 || target_s == 0)
11436 return false;
11438 target_percent_c[0] = target_percent;
11439 target_percent_c[1] = target_c;
11440 target_percent_c[2] = '\0';
11442 target_percent_s[0] = target_percent;
11443 target_percent_s[1] = target_s;
11444 target_percent_s[2] = '\0';
11446 target_percent_s_newline[0] = target_percent;
11447 target_percent_s_newline[1] = target_s;
11448 target_percent_s_newline[2] = target_newline;
11449 target_percent_s_newline[3] = '\0';
11451 init = true;
11453 return true;
11456 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11457 and no overflow/underflow occurred. INEXACT is true if M was not
11458 exactly calculated. TYPE is the tree type for the result. This
11459 function assumes that you cleared the MPFR flags and then
11460 calculated M to see if anything subsequently set a flag prior to
11461 entering this function. Return NULL_TREE if any checks fail. */
11463 static tree
11464 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11466 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11467 overflow/underflow occurred. If -frounding-math, proceed iff the
11468 result of calling FUNC was exact. */
11469 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11470 && (!flag_rounding_math || !inexact))
11472 REAL_VALUE_TYPE rr;
11474 real_from_mpfr (&rr, m, type, MPFR_RNDN);
11475 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11476 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11477 but the mpft_t is not, then we underflowed in the
11478 conversion. */
11479 if (real_isfinite (&rr)
11480 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11482 REAL_VALUE_TYPE rmode;
11484 real_convert (&rmode, TYPE_MODE (type), &rr);
11485 /* Proceed iff the specified mode can hold the value. */
11486 if (real_identical (&rmode, &rr))
11487 return build_real (type, rmode);
11490 return NULL_TREE;
11493 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11494 number and no overflow/underflow occurred. INEXACT is true if M
11495 was not exactly calculated. TYPE is the tree type for the result.
11496 This function assumes that you cleared the MPFR flags and then
11497 calculated M to see if anything subsequently set a flag prior to
11498 entering this function. Return NULL_TREE if any checks fail, if
11499 FORCE_CONVERT is true, then bypass the checks. */
11501 static tree
11502 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11504 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11505 overflow/underflow occurred. If -frounding-math, proceed iff the
11506 result of calling FUNC was exact. */
11507 if (force_convert
11508 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11509 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11510 && (!flag_rounding_math || !inexact)))
11512 REAL_VALUE_TYPE re, im;
11514 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11515 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
11516 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11517 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11518 but the mpft_t is not, then we underflowed in the
11519 conversion. */
11520 if (force_convert
11521 || (real_isfinite (&re) && real_isfinite (&im)
11522 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11523 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11525 REAL_VALUE_TYPE re_mode, im_mode;
11527 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11528 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11529 /* Proceed iff the specified mode can hold the value. */
11530 if (force_convert
11531 || (real_identical (&re_mode, &re)
11532 && real_identical (&im_mode, &im)))
11533 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11534 build_real (TREE_TYPE (type), im_mode));
11537 return NULL_TREE;
11540 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11541 the pointer *(ARG_QUO) and return the result. The type is taken
11542 from the type of ARG0 and is used for setting the precision of the
11543 calculation and results. */
11545 static tree
11546 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11548 tree const type = TREE_TYPE (arg0);
11549 tree result = NULL_TREE;
11551 STRIP_NOPS (arg0);
11552 STRIP_NOPS (arg1);
11554 /* To proceed, MPFR must exactly represent the target floating point
11555 format, which only happens when the target base equals two. */
11556 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11557 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11558 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11560 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11561 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11563 if (real_isfinite (ra0) && real_isfinite (ra1))
11565 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11566 const int prec = fmt->p;
11567 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11568 tree result_rem;
11569 long integer_quo;
11570 mpfr_t m0, m1;
11572 mpfr_inits2 (prec, m0, m1, NULL);
11573 mpfr_from_real (m0, ra0, MPFR_RNDN);
11574 mpfr_from_real (m1, ra1, MPFR_RNDN);
11575 mpfr_clear_flags ();
11576 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11577 /* Remquo is independent of the rounding mode, so pass
11578 inexact=0 to do_mpfr_ckconv(). */
11579 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11580 mpfr_clears (m0, m1, NULL);
11581 if (result_rem)
11583 /* MPFR calculates quo in the host's long so it may
11584 return more bits in quo than the target int can hold
11585 if sizeof(host long) > sizeof(target int). This can
11586 happen even for native compilers in LP64 mode. In
11587 these cases, modulo the quo value with the largest
11588 number that the target int can hold while leaving one
11589 bit for the sign. */
11590 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11591 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11593 /* Dereference the quo pointer argument. */
11594 arg_quo = build_fold_indirect_ref (arg_quo);
11595 /* Proceed iff a valid pointer type was passed in. */
11596 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11598 /* Set the value. */
11599 tree result_quo
11600 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11601 build_int_cst (TREE_TYPE (arg_quo),
11602 integer_quo));
11603 TREE_SIDE_EFFECTS (result_quo) = 1;
11604 /* Combine the quo assignment with the rem. */
11605 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11606 result_quo, result_rem));
11611 return result;
11614 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11615 resulting value as a tree with type TYPE. The mpfr precision is
11616 set to the precision of TYPE. We assume that this mpfr function
11617 returns zero if the result could be calculated exactly within the
11618 requested precision. In addition, the integer pointer represented
11619 by ARG_SG will be dereferenced and set to the appropriate signgam
11620 (-1,1) value. */
11622 static tree
11623 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11625 tree result = NULL_TREE;
11627 STRIP_NOPS (arg);
11629 /* To proceed, MPFR must exactly represent the target floating point
11630 format, which only happens when the target base equals two. Also
11631 verify ARG is a constant and that ARG_SG is an int pointer. */
11632 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11633 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11634 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11635 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11637 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11639 /* In addition to NaN and Inf, the argument cannot be zero or a
11640 negative integer. */
11641 if (real_isfinite (ra)
11642 && ra->cl != rvc_zero
11643 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11645 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11646 const int prec = fmt->p;
11647 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11648 int inexact, sg;
11649 mpfr_t m;
11650 tree result_lg;
11652 mpfr_init2 (m, prec);
11653 mpfr_from_real (m, ra, MPFR_RNDN);
11654 mpfr_clear_flags ();
11655 inexact = mpfr_lgamma (m, &sg, m, rnd);
11656 result_lg = do_mpfr_ckconv (m, type, inexact);
11657 mpfr_clear (m);
11658 if (result_lg)
11660 tree result_sg;
11662 /* Dereference the arg_sg pointer argument. */
11663 arg_sg = build_fold_indirect_ref (arg_sg);
11664 /* Assign the signgam value into *arg_sg. */
11665 result_sg = fold_build2 (MODIFY_EXPR,
11666 TREE_TYPE (arg_sg), arg_sg,
11667 build_int_cst (TREE_TYPE (arg_sg), sg));
11668 TREE_SIDE_EFFECTS (result_sg) = 1;
11669 /* Combine the signgam assignment with the lgamma result. */
11670 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11671 result_sg, result_lg));
11676 return result;
11679 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11680 mpc function FUNC on it and return the resulting value as a tree
11681 with type TYPE. The mpfr precision is set to the precision of
11682 TYPE. We assume that function FUNC returns zero if the result
11683 could be calculated exactly within the requested precision. If
11684 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11685 in the arguments and/or results. */
11687 tree
11688 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11689 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11691 tree result = NULL_TREE;
11693 STRIP_NOPS (arg0);
11694 STRIP_NOPS (arg1);
11696 /* To proceed, MPFR must exactly represent the target floating point
11697 format, which only happens when the target base equals two. */
11698 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11699 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11700 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11701 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11702 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11704 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11705 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11706 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11707 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11709 if (do_nonfinite
11710 || (real_isfinite (re0) && real_isfinite (im0)
11711 && real_isfinite (re1) && real_isfinite (im1)))
11713 const struct real_format *const fmt =
11714 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11715 const int prec = fmt->p;
11716 const mpfr_rnd_t rnd = fmt->round_towards_zero
11717 ? MPFR_RNDZ : MPFR_RNDN;
11718 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11719 int inexact;
11720 mpc_t m0, m1;
11722 mpc_init2 (m0, prec);
11723 mpc_init2 (m1, prec);
11724 mpfr_from_real (mpc_realref (m0), re0, rnd);
11725 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11726 mpfr_from_real (mpc_realref (m1), re1, rnd);
11727 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11728 mpfr_clear_flags ();
11729 inexact = func (m0, m0, m1, crnd);
11730 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11731 mpc_clear (m0);
11732 mpc_clear (m1);
11736 return result;
11739 /* A wrapper function for builtin folding that prevents warnings for
11740 "statement without effect" and the like, caused by removing the
11741 call node earlier than the warning is generated. */
11743 tree
11744 fold_call_stmt (gcall *stmt, bool ignore)
11746 tree ret = NULL_TREE;
11747 tree fndecl = gimple_call_fndecl (stmt);
11748 location_t loc = gimple_location (stmt);
11749 if (fndecl && fndecl_built_in_p (fndecl)
11750 && !gimple_call_va_arg_pack_p (stmt))
11752 int nargs = gimple_call_num_args (stmt);
11753 tree *args = (nargs > 0
11754 ? gimple_call_arg_ptr (stmt, 0)
11755 : &error_mark_node);
11757 if (avoid_folding_inline_builtin (fndecl))
11758 return NULL_TREE;
11759 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11761 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11763 else
11765 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
11766 if (ret)
11768 /* Propagate location information from original call to
11769 expansion of builtin. Otherwise things like
11770 maybe_emit_chk_warning, that operate on the expansion
11771 of a builtin, will use the wrong location information. */
11772 if (gimple_has_location (stmt))
11774 tree realret = ret;
11775 if (TREE_CODE (ret) == NOP_EXPR)
11776 realret = TREE_OPERAND (ret, 0);
11777 if (CAN_HAVE_LOCATION_P (realret)
11778 && !EXPR_HAS_LOCATION (realret))
11779 SET_EXPR_LOCATION (realret, loc);
11780 return realret;
11782 return ret;
11786 return NULL_TREE;
11789 /* Look up the function in builtin_decl that corresponds to DECL
11790 and set ASMSPEC as its user assembler name. DECL must be a
11791 function decl that declares a builtin. */
11793 void
11794 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11796 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11797 && asmspec != 0);
11799 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11800 set_user_assembler_name (builtin, asmspec);
11802 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11803 && INT_TYPE_SIZE < BITS_PER_WORD)
11805 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11806 set_user_assembler_libfunc ("ffs", asmspec);
11807 set_optab_libfunc (ffs_optab, mode, "ffs");
11811 /* Return true if DECL is a builtin that expands to a constant or similarly
11812 simple code. */
11813 bool
11814 is_simple_builtin (tree decl)
11816 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11817 switch (DECL_FUNCTION_CODE (decl))
11819 /* Builtins that expand to constants. */
11820 case BUILT_IN_CONSTANT_P:
11821 case BUILT_IN_EXPECT:
11822 case BUILT_IN_OBJECT_SIZE:
11823 case BUILT_IN_UNREACHABLE:
11824 /* Simple register moves or loads from stack. */
11825 case BUILT_IN_ASSUME_ALIGNED:
11826 case BUILT_IN_RETURN_ADDRESS:
11827 case BUILT_IN_EXTRACT_RETURN_ADDR:
11828 case BUILT_IN_FROB_RETURN_ADDR:
11829 case BUILT_IN_RETURN:
11830 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11831 case BUILT_IN_FRAME_ADDRESS:
11832 case BUILT_IN_VA_END:
11833 case BUILT_IN_STACK_SAVE:
11834 case BUILT_IN_STACK_RESTORE:
11835 /* Exception state returns or moves registers around. */
11836 case BUILT_IN_EH_FILTER:
11837 case BUILT_IN_EH_POINTER:
11838 case BUILT_IN_EH_COPY_VALUES:
11839 return true;
11841 default:
11842 return false;
11845 return false;
11848 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11849 most probably expanded inline into reasonably simple code. This is a
11850 superset of is_simple_builtin. */
11851 bool
11852 is_inexpensive_builtin (tree decl)
11854 if (!decl)
11855 return false;
11856 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11857 return true;
11858 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11859 switch (DECL_FUNCTION_CODE (decl))
11861 case BUILT_IN_ABS:
11862 CASE_BUILT_IN_ALLOCA:
11863 case BUILT_IN_BSWAP16:
11864 case BUILT_IN_BSWAP32:
11865 case BUILT_IN_BSWAP64:
11866 case BUILT_IN_BSWAP128:
11867 case BUILT_IN_CLZ:
11868 case BUILT_IN_CLZIMAX:
11869 case BUILT_IN_CLZL:
11870 case BUILT_IN_CLZLL:
11871 case BUILT_IN_CTZ:
11872 case BUILT_IN_CTZIMAX:
11873 case BUILT_IN_CTZL:
11874 case BUILT_IN_CTZLL:
11875 case BUILT_IN_FFS:
11876 case BUILT_IN_FFSIMAX:
11877 case BUILT_IN_FFSL:
11878 case BUILT_IN_FFSLL:
11879 case BUILT_IN_IMAXABS:
11880 case BUILT_IN_FINITE:
11881 case BUILT_IN_FINITEF:
11882 case BUILT_IN_FINITEL:
11883 case BUILT_IN_FINITED32:
11884 case BUILT_IN_FINITED64:
11885 case BUILT_IN_FINITED128:
11886 case BUILT_IN_FPCLASSIFY:
11887 case BUILT_IN_ISFINITE:
11888 case BUILT_IN_ISINF_SIGN:
11889 case BUILT_IN_ISINF:
11890 case BUILT_IN_ISINFF:
11891 case BUILT_IN_ISINFL:
11892 case BUILT_IN_ISINFD32:
11893 case BUILT_IN_ISINFD64:
11894 case BUILT_IN_ISINFD128:
11895 case BUILT_IN_ISNAN:
11896 case BUILT_IN_ISNANF:
11897 case BUILT_IN_ISNANL:
11898 case BUILT_IN_ISNAND32:
11899 case BUILT_IN_ISNAND64:
11900 case BUILT_IN_ISNAND128:
11901 case BUILT_IN_ISNORMAL:
11902 case BUILT_IN_ISGREATER:
11903 case BUILT_IN_ISGREATEREQUAL:
11904 case BUILT_IN_ISLESS:
11905 case BUILT_IN_ISLESSEQUAL:
11906 case BUILT_IN_ISLESSGREATER:
11907 case BUILT_IN_ISUNORDERED:
11908 case BUILT_IN_VA_ARG_PACK:
11909 case BUILT_IN_VA_ARG_PACK_LEN:
11910 case BUILT_IN_VA_COPY:
11911 case BUILT_IN_TRAP:
11912 case BUILT_IN_SAVEREGS:
11913 case BUILT_IN_POPCOUNTL:
11914 case BUILT_IN_POPCOUNTLL:
11915 case BUILT_IN_POPCOUNTIMAX:
11916 case BUILT_IN_POPCOUNT:
11917 case BUILT_IN_PARITYL:
11918 case BUILT_IN_PARITYLL:
11919 case BUILT_IN_PARITYIMAX:
11920 case BUILT_IN_PARITY:
11921 case BUILT_IN_LABS:
11922 case BUILT_IN_LLABS:
11923 case BUILT_IN_PREFETCH:
11924 case BUILT_IN_ACC_ON_DEVICE:
11925 return true;
11927 default:
11928 return is_simple_builtin (decl);
11931 return false;
11934 /* Return true if T is a constant and the value cast to a target char
11935 can be represented by a host char.
11936 Store the casted char constant in *P if so. */
11938 bool
11939 target_char_cst_p (tree t, char *p)
11941 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11942 return false;
11944 *p = (char)tree_to_uhwi (t);
11945 return true;
11948 /* Return true if the builtin DECL is implemented in a standard library.
11949 Otherwise returns false which doesn't guarantee it is not (thus the list of
11950 handled builtins below may be incomplete). */
11952 bool
11953 builtin_with_linkage_p (tree decl)
11955 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11956 switch (DECL_FUNCTION_CODE (decl))
11958 CASE_FLT_FN (BUILT_IN_ACOS):
11959 CASE_FLT_FN (BUILT_IN_ACOSH):
11960 CASE_FLT_FN (BUILT_IN_ASIN):
11961 CASE_FLT_FN (BUILT_IN_ASINH):
11962 CASE_FLT_FN (BUILT_IN_ATAN):
11963 CASE_FLT_FN (BUILT_IN_ATANH):
11964 CASE_FLT_FN (BUILT_IN_ATAN2):
11965 CASE_FLT_FN (BUILT_IN_CBRT):
11966 CASE_FLT_FN (BUILT_IN_CEIL):
11967 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11968 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11969 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11970 CASE_FLT_FN (BUILT_IN_COS):
11971 CASE_FLT_FN (BUILT_IN_COSH):
11972 CASE_FLT_FN (BUILT_IN_ERF):
11973 CASE_FLT_FN (BUILT_IN_ERFC):
11974 CASE_FLT_FN (BUILT_IN_EXP):
11975 CASE_FLT_FN (BUILT_IN_EXP2):
11976 CASE_FLT_FN (BUILT_IN_EXPM1):
11977 CASE_FLT_FN (BUILT_IN_FABS):
11978 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11979 CASE_FLT_FN (BUILT_IN_FDIM):
11980 CASE_FLT_FN (BUILT_IN_FLOOR):
11981 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11982 CASE_FLT_FN (BUILT_IN_FMA):
11983 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11984 CASE_FLT_FN (BUILT_IN_FMAX):
11985 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11986 CASE_FLT_FN (BUILT_IN_FMIN):
11987 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11988 CASE_FLT_FN (BUILT_IN_FMOD):
11989 CASE_FLT_FN (BUILT_IN_FREXP):
11990 CASE_FLT_FN (BUILT_IN_HYPOT):
11991 CASE_FLT_FN (BUILT_IN_ILOGB):
11992 CASE_FLT_FN (BUILT_IN_LDEXP):
11993 CASE_FLT_FN (BUILT_IN_LGAMMA):
11994 CASE_FLT_FN (BUILT_IN_LLRINT):
11995 CASE_FLT_FN (BUILT_IN_LLROUND):
11996 CASE_FLT_FN (BUILT_IN_LOG):
11997 CASE_FLT_FN (BUILT_IN_LOG10):
11998 CASE_FLT_FN (BUILT_IN_LOG1P):
11999 CASE_FLT_FN (BUILT_IN_LOG2):
12000 CASE_FLT_FN (BUILT_IN_LOGB):
12001 CASE_FLT_FN (BUILT_IN_LRINT):
12002 CASE_FLT_FN (BUILT_IN_LROUND):
12003 CASE_FLT_FN (BUILT_IN_MODF):
12004 CASE_FLT_FN (BUILT_IN_NAN):
12005 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12006 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
12007 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
12008 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
12009 CASE_FLT_FN (BUILT_IN_POW):
12010 CASE_FLT_FN (BUILT_IN_REMAINDER):
12011 CASE_FLT_FN (BUILT_IN_REMQUO):
12012 CASE_FLT_FN (BUILT_IN_RINT):
12013 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
12014 CASE_FLT_FN (BUILT_IN_ROUND):
12015 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
12016 CASE_FLT_FN (BUILT_IN_SCALBLN):
12017 CASE_FLT_FN (BUILT_IN_SCALBN):
12018 CASE_FLT_FN (BUILT_IN_SIN):
12019 CASE_FLT_FN (BUILT_IN_SINH):
12020 CASE_FLT_FN (BUILT_IN_SINCOS):
12021 CASE_FLT_FN (BUILT_IN_SQRT):
12022 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
12023 CASE_FLT_FN (BUILT_IN_TAN):
12024 CASE_FLT_FN (BUILT_IN_TANH):
12025 CASE_FLT_FN (BUILT_IN_TGAMMA):
12026 CASE_FLT_FN (BUILT_IN_TRUNC):
12027 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
12028 return true;
12029 default:
12030 break;
12032 return false;