gcc/
[official-gcc.git] / gcc / builtins.c
blob3110d0ae30fd613145c2e989d26e348741aa2589
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
31 #include "calls.h"
32 #include "varasm.h"
33 #include "tree-object-size.h"
34 #include "realmpfr.h"
35 #include "predict.h"
36 #include "hard-reg-set.h"
37 #include "function.h"
38 #include "cfgrtl.h"
39 #include "basic-block.h"
40 #include "tree-ssa-alias.h"
41 #include "internal-fn.h"
42 #include "gimple-expr.h"
43 #include "gimple.h"
44 #include "flags.h"
45 #include "regs.h"
46 #include "except.h"
47 #include "insn-config.h"
48 #include "expmed.h"
49 #include "dojump.h"
50 #include "explow.h"
51 #include "emit-rtl.h"
52 #include "stmt.h"
53 #include "expr.h"
54 #include "insn-codes.h"
55 #include "optabs.h"
56 #include "libfuncs.h"
57 #include "recog.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "langhooks.h"
63 #include "tree-ssanames.h"
64 #include "tree-dfa.h"
65 #include "value-prof.h"
66 #include "diagnostic-core.h"
67 #include "builtins.h"
68 #include "asan.h"
69 #include "cilk.h"
70 #include "lto-streamer.h"
71 #include "cgraph.h"
72 #include "tree-chkp.h"
73 #include "rtl-chkp.h"
74 #include "gomp-constants.h"
77 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
79 struct target_builtins default_target_builtins;
80 #if SWITCHABLE_TARGET
81 struct target_builtins *this_target_builtins = &default_target_builtins;
82 #endif
84 /* Define the names of the builtin function types and codes. */
85 const char *const built_in_class_names[BUILT_IN_LAST]
86 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
88 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
89 const char * built_in_names[(int) END_BUILTINS] =
91 #include "builtins.def"
93 #undef DEF_BUILTIN
95 /* Setup an array of builtin_info_type, make sure each element decl is
96 initialized to NULL_TREE. */
97 builtin_info_type builtin_info[(int)END_BUILTINS];
99 /* Non-zero if __builtin_constant_p should be folded right away. */
100 bool force_folding_builtin_constant_p;
102 static rtx c_readstr (const char *, machine_mode);
103 static int target_char_cast (tree, char *);
104 static rtx get_memory_rtx (tree, tree);
105 static int apply_args_size (void);
106 static int apply_result_size (void);
107 static rtx result_vector (int, rtx);
108 static void expand_builtin_prefetch (tree);
109 static rtx expand_builtin_apply_args (void);
110 static rtx expand_builtin_apply_args_1 (void);
111 static rtx expand_builtin_apply (rtx, rtx, rtx);
112 static void expand_builtin_return (rtx);
113 static enum type_class type_to_class (tree);
114 static rtx expand_builtin_classify_type (tree);
115 static void expand_errno_check (tree, rtx);
116 static rtx expand_builtin_mathfn (tree, rtx, rtx);
117 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
118 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
119 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
120 static rtx expand_builtin_interclass_mathfn (tree, rtx);
121 static rtx expand_builtin_sincos (tree);
122 static rtx expand_builtin_cexpi (tree, rtx);
123 static rtx expand_builtin_int_roundingfn (tree, rtx);
124 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
125 static rtx expand_builtin_next_arg (void);
126 static rtx expand_builtin_va_start (tree);
127 static rtx expand_builtin_va_end (tree);
128 static rtx expand_builtin_va_copy (tree);
129 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
130 static rtx expand_builtin_strcmp (tree, rtx);
131 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
132 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
133 static rtx expand_builtin_memcpy (tree, rtx);
134 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
135 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
136 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
138 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
139 machine_mode, int, tree);
140 static rtx expand_builtin_strcpy (tree, rtx);
141 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
142 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
143 static rtx expand_builtin_strncpy (tree, rtx);
144 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
145 static rtx expand_builtin_memset (tree, rtx, machine_mode);
146 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
147 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
148 static rtx expand_builtin_bzero (tree);
149 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
150 static rtx expand_builtin_alloca (tree, bool);
151 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
152 static rtx expand_builtin_frame_address (tree, tree);
153 static tree stabilize_va_list_loc (location_t, tree, int);
154 static rtx expand_builtin_expect (tree, rtx);
155 static tree fold_builtin_constant_p (tree);
156 static tree fold_builtin_classify_type (tree);
157 static tree fold_builtin_strlen (location_t, tree, tree);
158 static tree fold_builtin_inf (location_t, tree, int);
159 static tree fold_builtin_nan (tree, tree, int);
160 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
161 static bool validate_arg (const_tree, enum tree_code code);
162 static bool integer_valued_real_p (tree);
163 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
164 static rtx expand_builtin_fabs (tree, rtx, rtx);
165 static rtx expand_builtin_signbit (tree, rtx);
166 static tree fold_builtin_sqrt (location_t, tree, tree);
167 static tree fold_builtin_cbrt (location_t, tree, tree);
168 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_cos (location_t, tree, tree, tree);
171 static tree fold_builtin_cosh (location_t, tree, tree, tree);
172 static tree fold_builtin_tan (tree, tree);
173 static tree fold_builtin_trunc (location_t, tree, tree);
174 static tree fold_builtin_floor (location_t, tree, tree);
175 static tree fold_builtin_ceil (location_t, tree, tree);
176 static tree fold_builtin_round (location_t, tree, tree);
177 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
178 static tree fold_builtin_bitop (tree, tree);
179 static tree fold_builtin_strchr (location_t, tree, tree, tree);
180 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
181 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
182 static tree fold_builtin_strcmp (location_t, tree, tree);
183 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
184 static tree fold_builtin_signbit (location_t, tree, tree);
185 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
186 static tree fold_builtin_isascii (location_t, tree);
187 static tree fold_builtin_toascii (location_t, tree);
188 static tree fold_builtin_isdigit (location_t, tree);
189 static tree fold_builtin_fabs (location_t, tree, tree);
190 static tree fold_builtin_abs (location_t, tree, tree);
191 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
192 enum tree_code);
193 static tree fold_builtin_0 (location_t, tree);
194 static tree fold_builtin_1 (location_t, tree, tree);
195 static tree fold_builtin_2 (location_t, tree, tree, tree);
196 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
197 static tree fold_builtin_varargs (location_t, tree, tree*, int);
199 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
200 static tree fold_builtin_strstr (location_t, tree, tree, tree);
201 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
202 static tree fold_builtin_strspn (location_t, tree, tree);
203 static tree fold_builtin_strcspn (location_t, tree, tree);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static void maybe_emit_free_warning (tree);
211 static tree fold_builtin_object_size (tree, tree);
213 unsigned HOST_WIDE_INT target_newline;
214 unsigned HOST_WIDE_INT target_percent;
215 static unsigned HOST_WIDE_INT target_c;
216 static unsigned HOST_WIDE_INT target_s;
217 char target_percent_c[3];
218 char target_percent_s[3];
219 char target_percent_s_newline[4];
220 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
221 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
222 static tree do_mpfr_arg2 (tree, tree, tree,
223 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
224 static tree do_mpfr_arg3 (tree, tree, tree, tree,
225 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
226 static tree do_mpfr_sincos (tree, tree, tree);
227 static tree do_mpfr_bessel_n (tree, tree, tree,
228 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
229 const REAL_VALUE_TYPE *, bool);
230 static tree do_mpfr_remquo (tree, tree, tree);
231 static tree do_mpfr_lgamma_r (tree, tree, tree);
232 static void expand_builtin_sync_synchronize (void);
234 /* Return true if NAME starts with __builtin_ or __sync_. */
236 static bool
237 is_builtin_name (const char *name)
239 if (strncmp (name, "__builtin_", 10) == 0)
240 return true;
241 if (strncmp (name, "__sync_", 7) == 0)
242 return true;
243 if (strncmp (name, "__atomic_", 9) == 0)
244 return true;
245 if (flag_cilkplus
246 && (!strcmp (name, "__cilkrts_detach")
247 || !strcmp (name, "__cilkrts_pop_frame")))
248 return true;
249 return false;
253 /* Return true if DECL is a function symbol representing a built-in. */
255 bool
256 is_builtin_fn (tree decl)
258 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
261 /* Return true if NODE should be considered for inline expansion regardless
262 of the optimization level. This means whenever a function is invoked with
263 its "internal" name, which normally contains the prefix "__builtin". */
265 static bool
266 called_as_built_in (tree node)
268 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
269 we want the name used to call the function, not the name it
270 will have. */
271 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
272 return is_builtin_name (name);
275 /* Compute values M and N such that M divides (address of EXP - N) and such
276 that N < M. If these numbers can be determined, store M in alignp and N in
277 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
278 *alignp and any bit-offset to *bitposp.
280 Note that the address (and thus the alignment) computed here is based
281 on the address to which a symbol resolves, whereas DECL_ALIGN is based
282 on the address at which an object is actually located. These two
283 addresses are not always the same. For example, on ARM targets,
284 the address &foo of a Thumb function foo() has the lowest bit set,
285 whereas foo() itself starts on an even address.
287 If ADDR_P is true we are taking the address of the memory reference EXP
288 and thus cannot rely on the access taking place. */
290 static bool
291 get_object_alignment_2 (tree exp, unsigned int *alignp,
292 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
294 HOST_WIDE_INT bitsize, bitpos;
295 tree offset;
296 machine_mode mode;
297 int unsignedp, volatilep;
298 unsigned int align = BITS_PER_UNIT;
299 bool known_alignment = false;
301 /* Get the innermost object and the constant (bitpos) and possibly
302 variable (offset) offset of the access. */
303 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
304 &mode, &unsignedp, &volatilep, true);
306 /* Extract alignment information from the innermost object and
307 possibly adjust bitpos and offset. */
308 if (TREE_CODE (exp) == FUNCTION_DECL)
310 /* Function addresses can encode extra information besides their
311 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
312 allows the low bit to be used as a virtual bit, we know
313 that the address itself must be at least 2-byte aligned. */
314 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
315 align = 2 * BITS_PER_UNIT;
317 else if (TREE_CODE (exp) == LABEL_DECL)
319 else if (TREE_CODE (exp) == CONST_DECL)
321 /* The alignment of a CONST_DECL is determined by its initializer. */
322 exp = DECL_INITIAL (exp);
323 align = TYPE_ALIGN (TREE_TYPE (exp));
324 #ifdef CONSTANT_ALIGNMENT
325 if (CONSTANT_CLASS_P (exp))
326 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
327 #endif
328 known_alignment = true;
330 else if (DECL_P (exp))
332 align = DECL_ALIGN (exp);
333 known_alignment = true;
335 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
337 align = TYPE_ALIGN (TREE_TYPE (exp));
339 else if (TREE_CODE (exp) == INDIRECT_REF
340 || TREE_CODE (exp) == MEM_REF
341 || TREE_CODE (exp) == TARGET_MEM_REF)
343 tree addr = TREE_OPERAND (exp, 0);
344 unsigned ptr_align;
345 unsigned HOST_WIDE_INT ptr_bitpos;
346 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
348 /* If the address is explicitely aligned, handle that. */
349 if (TREE_CODE (addr) == BIT_AND_EXPR
350 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
352 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
353 ptr_bitmask *= BITS_PER_UNIT;
354 align = ptr_bitmask & -ptr_bitmask;
355 addr = TREE_OPERAND (addr, 0);
358 known_alignment
359 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
360 align = MAX (ptr_align, align);
362 /* Re-apply explicit alignment to the bitpos. */
363 ptr_bitpos &= ptr_bitmask;
365 /* The alignment of the pointer operand in a TARGET_MEM_REF
366 has to take the variable offset parts into account. */
367 if (TREE_CODE (exp) == TARGET_MEM_REF)
369 if (TMR_INDEX (exp))
371 unsigned HOST_WIDE_INT step = 1;
372 if (TMR_STEP (exp))
373 step = TREE_INT_CST_LOW (TMR_STEP (exp));
374 align = MIN (align, (step & -step) * BITS_PER_UNIT);
376 if (TMR_INDEX2 (exp))
377 align = BITS_PER_UNIT;
378 known_alignment = false;
381 /* When EXP is an actual memory reference then we can use
382 TYPE_ALIGN of a pointer indirection to derive alignment.
383 Do so only if get_pointer_alignment_1 did not reveal absolute
384 alignment knowledge and if using that alignment would
385 improve the situation. */
386 if (!addr_p && !known_alignment
387 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
388 align = TYPE_ALIGN (TREE_TYPE (exp));
389 else
391 /* Else adjust bitpos accordingly. */
392 bitpos += ptr_bitpos;
393 if (TREE_CODE (exp) == MEM_REF
394 || TREE_CODE (exp) == TARGET_MEM_REF)
395 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
398 else if (TREE_CODE (exp) == STRING_CST)
400 /* STRING_CST are the only constant objects we allow to be not
401 wrapped inside a CONST_DECL. */
402 align = TYPE_ALIGN (TREE_TYPE (exp));
403 #ifdef CONSTANT_ALIGNMENT
404 if (CONSTANT_CLASS_P (exp))
405 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
406 #endif
407 known_alignment = true;
410 /* If there is a non-constant offset part extract the maximum
411 alignment that can prevail. */
412 if (offset)
414 unsigned int trailing_zeros = tree_ctz (offset);
415 if (trailing_zeros < HOST_BITS_PER_INT)
417 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
418 if (inner)
419 align = MIN (align, inner);
423 *alignp = align;
424 *bitposp = bitpos & (*alignp - 1);
425 return known_alignment;
428 /* For a memory reference expression EXP compute values M and N such that M
429 divides (&EXP - N) and such that N < M. If these numbers can be determined,
430 store M in alignp and N in *BITPOSP and return true. Otherwise return false
431 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
433 bool
434 get_object_alignment_1 (tree exp, unsigned int *alignp,
435 unsigned HOST_WIDE_INT *bitposp)
437 return get_object_alignment_2 (exp, alignp, bitposp, false);
440 /* Return the alignment in bits of EXP, an object. */
442 unsigned int
443 get_object_alignment (tree exp)
445 unsigned HOST_WIDE_INT bitpos = 0;
446 unsigned int align;
448 get_object_alignment_1 (exp, &align, &bitpos);
450 /* align and bitpos now specify known low bits of the pointer.
451 ptr & (align - 1) == bitpos. */
453 if (bitpos != 0)
454 align = (bitpos & -bitpos);
455 return align;
458 /* For a pointer valued expression EXP compute values M and N such that M
459 divides (EXP - N) and such that N < M. If these numbers can be determined,
460 store M in alignp and N in *BITPOSP and return true. Return false if
461 the results are just a conservative approximation.
463 If EXP is not a pointer, false is returned too. */
465 bool
466 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
467 unsigned HOST_WIDE_INT *bitposp)
469 STRIP_NOPS (exp);
471 if (TREE_CODE (exp) == ADDR_EXPR)
472 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
473 alignp, bitposp, true);
474 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
476 unsigned int align;
477 unsigned HOST_WIDE_INT bitpos;
478 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
479 &align, &bitpos);
480 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
481 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
482 else
484 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
485 if (trailing_zeros < HOST_BITS_PER_INT)
487 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
488 if (inner)
489 align = MIN (align, inner);
492 *alignp = align;
493 *bitposp = bitpos & (align - 1);
494 return res;
496 else if (TREE_CODE (exp) == SSA_NAME
497 && POINTER_TYPE_P (TREE_TYPE (exp)))
499 unsigned int ptr_align, ptr_misalign;
500 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
502 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
504 *bitposp = ptr_misalign * BITS_PER_UNIT;
505 *alignp = ptr_align * BITS_PER_UNIT;
506 /* We cannot really tell whether this result is an approximation. */
507 return true;
509 else
511 *bitposp = 0;
512 *alignp = BITS_PER_UNIT;
513 return false;
516 else if (TREE_CODE (exp) == INTEGER_CST)
518 *alignp = BIGGEST_ALIGNMENT;
519 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
520 & (BIGGEST_ALIGNMENT - 1));
521 return true;
524 *bitposp = 0;
525 *alignp = BITS_PER_UNIT;
526 return false;
529 /* Return the alignment in bits of EXP, a pointer valued expression.
530 The alignment returned is, by default, the alignment of the thing that
531 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
533 Otherwise, look at the expression to see if we can do better, i.e., if the
534 expression is actually pointing at an object whose alignment is tighter. */
536 unsigned int
537 get_pointer_alignment (tree exp)
539 unsigned HOST_WIDE_INT bitpos = 0;
540 unsigned int align;
542 get_pointer_alignment_1 (exp, &align, &bitpos);
544 /* align and bitpos now specify known low bits of the pointer.
545 ptr & (align - 1) == bitpos. */
547 if (bitpos != 0)
548 align = (bitpos & -bitpos);
550 return align;
553 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
554 way, because it could contain a zero byte in the middle.
555 TREE_STRING_LENGTH is the size of the character array, not the string.
557 ONLY_VALUE should be nonzero if the result is not going to be emitted
558 into the instruction stream and zero if it is going to be expanded.
559 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
560 is returned, otherwise NULL, since
561 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
562 evaluate the side-effects.
564 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
565 accesses. Note that this implies the result is not going to be emitted
566 into the instruction stream.
568 The value returned is of type `ssizetype'.
570 Unfortunately, string_constant can't access the values of const char
571 arrays with initializers, so neither can we do so here. */
573 tree
574 c_strlen (tree src, int only_value)
576 tree offset_node;
577 HOST_WIDE_INT offset;
578 int max;
579 const char *ptr;
580 location_t loc;
582 STRIP_NOPS (src);
583 if (TREE_CODE (src) == COND_EXPR
584 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
586 tree len1, len2;
588 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
589 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
590 if (tree_int_cst_equal (len1, len2))
591 return len1;
594 if (TREE_CODE (src) == COMPOUND_EXPR
595 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
596 return c_strlen (TREE_OPERAND (src, 1), only_value);
598 loc = EXPR_LOC_OR_LOC (src, input_location);
600 src = string_constant (src, &offset_node);
601 if (src == 0)
602 return NULL_TREE;
604 max = TREE_STRING_LENGTH (src) - 1;
605 ptr = TREE_STRING_POINTER (src);
607 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
609 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
610 compute the offset to the following null if we don't know where to
611 start searching for it. */
612 int i;
614 for (i = 0; i < max; i++)
615 if (ptr[i] == 0)
616 return NULL_TREE;
618 /* We don't know the starting offset, but we do know that the string
619 has no internal zero bytes. We can assume that the offset falls
620 within the bounds of the string; otherwise, the programmer deserves
621 what he gets. Subtract the offset from the length of the string,
622 and return that. This would perhaps not be valid if we were dealing
623 with named arrays in addition to literal string constants. */
625 return size_diffop_loc (loc, size_int (max), offset_node);
628 /* We have a known offset into the string. Start searching there for
629 a null character if we can represent it as a single HOST_WIDE_INT. */
630 if (offset_node == 0)
631 offset = 0;
632 else if (! tree_fits_shwi_p (offset_node))
633 offset = -1;
634 else
635 offset = tree_to_shwi (offset_node);
637 /* If the offset is known to be out of bounds, warn, and call strlen at
638 runtime. */
639 if (offset < 0 || offset > max)
641 /* Suppress multiple warnings for propagated constant strings. */
642 if (only_value != 2
643 && !TREE_NO_WARNING (src))
645 warning_at (loc, 0, "offset outside bounds of constant string");
646 TREE_NO_WARNING (src) = 1;
648 return NULL_TREE;
651 /* Use strlen to search for the first zero byte. Since any strings
652 constructed with build_string will have nulls appended, we win even
653 if we get handed something like (char[4])"abcd".
655 Since OFFSET is our starting index into the string, no further
656 calculation is needed. */
657 return ssize_int (strlen (ptr + offset));
660 /* Return a char pointer for a C string if it is a string constant
661 or sum of string constant and integer constant. */
663 const char *
664 c_getstr (tree src)
666 tree offset_node;
668 src = string_constant (src, &offset_node);
669 if (src == 0)
670 return 0;
672 if (offset_node == 0)
673 return TREE_STRING_POINTER (src);
674 else if (!tree_fits_uhwi_p (offset_node)
675 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
676 return 0;
678 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
681 /* Return a constant integer corresponding to target reading
682 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
684 static rtx
685 c_readstr (const char *str, machine_mode mode)
687 HOST_WIDE_INT ch;
688 unsigned int i, j;
689 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
691 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
692 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
693 / HOST_BITS_PER_WIDE_INT;
695 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
696 for (i = 0; i < len; i++)
697 tmp[i] = 0;
699 ch = 1;
700 for (i = 0; i < GET_MODE_SIZE (mode); i++)
702 j = i;
703 if (WORDS_BIG_ENDIAN)
704 j = GET_MODE_SIZE (mode) - i - 1;
705 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
706 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
707 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
708 j *= BITS_PER_UNIT;
710 if (ch)
711 ch = (unsigned char) str[i];
712 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
715 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
716 return immed_wide_int_const (c, mode);
719 /* Cast a target constant CST to target CHAR and if that value fits into
720 host char type, return zero and put that value into variable pointed to by
721 P. */
723 static int
724 target_char_cast (tree cst, char *p)
726 unsigned HOST_WIDE_INT val, hostval;
728 if (TREE_CODE (cst) != INTEGER_CST
729 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
730 return 1;
732 /* Do not care if it fits or not right here. */
733 val = TREE_INT_CST_LOW (cst);
735 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
736 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
738 hostval = val;
739 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
740 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
742 if (val != hostval)
743 return 1;
745 *p = hostval;
746 return 0;
749 /* Similar to save_expr, but assumes that arbitrary code is not executed
750 in between the multiple evaluations. In particular, we assume that a
751 non-addressable local variable will not be modified. */
753 static tree
754 builtin_save_expr (tree exp)
756 if (TREE_CODE (exp) == SSA_NAME
757 || (TREE_ADDRESSABLE (exp) == 0
758 && (TREE_CODE (exp) == PARM_DECL
759 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
760 return exp;
762 return save_expr (exp);
765 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
766 times to get the address of either a higher stack frame, or a return
767 address located within it (depending on FNDECL_CODE). */
769 static rtx
770 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
772 int i;
774 #ifdef INITIAL_FRAME_ADDRESS_RTX
775 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
776 #else
777 rtx tem;
779 /* For a zero count with __builtin_return_address, we don't care what
780 frame address we return, because target-specific definitions will
781 override us. Therefore frame pointer elimination is OK, and using
782 the soft frame pointer is OK.
784 For a nonzero count, or a zero count with __builtin_frame_address,
785 we require a stable offset from the current frame pointer to the
786 previous one, so we must use the hard frame pointer, and
787 we must disable frame pointer elimination. */
788 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
789 tem = frame_pointer_rtx;
790 else
792 tem = hard_frame_pointer_rtx;
794 /* Tell reload not to eliminate the frame pointer. */
795 crtl->accesses_prior_frames = 1;
797 #endif
799 /* Some machines need special handling before we can access
800 arbitrary frames. For example, on the SPARC, we must first flush
801 all register windows to the stack. */
802 #ifdef SETUP_FRAME_ADDRESSES
803 if (count > 0)
804 SETUP_FRAME_ADDRESSES ();
805 #endif
807 /* On the SPARC, the return address is not in the frame, it is in a
808 register. There is no way to access it off of the current frame
809 pointer, but it can be accessed off the previous frame pointer by
810 reading the value from the register window save area. */
811 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
812 count--;
814 /* Scan back COUNT frames to the specified frame. */
815 for (i = 0; i < count; i++)
817 /* Assume the dynamic chain pointer is in the word that the
818 frame address points to, unless otherwise specified. */
819 #ifdef DYNAMIC_CHAIN_ADDRESS
820 tem = DYNAMIC_CHAIN_ADDRESS (tem);
821 #endif
822 tem = memory_address (Pmode, tem);
823 tem = gen_frame_mem (Pmode, tem);
824 tem = copy_to_reg (tem);
827 /* For __builtin_frame_address, return what we've got. But, on
828 the SPARC for example, we may have to add a bias. */
829 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
830 #ifdef FRAME_ADDR_RTX
831 return FRAME_ADDR_RTX (tem);
832 #else
833 return tem;
834 #endif
836 /* For __builtin_return_address, get the return address from that frame. */
837 #ifdef RETURN_ADDR_RTX
838 tem = RETURN_ADDR_RTX (count, tem);
839 #else
840 tem = memory_address (Pmode,
841 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
842 tem = gen_frame_mem (Pmode, tem);
843 #endif
844 return tem;
847 /* Alias set used for setjmp buffer. */
848 static alias_set_type setjmp_alias_set = -1;
850 /* Construct the leading half of a __builtin_setjmp call. Control will
851 return to RECEIVER_LABEL. This is also called directly by the SJLJ
852 exception handling code. */
854 void
855 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
857 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
858 rtx stack_save;
859 rtx mem;
861 if (setjmp_alias_set == -1)
862 setjmp_alias_set = new_alias_set ();
864 buf_addr = convert_memory_address (Pmode, buf_addr);
866 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
868 /* We store the frame pointer and the address of receiver_label in
869 the buffer and use the rest of it for the stack save area, which
870 is machine-dependent. */
872 mem = gen_rtx_MEM (Pmode, buf_addr);
873 set_mem_alias_set (mem, setjmp_alias_set);
874 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
876 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
877 GET_MODE_SIZE (Pmode))),
878 set_mem_alias_set (mem, setjmp_alias_set);
880 emit_move_insn (validize_mem (mem),
881 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
883 stack_save = gen_rtx_MEM (sa_mode,
884 plus_constant (Pmode, buf_addr,
885 2 * GET_MODE_SIZE (Pmode)));
886 set_mem_alias_set (stack_save, setjmp_alias_set);
887 emit_stack_save (SAVE_NONLOCAL, &stack_save);
889 /* If there is further processing to do, do it. */
890 if (targetm.have_builtin_setjmp_setup ())
891 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
893 /* We have a nonlocal label. */
894 cfun->has_nonlocal_label = 1;
897 /* Construct the trailing part of a __builtin_setjmp call. This is
898 also called directly by the SJLJ exception handling code.
899 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
901 void
902 expand_builtin_setjmp_receiver (rtx receiver_label)
904 rtx chain;
906 /* Mark the FP as used when we get here, so we have to make sure it's
907 marked as used by this function. */
908 emit_use (hard_frame_pointer_rtx);
910 /* Mark the static chain as clobbered here so life information
911 doesn't get messed up for it. */
912 chain = targetm.calls.static_chain (current_function_decl, true);
913 if (chain && REG_P (chain))
914 emit_clobber (chain);
916 /* Now put in the code to restore the frame pointer, and argument
917 pointer, if needed. */
918 if (! targetm.have_nonlocal_goto ())
920 /* First adjust our frame pointer to its actual value. It was
921 previously set to the start of the virtual area corresponding to
922 the stacked variables when we branched here and now needs to be
923 adjusted to the actual hardware fp value.
925 Assignments to virtual registers are converted by
926 instantiate_virtual_regs into the corresponding assignment
927 to the underlying register (fp in this case) that makes
928 the original assignment true.
929 So the following insn will actually be decrementing fp by
930 STARTING_FRAME_OFFSET. */
931 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
933 /* Restoring the frame pointer also modifies the hard frame pointer.
934 Mark it used (so that the previous assignment remains live once
935 the frame pointer is eliminated) and clobbered (to represent the
936 implicit update from the assignment). */
937 emit_use (hard_frame_pointer_rtx);
938 emit_clobber (hard_frame_pointer_rtx);
941 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
942 if (fixed_regs[ARG_POINTER_REGNUM])
944 #ifdef ELIMINABLE_REGS
945 /* If the argument pointer can be eliminated in favor of the
946 frame pointer, we don't need to restore it. We assume here
947 that if such an elimination is present, it can always be used.
948 This is the case on all known machines; if we don't make this
949 assumption, we do unnecessary saving on many machines. */
950 size_t i;
951 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
953 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
954 if (elim_regs[i].from == ARG_POINTER_REGNUM
955 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
956 break;
958 if (i == ARRAY_SIZE (elim_regs))
959 #endif
961 /* Now restore our arg pointer from the address at which it
962 was saved in our stack frame. */
963 emit_move_insn (crtl->args.internal_arg_pointer,
964 copy_to_reg (get_arg_pointer_save_area ()));
967 #endif
969 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
970 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
971 else if (targetm.have_nonlocal_goto_receiver ())
972 emit_insn (targetm.gen_nonlocal_goto_receiver ());
973 else
974 { /* Nothing */ }
976 /* We must not allow the code we just generated to be reordered by
977 scheduling. Specifically, the update of the frame pointer must
978 happen immediately, not later. */
979 emit_insn (gen_blockage ());
982 /* __builtin_longjmp is passed a pointer to an array of five words (not
983 all will be used on all machines). It operates similarly to the C
984 library function of the same name, but is more efficient. Much of
985 the code below is copied from the handling of non-local gotos. */
987 static void
988 expand_builtin_longjmp (rtx buf_addr, rtx value)
990 rtx fp, lab, stack;
991 rtx_insn *insn, *last;
992 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
994 /* DRAP is needed for stack realign if longjmp is expanded to current
995 function */
996 if (SUPPORTS_STACK_ALIGNMENT)
997 crtl->need_drap = true;
999 if (setjmp_alias_set == -1)
1000 setjmp_alias_set = new_alias_set ();
1002 buf_addr = convert_memory_address (Pmode, buf_addr);
1004 buf_addr = force_reg (Pmode, buf_addr);
1006 /* We require that the user must pass a second argument of 1, because
1007 that is what builtin_setjmp will return. */
1008 gcc_assert (value == const1_rtx);
1010 last = get_last_insn ();
1011 if (targetm.have_builtin_longjmp ())
1012 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1013 else
1015 fp = gen_rtx_MEM (Pmode, buf_addr);
1016 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1017 GET_MODE_SIZE (Pmode)));
1019 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1020 2 * GET_MODE_SIZE (Pmode)));
1021 set_mem_alias_set (fp, setjmp_alias_set);
1022 set_mem_alias_set (lab, setjmp_alias_set);
1023 set_mem_alias_set (stack, setjmp_alias_set);
1025 /* Pick up FP, label, and SP from the block and jump. This code is
1026 from expand_goto in stmt.c; see there for detailed comments. */
1027 if (targetm.have_nonlocal_goto ())
1028 /* We have to pass a value to the nonlocal_goto pattern that will
1029 get copied into the static_chain pointer, but it does not matter
1030 what that value is, because builtin_setjmp does not use it. */
1031 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1032 else
1034 lab = copy_to_reg (lab);
1036 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1037 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1039 emit_move_insn (hard_frame_pointer_rtx, fp);
1040 emit_stack_restore (SAVE_NONLOCAL, stack);
1042 emit_use (hard_frame_pointer_rtx);
1043 emit_use (stack_pointer_rtx);
1044 emit_indirect_jump (lab);
1048 /* Search backwards and mark the jump insn as a non-local goto.
1049 Note that this precludes the use of __builtin_longjmp to a
1050 __builtin_setjmp target in the same function. However, we've
1051 already cautioned the user that these functions are for
1052 internal exception handling use only. */
1053 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1055 gcc_assert (insn != last);
1057 if (JUMP_P (insn))
1059 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1060 break;
1062 else if (CALL_P (insn))
1063 break;
1067 static inline bool
1068 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1070 return (iter->i < iter->n);
1073 /* This function validates the types of a function call argument list
1074 against a specified list of tree_codes. If the last specifier is a 0,
1075 that represents an ellipses, otherwise the last specifier must be a
1076 VOID_TYPE. */
1078 static bool
1079 validate_arglist (const_tree callexpr, ...)
1081 enum tree_code code;
1082 bool res = 0;
1083 va_list ap;
1084 const_call_expr_arg_iterator iter;
1085 const_tree arg;
1087 va_start (ap, callexpr);
1088 init_const_call_expr_arg_iterator (callexpr, &iter);
1092 code = (enum tree_code) va_arg (ap, int);
1093 switch (code)
1095 case 0:
1096 /* This signifies an ellipses, any further arguments are all ok. */
1097 res = true;
1098 goto end;
1099 case VOID_TYPE:
1100 /* This signifies an endlink, if no arguments remain, return
1101 true, otherwise return false. */
1102 res = !more_const_call_expr_args_p (&iter);
1103 goto end;
1104 default:
1105 /* If no parameters remain or the parameter's code does not
1106 match the specified code, return false. Otherwise continue
1107 checking any remaining arguments. */
1108 arg = next_const_call_expr_arg (&iter);
1109 if (!validate_arg (arg, code))
1110 goto end;
1111 break;
1114 while (1);
1116 /* We need gotos here since we can only have one VA_CLOSE in a
1117 function. */
1118 end: ;
1119 va_end (ap);
1121 return res;
1124 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1125 and the address of the save area. */
1127 static rtx
1128 expand_builtin_nonlocal_goto (tree exp)
1130 tree t_label, t_save_area;
1131 rtx r_label, r_save_area, r_fp, r_sp;
1132 rtx_insn *insn;
1134 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1135 return NULL_RTX;
1137 t_label = CALL_EXPR_ARG (exp, 0);
1138 t_save_area = CALL_EXPR_ARG (exp, 1);
1140 r_label = expand_normal (t_label);
1141 r_label = convert_memory_address (Pmode, r_label);
1142 r_save_area = expand_normal (t_save_area);
1143 r_save_area = convert_memory_address (Pmode, r_save_area);
1144 /* Copy the address of the save location to a register just in case it was
1145 based on the frame pointer. */
1146 r_save_area = copy_to_reg (r_save_area);
1147 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1148 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1149 plus_constant (Pmode, r_save_area,
1150 GET_MODE_SIZE (Pmode)));
1152 crtl->has_nonlocal_goto = 1;
1154 /* ??? We no longer need to pass the static chain value, afaik. */
1155 if (targetm.have_nonlocal_goto ())
1156 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1157 else
1159 r_label = copy_to_reg (r_label);
1161 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1162 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1164 /* Restore frame pointer for containing function. */
1165 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1166 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1168 /* USE of hard_frame_pointer_rtx added for consistency;
1169 not clear if really needed. */
1170 emit_use (hard_frame_pointer_rtx);
1171 emit_use (stack_pointer_rtx);
1173 /* If the architecture is using a GP register, we must
1174 conservatively assume that the target function makes use of it.
1175 The prologue of functions with nonlocal gotos must therefore
1176 initialize the GP register to the appropriate value, and we
1177 must then make sure that this value is live at the point
1178 of the jump. (Note that this doesn't necessarily apply
1179 to targets with a nonlocal_goto pattern; they are free
1180 to implement it in their own way. Note also that this is
1181 a no-op if the GP register is a global invariant.) */
1182 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1183 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1184 emit_use (pic_offset_table_rtx);
1186 emit_indirect_jump (r_label);
1189 /* Search backwards to the jump insn and mark it as a
1190 non-local goto. */
1191 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1193 if (JUMP_P (insn))
1195 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1196 break;
1198 else if (CALL_P (insn))
1199 break;
1202 return const0_rtx;
1205 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1206 (not all will be used on all machines) that was passed to __builtin_setjmp.
1207 It updates the stack pointer in that block to the current value. This is
1208 also called directly by the SJLJ exception handling code. */
1210 void
1211 expand_builtin_update_setjmp_buf (rtx buf_addr)
1213 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1214 rtx stack_save
1215 = gen_rtx_MEM (sa_mode,
1216 memory_address
1217 (sa_mode,
1218 plus_constant (Pmode, buf_addr,
1219 2 * GET_MODE_SIZE (Pmode))));
1221 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1224 /* Expand a call to __builtin_prefetch. For a target that does not support
1225 data prefetch, evaluate the memory address argument in case it has side
1226 effects. */
1228 static void
1229 expand_builtin_prefetch (tree exp)
1231 tree arg0, arg1, arg2;
1232 int nargs;
1233 rtx op0, op1, op2;
1235 if (!validate_arglist (exp, POINTER_TYPE, 0))
1236 return;
1238 arg0 = CALL_EXPR_ARG (exp, 0);
1240 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1241 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1242 locality). */
1243 nargs = call_expr_nargs (exp);
1244 if (nargs > 1)
1245 arg1 = CALL_EXPR_ARG (exp, 1);
1246 else
1247 arg1 = integer_zero_node;
1248 if (nargs > 2)
1249 arg2 = CALL_EXPR_ARG (exp, 2);
1250 else
1251 arg2 = integer_three_node;
1253 /* Argument 0 is an address. */
1254 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1256 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1257 if (TREE_CODE (arg1) != INTEGER_CST)
1259 error ("second argument to %<__builtin_prefetch%> must be a constant");
1260 arg1 = integer_zero_node;
1262 op1 = expand_normal (arg1);
1263 /* Argument 1 must be either zero or one. */
1264 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1266 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1267 " using zero");
1268 op1 = const0_rtx;
1271 /* Argument 2 (locality) must be a compile-time constant int. */
1272 if (TREE_CODE (arg2) != INTEGER_CST)
1274 error ("third argument to %<__builtin_prefetch%> must be a constant");
1275 arg2 = integer_zero_node;
1277 op2 = expand_normal (arg2);
1278 /* Argument 2 must be 0, 1, 2, or 3. */
1279 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1281 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1282 op2 = const0_rtx;
1285 if (targetm.have_prefetch ())
1287 struct expand_operand ops[3];
1289 create_address_operand (&ops[0], op0);
1290 create_integer_operand (&ops[1], INTVAL (op1));
1291 create_integer_operand (&ops[2], INTVAL (op2));
1292 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1293 return;
1296 /* Don't do anything with direct references to volatile memory, but
1297 generate code to handle other side effects. */
1298 if (!MEM_P (op0) && side_effects_p (op0))
1299 emit_insn (op0);
1302 /* Get a MEM rtx for expression EXP which is the address of an operand
1303 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1304 the maximum length of the block of memory that might be accessed or
1305 NULL if unknown. */
1307 static rtx
1308 get_memory_rtx (tree exp, tree len)
1310 tree orig_exp = exp;
1311 rtx addr, mem;
1313 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1314 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1315 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1316 exp = TREE_OPERAND (exp, 0);
1318 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1319 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1321 /* Get an expression we can use to find the attributes to assign to MEM.
1322 First remove any nops. */
1323 while (CONVERT_EXPR_P (exp)
1324 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1325 exp = TREE_OPERAND (exp, 0);
1327 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1328 (as builtin stringops may alias with anything). */
1329 exp = fold_build2 (MEM_REF,
1330 build_array_type (char_type_node,
1331 build_range_type (sizetype,
1332 size_one_node, len)),
1333 exp, build_int_cst (ptr_type_node, 0));
1335 /* If the MEM_REF has no acceptable address, try to get the base object
1336 from the original address we got, and build an all-aliasing
1337 unknown-sized access to that one. */
1338 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1339 set_mem_attributes (mem, exp, 0);
1340 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1341 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1342 0))))
1344 exp = build_fold_addr_expr (exp);
1345 exp = fold_build2 (MEM_REF,
1346 build_array_type (char_type_node,
1347 build_range_type (sizetype,
1348 size_zero_node,
1349 NULL)),
1350 exp, build_int_cst (ptr_type_node, 0));
1351 set_mem_attributes (mem, exp, 0);
1353 set_mem_alias_set (mem, 0);
1354 return mem;
1357 /* Built-in functions to perform an untyped call and return. */
1359 #define apply_args_mode \
1360 (this_target_builtins->x_apply_args_mode)
1361 #define apply_result_mode \
1362 (this_target_builtins->x_apply_result_mode)
1364 /* Return the size required for the block returned by __builtin_apply_args,
1365 and initialize apply_args_mode. */
1367 static int
1368 apply_args_size (void)
1370 static int size = -1;
1371 int align;
1372 unsigned int regno;
1373 machine_mode mode;
1375 /* The values computed by this function never change. */
1376 if (size < 0)
1378 /* The first value is the incoming arg-pointer. */
1379 size = GET_MODE_SIZE (Pmode);
1381 /* The second value is the structure value address unless this is
1382 passed as an "invisible" first argument. */
1383 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1384 size += GET_MODE_SIZE (Pmode);
1386 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1387 if (FUNCTION_ARG_REGNO_P (regno))
1389 mode = targetm.calls.get_raw_arg_mode (regno);
1391 gcc_assert (mode != VOIDmode);
1393 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1394 if (size % align != 0)
1395 size = CEIL (size, align) * align;
1396 size += GET_MODE_SIZE (mode);
1397 apply_args_mode[regno] = mode;
1399 else
1401 apply_args_mode[regno] = VOIDmode;
1404 return size;
1407 /* Return the size required for the block returned by __builtin_apply,
1408 and initialize apply_result_mode. */
1410 static int
1411 apply_result_size (void)
1413 static int size = -1;
1414 int align, regno;
1415 machine_mode mode;
1417 /* The values computed by this function never change. */
1418 if (size < 0)
1420 size = 0;
1422 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1423 if (targetm.calls.function_value_regno_p (regno))
1425 mode = targetm.calls.get_raw_result_mode (regno);
1427 gcc_assert (mode != VOIDmode);
1429 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1430 if (size % align != 0)
1431 size = CEIL (size, align) * align;
1432 size += GET_MODE_SIZE (mode);
1433 apply_result_mode[regno] = mode;
1435 else
1436 apply_result_mode[regno] = VOIDmode;
1438 /* Allow targets that use untyped_call and untyped_return to override
1439 the size so that machine-specific information can be stored here. */
1440 #ifdef APPLY_RESULT_SIZE
1441 size = APPLY_RESULT_SIZE;
1442 #endif
1444 return size;
1447 /* Create a vector describing the result block RESULT. If SAVEP is true,
1448 the result block is used to save the values; otherwise it is used to
1449 restore the values. */
1451 static rtx
1452 result_vector (int savep, rtx result)
1454 int regno, size, align, nelts;
1455 machine_mode mode;
1456 rtx reg, mem;
1457 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1459 size = nelts = 0;
1460 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1461 if ((mode = apply_result_mode[regno]) != VOIDmode)
1463 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1464 if (size % align != 0)
1465 size = CEIL (size, align) * align;
1466 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1467 mem = adjust_address (result, mode, size);
1468 savevec[nelts++] = (savep
1469 ? gen_rtx_SET (mem, reg)
1470 : gen_rtx_SET (reg, mem));
1471 size += GET_MODE_SIZE (mode);
1473 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1476 /* Save the state required to perform an untyped call with the same
1477 arguments as were passed to the current function. */
1479 static rtx
1480 expand_builtin_apply_args_1 (void)
1482 rtx registers, tem;
1483 int size, align, regno;
1484 machine_mode mode;
1485 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1487 /* Create a block where the arg-pointer, structure value address,
1488 and argument registers can be saved. */
1489 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1491 /* Walk past the arg-pointer and structure value address. */
1492 size = GET_MODE_SIZE (Pmode);
1493 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1494 size += GET_MODE_SIZE (Pmode);
1496 /* Save each register used in calling a function to the block. */
1497 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1498 if ((mode = apply_args_mode[regno]) != VOIDmode)
1500 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1501 if (size % align != 0)
1502 size = CEIL (size, align) * align;
1504 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1506 emit_move_insn (adjust_address (registers, mode, size), tem);
1507 size += GET_MODE_SIZE (mode);
1510 /* Save the arg pointer to the block. */
1511 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1512 /* We need the pointer as the caller actually passed them to us, not
1513 as we might have pretended they were passed. Make sure it's a valid
1514 operand, as emit_move_insn isn't expected to handle a PLUS. */
1515 if (STACK_GROWS_DOWNWARD)
1517 = force_operand (plus_constant (Pmode, tem,
1518 crtl->args.pretend_args_size),
1519 NULL_RTX);
1520 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1522 size = GET_MODE_SIZE (Pmode);
1524 /* Save the structure value address unless this is passed as an
1525 "invisible" first argument. */
1526 if (struct_incoming_value)
1528 emit_move_insn (adjust_address (registers, Pmode, size),
1529 copy_to_reg (struct_incoming_value));
1530 size += GET_MODE_SIZE (Pmode);
1533 /* Return the address of the block. */
1534 return copy_addr_to_reg (XEXP (registers, 0));
1537 /* __builtin_apply_args returns block of memory allocated on
1538 the stack into which is stored the arg pointer, structure
1539 value address, static chain, and all the registers that might
1540 possibly be used in performing a function call. The code is
1541 moved to the start of the function so the incoming values are
1542 saved. */
1544 static rtx
1545 expand_builtin_apply_args (void)
1547 /* Don't do __builtin_apply_args more than once in a function.
1548 Save the result of the first call and reuse it. */
1549 if (apply_args_value != 0)
1550 return apply_args_value;
1552 /* When this function is called, it means that registers must be
1553 saved on entry to this function. So we migrate the
1554 call to the first insn of this function. */
1555 rtx temp;
1557 start_sequence ();
1558 temp = expand_builtin_apply_args_1 ();
1559 rtx_insn *seq = get_insns ();
1560 end_sequence ();
1562 apply_args_value = temp;
1564 /* Put the insns after the NOTE that starts the function.
1565 If this is inside a start_sequence, make the outer-level insn
1566 chain current, so the code is placed at the start of the
1567 function. If internal_arg_pointer is a non-virtual pseudo,
1568 it needs to be placed after the function that initializes
1569 that pseudo. */
1570 push_topmost_sequence ();
1571 if (REG_P (crtl->args.internal_arg_pointer)
1572 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1573 emit_insn_before (seq, parm_birth_insn);
1574 else
1575 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1576 pop_topmost_sequence ();
1577 return temp;
1581 /* Perform an untyped call and save the state required to perform an
1582 untyped return of whatever value was returned by the given function. */
1584 static rtx
1585 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1587 int size, align, regno;
1588 machine_mode mode;
1589 rtx incoming_args, result, reg, dest, src;
1590 rtx_call_insn *call_insn;
1591 rtx old_stack_level = 0;
1592 rtx call_fusage = 0;
1593 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1595 arguments = convert_memory_address (Pmode, arguments);
1597 /* Create a block where the return registers can be saved. */
1598 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1600 /* Fetch the arg pointer from the ARGUMENTS block. */
1601 incoming_args = gen_reg_rtx (Pmode);
1602 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1603 if (!STACK_GROWS_DOWNWARD)
1604 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1605 incoming_args, 0, OPTAB_LIB_WIDEN);
1607 /* Push a new argument block and copy the arguments. Do not allow
1608 the (potential) memcpy call below to interfere with our stack
1609 manipulations. */
1610 do_pending_stack_adjust ();
1611 NO_DEFER_POP;
1613 /* Save the stack with nonlocal if available. */
1614 if (targetm.have_save_stack_nonlocal ())
1615 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1616 else
1617 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1619 /* Allocate a block of memory onto the stack and copy the memory
1620 arguments to the outgoing arguments address. We can pass TRUE
1621 as the 4th argument because we just saved the stack pointer
1622 and will restore it right after the call. */
1623 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1625 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1626 may have already set current_function_calls_alloca to true.
1627 current_function_calls_alloca won't be set if argsize is zero,
1628 so we have to guarantee need_drap is true here. */
1629 if (SUPPORTS_STACK_ALIGNMENT)
1630 crtl->need_drap = true;
1632 dest = virtual_outgoing_args_rtx;
1633 if (!STACK_GROWS_DOWNWARD)
1635 if (CONST_INT_P (argsize))
1636 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1637 else
1638 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1640 dest = gen_rtx_MEM (BLKmode, dest);
1641 set_mem_align (dest, PARM_BOUNDARY);
1642 src = gen_rtx_MEM (BLKmode, incoming_args);
1643 set_mem_align (src, PARM_BOUNDARY);
1644 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1646 /* Refer to the argument block. */
1647 apply_args_size ();
1648 arguments = gen_rtx_MEM (BLKmode, arguments);
1649 set_mem_align (arguments, PARM_BOUNDARY);
1651 /* Walk past the arg-pointer and structure value address. */
1652 size = GET_MODE_SIZE (Pmode);
1653 if (struct_value)
1654 size += GET_MODE_SIZE (Pmode);
1656 /* Restore each of the registers previously saved. Make USE insns
1657 for each of these registers for use in making the call. */
1658 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1659 if ((mode = apply_args_mode[regno]) != VOIDmode)
1661 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1662 if (size % align != 0)
1663 size = CEIL (size, align) * align;
1664 reg = gen_rtx_REG (mode, regno);
1665 emit_move_insn (reg, adjust_address (arguments, mode, size));
1666 use_reg (&call_fusage, reg);
1667 size += GET_MODE_SIZE (mode);
1670 /* Restore the structure value address unless this is passed as an
1671 "invisible" first argument. */
1672 size = GET_MODE_SIZE (Pmode);
1673 if (struct_value)
1675 rtx value = gen_reg_rtx (Pmode);
1676 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1677 emit_move_insn (struct_value, value);
1678 if (REG_P (struct_value))
1679 use_reg (&call_fusage, struct_value);
1680 size += GET_MODE_SIZE (Pmode);
1683 /* All arguments and registers used for the call are set up by now! */
1684 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1686 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1687 and we don't want to load it into a register as an optimization,
1688 because prepare_call_address already did it if it should be done. */
1689 if (GET_CODE (function) != SYMBOL_REF)
1690 function = memory_address (FUNCTION_MODE, function);
1692 /* Generate the actual call instruction and save the return value. */
1693 if (targetm.have_untyped_call ())
1695 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1696 emit_call_insn (targetm.gen_untyped_call (mem, result,
1697 result_vector (1, result)));
1699 else
1700 #ifdef HAVE_call_value
1701 if (HAVE_call_value)
1703 rtx valreg = 0;
1705 /* Locate the unique return register. It is not possible to
1706 express a call that sets more than one return register using
1707 call_value; use untyped_call for that. In fact, untyped_call
1708 only needs to save the return registers in the given block. */
1709 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1710 if ((mode = apply_result_mode[regno]) != VOIDmode)
1712 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1714 valreg = gen_rtx_REG (mode, regno);
1717 emit_call_insn (GEN_CALL_VALUE (valreg,
1718 gen_rtx_MEM (FUNCTION_MODE, function),
1719 const0_rtx, NULL_RTX, const0_rtx));
1721 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1723 else
1724 #endif
1725 gcc_unreachable ();
1727 /* Find the CALL insn we just emitted, and attach the register usage
1728 information. */
1729 call_insn = last_call_insn ();
1730 add_function_usage_to (call_insn, call_fusage);
1732 /* Restore the stack. */
1733 if (targetm.have_save_stack_nonlocal ())
1734 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1735 else
1736 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1737 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1739 OK_DEFER_POP;
1741 /* Return the address of the result block. */
1742 result = copy_addr_to_reg (XEXP (result, 0));
1743 return convert_memory_address (ptr_mode, result);
1746 /* Perform an untyped return. */
1748 static void
1749 expand_builtin_return (rtx result)
1751 int size, align, regno;
1752 machine_mode mode;
1753 rtx reg;
1754 rtx_insn *call_fusage = 0;
1756 result = convert_memory_address (Pmode, result);
1758 apply_result_size ();
1759 result = gen_rtx_MEM (BLKmode, result);
1761 if (targetm.have_untyped_return ())
1763 rtx vector = result_vector (0, result);
1764 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1765 emit_barrier ();
1766 return;
1769 /* Restore the return value and note that each value is used. */
1770 size = 0;
1771 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1772 if ((mode = apply_result_mode[regno]) != VOIDmode)
1774 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1775 if (size % align != 0)
1776 size = CEIL (size, align) * align;
1777 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1778 emit_move_insn (reg, adjust_address (result, mode, size));
1780 push_to_sequence (call_fusage);
1781 emit_use (reg);
1782 call_fusage = get_insns ();
1783 end_sequence ();
1784 size += GET_MODE_SIZE (mode);
1787 /* Put the USE insns before the return. */
1788 emit_insn (call_fusage);
1790 /* Return whatever values was restored by jumping directly to the end
1791 of the function. */
1792 expand_naked_return ();
1795 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1797 static enum type_class
1798 type_to_class (tree type)
1800 switch (TREE_CODE (type))
1802 case VOID_TYPE: return void_type_class;
1803 case INTEGER_TYPE: return integer_type_class;
1804 case ENUMERAL_TYPE: return enumeral_type_class;
1805 case BOOLEAN_TYPE: return boolean_type_class;
1806 case POINTER_TYPE: return pointer_type_class;
1807 case REFERENCE_TYPE: return reference_type_class;
1808 case OFFSET_TYPE: return offset_type_class;
1809 case REAL_TYPE: return real_type_class;
1810 case COMPLEX_TYPE: return complex_type_class;
1811 case FUNCTION_TYPE: return function_type_class;
1812 case METHOD_TYPE: return method_type_class;
1813 case RECORD_TYPE: return record_type_class;
1814 case UNION_TYPE:
1815 case QUAL_UNION_TYPE: return union_type_class;
1816 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1817 ? string_type_class : array_type_class);
1818 case LANG_TYPE: return lang_type_class;
1819 default: return no_type_class;
1823 /* Expand a call EXP to __builtin_classify_type. */
1825 static rtx
1826 expand_builtin_classify_type (tree exp)
1828 if (call_expr_nargs (exp))
1829 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1830 return GEN_INT (no_type_class);
1833 /* This helper macro, meant to be used in mathfn_built_in below,
1834 determines which among a set of three builtin math functions is
1835 appropriate for a given type mode. The `F' and `L' cases are
1836 automatically generated from the `double' case. */
1837 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1838 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1839 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1840 fcodel = BUILT_IN_MATHFN##L ; break;
1841 /* Similar to above, but appends _R after any F/L suffix. */
1842 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1843 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1844 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1845 fcodel = BUILT_IN_MATHFN##L_R ; break;
1847 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1848 if available. If IMPLICIT is true use the implicit builtin declaration,
1849 otherwise use the explicit declaration. If we can't do the conversion,
1850 return zero. */
1852 static tree
1853 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1855 enum built_in_function fcode, fcodef, fcodel, fcode2;
1857 switch (fn)
1859 CASE_MATHFN (BUILT_IN_ACOS)
1860 CASE_MATHFN (BUILT_IN_ACOSH)
1861 CASE_MATHFN (BUILT_IN_ASIN)
1862 CASE_MATHFN (BUILT_IN_ASINH)
1863 CASE_MATHFN (BUILT_IN_ATAN)
1864 CASE_MATHFN (BUILT_IN_ATAN2)
1865 CASE_MATHFN (BUILT_IN_ATANH)
1866 CASE_MATHFN (BUILT_IN_CBRT)
1867 CASE_MATHFN (BUILT_IN_CEIL)
1868 CASE_MATHFN (BUILT_IN_CEXPI)
1869 CASE_MATHFN (BUILT_IN_COPYSIGN)
1870 CASE_MATHFN (BUILT_IN_COS)
1871 CASE_MATHFN (BUILT_IN_COSH)
1872 CASE_MATHFN (BUILT_IN_DREM)
1873 CASE_MATHFN (BUILT_IN_ERF)
1874 CASE_MATHFN (BUILT_IN_ERFC)
1875 CASE_MATHFN (BUILT_IN_EXP)
1876 CASE_MATHFN (BUILT_IN_EXP10)
1877 CASE_MATHFN (BUILT_IN_EXP2)
1878 CASE_MATHFN (BUILT_IN_EXPM1)
1879 CASE_MATHFN (BUILT_IN_FABS)
1880 CASE_MATHFN (BUILT_IN_FDIM)
1881 CASE_MATHFN (BUILT_IN_FLOOR)
1882 CASE_MATHFN (BUILT_IN_FMA)
1883 CASE_MATHFN (BUILT_IN_FMAX)
1884 CASE_MATHFN (BUILT_IN_FMIN)
1885 CASE_MATHFN (BUILT_IN_FMOD)
1886 CASE_MATHFN (BUILT_IN_FREXP)
1887 CASE_MATHFN (BUILT_IN_GAMMA)
1888 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1889 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1890 CASE_MATHFN (BUILT_IN_HYPOT)
1891 CASE_MATHFN (BUILT_IN_ILOGB)
1892 CASE_MATHFN (BUILT_IN_ICEIL)
1893 CASE_MATHFN (BUILT_IN_IFLOOR)
1894 CASE_MATHFN (BUILT_IN_INF)
1895 CASE_MATHFN (BUILT_IN_IRINT)
1896 CASE_MATHFN (BUILT_IN_IROUND)
1897 CASE_MATHFN (BUILT_IN_ISINF)
1898 CASE_MATHFN (BUILT_IN_J0)
1899 CASE_MATHFN (BUILT_IN_J1)
1900 CASE_MATHFN (BUILT_IN_JN)
1901 CASE_MATHFN (BUILT_IN_LCEIL)
1902 CASE_MATHFN (BUILT_IN_LDEXP)
1903 CASE_MATHFN (BUILT_IN_LFLOOR)
1904 CASE_MATHFN (BUILT_IN_LGAMMA)
1905 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1906 CASE_MATHFN (BUILT_IN_LLCEIL)
1907 CASE_MATHFN (BUILT_IN_LLFLOOR)
1908 CASE_MATHFN (BUILT_IN_LLRINT)
1909 CASE_MATHFN (BUILT_IN_LLROUND)
1910 CASE_MATHFN (BUILT_IN_LOG)
1911 CASE_MATHFN (BUILT_IN_LOG10)
1912 CASE_MATHFN (BUILT_IN_LOG1P)
1913 CASE_MATHFN (BUILT_IN_LOG2)
1914 CASE_MATHFN (BUILT_IN_LOGB)
1915 CASE_MATHFN (BUILT_IN_LRINT)
1916 CASE_MATHFN (BUILT_IN_LROUND)
1917 CASE_MATHFN (BUILT_IN_MODF)
1918 CASE_MATHFN (BUILT_IN_NAN)
1919 CASE_MATHFN (BUILT_IN_NANS)
1920 CASE_MATHFN (BUILT_IN_NEARBYINT)
1921 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1922 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1923 CASE_MATHFN (BUILT_IN_POW)
1924 CASE_MATHFN (BUILT_IN_POWI)
1925 CASE_MATHFN (BUILT_IN_POW10)
1926 CASE_MATHFN (BUILT_IN_REMAINDER)
1927 CASE_MATHFN (BUILT_IN_REMQUO)
1928 CASE_MATHFN (BUILT_IN_RINT)
1929 CASE_MATHFN (BUILT_IN_ROUND)
1930 CASE_MATHFN (BUILT_IN_SCALB)
1931 CASE_MATHFN (BUILT_IN_SCALBLN)
1932 CASE_MATHFN (BUILT_IN_SCALBN)
1933 CASE_MATHFN (BUILT_IN_SIGNBIT)
1934 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1935 CASE_MATHFN (BUILT_IN_SIN)
1936 CASE_MATHFN (BUILT_IN_SINCOS)
1937 CASE_MATHFN (BUILT_IN_SINH)
1938 CASE_MATHFN (BUILT_IN_SQRT)
1939 CASE_MATHFN (BUILT_IN_TAN)
1940 CASE_MATHFN (BUILT_IN_TANH)
1941 CASE_MATHFN (BUILT_IN_TGAMMA)
1942 CASE_MATHFN (BUILT_IN_TRUNC)
1943 CASE_MATHFN (BUILT_IN_Y0)
1944 CASE_MATHFN (BUILT_IN_Y1)
1945 CASE_MATHFN (BUILT_IN_YN)
1947 default:
1948 return NULL_TREE;
1951 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1952 fcode2 = fcode;
1953 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1954 fcode2 = fcodef;
1955 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1956 fcode2 = fcodel;
1957 else
1958 return NULL_TREE;
1960 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1961 return NULL_TREE;
1963 return builtin_decl_explicit (fcode2);
1966 /* Like mathfn_built_in_1(), but always use the implicit array. */
1968 tree
1969 mathfn_built_in (tree type, enum built_in_function fn)
1971 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1974 /* If errno must be maintained, expand the RTL to check if the result,
1975 TARGET, of a built-in function call, EXP, is NaN, and if so set
1976 errno to EDOM. */
1978 static void
1979 expand_errno_check (tree exp, rtx target)
1981 rtx_code_label *lab = gen_label_rtx ();
1983 /* Test the result; if it is NaN, set errno=EDOM because
1984 the argument was not in the domain. */
1985 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1986 NULL_RTX, NULL, lab,
1987 /* The jump is very likely. */
1988 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1990 #ifdef TARGET_EDOM
1991 /* If this built-in doesn't throw an exception, set errno directly. */
1992 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1994 #ifdef GEN_ERRNO_RTX
1995 rtx errno_rtx = GEN_ERRNO_RTX;
1996 #else
1997 rtx errno_rtx
1998 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1999 #endif
2000 emit_move_insn (errno_rtx,
2001 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2002 emit_label (lab);
2003 return;
2005 #endif
2007 /* Make sure the library call isn't expanded as a tail call. */
2008 CALL_EXPR_TAILCALL (exp) = 0;
2010 /* We can't set errno=EDOM directly; let the library call do it.
2011 Pop the arguments right away in case the call gets deleted. */
2012 NO_DEFER_POP;
2013 expand_call (exp, target, 0);
2014 OK_DEFER_POP;
2015 emit_label (lab);
2018 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2019 Return NULL_RTX if a normal call should be emitted rather than expanding
2020 the function in-line. EXP is the expression that is a call to the builtin
2021 function; if convenient, the result should be placed in TARGET.
2022 SUBTARGET may be used as the target for computing one of EXP's operands. */
2024 static rtx
2025 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2027 optab builtin_optab;
2028 rtx op0;
2029 rtx_insn *insns;
2030 tree fndecl = get_callee_fndecl (exp);
2031 machine_mode mode;
2032 bool errno_set = false;
2033 bool try_widening = false;
2034 tree arg;
2036 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2037 return NULL_RTX;
2039 arg = CALL_EXPR_ARG (exp, 0);
2041 switch (DECL_FUNCTION_CODE (fndecl))
2043 CASE_FLT_FN (BUILT_IN_SQRT):
2044 errno_set = ! tree_expr_nonnegative_p (arg);
2045 try_widening = true;
2046 builtin_optab = sqrt_optab;
2047 break;
2048 CASE_FLT_FN (BUILT_IN_EXP):
2049 errno_set = true; builtin_optab = exp_optab; break;
2050 CASE_FLT_FN (BUILT_IN_EXP10):
2051 CASE_FLT_FN (BUILT_IN_POW10):
2052 errno_set = true; builtin_optab = exp10_optab; break;
2053 CASE_FLT_FN (BUILT_IN_EXP2):
2054 errno_set = true; builtin_optab = exp2_optab; break;
2055 CASE_FLT_FN (BUILT_IN_EXPM1):
2056 errno_set = true; builtin_optab = expm1_optab; break;
2057 CASE_FLT_FN (BUILT_IN_LOGB):
2058 errno_set = true; builtin_optab = logb_optab; break;
2059 CASE_FLT_FN (BUILT_IN_LOG):
2060 errno_set = true; builtin_optab = log_optab; break;
2061 CASE_FLT_FN (BUILT_IN_LOG10):
2062 errno_set = true; builtin_optab = log10_optab; break;
2063 CASE_FLT_FN (BUILT_IN_LOG2):
2064 errno_set = true; builtin_optab = log2_optab; break;
2065 CASE_FLT_FN (BUILT_IN_LOG1P):
2066 errno_set = true; builtin_optab = log1p_optab; break;
2067 CASE_FLT_FN (BUILT_IN_ASIN):
2068 builtin_optab = asin_optab; break;
2069 CASE_FLT_FN (BUILT_IN_ACOS):
2070 builtin_optab = acos_optab; break;
2071 CASE_FLT_FN (BUILT_IN_TAN):
2072 builtin_optab = tan_optab; break;
2073 CASE_FLT_FN (BUILT_IN_ATAN):
2074 builtin_optab = atan_optab; break;
2075 CASE_FLT_FN (BUILT_IN_FLOOR):
2076 builtin_optab = floor_optab; break;
2077 CASE_FLT_FN (BUILT_IN_CEIL):
2078 builtin_optab = ceil_optab; break;
2079 CASE_FLT_FN (BUILT_IN_TRUNC):
2080 builtin_optab = btrunc_optab; break;
2081 CASE_FLT_FN (BUILT_IN_ROUND):
2082 builtin_optab = round_optab; break;
2083 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2084 builtin_optab = nearbyint_optab;
2085 if (flag_trapping_math)
2086 break;
2087 /* Else fallthrough and expand as rint. */
2088 CASE_FLT_FN (BUILT_IN_RINT):
2089 builtin_optab = rint_optab; break;
2090 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2091 builtin_optab = significand_optab; break;
2092 default:
2093 gcc_unreachable ();
2096 /* Make a suitable register to place result in. */
2097 mode = TYPE_MODE (TREE_TYPE (exp));
2099 if (! flag_errno_math || ! HONOR_NANS (mode))
2100 errno_set = false;
2102 /* Before working hard, check whether the instruction is available, but try
2103 to widen the mode for specific operations. */
2104 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2105 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2106 && (!errno_set || !optimize_insn_for_size_p ()))
2108 rtx result = gen_reg_rtx (mode);
2110 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2111 need to expand the argument again. This way, we will not perform
2112 side-effects more the once. */
2113 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2115 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2117 start_sequence ();
2119 /* Compute into RESULT.
2120 Set RESULT to wherever the result comes back. */
2121 result = expand_unop (mode, builtin_optab, op0, result, 0);
2123 if (result != 0)
2125 if (errno_set)
2126 expand_errno_check (exp, result);
2128 /* Output the entire sequence. */
2129 insns = get_insns ();
2130 end_sequence ();
2131 emit_insn (insns);
2132 return result;
2135 /* If we were unable to expand via the builtin, stop the sequence
2136 (without outputting the insns) and call to the library function
2137 with the stabilized argument list. */
2138 end_sequence ();
2141 return expand_call (exp, target, target == const0_rtx);
2144 /* Expand a call to the builtin binary math functions (pow and atan2).
2145 Return NULL_RTX if a normal call should be emitted rather than expanding the
2146 function in-line. EXP is the expression that is a call to the builtin
2147 function; if convenient, the result should be placed in TARGET.
2148 SUBTARGET may be used as the target for computing one of EXP's
2149 operands. */
2151 static rtx
2152 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2154 optab builtin_optab;
2155 rtx op0, op1, result;
2156 rtx_insn *insns;
2157 int op1_type = REAL_TYPE;
2158 tree fndecl = get_callee_fndecl (exp);
2159 tree arg0, arg1;
2160 machine_mode mode;
2161 bool errno_set = true;
2163 switch (DECL_FUNCTION_CODE (fndecl))
2165 CASE_FLT_FN (BUILT_IN_SCALBN):
2166 CASE_FLT_FN (BUILT_IN_SCALBLN):
2167 CASE_FLT_FN (BUILT_IN_LDEXP):
2168 op1_type = INTEGER_TYPE;
2169 default:
2170 break;
2173 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2174 return NULL_RTX;
2176 arg0 = CALL_EXPR_ARG (exp, 0);
2177 arg1 = CALL_EXPR_ARG (exp, 1);
2179 switch (DECL_FUNCTION_CODE (fndecl))
2181 CASE_FLT_FN (BUILT_IN_POW):
2182 builtin_optab = pow_optab; break;
2183 CASE_FLT_FN (BUILT_IN_ATAN2):
2184 builtin_optab = atan2_optab; break;
2185 CASE_FLT_FN (BUILT_IN_SCALB):
2186 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2187 return 0;
2188 builtin_optab = scalb_optab; break;
2189 CASE_FLT_FN (BUILT_IN_SCALBN):
2190 CASE_FLT_FN (BUILT_IN_SCALBLN):
2191 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2192 return 0;
2193 /* Fall through... */
2194 CASE_FLT_FN (BUILT_IN_LDEXP):
2195 builtin_optab = ldexp_optab; break;
2196 CASE_FLT_FN (BUILT_IN_FMOD):
2197 builtin_optab = fmod_optab; break;
2198 CASE_FLT_FN (BUILT_IN_REMAINDER):
2199 CASE_FLT_FN (BUILT_IN_DREM):
2200 builtin_optab = remainder_optab; break;
2201 default:
2202 gcc_unreachable ();
2205 /* Make a suitable register to place result in. */
2206 mode = TYPE_MODE (TREE_TYPE (exp));
2208 /* Before working hard, check whether the instruction is available. */
2209 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2210 return NULL_RTX;
2212 result = gen_reg_rtx (mode);
2214 if (! flag_errno_math || ! HONOR_NANS (mode))
2215 errno_set = false;
2217 if (errno_set && optimize_insn_for_size_p ())
2218 return 0;
2220 /* Always stabilize the argument list. */
2221 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2222 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2224 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2225 op1 = expand_normal (arg1);
2227 start_sequence ();
2229 /* Compute into RESULT.
2230 Set RESULT to wherever the result comes back. */
2231 result = expand_binop (mode, builtin_optab, op0, op1,
2232 result, 0, OPTAB_DIRECT);
2234 /* If we were unable to expand via the builtin, stop the sequence
2235 (without outputting the insns) and call to the library function
2236 with the stabilized argument list. */
2237 if (result == 0)
2239 end_sequence ();
2240 return expand_call (exp, target, target == const0_rtx);
2243 if (errno_set)
2244 expand_errno_check (exp, result);
2246 /* Output the entire sequence. */
2247 insns = get_insns ();
2248 end_sequence ();
2249 emit_insn (insns);
2251 return result;
2254 /* Expand a call to the builtin trinary math functions (fma).
2255 Return NULL_RTX if a normal call should be emitted rather than expanding the
2256 function in-line. EXP is the expression that is a call to the builtin
2257 function; if convenient, the result should be placed in TARGET.
2258 SUBTARGET may be used as the target for computing one of EXP's
2259 operands. */
2261 static rtx
2262 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2264 optab builtin_optab;
2265 rtx op0, op1, op2, result;
2266 rtx_insn *insns;
2267 tree fndecl = get_callee_fndecl (exp);
2268 tree arg0, arg1, arg2;
2269 machine_mode mode;
2271 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2272 return NULL_RTX;
2274 arg0 = CALL_EXPR_ARG (exp, 0);
2275 arg1 = CALL_EXPR_ARG (exp, 1);
2276 arg2 = CALL_EXPR_ARG (exp, 2);
2278 switch (DECL_FUNCTION_CODE (fndecl))
2280 CASE_FLT_FN (BUILT_IN_FMA):
2281 builtin_optab = fma_optab; break;
2282 default:
2283 gcc_unreachable ();
2286 /* Make a suitable register to place result in. */
2287 mode = TYPE_MODE (TREE_TYPE (exp));
2289 /* Before working hard, check whether the instruction is available. */
2290 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2291 return NULL_RTX;
2293 result = gen_reg_rtx (mode);
2295 /* Always stabilize the argument list. */
2296 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2297 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2298 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2300 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2301 op1 = expand_normal (arg1);
2302 op2 = expand_normal (arg2);
2304 start_sequence ();
2306 /* Compute into RESULT.
2307 Set RESULT to wherever the result comes back. */
2308 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2309 result, 0);
2311 /* If we were unable to expand via the builtin, stop the sequence
2312 (without outputting the insns) and call to the library function
2313 with the stabilized argument list. */
2314 if (result == 0)
2316 end_sequence ();
2317 return expand_call (exp, target, target == const0_rtx);
2320 /* Output the entire sequence. */
2321 insns = get_insns ();
2322 end_sequence ();
2323 emit_insn (insns);
2325 return result;
2328 /* Expand a call to the builtin sin and cos math functions.
2329 Return NULL_RTX if a normal call should be emitted rather than expanding the
2330 function in-line. EXP is the expression that is a call to the builtin
2331 function; if convenient, the result should be placed in TARGET.
2332 SUBTARGET may be used as the target for computing one of EXP's
2333 operands. */
2335 static rtx
2336 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2338 optab builtin_optab;
2339 rtx op0;
2340 rtx_insn *insns;
2341 tree fndecl = get_callee_fndecl (exp);
2342 machine_mode mode;
2343 tree arg;
2345 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2346 return NULL_RTX;
2348 arg = CALL_EXPR_ARG (exp, 0);
2350 switch (DECL_FUNCTION_CODE (fndecl))
2352 CASE_FLT_FN (BUILT_IN_SIN):
2353 CASE_FLT_FN (BUILT_IN_COS):
2354 builtin_optab = sincos_optab; break;
2355 default:
2356 gcc_unreachable ();
2359 /* Make a suitable register to place result in. */
2360 mode = TYPE_MODE (TREE_TYPE (exp));
2362 /* Check if sincos insn is available, otherwise fallback
2363 to sin or cos insn. */
2364 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2365 switch (DECL_FUNCTION_CODE (fndecl))
2367 CASE_FLT_FN (BUILT_IN_SIN):
2368 builtin_optab = sin_optab; break;
2369 CASE_FLT_FN (BUILT_IN_COS):
2370 builtin_optab = cos_optab; break;
2371 default:
2372 gcc_unreachable ();
2375 /* Before working hard, check whether the instruction is available. */
2376 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2378 rtx result = gen_reg_rtx (mode);
2380 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2381 need to expand the argument again. This way, we will not perform
2382 side-effects more the once. */
2383 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2385 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2387 start_sequence ();
2389 /* Compute into RESULT.
2390 Set RESULT to wherever the result comes back. */
2391 if (builtin_optab == sincos_optab)
2393 int ok;
2395 switch (DECL_FUNCTION_CODE (fndecl))
2397 CASE_FLT_FN (BUILT_IN_SIN):
2398 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2399 break;
2400 CASE_FLT_FN (BUILT_IN_COS):
2401 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2402 break;
2403 default:
2404 gcc_unreachable ();
2406 gcc_assert (ok);
2408 else
2409 result = expand_unop (mode, builtin_optab, op0, result, 0);
2411 if (result != 0)
2413 /* Output the entire sequence. */
2414 insns = get_insns ();
2415 end_sequence ();
2416 emit_insn (insns);
2417 return result;
2420 /* If we were unable to expand via the builtin, stop the sequence
2421 (without outputting the insns) and call to the library function
2422 with the stabilized argument list. */
2423 end_sequence ();
2426 return expand_call (exp, target, target == const0_rtx);
2429 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2430 return an RTL instruction code that implements the functionality.
2431 If that isn't possible or available return CODE_FOR_nothing. */
2433 static enum insn_code
2434 interclass_mathfn_icode (tree arg, tree fndecl)
2436 bool errno_set = false;
2437 optab builtin_optab = unknown_optab;
2438 machine_mode mode;
2440 switch (DECL_FUNCTION_CODE (fndecl))
2442 CASE_FLT_FN (BUILT_IN_ILOGB):
2443 errno_set = true; builtin_optab = ilogb_optab; break;
2444 CASE_FLT_FN (BUILT_IN_ISINF):
2445 builtin_optab = isinf_optab; break;
2446 case BUILT_IN_ISNORMAL:
2447 case BUILT_IN_ISFINITE:
2448 CASE_FLT_FN (BUILT_IN_FINITE):
2449 case BUILT_IN_FINITED32:
2450 case BUILT_IN_FINITED64:
2451 case BUILT_IN_FINITED128:
2452 case BUILT_IN_ISINFD32:
2453 case BUILT_IN_ISINFD64:
2454 case BUILT_IN_ISINFD128:
2455 /* These builtins have no optabs (yet). */
2456 break;
2457 default:
2458 gcc_unreachable ();
2461 /* There's no easy way to detect the case we need to set EDOM. */
2462 if (flag_errno_math && errno_set)
2463 return CODE_FOR_nothing;
2465 /* Optab mode depends on the mode of the input argument. */
2466 mode = TYPE_MODE (TREE_TYPE (arg));
2468 if (builtin_optab)
2469 return optab_handler (builtin_optab, mode);
2470 return CODE_FOR_nothing;
2473 /* Expand a call to one of the builtin math functions that operate on
2474 floating point argument and output an integer result (ilogb, isinf,
2475 isnan, etc).
2476 Return 0 if a normal call should be emitted rather than expanding the
2477 function in-line. EXP is the expression that is a call to the builtin
2478 function; if convenient, the result should be placed in TARGET. */
2480 static rtx
2481 expand_builtin_interclass_mathfn (tree exp, rtx target)
2483 enum insn_code icode = CODE_FOR_nothing;
2484 rtx op0;
2485 tree fndecl = get_callee_fndecl (exp);
2486 machine_mode mode;
2487 tree arg;
2489 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2490 return NULL_RTX;
2492 arg = CALL_EXPR_ARG (exp, 0);
2493 icode = interclass_mathfn_icode (arg, fndecl);
2494 mode = TYPE_MODE (TREE_TYPE (arg));
2496 if (icode != CODE_FOR_nothing)
2498 struct expand_operand ops[1];
2499 rtx_insn *last = get_last_insn ();
2500 tree orig_arg = arg;
2502 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2503 need to expand the argument again. This way, we will not perform
2504 side-effects more the once. */
2505 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2507 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2509 if (mode != GET_MODE (op0))
2510 op0 = convert_to_mode (mode, op0, 0);
2512 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2513 if (maybe_legitimize_operands (icode, 0, 1, ops)
2514 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2515 return ops[0].value;
2517 delete_insns_since (last);
2518 CALL_EXPR_ARG (exp, 0) = orig_arg;
2521 return NULL_RTX;
2524 /* Expand a call to the builtin sincos math function.
2525 Return NULL_RTX if a normal call should be emitted rather than expanding the
2526 function in-line. EXP is the expression that is a call to the builtin
2527 function. */
2529 static rtx
2530 expand_builtin_sincos (tree exp)
2532 rtx op0, op1, op2, target1, target2;
2533 machine_mode mode;
2534 tree arg, sinp, cosp;
2535 int result;
2536 location_t loc = EXPR_LOCATION (exp);
2537 tree alias_type, alias_off;
2539 if (!validate_arglist (exp, REAL_TYPE,
2540 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2541 return NULL_RTX;
2543 arg = CALL_EXPR_ARG (exp, 0);
2544 sinp = CALL_EXPR_ARG (exp, 1);
2545 cosp = CALL_EXPR_ARG (exp, 2);
2547 /* Make a suitable register to place result in. */
2548 mode = TYPE_MODE (TREE_TYPE (arg));
2550 /* Check if sincos insn is available, otherwise emit the call. */
2551 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2552 return NULL_RTX;
2554 target1 = gen_reg_rtx (mode);
2555 target2 = gen_reg_rtx (mode);
2557 op0 = expand_normal (arg);
2558 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2559 alias_off = build_int_cst (alias_type, 0);
2560 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2561 sinp, alias_off));
2562 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2563 cosp, alias_off));
2565 /* Compute into target1 and target2.
2566 Set TARGET to wherever the result comes back. */
2567 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2568 gcc_assert (result);
2570 /* Move target1 and target2 to the memory locations indicated
2571 by op1 and op2. */
2572 emit_move_insn (op1, target1);
2573 emit_move_insn (op2, target2);
2575 return const0_rtx;
2578 /* Expand a call to the internal cexpi builtin to the sincos math function.
2579 EXP is the expression that is a call to the builtin function; if convenient,
2580 the result should be placed in TARGET. */
2582 static rtx
2583 expand_builtin_cexpi (tree exp, rtx target)
2585 tree fndecl = get_callee_fndecl (exp);
2586 tree arg, type;
2587 machine_mode mode;
2588 rtx op0, op1, op2;
2589 location_t loc = EXPR_LOCATION (exp);
2591 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2592 return NULL_RTX;
2594 arg = CALL_EXPR_ARG (exp, 0);
2595 type = TREE_TYPE (arg);
2596 mode = TYPE_MODE (TREE_TYPE (arg));
2598 /* Try expanding via a sincos optab, fall back to emitting a libcall
2599 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2600 is only generated from sincos, cexp or if we have either of them. */
2601 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2603 op1 = gen_reg_rtx (mode);
2604 op2 = gen_reg_rtx (mode);
2606 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2608 /* Compute into op1 and op2. */
2609 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2611 else if (targetm.libc_has_function (function_sincos))
2613 tree call, fn = NULL_TREE;
2614 tree top1, top2;
2615 rtx op1a, op2a;
2617 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2618 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2619 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2620 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2621 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2622 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2623 else
2624 gcc_unreachable ();
2626 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2627 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2628 op1a = copy_addr_to_reg (XEXP (op1, 0));
2629 op2a = copy_addr_to_reg (XEXP (op2, 0));
2630 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2631 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2633 /* Make sure not to fold the sincos call again. */
2634 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2635 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2636 call, 3, arg, top1, top2));
2638 else
2640 tree call, fn = NULL_TREE, narg;
2641 tree ctype = build_complex_type (type);
2643 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2644 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2645 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2646 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2647 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2648 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2649 else
2650 gcc_unreachable ();
2652 /* If we don't have a decl for cexp create one. This is the
2653 friendliest fallback if the user calls __builtin_cexpi
2654 without full target C99 function support. */
2655 if (fn == NULL_TREE)
2657 tree fntype;
2658 const char *name = NULL;
2660 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2661 name = "cexpf";
2662 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2663 name = "cexp";
2664 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2665 name = "cexpl";
2667 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2668 fn = build_fn_decl (name, fntype);
2671 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2672 build_real (type, dconst0), arg);
2674 /* Make sure not to fold the cexp call again. */
2675 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2676 return expand_expr (build_call_nary (ctype, call, 1, narg),
2677 target, VOIDmode, EXPAND_NORMAL);
2680 /* Now build the proper return type. */
2681 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2682 make_tree (TREE_TYPE (arg), op2),
2683 make_tree (TREE_TYPE (arg), op1)),
2684 target, VOIDmode, EXPAND_NORMAL);
2687 /* Conveniently construct a function call expression. FNDECL names the
2688 function to be called, N is the number of arguments, and the "..."
2689 parameters are the argument expressions. Unlike build_call_exr
2690 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2692 static tree
2693 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2695 va_list ap;
2696 tree fntype = TREE_TYPE (fndecl);
2697 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2699 va_start (ap, n);
2700 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2701 va_end (ap);
2702 SET_EXPR_LOCATION (fn, loc);
2703 return fn;
2706 /* Expand a call to one of the builtin rounding functions gcc defines
2707 as an extension (lfloor and lceil). As these are gcc extensions we
2708 do not need to worry about setting errno to EDOM.
2709 If expanding via optab fails, lower expression to (int)(floor(x)).
2710 EXP is the expression that is a call to the builtin function;
2711 if convenient, the result should be placed in TARGET. */
2713 static rtx
2714 expand_builtin_int_roundingfn (tree exp, rtx target)
2716 convert_optab builtin_optab;
2717 rtx op0, tmp;
2718 rtx_insn *insns;
2719 tree fndecl = get_callee_fndecl (exp);
2720 enum built_in_function fallback_fn;
2721 tree fallback_fndecl;
2722 machine_mode mode;
2723 tree arg;
2725 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2726 gcc_unreachable ();
2728 arg = CALL_EXPR_ARG (exp, 0);
2730 switch (DECL_FUNCTION_CODE (fndecl))
2732 CASE_FLT_FN (BUILT_IN_ICEIL):
2733 CASE_FLT_FN (BUILT_IN_LCEIL):
2734 CASE_FLT_FN (BUILT_IN_LLCEIL):
2735 builtin_optab = lceil_optab;
2736 fallback_fn = BUILT_IN_CEIL;
2737 break;
2739 CASE_FLT_FN (BUILT_IN_IFLOOR):
2740 CASE_FLT_FN (BUILT_IN_LFLOOR):
2741 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2742 builtin_optab = lfloor_optab;
2743 fallback_fn = BUILT_IN_FLOOR;
2744 break;
2746 default:
2747 gcc_unreachable ();
2750 /* Make a suitable register to place result in. */
2751 mode = TYPE_MODE (TREE_TYPE (exp));
2753 target = gen_reg_rtx (mode);
2755 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2756 need to expand the argument again. This way, we will not perform
2757 side-effects more the once. */
2758 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2760 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2762 start_sequence ();
2764 /* Compute into TARGET. */
2765 if (expand_sfix_optab (target, op0, builtin_optab))
2767 /* Output the entire sequence. */
2768 insns = get_insns ();
2769 end_sequence ();
2770 emit_insn (insns);
2771 return target;
2774 /* If we were unable to expand via the builtin, stop the sequence
2775 (without outputting the insns). */
2776 end_sequence ();
2778 /* Fall back to floating point rounding optab. */
2779 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2781 /* For non-C99 targets we may end up without a fallback fndecl here
2782 if the user called __builtin_lfloor directly. In this case emit
2783 a call to the floor/ceil variants nevertheless. This should result
2784 in the best user experience for not full C99 targets. */
2785 if (fallback_fndecl == NULL_TREE)
2787 tree fntype;
2788 const char *name = NULL;
2790 switch (DECL_FUNCTION_CODE (fndecl))
2792 case BUILT_IN_ICEIL:
2793 case BUILT_IN_LCEIL:
2794 case BUILT_IN_LLCEIL:
2795 name = "ceil";
2796 break;
2797 case BUILT_IN_ICEILF:
2798 case BUILT_IN_LCEILF:
2799 case BUILT_IN_LLCEILF:
2800 name = "ceilf";
2801 break;
2802 case BUILT_IN_ICEILL:
2803 case BUILT_IN_LCEILL:
2804 case BUILT_IN_LLCEILL:
2805 name = "ceill";
2806 break;
2807 case BUILT_IN_IFLOOR:
2808 case BUILT_IN_LFLOOR:
2809 case BUILT_IN_LLFLOOR:
2810 name = "floor";
2811 break;
2812 case BUILT_IN_IFLOORF:
2813 case BUILT_IN_LFLOORF:
2814 case BUILT_IN_LLFLOORF:
2815 name = "floorf";
2816 break;
2817 case BUILT_IN_IFLOORL:
2818 case BUILT_IN_LFLOORL:
2819 case BUILT_IN_LLFLOORL:
2820 name = "floorl";
2821 break;
2822 default:
2823 gcc_unreachable ();
2826 fntype = build_function_type_list (TREE_TYPE (arg),
2827 TREE_TYPE (arg), NULL_TREE);
2828 fallback_fndecl = build_fn_decl (name, fntype);
2831 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2833 tmp = expand_normal (exp);
2834 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2836 /* Truncate the result of floating point optab to integer
2837 via expand_fix (). */
2838 target = gen_reg_rtx (mode);
2839 expand_fix (target, tmp, 0);
2841 return target;
2844 /* Expand a call to one of the builtin math functions doing integer
2845 conversion (lrint).
2846 Return 0 if a normal call should be emitted rather than expanding the
2847 function in-line. EXP is the expression that is a call to the builtin
2848 function; if convenient, the result should be placed in TARGET. */
2850 static rtx
2851 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2853 convert_optab builtin_optab;
2854 rtx op0;
2855 rtx_insn *insns;
2856 tree fndecl = get_callee_fndecl (exp);
2857 tree arg;
2858 machine_mode mode;
2859 enum built_in_function fallback_fn = BUILT_IN_NONE;
2861 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2862 gcc_unreachable ();
2864 arg = CALL_EXPR_ARG (exp, 0);
2866 switch (DECL_FUNCTION_CODE (fndecl))
2868 CASE_FLT_FN (BUILT_IN_IRINT):
2869 fallback_fn = BUILT_IN_LRINT;
2870 /* FALLTHRU */
2871 CASE_FLT_FN (BUILT_IN_LRINT):
2872 CASE_FLT_FN (BUILT_IN_LLRINT):
2873 builtin_optab = lrint_optab;
2874 break;
2876 CASE_FLT_FN (BUILT_IN_IROUND):
2877 fallback_fn = BUILT_IN_LROUND;
2878 /* FALLTHRU */
2879 CASE_FLT_FN (BUILT_IN_LROUND):
2880 CASE_FLT_FN (BUILT_IN_LLROUND):
2881 builtin_optab = lround_optab;
2882 break;
2884 default:
2885 gcc_unreachable ();
2888 /* There's no easy way to detect the case we need to set EDOM. */
2889 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2890 return NULL_RTX;
2892 /* Make a suitable register to place result in. */
2893 mode = TYPE_MODE (TREE_TYPE (exp));
2895 /* There's no easy way to detect the case we need to set EDOM. */
2896 if (!flag_errno_math)
2898 rtx result = gen_reg_rtx (mode);
2900 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2901 need to expand the argument again. This way, we will not perform
2902 side-effects more the once. */
2903 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2905 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2907 start_sequence ();
2909 if (expand_sfix_optab (result, op0, builtin_optab))
2911 /* Output the entire sequence. */
2912 insns = get_insns ();
2913 end_sequence ();
2914 emit_insn (insns);
2915 return result;
2918 /* If we were unable to expand via the builtin, stop the sequence
2919 (without outputting the insns) and call to the library function
2920 with the stabilized argument list. */
2921 end_sequence ();
2924 if (fallback_fn != BUILT_IN_NONE)
2926 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2927 targets, (int) round (x) should never be transformed into
2928 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2929 a call to lround in the hope that the target provides at least some
2930 C99 functions. This should result in the best user experience for
2931 not full C99 targets. */
2932 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2933 fallback_fn, 0);
2935 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2936 fallback_fndecl, 1, arg);
2938 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2939 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2940 return convert_to_mode (mode, target, 0);
2943 return expand_call (exp, target, target == const0_rtx);
2946 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2947 a normal call should be emitted rather than expanding the function
2948 in-line. EXP is the expression that is a call to the builtin
2949 function; if convenient, the result should be placed in TARGET. */
2951 static rtx
2952 expand_builtin_powi (tree exp, rtx target)
2954 tree arg0, arg1;
2955 rtx op0, op1;
2956 machine_mode mode;
2957 machine_mode mode2;
2959 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2960 return NULL_RTX;
2962 arg0 = CALL_EXPR_ARG (exp, 0);
2963 arg1 = CALL_EXPR_ARG (exp, 1);
2964 mode = TYPE_MODE (TREE_TYPE (exp));
2966 /* Emit a libcall to libgcc. */
2968 /* Mode of the 2nd argument must match that of an int. */
2969 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2971 if (target == NULL_RTX)
2972 target = gen_reg_rtx (mode);
2974 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2975 if (GET_MODE (op0) != mode)
2976 op0 = convert_to_mode (mode, op0, 0);
2977 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2978 if (GET_MODE (op1) != mode2)
2979 op1 = convert_to_mode (mode2, op1, 0);
2981 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2982 target, LCT_CONST, mode, 2,
2983 op0, mode, op1, mode2);
2985 return target;
2988 /* Expand expression EXP which is a call to the strlen builtin. Return
2989 NULL_RTX if we failed the caller should emit a normal call, otherwise
2990 try to get the result in TARGET, if convenient. */
2992 static rtx
2993 expand_builtin_strlen (tree exp, rtx target,
2994 machine_mode target_mode)
2996 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2997 return NULL_RTX;
2998 else
3000 struct expand_operand ops[4];
3001 rtx pat;
3002 tree len;
3003 tree src = CALL_EXPR_ARG (exp, 0);
3004 rtx src_reg;
3005 rtx_insn *before_strlen;
3006 machine_mode insn_mode = target_mode;
3007 enum insn_code icode = CODE_FOR_nothing;
3008 unsigned int align;
3010 /* If the length can be computed at compile-time, return it. */
3011 len = c_strlen (src, 0);
3012 if (len)
3013 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3015 /* If the length can be computed at compile-time and is constant
3016 integer, but there are side-effects in src, evaluate
3017 src for side-effects, then return len.
3018 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3019 can be optimized into: i++; x = 3; */
3020 len = c_strlen (src, 1);
3021 if (len && TREE_CODE (len) == INTEGER_CST)
3023 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3024 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3027 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3029 /* If SRC is not a pointer type, don't do this operation inline. */
3030 if (align == 0)
3031 return NULL_RTX;
3033 /* Bail out if we can't compute strlen in the right mode. */
3034 while (insn_mode != VOIDmode)
3036 icode = optab_handler (strlen_optab, insn_mode);
3037 if (icode != CODE_FOR_nothing)
3038 break;
3040 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3042 if (insn_mode == VOIDmode)
3043 return NULL_RTX;
3045 /* Make a place to hold the source address. We will not expand
3046 the actual source until we are sure that the expansion will
3047 not fail -- there are trees that cannot be expanded twice. */
3048 src_reg = gen_reg_rtx (Pmode);
3050 /* Mark the beginning of the strlen sequence so we can emit the
3051 source operand later. */
3052 before_strlen = get_last_insn ();
3054 create_output_operand (&ops[0], target, insn_mode);
3055 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3056 create_integer_operand (&ops[2], 0);
3057 create_integer_operand (&ops[3], align);
3058 if (!maybe_expand_insn (icode, 4, ops))
3059 return NULL_RTX;
3061 /* Now that we are assured of success, expand the source. */
3062 start_sequence ();
3063 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3064 if (pat != src_reg)
3066 #ifdef POINTERS_EXTEND_UNSIGNED
3067 if (GET_MODE (pat) != Pmode)
3068 pat = convert_to_mode (Pmode, pat,
3069 POINTERS_EXTEND_UNSIGNED);
3070 #endif
3071 emit_move_insn (src_reg, pat);
3073 pat = get_insns ();
3074 end_sequence ();
3076 if (before_strlen)
3077 emit_insn_after (pat, before_strlen);
3078 else
3079 emit_insn_before (pat, get_insns ());
3081 /* Return the value in the proper mode for this function. */
3082 if (GET_MODE (ops[0].value) == target_mode)
3083 target = ops[0].value;
3084 else if (target != 0)
3085 convert_move (target, ops[0].value, 0);
3086 else
3087 target = convert_to_mode (target_mode, ops[0].value, 0);
3089 return target;
3093 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3094 bytes from constant string DATA + OFFSET and return it as target
3095 constant. */
3097 static rtx
3098 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3099 machine_mode mode)
3101 const char *str = (const char *) data;
3103 gcc_assert (offset >= 0
3104 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3105 <= strlen (str) + 1));
3107 return c_readstr (str + offset, mode);
3110 /* LEN specify length of the block of memcpy/memset operation.
3111 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3112 In some cases we can make very likely guess on max size, then we
3113 set it into PROBABLE_MAX_SIZE. */
3115 static void
3116 determine_block_size (tree len, rtx len_rtx,
3117 unsigned HOST_WIDE_INT *min_size,
3118 unsigned HOST_WIDE_INT *max_size,
3119 unsigned HOST_WIDE_INT *probable_max_size)
3121 if (CONST_INT_P (len_rtx))
3123 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3124 return;
3126 else
3128 wide_int min, max;
3129 enum value_range_type range_type = VR_UNDEFINED;
3131 /* Determine bounds from the type. */
3132 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3133 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3134 else
3135 *min_size = 0;
3136 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3137 *probable_max_size = *max_size
3138 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3139 else
3140 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3142 if (TREE_CODE (len) == SSA_NAME)
3143 range_type = get_range_info (len, &min, &max);
3144 if (range_type == VR_RANGE)
3146 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3147 *min_size = min.to_uhwi ();
3148 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3149 *probable_max_size = *max_size = max.to_uhwi ();
3151 else if (range_type == VR_ANTI_RANGE)
3153 /* Anti range 0...N lets us to determine minimal size to N+1. */
3154 if (min == 0)
3156 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3157 *min_size = max.to_uhwi () + 1;
3159 /* Code like
3161 int n;
3162 if (n < 100)
3163 memcpy (a, b, n)
3165 Produce anti range allowing negative values of N. We still
3166 can use the information and make a guess that N is not negative.
3168 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3169 *probable_max_size = min.to_uhwi () - 1;
3172 gcc_checking_assert (*max_size <=
3173 (unsigned HOST_WIDE_INT)
3174 GET_MODE_MASK (GET_MODE (len_rtx)));
3177 /* Helper function to do the actual work for expand_builtin_memcpy. */
3179 static rtx
3180 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3182 const char *src_str;
3183 unsigned int src_align = get_pointer_alignment (src);
3184 unsigned int dest_align = get_pointer_alignment (dest);
3185 rtx dest_mem, src_mem, dest_addr, len_rtx;
3186 HOST_WIDE_INT expected_size = -1;
3187 unsigned int expected_align = 0;
3188 unsigned HOST_WIDE_INT min_size;
3189 unsigned HOST_WIDE_INT max_size;
3190 unsigned HOST_WIDE_INT probable_max_size;
3192 /* If DEST is not a pointer type, call the normal function. */
3193 if (dest_align == 0)
3194 return NULL_RTX;
3196 /* If either SRC is not a pointer type, don't do this
3197 operation in-line. */
3198 if (src_align == 0)
3199 return NULL_RTX;
3201 if (currently_expanding_gimple_stmt)
3202 stringop_block_profile (currently_expanding_gimple_stmt,
3203 &expected_align, &expected_size);
3205 if (expected_align < dest_align)
3206 expected_align = dest_align;
3207 dest_mem = get_memory_rtx (dest, len);
3208 set_mem_align (dest_mem, dest_align);
3209 len_rtx = expand_normal (len);
3210 determine_block_size (len, len_rtx, &min_size, &max_size,
3211 &probable_max_size);
3212 src_str = c_getstr (src);
3214 /* If SRC is a string constant and block move would be done
3215 by pieces, we can avoid loading the string from memory
3216 and only stored the computed constants. */
3217 if (src_str
3218 && CONST_INT_P (len_rtx)
3219 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3220 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3221 CONST_CAST (char *, src_str),
3222 dest_align, false))
3224 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3225 builtin_memcpy_read_str,
3226 CONST_CAST (char *, src_str),
3227 dest_align, false, 0);
3228 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3229 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3230 return dest_mem;
3233 src_mem = get_memory_rtx (src, len);
3234 set_mem_align (src_mem, src_align);
3236 /* Copy word part most expediently. */
3237 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3238 CALL_EXPR_TAILCALL (exp)
3239 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3240 expected_align, expected_size,
3241 min_size, max_size, probable_max_size);
3243 if (dest_addr == 0)
3245 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3246 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3249 return dest_addr;
3252 /* Expand a call EXP to the memcpy builtin.
3253 Return NULL_RTX if we failed, the caller should emit a normal call,
3254 otherwise try to get the result in TARGET, if convenient (and in
3255 mode MODE if that's convenient). */
3257 static rtx
3258 expand_builtin_memcpy (tree exp, rtx target)
3260 if (!validate_arglist (exp,
3261 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3262 return NULL_RTX;
3263 else
3265 tree dest = CALL_EXPR_ARG (exp, 0);
3266 tree src = CALL_EXPR_ARG (exp, 1);
3267 tree len = CALL_EXPR_ARG (exp, 2);
3268 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3272 /* Expand an instrumented call EXP to the memcpy builtin.
3273 Return NULL_RTX if we failed, the caller should emit a normal call,
3274 otherwise try to get the result in TARGET, if convenient (and in
3275 mode MODE if that's convenient). */
3277 static rtx
3278 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3280 if (!validate_arglist (exp,
3281 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3282 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3283 INTEGER_TYPE, VOID_TYPE))
3284 return NULL_RTX;
3285 else
3287 tree dest = CALL_EXPR_ARG (exp, 0);
3288 tree src = CALL_EXPR_ARG (exp, 2);
3289 tree len = CALL_EXPR_ARG (exp, 4);
3290 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3292 /* Return src bounds with the result. */
3293 if (res)
3295 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3296 expand_normal (CALL_EXPR_ARG (exp, 1)));
3297 res = chkp_join_splitted_slot (res, bnd);
3299 return res;
3303 /* Expand a call EXP to the mempcpy builtin.
3304 Return NULL_RTX if we failed; the caller should emit a normal call,
3305 otherwise try to get the result in TARGET, if convenient (and in
3306 mode MODE if that's convenient). If ENDP is 0 return the
3307 destination pointer, if ENDP is 1 return the end pointer ala
3308 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3309 stpcpy. */
3311 static rtx
3312 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3314 if (!validate_arglist (exp,
3315 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3316 return NULL_RTX;
3317 else
3319 tree dest = CALL_EXPR_ARG (exp, 0);
3320 tree src = CALL_EXPR_ARG (exp, 1);
3321 tree len = CALL_EXPR_ARG (exp, 2);
3322 return expand_builtin_mempcpy_args (dest, src, len,
3323 target, mode, /*endp=*/ 1,
3324 exp);
3328 /* Expand an instrumented call EXP to the mempcpy builtin.
3329 Return NULL_RTX if we failed, the caller should emit a normal call,
3330 otherwise try to get the result in TARGET, if convenient (and in
3331 mode MODE if that's convenient). */
3333 static rtx
3334 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3336 if (!validate_arglist (exp,
3337 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3338 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3339 INTEGER_TYPE, VOID_TYPE))
3340 return NULL_RTX;
3341 else
3343 tree dest = CALL_EXPR_ARG (exp, 0);
3344 tree src = CALL_EXPR_ARG (exp, 2);
3345 tree len = CALL_EXPR_ARG (exp, 4);
3346 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3347 mode, 1, exp);
3349 /* Return src bounds with the result. */
3350 if (res)
3352 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3353 expand_normal (CALL_EXPR_ARG (exp, 1)));
3354 res = chkp_join_splitted_slot (res, bnd);
3356 return res;
3360 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3361 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3362 so that this can also be called without constructing an actual CALL_EXPR.
3363 The other arguments and return value are the same as for
3364 expand_builtin_mempcpy. */
3366 static rtx
3367 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3368 rtx target, machine_mode mode, int endp,
3369 tree orig_exp)
3371 tree fndecl = get_callee_fndecl (orig_exp);
3373 /* If return value is ignored, transform mempcpy into memcpy. */
3374 if (target == const0_rtx
3375 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3376 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3378 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3379 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3380 dest, src, len);
3381 return expand_expr (result, target, mode, EXPAND_NORMAL);
3383 else if (target == const0_rtx
3384 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3386 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3387 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3388 dest, src, len);
3389 return expand_expr (result, target, mode, EXPAND_NORMAL);
3391 else
3393 const char *src_str;
3394 unsigned int src_align = get_pointer_alignment (src);
3395 unsigned int dest_align = get_pointer_alignment (dest);
3396 rtx dest_mem, src_mem, len_rtx;
3398 /* If either SRC or DEST is not a pointer type, don't do this
3399 operation in-line. */
3400 if (dest_align == 0 || src_align == 0)
3401 return NULL_RTX;
3403 /* If LEN is not constant, call the normal function. */
3404 if (! tree_fits_uhwi_p (len))
3405 return NULL_RTX;
3407 len_rtx = expand_normal (len);
3408 src_str = c_getstr (src);
3410 /* If SRC is a string constant and block move would be done
3411 by pieces, we can avoid loading the string from memory
3412 and only stored the computed constants. */
3413 if (src_str
3414 && CONST_INT_P (len_rtx)
3415 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3416 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3417 CONST_CAST (char *, src_str),
3418 dest_align, false))
3420 dest_mem = get_memory_rtx (dest, len);
3421 set_mem_align (dest_mem, dest_align);
3422 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3423 builtin_memcpy_read_str,
3424 CONST_CAST (char *, src_str),
3425 dest_align, false, endp);
3426 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3427 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3428 return dest_mem;
3431 if (CONST_INT_P (len_rtx)
3432 && can_move_by_pieces (INTVAL (len_rtx),
3433 MIN (dest_align, src_align)))
3435 dest_mem = get_memory_rtx (dest, len);
3436 set_mem_align (dest_mem, dest_align);
3437 src_mem = get_memory_rtx (src, len);
3438 set_mem_align (src_mem, src_align);
3439 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3440 MIN (dest_align, src_align), endp);
3441 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3442 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3443 return dest_mem;
3446 return NULL_RTX;
3450 #ifndef HAVE_movstr
3451 # define HAVE_movstr 0
3452 # define CODE_FOR_movstr CODE_FOR_nothing
3453 #endif
3455 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3456 we failed, the caller should emit a normal call, otherwise try to
3457 get the result in TARGET, if convenient. If ENDP is 0 return the
3458 destination pointer, if ENDP is 1 return the end pointer ala
3459 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3460 stpcpy. */
3462 static rtx
3463 expand_movstr (tree dest, tree src, rtx target, int endp)
3465 struct expand_operand ops[3];
3466 rtx dest_mem;
3467 rtx src_mem;
3469 if (!HAVE_movstr)
3470 return NULL_RTX;
3472 dest_mem = get_memory_rtx (dest, NULL);
3473 src_mem = get_memory_rtx (src, NULL);
3474 if (!endp)
3476 target = force_reg (Pmode, XEXP (dest_mem, 0));
3477 dest_mem = replace_equiv_address (dest_mem, target);
3480 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3481 create_fixed_operand (&ops[1], dest_mem);
3482 create_fixed_operand (&ops[2], src_mem);
3483 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3484 return NULL_RTX;
3486 if (endp && target != const0_rtx)
3488 target = ops[0].value;
3489 /* movstr is supposed to set end to the address of the NUL
3490 terminator. If the caller requested a mempcpy-like return value,
3491 adjust it. */
3492 if (endp == 1)
3494 rtx tem = plus_constant (GET_MODE (target),
3495 gen_lowpart (GET_MODE (target), target), 1);
3496 emit_move_insn (target, force_operand (tem, NULL_RTX));
3499 return target;
3502 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3503 NULL_RTX if we failed the caller should emit a normal call, otherwise
3504 try to get the result in TARGET, if convenient (and in mode MODE if that's
3505 convenient). */
3507 static rtx
3508 expand_builtin_strcpy (tree exp, rtx target)
3510 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3512 tree dest = CALL_EXPR_ARG (exp, 0);
3513 tree src = CALL_EXPR_ARG (exp, 1);
3514 return expand_builtin_strcpy_args (dest, src, target);
3516 return NULL_RTX;
3519 /* Helper function to do the actual work for expand_builtin_strcpy. The
3520 arguments to the builtin_strcpy call DEST and SRC are broken out
3521 so that this can also be called without constructing an actual CALL_EXPR.
3522 The other arguments and return value are the same as for
3523 expand_builtin_strcpy. */
3525 static rtx
3526 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3528 return expand_movstr (dest, src, target, /*endp=*/0);
3531 /* Expand a call EXP to the stpcpy builtin.
3532 Return NULL_RTX if we failed the caller should emit a normal call,
3533 otherwise try to get the result in TARGET, if convenient (and in
3534 mode MODE if that's convenient). */
3536 static rtx
3537 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3539 tree dst, src;
3540 location_t loc = EXPR_LOCATION (exp);
3542 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3543 return NULL_RTX;
3545 dst = CALL_EXPR_ARG (exp, 0);
3546 src = CALL_EXPR_ARG (exp, 1);
3548 /* If return value is ignored, transform stpcpy into strcpy. */
3549 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3551 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3552 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3553 return expand_expr (result, target, mode, EXPAND_NORMAL);
3555 else
3557 tree len, lenp1;
3558 rtx ret;
3560 /* Ensure we get an actual string whose length can be evaluated at
3561 compile-time, not an expression containing a string. This is
3562 because the latter will potentially produce pessimized code
3563 when used to produce the return value. */
3564 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3565 return expand_movstr (dst, src, target, /*endp=*/2);
3567 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3568 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3569 target, mode, /*endp=*/2,
3570 exp);
3572 if (ret)
3573 return ret;
3575 if (TREE_CODE (len) == INTEGER_CST)
3577 rtx len_rtx = expand_normal (len);
3579 if (CONST_INT_P (len_rtx))
3581 ret = expand_builtin_strcpy_args (dst, src, target);
3583 if (ret)
3585 if (! target)
3587 if (mode != VOIDmode)
3588 target = gen_reg_rtx (mode);
3589 else
3590 target = gen_reg_rtx (GET_MODE (ret));
3592 if (GET_MODE (target) != GET_MODE (ret))
3593 ret = gen_lowpart (GET_MODE (target), ret);
3595 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3596 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3597 gcc_assert (ret);
3599 return target;
3604 return expand_movstr (dst, src, target, /*endp=*/2);
3608 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3609 bytes from constant string DATA + OFFSET and return it as target
3610 constant. */
3613 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3614 machine_mode mode)
3616 const char *str = (const char *) data;
3618 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3619 return const0_rtx;
3621 return c_readstr (str + offset, mode);
3624 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3625 NULL_RTX if we failed the caller should emit a normal call. */
3627 static rtx
3628 expand_builtin_strncpy (tree exp, rtx target)
3630 location_t loc = EXPR_LOCATION (exp);
3632 if (validate_arglist (exp,
3633 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3635 tree dest = CALL_EXPR_ARG (exp, 0);
3636 tree src = CALL_EXPR_ARG (exp, 1);
3637 tree len = CALL_EXPR_ARG (exp, 2);
3638 tree slen = c_strlen (src, 1);
3640 /* We must be passed a constant len and src parameter. */
3641 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3642 return NULL_RTX;
3644 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3646 /* We're required to pad with trailing zeros if the requested
3647 len is greater than strlen(s2)+1. In that case try to
3648 use store_by_pieces, if it fails, punt. */
3649 if (tree_int_cst_lt (slen, len))
3651 unsigned int dest_align = get_pointer_alignment (dest);
3652 const char *p = c_getstr (src);
3653 rtx dest_mem;
3655 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3656 || !can_store_by_pieces (tree_to_uhwi (len),
3657 builtin_strncpy_read_str,
3658 CONST_CAST (char *, p),
3659 dest_align, false))
3660 return NULL_RTX;
3662 dest_mem = get_memory_rtx (dest, len);
3663 store_by_pieces (dest_mem, tree_to_uhwi (len),
3664 builtin_strncpy_read_str,
3665 CONST_CAST (char *, p), dest_align, false, 0);
3666 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3667 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3668 return dest_mem;
3671 return NULL_RTX;
3674 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3675 bytes from constant string DATA + OFFSET and return it as target
3676 constant. */
3679 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3680 machine_mode mode)
3682 const char *c = (const char *) data;
3683 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3685 memset (p, *c, GET_MODE_SIZE (mode));
3687 return c_readstr (p, mode);
3690 /* Callback routine for store_by_pieces. Return the RTL of a register
3691 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3692 char value given in the RTL register data. For example, if mode is
3693 4 bytes wide, return the RTL for 0x01010101*data. */
3695 static rtx
3696 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3697 machine_mode mode)
3699 rtx target, coeff;
3700 size_t size;
3701 char *p;
3703 size = GET_MODE_SIZE (mode);
3704 if (size == 1)
3705 return (rtx) data;
3707 p = XALLOCAVEC (char, size);
3708 memset (p, 1, size);
3709 coeff = c_readstr (p, mode);
3711 target = convert_to_mode (mode, (rtx) data, 1);
3712 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3713 return force_reg (mode, target);
3716 /* Expand expression EXP, which is a call to the memset builtin. Return
3717 NULL_RTX if we failed the caller should emit a normal call, otherwise
3718 try to get the result in TARGET, if convenient (and in mode MODE if that's
3719 convenient). */
3721 static rtx
3722 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3724 if (!validate_arglist (exp,
3725 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3726 return NULL_RTX;
3727 else
3729 tree dest = CALL_EXPR_ARG (exp, 0);
3730 tree val = CALL_EXPR_ARG (exp, 1);
3731 tree len = CALL_EXPR_ARG (exp, 2);
3732 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3736 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3737 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3738 try to get the result in TARGET, if convenient (and in mode MODE if that's
3739 convenient). */
3741 static rtx
3742 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3744 if (!validate_arglist (exp,
3745 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3746 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3747 return NULL_RTX;
3748 else
3750 tree dest = CALL_EXPR_ARG (exp, 0);
3751 tree val = CALL_EXPR_ARG (exp, 2);
3752 tree len = CALL_EXPR_ARG (exp, 3);
3753 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3755 /* Return src bounds with the result. */
3756 if (res)
3758 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3759 expand_normal (CALL_EXPR_ARG (exp, 1)));
3760 res = chkp_join_splitted_slot (res, bnd);
3762 return res;
3766 /* Helper function to do the actual work for expand_builtin_memset. The
3767 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3768 so that this can also be called without constructing an actual CALL_EXPR.
3769 The other arguments and return value are the same as for
3770 expand_builtin_memset. */
3772 static rtx
3773 expand_builtin_memset_args (tree dest, tree val, tree len,
3774 rtx target, machine_mode mode, tree orig_exp)
3776 tree fndecl, fn;
3777 enum built_in_function fcode;
3778 machine_mode val_mode;
3779 char c;
3780 unsigned int dest_align;
3781 rtx dest_mem, dest_addr, len_rtx;
3782 HOST_WIDE_INT expected_size = -1;
3783 unsigned int expected_align = 0;
3784 unsigned HOST_WIDE_INT min_size;
3785 unsigned HOST_WIDE_INT max_size;
3786 unsigned HOST_WIDE_INT probable_max_size;
3788 dest_align = get_pointer_alignment (dest);
3790 /* If DEST is not a pointer type, don't do this operation in-line. */
3791 if (dest_align == 0)
3792 return NULL_RTX;
3794 if (currently_expanding_gimple_stmt)
3795 stringop_block_profile (currently_expanding_gimple_stmt,
3796 &expected_align, &expected_size);
3798 if (expected_align < dest_align)
3799 expected_align = dest_align;
3801 /* If the LEN parameter is zero, return DEST. */
3802 if (integer_zerop (len))
3804 /* Evaluate and ignore VAL in case it has side-effects. */
3805 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3806 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3809 /* Stabilize the arguments in case we fail. */
3810 dest = builtin_save_expr (dest);
3811 val = builtin_save_expr (val);
3812 len = builtin_save_expr (len);
3814 len_rtx = expand_normal (len);
3815 determine_block_size (len, len_rtx, &min_size, &max_size,
3816 &probable_max_size);
3817 dest_mem = get_memory_rtx (dest, len);
3818 val_mode = TYPE_MODE (unsigned_char_type_node);
3820 if (TREE_CODE (val) != INTEGER_CST)
3822 rtx val_rtx;
3824 val_rtx = expand_normal (val);
3825 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3827 /* Assume that we can memset by pieces if we can store
3828 * the coefficients by pieces (in the required modes).
3829 * We can't pass builtin_memset_gen_str as that emits RTL. */
3830 c = 1;
3831 if (tree_fits_uhwi_p (len)
3832 && can_store_by_pieces (tree_to_uhwi (len),
3833 builtin_memset_read_str, &c, dest_align,
3834 true))
3836 val_rtx = force_reg (val_mode, val_rtx);
3837 store_by_pieces (dest_mem, tree_to_uhwi (len),
3838 builtin_memset_gen_str, val_rtx, dest_align,
3839 true, 0);
3841 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3842 dest_align, expected_align,
3843 expected_size, min_size, max_size,
3844 probable_max_size))
3845 goto do_libcall;
3847 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3848 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3849 return dest_mem;
3852 if (target_char_cast (val, &c))
3853 goto do_libcall;
3855 if (c)
3857 if (tree_fits_uhwi_p (len)
3858 && can_store_by_pieces (tree_to_uhwi (len),
3859 builtin_memset_read_str, &c, dest_align,
3860 true))
3861 store_by_pieces (dest_mem, tree_to_uhwi (len),
3862 builtin_memset_read_str, &c, dest_align, true, 0);
3863 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3864 gen_int_mode (c, val_mode),
3865 dest_align, expected_align,
3866 expected_size, min_size, max_size,
3867 probable_max_size))
3868 goto do_libcall;
3870 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3871 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3872 return dest_mem;
3875 set_mem_align (dest_mem, dest_align);
3876 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3877 CALL_EXPR_TAILCALL (orig_exp)
3878 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3879 expected_align, expected_size,
3880 min_size, max_size,
3881 probable_max_size);
3883 if (dest_addr == 0)
3885 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3886 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3889 return dest_addr;
3891 do_libcall:
3892 fndecl = get_callee_fndecl (orig_exp);
3893 fcode = DECL_FUNCTION_CODE (fndecl);
3894 if (fcode == BUILT_IN_MEMSET
3895 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3896 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3897 dest, val, len);
3898 else if (fcode == BUILT_IN_BZERO)
3899 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3900 dest, len);
3901 else
3902 gcc_unreachable ();
3903 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3904 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3905 return expand_call (fn, target, target == const0_rtx);
3908 /* Expand expression EXP, which is a call to the bzero builtin. Return
3909 NULL_RTX if we failed the caller should emit a normal call. */
3911 static rtx
3912 expand_builtin_bzero (tree exp)
3914 tree dest, size;
3915 location_t loc = EXPR_LOCATION (exp);
3917 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3918 return NULL_RTX;
3920 dest = CALL_EXPR_ARG (exp, 0);
3921 size = CALL_EXPR_ARG (exp, 1);
3923 /* New argument list transforming bzero(ptr x, int y) to
3924 memset(ptr x, int 0, size_t y). This is done this way
3925 so that if it isn't expanded inline, we fallback to
3926 calling bzero instead of memset. */
3928 return expand_builtin_memset_args (dest, integer_zero_node,
3929 fold_convert_loc (loc,
3930 size_type_node, size),
3931 const0_rtx, VOIDmode, exp);
3934 /* Expand expression EXP, which is a call to the memcmp built-in function.
3935 Return NULL_RTX if we failed and the caller should emit a normal call,
3936 otherwise try to get the result in TARGET, if convenient (and in mode
3937 MODE, if that's convenient). */
3939 static rtx
3940 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3941 ATTRIBUTE_UNUSED machine_mode mode)
3943 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3945 if (!validate_arglist (exp,
3946 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3947 return NULL_RTX;
3949 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3950 implementing memcmp because it will stop if it encounters two
3951 zero bytes. */
3952 #if defined HAVE_cmpmemsi
3954 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3955 rtx result;
3956 rtx insn;
3957 tree arg1 = CALL_EXPR_ARG (exp, 0);
3958 tree arg2 = CALL_EXPR_ARG (exp, 1);
3959 tree len = CALL_EXPR_ARG (exp, 2);
3961 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3962 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3963 machine_mode insn_mode;
3965 if (HAVE_cmpmemsi)
3966 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3967 else
3968 return NULL_RTX;
3970 /* If we don't have POINTER_TYPE, call the function. */
3971 if (arg1_align == 0 || arg2_align == 0)
3972 return NULL_RTX;
3974 /* Make a place to write the result of the instruction. */
3975 result = target;
3976 if (! (result != 0
3977 && REG_P (result) && GET_MODE (result) == insn_mode
3978 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3979 result = gen_reg_rtx (insn_mode);
3981 arg1_rtx = get_memory_rtx (arg1, len);
3982 arg2_rtx = get_memory_rtx (arg2, len);
3983 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3985 /* Set MEM_SIZE as appropriate. */
3986 if (CONST_INT_P (arg3_rtx))
3988 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3989 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3992 if (HAVE_cmpmemsi)
3993 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3994 GEN_INT (MIN (arg1_align, arg2_align)));
3995 else
3996 gcc_unreachable ();
3998 if (insn)
3999 emit_insn (insn);
4000 else
4001 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4002 TYPE_MODE (integer_type_node), 3,
4003 XEXP (arg1_rtx, 0), Pmode,
4004 XEXP (arg2_rtx, 0), Pmode,
4005 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4006 TYPE_UNSIGNED (sizetype)),
4007 TYPE_MODE (sizetype));
4009 /* Return the value in the proper mode for this function. */
4010 mode = TYPE_MODE (TREE_TYPE (exp));
4011 if (GET_MODE (result) == mode)
4012 return result;
4013 else if (target != 0)
4015 convert_move (target, result, 0);
4016 return target;
4018 else
4019 return convert_to_mode (mode, result, 0);
4021 #endif /* HAVE_cmpmemsi. */
4023 return NULL_RTX;
4026 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4027 if we failed the caller should emit a normal call, otherwise try to get
4028 the result in TARGET, if convenient. */
4030 static rtx
4031 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4033 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4034 return NULL_RTX;
4036 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4037 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4038 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4040 rtx arg1_rtx, arg2_rtx;
4041 rtx result, insn = NULL_RTX;
4042 tree fndecl, fn;
4043 tree arg1 = CALL_EXPR_ARG (exp, 0);
4044 tree arg2 = CALL_EXPR_ARG (exp, 1);
4046 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4047 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4049 /* If we don't have POINTER_TYPE, call the function. */
4050 if (arg1_align == 0 || arg2_align == 0)
4051 return NULL_RTX;
4053 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4054 arg1 = builtin_save_expr (arg1);
4055 arg2 = builtin_save_expr (arg2);
4057 arg1_rtx = get_memory_rtx (arg1, NULL);
4058 arg2_rtx = get_memory_rtx (arg2, NULL);
4060 #ifdef HAVE_cmpstrsi
4061 /* Try to call cmpstrsi. */
4062 if (HAVE_cmpstrsi)
4064 machine_mode insn_mode
4065 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4067 /* Make a place to write the result of the instruction. */
4068 result = target;
4069 if (! (result != 0
4070 && REG_P (result) && GET_MODE (result) == insn_mode
4071 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4072 result = gen_reg_rtx (insn_mode);
4074 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4075 GEN_INT (MIN (arg1_align, arg2_align)));
4077 #endif
4078 #ifdef HAVE_cmpstrnsi
4079 /* Try to determine at least one length and call cmpstrnsi. */
4080 if (!insn && HAVE_cmpstrnsi)
4082 tree len;
4083 rtx arg3_rtx;
4085 machine_mode insn_mode
4086 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4087 tree len1 = c_strlen (arg1, 1);
4088 tree len2 = c_strlen (arg2, 1);
4090 if (len1)
4091 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4092 if (len2)
4093 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4095 /* If we don't have a constant length for the first, use the length
4096 of the second, if we know it. We don't require a constant for
4097 this case; some cost analysis could be done if both are available
4098 but neither is constant. For now, assume they're equally cheap,
4099 unless one has side effects. If both strings have constant lengths,
4100 use the smaller. */
4102 if (!len1)
4103 len = len2;
4104 else if (!len2)
4105 len = len1;
4106 else if (TREE_SIDE_EFFECTS (len1))
4107 len = len2;
4108 else if (TREE_SIDE_EFFECTS (len2))
4109 len = len1;
4110 else if (TREE_CODE (len1) != INTEGER_CST)
4111 len = len2;
4112 else if (TREE_CODE (len2) != INTEGER_CST)
4113 len = len1;
4114 else if (tree_int_cst_lt (len1, len2))
4115 len = len1;
4116 else
4117 len = len2;
4119 /* If both arguments have side effects, we cannot optimize. */
4120 if (!len || TREE_SIDE_EFFECTS (len))
4121 goto do_libcall;
4123 arg3_rtx = expand_normal (len);
4125 /* Make a place to write the result of the instruction. */
4126 result = target;
4127 if (! (result != 0
4128 && REG_P (result) && GET_MODE (result) == insn_mode
4129 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4130 result = gen_reg_rtx (insn_mode);
4132 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4133 GEN_INT (MIN (arg1_align, arg2_align)));
4135 #endif
4137 if (insn)
4139 machine_mode mode;
4140 emit_insn (insn);
4142 /* Return the value in the proper mode for this function. */
4143 mode = TYPE_MODE (TREE_TYPE (exp));
4144 if (GET_MODE (result) == mode)
4145 return result;
4146 if (target == 0)
4147 return convert_to_mode (mode, result, 0);
4148 convert_move (target, result, 0);
4149 return target;
4152 /* Expand the library call ourselves using a stabilized argument
4153 list to avoid re-evaluating the function's arguments twice. */
4154 #ifdef HAVE_cmpstrnsi
4155 do_libcall:
4156 #endif
4157 fndecl = get_callee_fndecl (exp);
4158 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4159 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4160 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4161 return expand_call (fn, target, target == const0_rtx);
4163 #endif
4164 return NULL_RTX;
4167 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4168 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4169 the result in TARGET, if convenient. */
4171 static rtx
4172 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4173 ATTRIBUTE_UNUSED machine_mode mode)
4175 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4177 if (!validate_arglist (exp,
4178 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4179 return NULL_RTX;
4181 /* If c_strlen can determine an expression for one of the string
4182 lengths, and it doesn't have side effects, then emit cmpstrnsi
4183 using length MIN(strlen(string)+1, arg3). */
4184 #ifdef HAVE_cmpstrnsi
4185 if (HAVE_cmpstrnsi)
4187 tree len, len1, len2;
4188 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4189 rtx result, insn;
4190 tree fndecl, fn;
4191 tree arg1 = CALL_EXPR_ARG (exp, 0);
4192 tree arg2 = CALL_EXPR_ARG (exp, 1);
4193 tree arg3 = CALL_EXPR_ARG (exp, 2);
4195 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4196 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4197 machine_mode insn_mode
4198 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4200 len1 = c_strlen (arg1, 1);
4201 len2 = c_strlen (arg2, 1);
4203 if (len1)
4204 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4205 if (len2)
4206 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4208 /* If we don't have a constant length for the first, use the length
4209 of the second, if we know it. We don't require a constant for
4210 this case; some cost analysis could be done if both are available
4211 but neither is constant. For now, assume they're equally cheap,
4212 unless one has side effects. If both strings have constant lengths,
4213 use the smaller. */
4215 if (!len1)
4216 len = len2;
4217 else if (!len2)
4218 len = len1;
4219 else if (TREE_SIDE_EFFECTS (len1))
4220 len = len2;
4221 else if (TREE_SIDE_EFFECTS (len2))
4222 len = len1;
4223 else if (TREE_CODE (len1) != INTEGER_CST)
4224 len = len2;
4225 else if (TREE_CODE (len2) != INTEGER_CST)
4226 len = len1;
4227 else if (tree_int_cst_lt (len1, len2))
4228 len = len1;
4229 else
4230 len = len2;
4232 /* If both arguments have side effects, we cannot optimize. */
4233 if (!len || TREE_SIDE_EFFECTS (len))
4234 return NULL_RTX;
4236 /* The actual new length parameter is MIN(len,arg3). */
4237 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4238 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4240 /* If we don't have POINTER_TYPE, call the function. */
4241 if (arg1_align == 0 || arg2_align == 0)
4242 return NULL_RTX;
4244 /* Make a place to write the result of the instruction. */
4245 result = target;
4246 if (! (result != 0
4247 && REG_P (result) && GET_MODE (result) == insn_mode
4248 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4249 result = gen_reg_rtx (insn_mode);
4251 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4252 arg1 = builtin_save_expr (arg1);
4253 arg2 = builtin_save_expr (arg2);
4254 len = builtin_save_expr (len);
4256 arg1_rtx = get_memory_rtx (arg1, len);
4257 arg2_rtx = get_memory_rtx (arg2, len);
4258 arg3_rtx = expand_normal (len);
4259 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4260 GEN_INT (MIN (arg1_align, arg2_align)));
4261 if (insn)
4263 emit_insn (insn);
4265 /* Return the value in the proper mode for this function. */
4266 mode = TYPE_MODE (TREE_TYPE (exp));
4267 if (GET_MODE (result) == mode)
4268 return result;
4269 if (target == 0)
4270 return convert_to_mode (mode, result, 0);
4271 convert_move (target, result, 0);
4272 return target;
4275 /* Expand the library call ourselves using a stabilized argument
4276 list to avoid re-evaluating the function's arguments twice. */
4277 fndecl = get_callee_fndecl (exp);
4278 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4279 arg1, arg2, len);
4280 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4281 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4282 return expand_call (fn, target, target == const0_rtx);
4284 #endif
4285 return NULL_RTX;
4288 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4289 if that's convenient. */
4292 expand_builtin_saveregs (void)
4294 rtx val;
4295 rtx_insn *seq;
4297 /* Don't do __builtin_saveregs more than once in a function.
4298 Save the result of the first call and reuse it. */
4299 if (saveregs_value != 0)
4300 return saveregs_value;
4302 /* When this function is called, it means that registers must be
4303 saved on entry to this function. So we migrate the call to the
4304 first insn of this function. */
4306 start_sequence ();
4308 /* Do whatever the machine needs done in this case. */
4309 val = targetm.calls.expand_builtin_saveregs ();
4311 seq = get_insns ();
4312 end_sequence ();
4314 saveregs_value = val;
4316 /* Put the insns after the NOTE that starts the function. If this
4317 is inside a start_sequence, make the outer-level insn chain current, so
4318 the code is placed at the start of the function. */
4319 push_topmost_sequence ();
4320 emit_insn_after (seq, entry_of_function ());
4321 pop_topmost_sequence ();
4323 return val;
4326 /* Expand a call to __builtin_next_arg. */
4328 static rtx
4329 expand_builtin_next_arg (void)
4331 /* Checking arguments is already done in fold_builtin_next_arg
4332 that must be called before this function. */
4333 return expand_binop (ptr_mode, add_optab,
4334 crtl->args.internal_arg_pointer,
4335 crtl->args.arg_offset_rtx,
4336 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4339 /* Make it easier for the backends by protecting the valist argument
4340 from multiple evaluations. */
4342 static tree
4343 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4345 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4347 /* The current way of determining the type of valist is completely
4348 bogus. We should have the information on the va builtin instead. */
4349 if (!vatype)
4350 vatype = targetm.fn_abi_va_list (cfun->decl);
4352 if (TREE_CODE (vatype) == ARRAY_TYPE)
4354 if (TREE_SIDE_EFFECTS (valist))
4355 valist = save_expr (valist);
4357 /* For this case, the backends will be expecting a pointer to
4358 vatype, but it's possible we've actually been given an array
4359 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4360 So fix it. */
4361 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4363 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4364 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4367 else
4369 tree pt = build_pointer_type (vatype);
4371 if (! needs_lvalue)
4373 if (! TREE_SIDE_EFFECTS (valist))
4374 return valist;
4376 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4377 TREE_SIDE_EFFECTS (valist) = 1;
4380 if (TREE_SIDE_EFFECTS (valist))
4381 valist = save_expr (valist);
4382 valist = fold_build2_loc (loc, MEM_REF,
4383 vatype, valist, build_int_cst (pt, 0));
4386 return valist;
4389 /* The "standard" definition of va_list is void*. */
4391 tree
4392 std_build_builtin_va_list (void)
4394 return ptr_type_node;
4397 /* The "standard" abi va_list is va_list_type_node. */
4399 tree
4400 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4402 return va_list_type_node;
4405 /* The "standard" type of va_list is va_list_type_node. */
4407 tree
4408 std_canonical_va_list_type (tree type)
4410 tree wtype, htype;
4412 if (INDIRECT_REF_P (type))
4413 type = TREE_TYPE (type);
4414 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4415 type = TREE_TYPE (type);
4416 wtype = va_list_type_node;
4417 htype = type;
4418 /* Treat structure va_list types. */
4419 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4420 htype = TREE_TYPE (htype);
4421 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4423 /* If va_list is an array type, the argument may have decayed
4424 to a pointer type, e.g. by being passed to another function.
4425 In that case, unwrap both types so that we can compare the
4426 underlying records. */
4427 if (TREE_CODE (htype) == ARRAY_TYPE
4428 || POINTER_TYPE_P (htype))
4430 wtype = TREE_TYPE (wtype);
4431 htype = TREE_TYPE (htype);
4434 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4435 return va_list_type_node;
4437 return NULL_TREE;
4440 /* The "standard" implementation of va_start: just assign `nextarg' to
4441 the variable. */
4443 void
4444 std_expand_builtin_va_start (tree valist, rtx nextarg)
4446 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4447 convert_move (va_r, nextarg, 0);
4449 /* We do not have any valid bounds for the pointer, so
4450 just store zero bounds for it. */
4451 if (chkp_function_instrumented_p (current_function_decl))
4452 chkp_expand_bounds_reset_for_mem (valist,
4453 make_tree (TREE_TYPE (valist),
4454 nextarg));
4457 /* Expand EXP, a call to __builtin_va_start. */
4459 static rtx
4460 expand_builtin_va_start (tree exp)
4462 rtx nextarg;
4463 tree valist;
4464 location_t loc = EXPR_LOCATION (exp);
4466 if (call_expr_nargs (exp) < 2)
4468 error_at (loc, "too few arguments to function %<va_start%>");
4469 return const0_rtx;
4472 if (fold_builtin_next_arg (exp, true))
4473 return const0_rtx;
4475 nextarg = expand_builtin_next_arg ();
4476 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4478 if (targetm.expand_builtin_va_start)
4479 targetm.expand_builtin_va_start (valist, nextarg);
4480 else
4481 std_expand_builtin_va_start (valist, nextarg);
4483 return const0_rtx;
4486 /* Expand EXP, a call to __builtin_va_end. */
4488 static rtx
4489 expand_builtin_va_end (tree exp)
4491 tree valist = CALL_EXPR_ARG (exp, 0);
4493 /* Evaluate for side effects, if needed. I hate macros that don't
4494 do that. */
4495 if (TREE_SIDE_EFFECTS (valist))
4496 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4498 return const0_rtx;
4501 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4502 builtin rather than just as an assignment in stdarg.h because of the
4503 nastiness of array-type va_list types. */
4505 static rtx
4506 expand_builtin_va_copy (tree exp)
4508 tree dst, src, t;
4509 location_t loc = EXPR_LOCATION (exp);
4511 dst = CALL_EXPR_ARG (exp, 0);
4512 src = CALL_EXPR_ARG (exp, 1);
4514 dst = stabilize_va_list_loc (loc, dst, 1);
4515 src = stabilize_va_list_loc (loc, src, 0);
4517 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4519 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4521 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4522 TREE_SIDE_EFFECTS (t) = 1;
4523 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4525 else
4527 rtx dstb, srcb, size;
4529 /* Evaluate to pointers. */
4530 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4531 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4532 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4533 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4535 dstb = convert_memory_address (Pmode, dstb);
4536 srcb = convert_memory_address (Pmode, srcb);
4538 /* "Dereference" to BLKmode memories. */
4539 dstb = gen_rtx_MEM (BLKmode, dstb);
4540 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4541 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4542 srcb = gen_rtx_MEM (BLKmode, srcb);
4543 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4544 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4546 /* Copy. */
4547 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4550 return const0_rtx;
4553 /* Expand a call to one of the builtin functions __builtin_frame_address or
4554 __builtin_return_address. */
4556 static rtx
4557 expand_builtin_frame_address (tree fndecl, tree exp)
4559 /* The argument must be a nonnegative integer constant.
4560 It counts the number of frames to scan up the stack.
4561 The value is the return address saved in that frame. */
4562 if (call_expr_nargs (exp) == 0)
4563 /* Warning about missing arg was already issued. */
4564 return const0_rtx;
4565 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4567 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4568 error ("invalid argument to %<__builtin_frame_address%>");
4569 else
4570 error ("invalid argument to %<__builtin_return_address%>");
4571 return const0_rtx;
4573 else
4575 rtx tem
4576 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4577 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4579 /* Some ports cannot access arbitrary stack frames. */
4580 if (tem == NULL)
4582 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4583 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4584 else
4585 warning (0, "unsupported argument to %<__builtin_return_address%>");
4586 return const0_rtx;
4589 /* For __builtin_frame_address, return what we've got. */
4590 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4591 return tem;
4593 if (!REG_P (tem)
4594 && ! CONSTANT_P (tem))
4595 tem = copy_addr_to_reg (tem);
4596 return tem;
4600 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4601 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4602 is the same as for allocate_dynamic_stack_space. */
4604 static rtx
4605 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4607 rtx op0;
4608 rtx result;
4609 bool valid_arglist;
4610 unsigned int align;
4611 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4612 == BUILT_IN_ALLOCA_WITH_ALIGN);
4614 valid_arglist
4615 = (alloca_with_align
4616 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4617 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4619 if (!valid_arglist)
4620 return NULL_RTX;
4622 /* Compute the argument. */
4623 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4625 /* Compute the alignment. */
4626 align = (alloca_with_align
4627 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4628 : BIGGEST_ALIGNMENT);
4630 /* Allocate the desired space. */
4631 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4632 result = convert_memory_address (ptr_mode, result);
4634 return result;
4637 /* Expand a call to bswap builtin in EXP.
4638 Return NULL_RTX if a normal call should be emitted rather than expanding the
4639 function in-line. If convenient, the result should be placed in TARGET.
4640 SUBTARGET may be used as the target for computing one of EXP's operands. */
4642 static rtx
4643 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4644 rtx subtarget)
4646 tree arg;
4647 rtx op0;
4649 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4650 return NULL_RTX;
4652 arg = CALL_EXPR_ARG (exp, 0);
4653 op0 = expand_expr (arg,
4654 subtarget && GET_MODE (subtarget) == target_mode
4655 ? subtarget : NULL_RTX,
4656 target_mode, EXPAND_NORMAL);
4657 if (GET_MODE (op0) != target_mode)
4658 op0 = convert_to_mode (target_mode, op0, 1);
4660 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4662 gcc_assert (target);
4664 return convert_to_mode (target_mode, target, 1);
4667 /* Expand a call to a unary builtin in EXP.
4668 Return NULL_RTX if a normal call should be emitted rather than expanding the
4669 function in-line. If convenient, the result should be placed in TARGET.
4670 SUBTARGET may be used as the target for computing one of EXP's operands. */
4672 static rtx
4673 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4674 rtx subtarget, optab op_optab)
4676 rtx op0;
4678 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4679 return NULL_RTX;
4681 /* Compute the argument. */
4682 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4683 (subtarget
4684 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4685 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4686 VOIDmode, EXPAND_NORMAL);
4687 /* Compute op, into TARGET if possible.
4688 Set TARGET to wherever the result comes back. */
4689 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4690 op_optab, op0, target, op_optab != clrsb_optab);
4691 gcc_assert (target);
4693 return convert_to_mode (target_mode, target, 0);
4696 /* Expand a call to __builtin_expect. We just return our argument
4697 as the builtin_expect semantic should've been already executed by
4698 tree branch prediction pass. */
4700 static rtx
4701 expand_builtin_expect (tree exp, rtx target)
4703 tree arg;
4705 if (call_expr_nargs (exp) < 2)
4706 return const0_rtx;
4707 arg = CALL_EXPR_ARG (exp, 0);
4709 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4710 /* When guessing was done, the hints should be already stripped away. */
4711 gcc_assert (!flag_guess_branch_prob
4712 || optimize == 0 || seen_error ());
4713 return target;
4716 /* Expand a call to __builtin_assume_aligned. We just return our first
4717 argument as the builtin_assume_aligned semantic should've been already
4718 executed by CCP. */
4720 static rtx
4721 expand_builtin_assume_aligned (tree exp, rtx target)
4723 if (call_expr_nargs (exp) < 2)
4724 return const0_rtx;
4725 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4726 EXPAND_NORMAL);
4727 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4728 && (call_expr_nargs (exp) < 3
4729 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4730 return target;
4733 void
4734 expand_builtin_trap (void)
4736 if (targetm.have_trap ())
4738 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4739 /* For trap insns when not accumulating outgoing args force
4740 REG_ARGS_SIZE note to prevent crossjumping of calls with
4741 different args sizes. */
4742 if (!ACCUMULATE_OUTGOING_ARGS)
4743 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4745 else
4746 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4747 emit_barrier ();
4750 /* Expand a call to __builtin_unreachable. We do nothing except emit
4751 a barrier saying that control flow will not pass here.
4753 It is the responsibility of the program being compiled to ensure
4754 that control flow does never reach __builtin_unreachable. */
4755 static void
4756 expand_builtin_unreachable (void)
4758 emit_barrier ();
4761 /* Expand EXP, a call to fabs, fabsf or fabsl.
4762 Return NULL_RTX if a normal call should be emitted rather than expanding
4763 the function inline. If convenient, the result should be placed
4764 in TARGET. SUBTARGET may be used as the target for computing
4765 the operand. */
4767 static rtx
4768 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4770 machine_mode mode;
4771 tree arg;
4772 rtx op0;
4774 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4775 return NULL_RTX;
4777 arg = CALL_EXPR_ARG (exp, 0);
4778 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4779 mode = TYPE_MODE (TREE_TYPE (arg));
4780 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4781 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4784 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4785 Return NULL is a normal call should be emitted rather than expanding the
4786 function inline. If convenient, the result should be placed in TARGET.
4787 SUBTARGET may be used as the target for computing the operand. */
4789 static rtx
4790 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4792 rtx op0, op1;
4793 tree arg;
4795 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4796 return NULL_RTX;
4798 arg = CALL_EXPR_ARG (exp, 0);
4799 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4801 arg = CALL_EXPR_ARG (exp, 1);
4802 op1 = expand_normal (arg);
4804 return expand_copysign (op0, op1, target);
4807 /* Expand a call to __builtin___clear_cache. */
4809 static rtx
4810 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4812 #ifndef HAVE_clear_cache
4813 #ifdef CLEAR_INSN_CACHE
4814 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4815 does something. Just do the default expansion to a call to
4816 __clear_cache(). */
4817 return NULL_RTX;
4818 #else
4819 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4820 does nothing. There is no need to call it. Do nothing. */
4821 return const0_rtx;
4822 #endif /* CLEAR_INSN_CACHE */
4823 #else
4824 /* We have a "clear_cache" insn, and it will handle everything. */
4825 tree begin, end;
4826 rtx begin_rtx, end_rtx;
4828 /* We must not expand to a library call. If we did, any
4829 fallback library function in libgcc that might contain a call to
4830 __builtin___clear_cache() would recurse infinitely. */
4831 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4833 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4834 return const0_rtx;
4837 if (HAVE_clear_cache)
4839 struct expand_operand ops[2];
4841 begin = CALL_EXPR_ARG (exp, 0);
4842 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4844 end = CALL_EXPR_ARG (exp, 1);
4845 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4847 create_address_operand (&ops[0], begin_rtx);
4848 create_address_operand (&ops[1], end_rtx);
4849 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4850 return const0_rtx;
4852 return const0_rtx;
4853 #endif /* HAVE_clear_cache */
4856 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4858 static rtx
4859 round_trampoline_addr (rtx tramp)
4861 rtx temp, addend, mask;
4863 /* If we don't need too much alignment, we'll have been guaranteed
4864 proper alignment by get_trampoline_type. */
4865 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4866 return tramp;
4868 /* Round address up to desired boundary. */
4869 temp = gen_reg_rtx (Pmode);
4870 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4871 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4873 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4874 temp, 0, OPTAB_LIB_WIDEN);
4875 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4876 temp, 0, OPTAB_LIB_WIDEN);
4878 return tramp;
4881 static rtx
4882 expand_builtin_init_trampoline (tree exp, bool onstack)
4884 tree t_tramp, t_func, t_chain;
4885 rtx m_tramp, r_tramp, r_chain, tmp;
4887 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4888 POINTER_TYPE, VOID_TYPE))
4889 return NULL_RTX;
4891 t_tramp = CALL_EXPR_ARG (exp, 0);
4892 t_func = CALL_EXPR_ARG (exp, 1);
4893 t_chain = CALL_EXPR_ARG (exp, 2);
4895 r_tramp = expand_normal (t_tramp);
4896 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4897 MEM_NOTRAP_P (m_tramp) = 1;
4899 /* If ONSTACK, the TRAMP argument should be the address of a field
4900 within the local function's FRAME decl. Either way, let's see if
4901 we can fill in the MEM_ATTRs for this memory. */
4902 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4903 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4905 /* Creator of a heap trampoline is responsible for making sure the
4906 address is aligned to at least STACK_BOUNDARY. Normally malloc
4907 will ensure this anyhow. */
4908 tmp = round_trampoline_addr (r_tramp);
4909 if (tmp != r_tramp)
4911 m_tramp = change_address (m_tramp, BLKmode, tmp);
4912 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4913 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4916 /* The FUNC argument should be the address of the nested function.
4917 Extract the actual function decl to pass to the hook. */
4918 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4919 t_func = TREE_OPERAND (t_func, 0);
4920 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4922 r_chain = expand_normal (t_chain);
4924 /* Generate insns to initialize the trampoline. */
4925 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4927 if (onstack)
4929 trampolines_created = 1;
4931 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4932 "trampoline generated for nested function %qD", t_func);
4935 return const0_rtx;
4938 static rtx
4939 expand_builtin_adjust_trampoline (tree exp)
4941 rtx tramp;
4943 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4944 return NULL_RTX;
4946 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4947 tramp = round_trampoline_addr (tramp);
4948 if (targetm.calls.trampoline_adjust_address)
4949 tramp = targetm.calls.trampoline_adjust_address (tramp);
4951 return tramp;
4954 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4955 function. The function first checks whether the back end provides
4956 an insn to implement signbit for the respective mode. If not, it
4957 checks whether the floating point format of the value is such that
4958 the sign bit can be extracted. If that is not the case, the
4959 function returns NULL_RTX to indicate that a normal call should be
4960 emitted rather than expanding the function in-line. EXP is the
4961 expression that is a call to the builtin function; if convenient,
4962 the result should be placed in TARGET. */
4963 static rtx
4964 expand_builtin_signbit (tree exp, rtx target)
4966 const struct real_format *fmt;
4967 machine_mode fmode, imode, rmode;
4968 tree arg;
4969 int word, bitpos;
4970 enum insn_code icode;
4971 rtx temp;
4972 location_t loc = EXPR_LOCATION (exp);
4974 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4975 return NULL_RTX;
4977 arg = CALL_EXPR_ARG (exp, 0);
4978 fmode = TYPE_MODE (TREE_TYPE (arg));
4979 rmode = TYPE_MODE (TREE_TYPE (exp));
4980 fmt = REAL_MODE_FORMAT (fmode);
4982 arg = builtin_save_expr (arg);
4984 /* Expand the argument yielding a RTX expression. */
4985 temp = expand_normal (arg);
4987 /* Check if the back end provides an insn that handles signbit for the
4988 argument's mode. */
4989 icode = optab_handler (signbit_optab, fmode);
4990 if (icode != CODE_FOR_nothing)
4992 rtx_insn *last = get_last_insn ();
4993 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4994 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4995 return target;
4996 delete_insns_since (last);
4999 /* For floating point formats without a sign bit, implement signbit
5000 as "ARG < 0.0". */
5001 bitpos = fmt->signbit_ro;
5002 if (bitpos < 0)
5004 /* But we can't do this if the format supports signed zero. */
5005 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5006 return NULL_RTX;
5008 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5009 build_real (TREE_TYPE (arg), dconst0));
5010 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5013 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5015 imode = int_mode_for_mode (fmode);
5016 if (imode == BLKmode)
5017 return NULL_RTX;
5018 temp = gen_lowpart (imode, temp);
5020 else
5022 imode = word_mode;
5023 /* Handle targets with different FP word orders. */
5024 if (FLOAT_WORDS_BIG_ENDIAN)
5025 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5026 else
5027 word = bitpos / BITS_PER_WORD;
5028 temp = operand_subword_force (temp, word, fmode);
5029 bitpos = bitpos % BITS_PER_WORD;
5032 /* Force the intermediate word_mode (or narrower) result into a
5033 register. This avoids attempting to create paradoxical SUBREGs
5034 of floating point modes below. */
5035 temp = force_reg (imode, temp);
5037 /* If the bitpos is within the "result mode" lowpart, the operation
5038 can be implement with a single bitwise AND. Otherwise, we need
5039 a right shift and an AND. */
5041 if (bitpos < GET_MODE_BITSIZE (rmode))
5043 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5045 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5046 temp = gen_lowpart (rmode, temp);
5047 temp = expand_binop (rmode, and_optab, temp,
5048 immed_wide_int_const (mask, rmode),
5049 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5051 else
5053 /* Perform a logical right shift to place the signbit in the least
5054 significant bit, then truncate the result to the desired mode
5055 and mask just this bit. */
5056 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5057 temp = gen_lowpart (rmode, temp);
5058 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5059 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5062 return temp;
5065 /* Expand fork or exec calls. TARGET is the desired target of the
5066 call. EXP is the call. FN is the
5067 identificator of the actual function. IGNORE is nonzero if the
5068 value is to be ignored. */
5070 static rtx
5071 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5073 tree id, decl;
5074 tree call;
5076 /* If we are not profiling, just call the function. */
5077 if (!profile_arc_flag)
5078 return NULL_RTX;
5080 /* Otherwise call the wrapper. This should be equivalent for the rest of
5081 compiler, so the code does not diverge, and the wrapper may run the
5082 code necessary for keeping the profiling sane. */
5084 switch (DECL_FUNCTION_CODE (fn))
5086 case BUILT_IN_FORK:
5087 id = get_identifier ("__gcov_fork");
5088 break;
5090 case BUILT_IN_EXECL:
5091 id = get_identifier ("__gcov_execl");
5092 break;
5094 case BUILT_IN_EXECV:
5095 id = get_identifier ("__gcov_execv");
5096 break;
5098 case BUILT_IN_EXECLP:
5099 id = get_identifier ("__gcov_execlp");
5100 break;
5102 case BUILT_IN_EXECLE:
5103 id = get_identifier ("__gcov_execle");
5104 break;
5106 case BUILT_IN_EXECVP:
5107 id = get_identifier ("__gcov_execvp");
5108 break;
5110 case BUILT_IN_EXECVE:
5111 id = get_identifier ("__gcov_execve");
5112 break;
5114 default:
5115 gcc_unreachable ();
5118 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5119 FUNCTION_DECL, id, TREE_TYPE (fn));
5120 DECL_EXTERNAL (decl) = 1;
5121 TREE_PUBLIC (decl) = 1;
5122 DECL_ARTIFICIAL (decl) = 1;
5123 TREE_NOTHROW (decl) = 1;
5124 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5125 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5126 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5127 return expand_call (call, target, ignore);
5132 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5133 the pointer in these functions is void*, the tree optimizers may remove
5134 casts. The mode computed in expand_builtin isn't reliable either, due
5135 to __sync_bool_compare_and_swap.
5137 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5138 group of builtins. This gives us log2 of the mode size. */
5140 static inline machine_mode
5141 get_builtin_sync_mode (int fcode_diff)
5143 /* The size is not negotiable, so ask not to get BLKmode in return
5144 if the target indicates that a smaller size would be better. */
5145 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5148 /* Expand the memory expression LOC and return the appropriate memory operand
5149 for the builtin_sync operations. */
5151 static rtx
5152 get_builtin_sync_mem (tree loc, machine_mode mode)
5154 rtx addr, mem;
5156 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5157 addr = convert_memory_address (Pmode, addr);
5159 /* Note that we explicitly do not want any alias information for this
5160 memory, so that we kill all other live memories. Otherwise we don't
5161 satisfy the full barrier semantics of the intrinsic. */
5162 mem = validize_mem (gen_rtx_MEM (mode, addr));
5164 /* The alignment needs to be at least according to that of the mode. */
5165 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5166 get_pointer_alignment (loc)));
5167 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5168 MEM_VOLATILE_P (mem) = 1;
5170 return mem;
5173 /* Make sure an argument is in the right mode.
5174 EXP is the tree argument.
5175 MODE is the mode it should be in. */
5177 static rtx
5178 expand_expr_force_mode (tree exp, machine_mode mode)
5180 rtx val;
5181 machine_mode old_mode;
5183 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5184 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5185 of CONST_INTs, where we know the old_mode only from the call argument. */
5187 old_mode = GET_MODE (val);
5188 if (old_mode == VOIDmode)
5189 old_mode = TYPE_MODE (TREE_TYPE (exp));
5190 val = convert_modes (mode, old_mode, val, 1);
5191 return val;
5195 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5196 EXP is the CALL_EXPR. CODE is the rtx code
5197 that corresponds to the arithmetic or logical operation from the name;
5198 an exception here is that NOT actually means NAND. TARGET is an optional
5199 place for us to store the results; AFTER is true if this is the
5200 fetch_and_xxx form. */
5202 static rtx
5203 expand_builtin_sync_operation (machine_mode mode, tree exp,
5204 enum rtx_code code, bool after,
5205 rtx target)
5207 rtx val, mem;
5208 location_t loc = EXPR_LOCATION (exp);
5210 if (code == NOT && warn_sync_nand)
5212 tree fndecl = get_callee_fndecl (exp);
5213 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5215 static bool warned_f_a_n, warned_n_a_f;
5217 switch (fcode)
5219 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5220 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5221 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5222 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5223 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5224 if (warned_f_a_n)
5225 break;
5227 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5228 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5229 warned_f_a_n = true;
5230 break;
5232 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5233 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5234 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5235 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5236 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5237 if (warned_n_a_f)
5238 break;
5240 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5241 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5242 warned_n_a_f = true;
5243 break;
5245 default:
5246 gcc_unreachable ();
5250 /* Expand the operands. */
5251 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5252 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5254 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5255 after);
5258 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5259 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5260 true if this is the boolean form. TARGET is a place for us to store the
5261 results; this is NOT optional if IS_BOOL is true. */
5263 static rtx
5264 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5265 bool is_bool, rtx target)
5267 rtx old_val, new_val, mem;
5268 rtx *pbool, *poval;
5270 /* Expand the operands. */
5271 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5272 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5273 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5275 pbool = poval = NULL;
5276 if (target != const0_rtx)
5278 if (is_bool)
5279 pbool = &target;
5280 else
5281 poval = &target;
5283 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5284 false, MEMMODEL_SYNC_SEQ_CST,
5285 MEMMODEL_SYNC_SEQ_CST))
5286 return NULL_RTX;
5288 return target;
5291 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5292 general form is actually an atomic exchange, and some targets only
5293 support a reduced form with the second argument being a constant 1.
5294 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5295 the results. */
5297 static rtx
5298 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5299 rtx target)
5301 rtx val, mem;
5303 /* Expand the operands. */
5304 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5305 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5307 return expand_sync_lock_test_and_set (target, mem, val);
5310 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5312 static void
5313 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5315 rtx mem;
5317 /* Expand the operands. */
5318 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5320 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5323 /* Given an integer representing an ``enum memmodel'', verify its
5324 correctness and return the memory model enum. */
5326 static enum memmodel
5327 get_memmodel (tree exp)
5329 rtx op;
5330 unsigned HOST_WIDE_INT val;
5332 /* If the parameter is not a constant, it's a run time value so we'll just
5333 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5334 if (TREE_CODE (exp) != INTEGER_CST)
5335 return MEMMODEL_SEQ_CST;
5337 op = expand_normal (exp);
5339 val = INTVAL (op);
5340 if (targetm.memmodel_check)
5341 val = targetm.memmodel_check (val);
5342 else if (val & ~MEMMODEL_MASK)
5344 warning (OPT_Winvalid_memory_model,
5345 "Unknown architecture specifier in memory model to builtin.");
5346 return MEMMODEL_SEQ_CST;
5349 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5350 if (memmodel_base (val) >= MEMMODEL_LAST)
5352 warning (OPT_Winvalid_memory_model,
5353 "invalid memory model argument to builtin");
5354 return MEMMODEL_SEQ_CST;
5357 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5358 be conservative and promote consume to acquire. */
5359 if (val == MEMMODEL_CONSUME)
5360 val = MEMMODEL_ACQUIRE;
5362 return (enum memmodel) val;
5365 /* Expand the __atomic_exchange intrinsic:
5366 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5367 EXP is the CALL_EXPR.
5368 TARGET is an optional place for us to store the results. */
5370 static rtx
5371 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5373 rtx val, mem;
5374 enum memmodel model;
5376 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5378 if (!flag_inline_atomics)
5379 return NULL_RTX;
5381 /* Expand the operands. */
5382 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5383 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5385 return expand_atomic_exchange (target, mem, val, model);
5388 /* Expand the __atomic_compare_exchange intrinsic:
5389 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5390 TYPE desired, BOOL weak,
5391 enum memmodel success,
5392 enum memmodel failure)
5393 EXP is the CALL_EXPR.
5394 TARGET is an optional place for us to store the results. */
5396 static rtx
5397 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5398 rtx target)
5400 rtx expect, desired, mem, oldval;
5401 rtx_code_label *label;
5402 enum memmodel success, failure;
5403 tree weak;
5404 bool is_weak;
5406 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5407 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5409 if (failure > success)
5411 warning (OPT_Winvalid_memory_model,
5412 "failure memory model cannot be stronger than success memory "
5413 "model for %<__atomic_compare_exchange%>");
5414 success = MEMMODEL_SEQ_CST;
5417 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5419 warning (OPT_Winvalid_memory_model,
5420 "invalid failure memory model for "
5421 "%<__atomic_compare_exchange%>");
5422 failure = MEMMODEL_SEQ_CST;
5423 success = MEMMODEL_SEQ_CST;
5427 if (!flag_inline_atomics)
5428 return NULL_RTX;
5430 /* Expand the operands. */
5431 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5433 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5434 expect = convert_memory_address (Pmode, expect);
5435 expect = gen_rtx_MEM (mode, expect);
5436 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5438 weak = CALL_EXPR_ARG (exp, 3);
5439 is_weak = false;
5440 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5441 is_weak = true;
5443 if (target == const0_rtx)
5444 target = NULL;
5446 /* Lest the rtl backend create a race condition with an imporoper store
5447 to memory, always create a new pseudo for OLDVAL. */
5448 oldval = NULL;
5450 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5451 is_weak, success, failure))
5452 return NULL_RTX;
5454 /* Conditionally store back to EXPECT, lest we create a race condition
5455 with an improper store to memory. */
5456 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5457 the normal case where EXPECT is totally private, i.e. a register. At
5458 which point the store can be unconditional. */
5459 label = gen_label_rtx ();
5460 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5461 GET_MODE (target), 1, label);
5462 emit_move_insn (expect, oldval);
5463 emit_label (label);
5465 return target;
5468 /* Expand the __atomic_load intrinsic:
5469 TYPE __atomic_load (TYPE *object, enum memmodel)
5470 EXP is the CALL_EXPR.
5471 TARGET is an optional place for us to store the results. */
5473 static rtx
5474 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5476 rtx mem;
5477 enum memmodel model;
5479 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5480 if (is_mm_release (model) || is_mm_acq_rel (model))
5482 warning (OPT_Winvalid_memory_model,
5483 "invalid memory model for %<__atomic_load%>");
5484 model = MEMMODEL_SEQ_CST;
5487 if (!flag_inline_atomics)
5488 return NULL_RTX;
5490 /* Expand the operand. */
5491 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5493 return expand_atomic_load (target, mem, model);
5497 /* Expand the __atomic_store intrinsic:
5498 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5499 EXP is the CALL_EXPR.
5500 TARGET is an optional place for us to store the results. */
5502 static rtx
5503 expand_builtin_atomic_store (machine_mode mode, tree exp)
5505 rtx mem, val;
5506 enum memmodel model;
5508 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5509 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5510 || is_mm_release (model)))
5512 warning (OPT_Winvalid_memory_model,
5513 "invalid memory model for %<__atomic_store%>");
5514 model = MEMMODEL_SEQ_CST;
5517 if (!flag_inline_atomics)
5518 return NULL_RTX;
5520 /* Expand the operands. */
5521 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5522 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5524 return expand_atomic_store (mem, val, model, false);
5527 /* Expand the __atomic_fetch_XXX intrinsic:
5528 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5529 EXP is the CALL_EXPR.
5530 TARGET is an optional place for us to store the results.
5531 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5532 FETCH_AFTER is true if returning the result of the operation.
5533 FETCH_AFTER is false if returning the value before the operation.
5534 IGNORE is true if the result is not used.
5535 EXT_CALL is the correct builtin for an external call if this cannot be
5536 resolved to an instruction sequence. */
5538 static rtx
5539 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5540 enum rtx_code code, bool fetch_after,
5541 bool ignore, enum built_in_function ext_call)
5543 rtx val, mem, ret;
5544 enum memmodel model;
5545 tree fndecl;
5546 tree addr;
5548 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5550 /* Expand the operands. */
5551 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5552 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5554 /* Only try generating instructions if inlining is turned on. */
5555 if (flag_inline_atomics)
5557 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5558 if (ret)
5559 return ret;
5562 /* Return if a different routine isn't needed for the library call. */
5563 if (ext_call == BUILT_IN_NONE)
5564 return NULL_RTX;
5566 /* Change the call to the specified function. */
5567 fndecl = get_callee_fndecl (exp);
5568 addr = CALL_EXPR_FN (exp);
5569 STRIP_NOPS (addr);
5571 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5572 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5574 /* Expand the call here so we can emit trailing code. */
5575 ret = expand_call (exp, target, ignore);
5577 /* Replace the original function just in case it matters. */
5578 TREE_OPERAND (addr, 0) = fndecl;
5580 /* Then issue the arithmetic correction to return the right result. */
5581 if (!ignore)
5583 if (code == NOT)
5585 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5586 OPTAB_LIB_WIDEN);
5587 ret = expand_simple_unop (mode, NOT, ret, target, true);
5589 else
5590 ret = expand_simple_binop (mode, code, ret, val, target, true,
5591 OPTAB_LIB_WIDEN);
5593 return ret;
5597 #ifndef HAVE_atomic_clear
5598 # define HAVE_atomic_clear 0
5599 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5600 #endif
5602 /* Expand an atomic clear operation.
5603 void _atomic_clear (BOOL *obj, enum memmodel)
5604 EXP is the call expression. */
5606 static rtx
5607 expand_builtin_atomic_clear (tree exp)
5609 machine_mode mode;
5610 rtx mem, ret;
5611 enum memmodel model;
5613 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5614 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5615 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5617 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5619 warning (OPT_Winvalid_memory_model,
5620 "invalid memory model for %<__atomic_store%>");
5621 model = MEMMODEL_SEQ_CST;
5624 if (HAVE_atomic_clear)
5626 emit_insn (gen_atomic_clear (mem, model));
5627 return const0_rtx;
5630 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5631 Failing that, a store is issued by __atomic_store. The only way this can
5632 fail is if the bool type is larger than a word size. Unlikely, but
5633 handle it anyway for completeness. Assume a single threaded model since
5634 there is no atomic support in this case, and no barriers are required. */
5635 ret = expand_atomic_store (mem, const0_rtx, model, true);
5636 if (!ret)
5637 emit_move_insn (mem, const0_rtx);
5638 return const0_rtx;
5641 /* Expand an atomic test_and_set operation.
5642 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5643 EXP is the call expression. */
5645 static rtx
5646 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5648 rtx mem;
5649 enum memmodel model;
5650 machine_mode mode;
5652 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5653 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5654 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5656 return expand_atomic_test_and_set (target, mem, model);
5660 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5661 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5663 static tree
5664 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5666 int size;
5667 machine_mode mode;
5668 unsigned int mode_align, type_align;
5670 if (TREE_CODE (arg0) != INTEGER_CST)
5671 return NULL_TREE;
5673 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5674 mode = mode_for_size (size, MODE_INT, 0);
5675 mode_align = GET_MODE_ALIGNMENT (mode);
5677 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5678 type_align = mode_align;
5679 else
5681 tree ttype = TREE_TYPE (arg1);
5683 /* This function is usually invoked and folded immediately by the front
5684 end before anything else has a chance to look at it. The pointer
5685 parameter at this point is usually cast to a void *, so check for that
5686 and look past the cast. */
5687 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5688 && VOID_TYPE_P (TREE_TYPE (ttype)))
5689 arg1 = TREE_OPERAND (arg1, 0);
5691 ttype = TREE_TYPE (arg1);
5692 gcc_assert (POINTER_TYPE_P (ttype));
5694 /* Get the underlying type of the object. */
5695 ttype = TREE_TYPE (ttype);
5696 type_align = TYPE_ALIGN (ttype);
5699 /* If the object has smaller alignment, the the lock free routines cannot
5700 be used. */
5701 if (type_align < mode_align)
5702 return boolean_false_node;
5704 /* Check if a compare_and_swap pattern exists for the mode which represents
5705 the required size. The pattern is not allowed to fail, so the existence
5706 of the pattern indicates support is present. */
5707 if (can_compare_and_swap_p (mode, true))
5708 return boolean_true_node;
5709 else
5710 return boolean_false_node;
5713 /* Return true if the parameters to call EXP represent an object which will
5714 always generate lock free instructions. The first argument represents the
5715 size of the object, and the second parameter is a pointer to the object
5716 itself. If NULL is passed for the object, then the result is based on
5717 typical alignment for an object of the specified size. Otherwise return
5718 false. */
5720 static rtx
5721 expand_builtin_atomic_always_lock_free (tree exp)
5723 tree size;
5724 tree arg0 = CALL_EXPR_ARG (exp, 0);
5725 tree arg1 = CALL_EXPR_ARG (exp, 1);
5727 if (TREE_CODE (arg0) != INTEGER_CST)
5729 error ("non-constant argument 1 to __atomic_always_lock_free");
5730 return const0_rtx;
5733 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5734 if (size == boolean_true_node)
5735 return const1_rtx;
5736 return const0_rtx;
5739 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5740 is lock free on this architecture. */
5742 static tree
5743 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5745 if (!flag_inline_atomics)
5746 return NULL_TREE;
5748 /* If it isn't always lock free, don't generate a result. */
5749 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5750 return boolean_true_node;
5752 return NULL_TREE;
5755 /* Return true if the parameters to call EXP represent an object which will
5756 always generate lock free instructions. The first argument represents the
5757 size of the object, and the second parameter is a pointer to the object
5758 itself. If NULL is passed for the object, then the result is based on
5759 typical alignment for an object of the specified size. Otherwise return
5760 NULL*/
5762 static rtx
5763 expand_builtin_atomic_is_lock_free (tree exp)
5765 tree size;
5766 tree arg0 = CALL_EXPR_ARG (exp, 0);
5767 tree arg1 = CALL_EXPR_ARG (exp, 1);
5769 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5771 error ("non-integer argument 1 to __atomic_is_lock_free");
5772 return NULL_RTX;
5775 if (!flag_inline_atomics)
5776 return NULL_RTX;
5778 /* If the value is known at compile time, return the RTX for it. */
5779 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5780 if (size == boolean_true_node)
5781 return const1_rtx;
5783 return NULL_RTX;
5786 /* Expand the __atomic_thread_fence intrinsic:
5787 void __atomic_thread_fence (enum memmodel)
5788 EXP is the CALL_EXPR. */
5790 static void
5791 expand_builtin_atomic_thread_fence (tree exp)
5793 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5794 expand_mem_thread_fence (model);
5797 /* Expand the __atomic_signal_fence intrinsic:
5798 void __atomic_signal_fence (enum memmodel)
5799 EXP is the CALL_EXPR. */
5801 static void
5802 expand_builtin_atomic_signal_fence (tree exp)
5804 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5805 expand_mem_signal_fence (model);
5808 /* Expand the __sync_synchronize intrinsic. */
5810 static void
5811 expand_builtin_sync_synchronize (void)
5813 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5816 static rtx
5817 expand_builtin_thread_pointer (tree exp, rtx target)
5819 enum insn_code icode;
5820 if (!validate_arglist (exp, VOID_TYPE))
5821 return const0_rtx;
5822 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5823 if (icode != CODE_FOR_nothing)
5825 struct expand_operand op;
5826 /* If the target is not sutitable then create a new target. */
5827 if (target == NULL_RTX
5828 || !REG_P (target)
5829 || GET_MODE (target) != Pmode)
5830 target = gen_reg_rtx (Pmode);
5831 create_output_operand (&op, target, Pmode);
5832 expand_insn (icode, 1, &op);
5833 return target;
5835 error ("__builtin_thread_pointer is not supported on this target");
5836 return const0_rtx;
5839 static void
5840 expand_builtin_set_thread_pointer (tree exp)
5842 enum insn_code icode;
5843 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5844 return;
5845 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5846 if (icode != CODE_FOR_nothing)
5848 struct expand_operand op;
5849 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5850 Pmode, EXPAND_NORMAL);
5851 create_input_operand (&op, val, Pmode);
5852 expand_insn (icode, 1, &op);
5853 return;
5855 error ("__builtin_set_thread_pointer is not supported on this target");
5859 /* Emit code to restore the current value of stack. */
5861 static void
5862 expand_stack_restore (tree var)
5864 rtx_insn *prev;
5865 rtx sa = expand_normal (var);
5867 sa = convert_memory_address (Pmode, sa);
5869 prev = get_last_insn ();
5870 emit_stack_restore (SAVE_BLOCK, sa);
5872 record_new_stack_level ();
5874 fixup_args_size_notes (prev, get_last_insn (), 0);
5877 /* Emit code to save the current value of stack. */
5879 static rtx
5880 expand_stack_save (void)
5882 rtx ret = NULL_RTX;
5884 emit_stack_save (SAVE_BLOCK, &ret);
5885 return ret;
5889 /* Expand OpenACC acc_on_device.
5891 This has to happen late (that is, not in early folding; expand_builtin_*,
5892 rather than fold_builtin_*), as we have to act differently for host and
5893 acceleration device (ACCEL_COMPILER conditional). */
5895 static rtx
5896 expand_builtin_acc_on_device (tree exp ATTRIBUTE_UNUSED,
5897 rtx target ATTRIBUTE_UNUSED)
5899 #ifdef ACCEL_COMPILER
5900 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5901 return NULL_RTX;
5903 tree arg = CALL_EXPR_ARG (exp, 0);
5905 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5906 machine_mode v_mode = TYPE_MODE (TREE_TYPE (arg));
5907 rtx v = expand_normal (arg), v1, v2;
5908 v1 = GEN_INT (GOMP_DEVICE_NOT_HOST);
5909 v2 = GEN_INT (ACCEL_COMPILER_acc_device);
5910 machine_mode target_mode = TYPE_MODE (integer_type_node);
5911 if (!target || !register_operand (target, target_mode))
5912 target = gen_reg_rtx (target_mode);
5913 emit_move_insn (target, const1_rtx);
5914 rtx_code_label *done_label = gen_label_rtx ();
5915 do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
5916 NULL, done_label, PROB_EVEN);
5917 do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
5918 NULL, done_label, PROB_EVEN);
5919 emit_move_insn (target, const0_rtx);
5920 emit_label (done_label);
5922 return target;
5923 #else
5924 return NULL;
5925 #endif
5929 /* Expand an expression EXP that calls a built-in function,
5930 with result going to TARGET if that's convenient
5931 (and in mode MODE if that's convenient).
5932 SUBTARGET may be used as the target for computing one of EXP's operands.
5933 IGNORE is nonzero if the value is to be ignored. */
5936 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5937 int ignore)
5939 tree fndecl = get_callee_fndecl (exp);
5940 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5941 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5942 int flags;
5944 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5945 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5947 /* When ASan is enabled, we don't want to expand some memory/string
5948 builtins and rely on libsanitizer's hooks. This allows us to avoid
5949 redundant checks and be sure, that possible overflow will be detected
5950 by ASan. */
5952 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5953 return expand_call (exp, target, ignore);
5955 /* When not optimizing, generate calls to library functions for a certain
5956 set of builtins. */
5957 if (!optimize
5958 && !called_as_built_in (fndecl)
5959 && fcode != BUILT_IN_FORK
5960 && fcode != BUILT_IN_EXECL
5961 && fcode != BUILT_IN_EXECV
5962 && fcode != BUILT_IN_EXECLP
5963 && fcode != BUILT_IN_EXECLE
5964 && fcode != BUILT_IN_EXECVP
5965 && fcode != BUILT_IN_EXECVE
5966 && fcode != BUILT_IN_ALLOCA
5967 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5968 && fcode != BUILT_IN_FREE
5969 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5970 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5971 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5972 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5973 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5974 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5975 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5976 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5977 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5978 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5979 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5980 && fcode != BUILT_IN_CHKP_BNDRET)
5981 return expand_call (exp, target, ignore);
5983 /* The built-in function expanders test for target == const0_rtx
5984 to determine whether the function's result will be ignored. */
5985 if (ignore)
5986 target = const0_rtx;
5988 /* If the result of a pure or const built-in function is ignored, and
5989 none of its arguments are volatile, we can avoid expanding the
5990 built-in call and just evaluate the arguments for side-effects. */
5991 if (target == const0_rtx
5992 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5993 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5995 bool volatilep = false;
5996 tree arg;
5997 call_expr_arg_iterator iter;
5999 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6000 if (TREE_THIS_VOLATILE (arg))
6002 volatilep = true;
6003 break;
6006 if (! volatilep)
6008 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6009 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6010 return const0_rtx;
6014 /* expand_builtin_with_bounds is supposed to be used for
6015 instrumented builtin calls. */
6016 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6018 switch (fcode)
6020 CASE_FLT_FN (BUILT_IN_FABS):
6021 case BUILT_IN_FABSD32:
6022 case BUILT_IN_FABSD64:
6023 case BUILT_IN_FABSD128:
6024 target = expand_builtin_fabs (exp, target, subtarget);
6025 if (target)
6026 return target;
6027 break;
6029 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6030 target = expand_builtin_copysign (exp, target, subtarget);
6031 if (target)
6032 return target;
6033 break;
6035 /* Just do a normal library call if we were unable to fold
6036 the values. */
6037 CASE_FLT_FN (BUILT_IN_CABS):
6038 break;
6040 CASE_FLT_FN (BUILT_IN_EXP):
6041 CASE_FLT_FN (BUILT_IN_EXP10):
6042 CASE_FLT_FN (BUILT_IN_POW10):
6043 CASE_FLT_FN (BUILT_IN_EXP2):
6044 CASE_FLT_FN (BUILT_IN_EXPM1):
6045 CASE_FLT_FN (BUILT_IN_LOGB):
6046 CASE_FLT_FN (BUILT_IN_LOG):
6047 CASE_FLT_FN (BUILT_IN_LOG10):
6048 CASE_FLT_FN (BUILT_IN_LOG2):
6049 CASE_FLT_FN (BUILT_IN_LOG1P):
6050 CASE_FLT_FN (BUILT_IN_TAN):
6051 CASE_FLT_FN (BUILT_IN_ASIN):
6052 CASE_FLT_FN (BUILT_IN_ACOS):
6053 CASE_FLT_FN (BUILT_IN_ATAN):
6054 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6055 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6056 because of possible accuracy problems. */
6057 if (! flag_unsafe_math_optimizations)
6058 break;
6059 CASE_FLT_FN (BUILT_IN_SQRT):
6060 CASE_FLT_FN (BUILT_IN_FLOOR):
6061 CASE_FLT_FN (BUILT_IN_CEIL):
6062 CASE_FLT_FN (BUILT_IN_TRUNC):
6063 CASE_FLT_FN (BUILT_IN_ROUND):
6064 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6065 CASE_FLT_FN (BUILT_IN_RINT):
6066 target = expand_builtin_mathfn (exp, target, subtarget);
6067 if (target)
6068 return target;
6069 break;
6071 CASE_FLT_FN (BUILT_IN_FMA):
6072 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6073 if (target)
6074 return target;
6075 break;
6077 CASE_FLT_FN (BUILT_IN_ILOGB):
6078 if (! flag_unsafe_math_optimizations)
6079 break;
6080 CASE_FLT_FN (BUILT_IN_ISINF):
6081 CASE_FLT_FN (BUILT_IN_FINITE):
6082 case BUILT_IN_ISFINITE:
6083 case BUILT_IN_ISNORMAL:
6084 target = expand_builtin_interclass_mathfn (exp, target);
6085 if (target)
6086 return target;
6087 break;
6089 CASE_FLT_FN (BUILT_IN_ICEIL):
6090 CASE_FLT_FN (BUILT_IN_LCEIL):
6091 CASE_FLT_FN (BUILT_IN_LLCEIL):
6092 CASE_FLT_FN (BUILT_IN_LFLOOR):
6093 CASE_FLT_FN (BUILT_IN_IFLOOR):
6094 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6095 target = expand_builtin_int_roundingfn (exp, target);
6096 if (target)
6097 return target;
6098 break;
6100 CASE_FLT_FN (BUILT_IN_IRINT):
6101 CASE_FLT_FN (BUILT_IN_LRINT):
6102 CASE_FLT_FN (BUILT_IN_LLRINT):
6103 CASE_FLT_FN (BUILT_IN_IROUND):
6104 CASE_FLT_FN (BUILT_IN_LROUND):
6105 CASE_FLT_FN (BUILT_IN_LLROUND):
6106 target = expand_builtin_int_roundingfn_2 (exp, target);
6107 if (target)
6108 return target;
6109 break;
6111 CASE_FLT_FN (BUILT_IN_POWI):
6112 target = expand_builtin_powi (exp, target);
6113 if (target)
6114 return target;
6115 break;
6117 CASE_FLT_FN (BUILT_IN_ATAN2):
6118 CASE_FLT_FN (BUILT_IN_LDEXP):
6119 CASE_FLT_FN (BUILT_IN_SCALB):
6120 CASE_FLT_FN (BUILT_IN_SCALBN):
6121 CASE_FLT_FN (BUILT_IN_SCALBLN):
6122 if (! flag_unsafe_math_optimizations)
6123 break;
6125 CASE_FLT_FN (BUILT_IN_FMOD):
6126 CASE_FLT_FN (BUILT_IN_REMAINDER):
6127 CASE_FLT_FN (BUILT_IN_DREM):
6128 CASE_FLT_FN (BUILT_IN_POW):
6129 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6130 if (target)
6131 return target;
6132 break;
6134 CASE_FLT_FN (BUILT_IN_CEXPI):
6135 target = expand_builtin_cexpi (exp, target);
6136 gcc_assert (target);
6137 return target;
6139 CASE_FLT_FN (BUILT_IN_SIN):
6140 CASE_FLT_FN (BUILT_IN_COS):
6141 if (! flag_unsafe_math_optimizations)
6142 break;
6143 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6144 if (target)
6145 return target;
6146 break;
6148 CASE_FLT_FN (BUILT_IN_SINCOS):
6149 if (! flag_unsafe_math_optimizations)
6150 break;
6151 target = expand_builtin_sincos (exp);
6152 if (target)
6153 return target;
6154 break;
6156 case BUILT_IN_APPLY_ARGS:
6157 return expand_builtin_apply_args ();
6159 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6160 FUNCTION with a copy of the parameters described by
6161 ARGUMENTS, and ARGSIZE. It returns a block of memory
6162 allocated on the stack into which is stored all the registers
6163 that might possibly be used for returning the result of a
6164 function. ARGUMENTS is the value returned by
6165 __builtin_apply_args. ARGSIZE is the number of bytes of
6166 arguments that must be copied. ??? How should this value be
6167 computed? We'll also need a safe worst case value for varargs
6168 functions. */
6169 case BUILT_IN_APPLY:
6170 if (!validate_arglist (exp, POINTER_TYPE,
6171 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6172 && !validate_arglist (exp, REFERENCE_TYPE,
6173 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6174 return const0_rtx;
6175 else
6177 rtx ops[3];
6179 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6180 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6181 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6183 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6186 /* __builtin_return (RESULT) causes the function to return the
6187 value described by RESULT. RESULT is address of the block of
6188 memory returned by __builtin_apply. */
6189 case BUILT_IN_RETURN:
6190 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6191 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6192 return const0_rtx;
6194 case BUILT_IN_SAVEREGS:
6195 return expand_builtin_saveregs ();
6197 case BUILT_IN_VA_ARG_PACK:
6198 /* All valid uses of __builtin_va_arg_pack () are removed during
6199 inlining. */
6200 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6201 return const0_rtx;
6203 case BUILT_IN_VA_ARG_PACK_LEN:
6204 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6205 inlining. */
6206 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6207 return const0_rtx;
6209 /* Return the address of the first anonymous stack arg. */
6210 case BUILT_IN_NEXT_ARG:
6211 if (fold_builtin_next_arg (exp, false))
6212 return const0_rtx;
6213 return expand_builtin_next_arg ();
6215 case BUILT_IN_CLEAR_CACHE:
6216 target = expand_builtin___clear_cache (exp);
6217 if (target)
6218 return target;
6219 break;
6221 case BUILT_IN_CLASSIFY_TYPE:
6222 return expand_builtin_classify_type (exp);
6224 case BUILT_IN_CONSTANT_P:
6225 return const0_rtx;
6227 case BUILT_IN_FRAME_ADDRESS:
6228 case BUILT_IN_RETURN_ADDRESS:
6229 return expand_builtin_frame_address (fndecl, exp);
6231 /* Returns the address of the area where the structure is returned.
6232 0 otherwise. */
6233 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6234 if (call_expr_nargs (exp) != 0
6235 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6236 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6237 return const0_rtx;
6238 else
6239 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6241 case BUILT_IN_ALLOCA:
6242 case BUILT_IN_ALLOCA_WITH_ALIGN:
6243 /* If the allocation stems from the declaration of a variable-sized
6244 object, it cannot accumulate. */
6245 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6246 if (target)
6247 return target;
6248 break;
6250 case BUILT_IN_STACK_SAVE:
6251 return expand_stack_save ();
6253 case BUILT_IN_STACK_RESTORE:
6254 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6255 return const0_rtx;
6257 case BUILT_IN_BSWAP16:
6258 case BUILT_IN_BSWAP32:
6259 case BUILT_IN_BSWAP64:
6260 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6261 if (target)
6262 return target;
6263 break;
6265 CASE_INT_FN (BUILT_IN_FFS):
6266 target = expand_builtin_unop (target_mode, exp, target,
6267 subtarget, ffs_optab);
6268 if (target)
6269 return target;
6270 break;
6272 CASE_INT_FN (BUILT_IN_CLZ):
6273 target = expand_builtin_unop (target_mode, exp, target,
6274 subtarget, clz_optab);
6275 if (target)
6276 return target;
6277 break;
6279 CASE_INT_FN (BUILT_IN_CTZ):
6280 target = expand_builtin_unop (target_mode, exp, target,
6281 subtarget, ctz_optab);
6282 if (target)
6283 return target;
6284 break;
6286 CASE_INT_FN (BUILT_IN_CLRSB):
6287 target = expand_builtin_unop (target_mode, exp, target,
6288 subtarget, clrsb_optab);
6289 if (target)
6290 return target;
6291 break;
6293 CASE_INT_FN (BUILT_IN_POPCOUNT):
6294 target = expand_builtin_unop (target_mode, exp, target,
6295 subtarget, popcount_optab);
6296 if (target)
6297 return target;
6298 break;
6300 CASE_INT_FN (BUILT_IN_PARITY):
6301 target = expand_builtin_unop (target_mode, exp, target,
6302 subtarget, parity_optab);
6303 if (target)
6304 return target;
6305 break;
6307 case BUILT_IN_STRLEN:
6308 target = expand_builtin_strlen (exp, target, target_mode);
6309 if (target)
6310 return target;
6311 break;
6313 case BUILT_IN_STRCPY:
6314 target = expand_builtin_strcpy (exp, target);
6315 if (target)
6316 return target;
6317 break;
6319 case BUILT_IN_STRNCPY:
6320 target = expand_builtin_strncpy (exp, target);
6321 if (target)
6322 return target;
6323 break;
6325 case BUILT_IN_STPCPY:
6326 target = expand_builtin_stpcpy (exp, target, mode);
6327 if (target)
6328 return target;
6329 break;
6331 case BUILT_IN_MEMCPY:
6332 target = expand_builtin_memcpy (exp, target);
6333 if (target)
6334 return target;
6335 break;
6337 case BUILT_IN_MEMPCPY:
6338 target = expand_builtin_mempcpy (exp, target, mode);
6339 if (target)
6340 return target;
6341 break;
6343 case BUILT_IN_MEMSET:
6344 target = expand_builtin_memset (exp, target, mode);
6345 if (target)
6346 return target;
6347 break;
6349 case BUILT_IN_BZERO:
6350 target = expand_builtin_bzero (exp);
6351 if (target)
6352 return target;
6353 break;
6355 case BUILT_IN_STRCMP:
6356 target = expand_builtin_strcmp (exp, target);
6357 if (target)
6358 return target;
6359 break;
6361 case BUILT_IN_STRNCMP:
6362 target = expand_builtin_strncmp (exp, target, mode);
6363 if (target)
6364 return target;
6365 break;
6367 case BUILT_IN_BCMP:
6368 case BUILT_IN_MEMCMP:
6369 target = expand_builtin_memcmp (exp, target, mode);
6370 if (target)
6371 return target;
6372 break;
6374 case BUILT_IN_SETJMP:
6375 /* This should have been lowered to the builtins below. */
6376 gcc_unreachable ();
6378 case BUILT_IN_SETJMP_SETUP:
6379 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6380 and the receiver label. */
6381 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6383 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6384 VOIDmode, EXPAND_NORMAL);
6385 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6386 rtx_insn *label_r = label_rtx (label);
6388 /* This is copied from the handling of non-local gotos. */
6389 expand_builtin_setjmp_setup (buf_addr, label_r);
6390 nonlocal_goto_handler_labels
6391 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6392 nonlocal_goto_handler_labels);
6393 /* ??? Do not let expand_label treat us as such since we would
6394 not want to be both on the list of non-local labels and on
6395 the list of forced labels. */
6396 FORCED_LABEL (label) = 0;
6397 return const0_rtx;
6399 break;
6401 case BUILT_IN_SETJMP_RECEIVER:
6402 /* __builtin_setjmp_receiver is passed the receiver label. */
6403 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6405 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6406 rtx_insn *label_r = label_rtx (label);
6408 expand_builtin_setjmp_receiver (label_r);
6409 return const0_rtx;
6411 break;
6413 /* __builtin_longjmp is passed a pointer to an array of five words.
6414 It's similar to the C library longjmp function but works with
6415 __builtin_setjmp above. */
6416 case BUILT_IN_LONGJMP:
6417 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6419 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6420 VOIDmode, EXPAND_NORMAL);
6421 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6423 if (value != const1_rtx)
6425 error ("%<__builtin_longjmp%> second argument must be 1");
6426 return const0_rtx;
6429 expand_builtin_longjmp (buf_addr, value);
6430 return const0_rtx;
6432 break;
6434 case BUILT_IN_NONLOCAL_GOTO:
6435 target = expand_builtin_nonlocal_goto (exp);
6436 if (target)
6437 return target;
6438 break;
6440 /* This updates the setjmp buffer that is its argument with the value
6441 of the current stack pointer. */
6442 case BUILT_IN_UPDATE_SETJMP_BUF:
6443 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6445 rtx buf_addr
6446 = expand_normal (CALL_EXPR_ARG (exp, 0));
6448 expand_builtin_update_setjmp_buf (buf_addr);
6449 return const0_rtx;
6451 break;
6453 case BUILT_IN_TRAP:
6454 expand_builtin_trap ();
6455 return const0_rtx;
6457 case BUILT_IN_UNREACHABLE:
6458 expand_builtin_unreachable ();
6459 return const0_rtx;
6461 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6462 case BUILT_IN_SIGNBITD32:
6463 case BUILT_IN_SIGNBITD64:
6464 case BUILT_IN_SIGNBITD128:
6465 target = expand_builtin_signbit (exp, target);
6466 if (target)
6467 return target;
6468 break;
6470 /* Various hooks for the DWARF 2 __throw routine. */
6471 case BUILT_IN_UNWIND_INIT:
6472 expand_builtin_unwind_init ();
6473 return const0_rtx;
6474 case BUILT_IN_DWARF_CFA:
6475 return virtual_cfa_rtx;
6476 #ifdef DWARF2_UNWIND_INFO
6477 case BUILT_IN_DWARF_SP_COLUMN:
6478 return expand_builtin_dwarf_sp_column ();
6479 case BUILT_IN_INIT_DWARF_REG_SIZES:
6480 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6481 return const0_rtx;
6482 #endif
6483 case BUILT_IN_FROB_RETURN_ADDR:
6484 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6485 case BUILT_IN_EXTRACT_RETURN_ADDR:
6486 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6487 case BUILT_IN_EH_RETURN:
6488 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6489 CALL_EXPR_ARG (exp, 1));
6490 return const0_rtx;
6491 case BUILT_IN_EH_RETURN_DATA_REGNO:
6492 return expand_builtin_eh_return_data_regno (exp);
6493 case BUILT_IN_EXTEND_POINTER:
6494 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6495 case BUILT_IN_EH_POINTER:
6496 return expand_builtin_eh_pointer (exp);
6497 case BUILT_IN_EH_FILTER:
6498 return expand_builtin_eh_filter (exp);
6499 case BUILT_IN_EH_COPY_VALUES:
6500 return expand_builtin_eh_copy_values (exp);
6502 case BUILT_IN_VA_START:
6503 return expand_builtin_va_start (exp);
6504 case BUILT_IN_VA_END:
6505 return expand_builtin_va_end (exp);
6506 case BUILT_IN_VA_COPY:
6507 return expand_builtin_va_copy (exp);
6508 case BUILT_IN_EXPECT:
6509 return expand_builtin_expect (exp, target);
6510 case BUILT_IN_ASSUME_ALIGNED:
6511 return expand_builtin_assume_aligned (exp, target);
6512 case BUILT_IN_PREFETCH:
6513 expand_builtin_prefetch (exp);
6514 return const0_rtx;
6516 case BUILT_IN_INIT_TRAMPOLINE:
6517 return expand_builtin_init_trampoline (exp, true);
6518 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6519 return expand_builtin_init_trampoline (exp, false);
6520 case BUILT_IN_ADJUST_TRAMPOLINE:
6521 return expand_builtin_adjust_trampoline (exp);
6523 case BUILT_IN_FORK:
6524 case BUILT_IN_EXECL:
6525 case BUILT_IN_EXECV:
6526 case BUILT_IN_EXECLP:
6527 case BUILT_IN_EXECLE:
6528 case BUILT_IN_EXECVP:
6529 case BUILT_IN_EXECVE:
6530 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6531 if (target)
6532 return target;
6533 break;
6535 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6536 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6537 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6538 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6539 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6540 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6541 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6542 if (target)
6543 return target;
6544 break;
6546 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6547 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6548 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6549 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6550 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6551 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6552 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6553 if (target)
6554 return target;
6555 break;
6557 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6558 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6559 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6560 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6561 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6562 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6563 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6564 if (target)
6565 return target;
6566 break;
6568 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6569 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6570 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6571 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6572 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6573 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6574 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6575 if (target)
6576 return target;
6577 break;
6579 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6580 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6581 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6582 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6583 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6584 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6585 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6586 if (target)
6587 return target;
6588 break;
6590 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6591 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6592 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6593 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6594 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6595 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6596 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6597 if (target)
6598 return target;
6599 break;
6601 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6602 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6603 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6604 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6605 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6606 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6607 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6608 if (target)
6609 return target;
6610 break;
6612 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6613 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6614 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6615 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6616 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6617 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6618 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6619 if (target)
6620 return target;
6621 break;
6623 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6624 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6625 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6626 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6627 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6628 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6629 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6630 if (target)
6631 return target;
6632 break;
6634 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6635 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6636 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6637 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6638 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6639 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6640 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6641 if (target)
6642 return target;
6643 break;
6645 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6646 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6647 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6648 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6649 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6650 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6651 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6652 if (target)
6653 return target;
6654 break;
6656 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6657 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6658 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6659 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6660 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6661 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6662 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6663 if (target)
6664 return target;
6665 break;
6667 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6668 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6669 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6670 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6671 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6672 if (mode == VOIDmode)
6673 mode = TYPE_MODE (boolean_type_node);
6674 if (!target || !register_operand (target, mode))
6675 target = gen_reg_rtx (mode);
6677 mode = get_builtin_sync_mode
6678 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6679 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6680 if (target)
6681 return target;
6682 break;
6684 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6685 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6686 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6687 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6688 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6689 mode = get_builtin_sync_mode
6690 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6691 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6692 if (target)
6693 return target;
6694 break;
6696 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6697 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6698 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6699 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6700 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6701 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6702 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6703 if (target)
6704 return target;
6705 break;
6707 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6708 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6709 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6710 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6711 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6712 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6713 expand_builtin_sync_lock_release (mode, exp);
6714 return const0_rtx;
6716 case BUILT_IN_SYNC_SYNCHRONIZE:
6717 expand_builtin_sync_synchronize ();
6718 return const0_rtx;
6720 case BUILT_IN_ATOMIC_EXCHANGE_1:
6721 case BUILT_IN_ATOMIC_EXCHANGE_2:
6722 case BUILT_IN_ATOMIC_EXCHANGE_4:
6723 case BUILT_IN_ATOMIC_EXCHANGE_8:
6724 case BUILT_IN_ATOMIC_EXCHANGE_16:
6725 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6726 target = expand_builtin_atomic_exchange (mode, exp, target);
6727 if (target)
6728 return target;
6729 break;
6731 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6732 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6733 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6734 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6735 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6737 unsigned int nargs, z;
6738 vec<tree, va_gc> *vec;
6740 mode =
6741 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6742 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6743 if (target)
6744 return target;
6746 /* If this is turned into an external library call, the weak parameter
6747 must be dropped to match the expected parameter list. */
6748 nargs = call_expr_nargs (exp);
6749 vec_alloc (vec, nargs - 1);
6750 for (z = 0; z < 3; z++)
6751 vec->quick_push (CALL_EXPR_ARG (exp, z));
6752 /* Skip the boolean weak parameter. */
6753 for (z = 4; z < 6; z++)
6754 vec->quick_push (CALL_EXPR_ARG (exp, z));
6755 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6756 break;
6759 case BUILT_IN_ATOMIC_LOAD_1:
6760 case BUILT_IN_ATOMIC_LOAD_2:
6761 case BUILT_IN_ATOMIC_LOAD_4:
6762 case BUILT_IN_ATOMIC_LOAD_8:
6763 case BUILT_IN_ATOMIC_LOAD_16:
6764 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6765 target = expand_builtin_atomic_load (mode, exp, target);
6766 if (target)
6767 return target;
6768 break;
6770 case BUILT_IN_ATOMIC_STORE_1:
6771 case BUILT_IN_ATOMIC_STORE_2:
6772 case BUILT_IN_ATOMIC_STORE_4:
6773 case BUILT_IN_ATOMIC_STORE_8:
6774 case BUILT_IN_ATOMIC_STORE_16:
6775 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6776 target = expand_builtin_atomic_store (mode, exp);
6777 if (target)
6778 return const0_rtx;
6779 break;
6781 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6782 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6783 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6784 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6785 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6787 enum built_in_function lib;
6788 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6789 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6790 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6791 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6792 ignore, lib);
6793 if (target)
6794 return target;
6795 break;
6797 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6798 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6799 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6800 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6801 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6803 enum built_in_function lib;
6804 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6805 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6806 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6807 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6808 ignore, lib);
6809 if (target)
6810 return target;
6811 break;
6813 case BUILT_IN_ATOMIC_AND_FETCH_1:
6814 case BUILT_IN_ATOMIC_AND_FETCH_2:
6815 case BUILT_IN_ATOMIC_AND_FETCH_4:
6816 case BUILT_IN_ATOMIC_AND_FETCH_8:
6817 case BUILT_IN_ATOMIC_AND_FETCH_16:
6819 enum built_in_function lib;
6820 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6821 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6822 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6823 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6824 ignore, lib);
6825 if (target)
6826 return target;
6827 break;
6829 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6830 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6831 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6832 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6833 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6835 enum built_in_function lib;
6836 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6837 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6838 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6839 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6840 ignore, lib);
6841 if (target)
6842 return target;
6843 break;
6845 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6846 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6847 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6848 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6849 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6851 enum built_in_function lib;
6852 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6853 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6854 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6855 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6856 ignore, lib);
6857 if (target)
6858 return target;
6859 break;
6861 case BUILT_IN_ATOMIC_OR_FETCH_1:
6862 case BUILT_IN_ATOMIC_OR_FETCH_2:
6863 case BUILT_IN_ATOMIC_OR_FETCH_4:
6864 case BUILT_IN_ATOMIC_OR_FETCH_8:
6865 case BUILT_IN_ATOMIC_OR_FETCH_16:
6867 enum built_in_function lib;
6868 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6869 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6870 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6871 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6872 ignore, lib);
6873 if (target)
6874 return target;
6875 break;
6877 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6878 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6879 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6880 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6881 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6882 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6883 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6884 ignore, BUILT_IN_NONE);
6885 if (target)
6886 return target;
6887 break;
6889 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6890 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6891 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6892 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6893 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6894 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6895 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6896 ignore, BUILT_IN_NONE);
6897 if (target)
6898 return target;
6899 break;
6901 case BUILT_IN_ATOMIC_FETCH_AND_1:
6902 case BUILT_IN_ATOMIC_FETCH_AND_2:
6903 case BUILT_IN_ATOMIC_FETCH_AND_4:
6904 case BUILT_IN_ATOMIC_FETCH_AND_8:
6905 case BUILT_IN_ATOMIC_FETCH_AND_16:
6906 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6907 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6908 ignore, BUILT_IN_NONE);
6909 if (target)
6910 return target;
6911 break;
6913 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6914 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6915 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6916 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6917 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6918 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6919 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6920 ignore, BUILT_IN_NONE);
6921 if (target)
6922 return target;
6923 break;
6925 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6926 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6927 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6928 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6929 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6930 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6931 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6932 ignore, BUILT_IN_NONE);
6933 if (target)
6934 return target;
6935 break;
6937 case BUILT_IN_ATOMIC_FETCH_OR_1:
6938 case BUILT_IN_ATOMIC_FETCH_OR_2:
6939 case BUILT_IN_ATOMIC_FETCH_OR_4:
6940 case BUILT_IN_ATOMIC_FETCH_OR_8:
6941 case BUILT_IN_ATOMIC_FETCH_OR_16:
6942 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6943 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6944 ignore, BUILT_IN_NONE);
6945 if (target)
6946 return target;
6947 break;
6949 case BUILT_IN_ATOMIC_TEST_AND_SET:
6950 return expand_builtin_atomic_test_and_set (exp, target);
6952 case BUILT_IN_ATOMIC_CLEAR:
6953 return expand_builtin_atomic_clear (exp);
6955 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6956 return expand_builtin_atomic_always_lock_free (exp);
6958 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6959 target = expand_builtin_atomic_is_lock_free (exp);
6960 if (target)
6961 return target;
6962 break;
6964 case BUILT_IN_ATOMIC_THREAD_FENCE:
6965 expand_builtin_atomic_thread_fence (exp);
6966 return const0_rtx;
6968 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6969 expand_builtin_atomic_signal_fence (exp);
6970 return const0_rtx;
6972 case BUILT_IN_OBJECT_SIZE:
6973 return expand_builtin_object_size (exp);
6975 case BUILT_IN_MEMCPY_CHK:
6976 case BUILT_IN_MEMPCPY_CHK:
6977 case BUILT_IN_MEMMOVE_CHK:
6978 case BUILT_IN_MEMSET_CHK:
6979 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6980 if (target)
6981 return target;
6982 break;
6984 case BUILT_IN_STRCPY_CHK:
6985 case BUILT_IN_STPCPY_CHK:
6986 case BUILT_IN_STRNCPY_CHK:
6987 case BUILT_IN_STPNCPY_CHK:
6988 case BUILT_IN_STRCAT_CHK:
6989 case BUILT_IN_STRNCAT_CHK:
6990 case BUILT_IN_SNPRINTF_CHK:
6991 case BUILT_IN_VSNPRINTF_CHK:
6992 maybe_emit_chk_warning (exp, fcode);
6993 break;
6995 case BUILT_IN_SPRINTF_CHK:
6996 case BUILT_IN_VSPRINTF_CHK:
6997 maybe_emit_sprintf_chk_warning (exp, fcode);
6998 break;
7000 case BUILT_IN_FREE:
7001 if (warn_free_nonheap_object)
7002 maybe_emit_free_warning (exp);
7003 break;
7005 case BUILT_IN_THREAD_POINTER:
7006 return expand_builtin_thread_pointer (exp, target);
7008 case BUILT_IN_SET_THREAD_POINTER:
7009 expand_builtin_set_thread_pointer (exp);
7010 return const0_rtx;
7012 case BUILT_IN_CILK_DETACH:
7013 expand_builtin_cilk_detach (exp);
7014 return const0_rtx;
7016 case BUILT_IN_CILK_POP_FRAME:
7017 expand_builtin_cilk_pop_frame (exp);
7018 return const0_rtx;
7020 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7021 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7022 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7023 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7024 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7025 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7026 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7027 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7028 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7029 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7030 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7031 /* We allow user CHKP builtins if Pointer Bounds
7032 Checker is off. */
7033 if (!chkp_function_instrumented_p (current_function_decl))
7035 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7036 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7037 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7038 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7039 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7040 return expand_normal (CALL_EXPR_ARG (exp, 0));
7041 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7042 return expand_normal (size_zero_node);
7043 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7044 return expand_normal (size_int (-1));
7045 else
7046 return const0_rtx;
7048 /* FALLTHROUGH */
7050 case BUILT_IN_CHKP_BNDMK:
7051 case BUILT_IN_CHKP_BNDSTX:
7052 case BUILT_IN_CHKP_BNDCL:
7053 case BUILT_IN_CHKP_BNDCU:
7054 case BUILT_IN_CHKP_BNDLDX:
7055 case BUILT_IN_CHKP_BNDRET:
7056 case BUILT_IN_CHKP_INTERSECT:
7057 case BUILT_IN_CHKP_NARROW:
7058 case BUILT_IN_CHKP_EXTRACT_LOWER:
7059 case BUILT_IN_CHKP_EXTRACT_UPPER:
7060 /* Software implementation of Pointer Bounds Checker is NYI.
7061 Target support is required. */
7062 error ("Your target platform does not support -fcheck-pointer-bounds");
7063 break;
7065 case BUILT_IN_ACC_ON_DEVICE:
7066 target = expand_builtin_acc_on_device (exp, target);
7067 if (target)
7068 return target;
7069 break;
7071 default: /* just do library call, if unknown builtin */
7072 break;
7075 /* The switch statement above can drop through to cause the function
7076 to be called normally. */
7077 return expand_call (exp, target, ignore);
7080 /* Similar to expand_builtin but is used for instrumented calls. */
7083 expand_builtin_with_bounds (tree exp, rtx target,
7084 rtx subtarget ATTRIBUTE_UNUSED,
7085 machine_mode mode, int ignore)
7087 tree fndecl = get_callee_fndecl (exp);
7088 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7090 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7092 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7093 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7095 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7096 && fcode < END_CHKP_BUILTINS);
7098 switch (fcode)
7100 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7101 target = expand_builtin_memcpy_with_bounds (exp, target);
7102 if (target)
7103 return target;
7104 break;
7106 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7107 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7108 if (target)
7109 return target;
7110 break;
7112 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7113 target = expand_builtin_memset_with_bounds (exp, target, mode);
7114 if (target)
7115 return target;
7116 break;
7118 default:
7119 break;
7122 /* The switch statement above can drop through to cause the function
7123 to be called normally. */
7124 return expand_call (exp, target, ignore);
7127 /* Determine whether a tree node represents a call to a built-in
7128 function. If the tree T is a call to a built-in function with
7129 the right number of arguments of the appropriate types, return
7130 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7131 Otherwise the return value is END_BUILTINS. */
7133 enum built_in_function
7134 builtin_mathfn_code (const_tree t)
7136 const_tree fndecl, arg, parmlist;
7137 const_tree argtype, parmtype;
7138 const_call_expr_arg_iterator iter;
7140 if (TREE_CODE (t) != CALL_EXPR
7141 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7142 return END_BUILTINS;
7144 fndecl = get_callee_fndecl (t);
7145 if (fndecl == NULL_TREE
7146 || TREE_CODE (fndecl) != FUNCTION_DECL
7147 || ! DECL_BUILT_IN (fndecl)
7148 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7149 return END_BUILTINS;
7151 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7152 init_const_call_expr_arg_iterator (t, &iter);
7153 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7155 /* If a function doesn't take a variable number of arguments,
7156 the last element in the list will have type `void'. */
7157 parmtype = TREE_VALUE (parmlist);
7158 if (VOID_TYPE_P (parmtype))
7160 if (more_const_call_expr_args_p (&iter))
7161 return END_BUILTINS;
7162 return DECL_FUNCTION_CODE (fndecl);
7165 if (! more_const_call_expr_args_p (&iter))
7166 return END_BUILTINS;
7168 arg = next_const_call_expr_arg (&iter);
7169 argtype = TREE_TYPE (arg);
7171 if (SCALAR_FLOAT_TYPE_P (parmtype))
7173 if (! SCALAR_FLOAT_TYPE_P (argtype))
7174 return END_BUILTINS;
7176 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7178 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7179 return END_BUILTINS;
7181 else if (POINTER_TYPE_P (parmtype))
7183 if (! POINTER_TYPE_P (argtype))
7184 return END_BUILTINS;
7186 else if (INTEGRAL_TYPE_P (parmtype))
7188 if (! INTEGRAL_TYPE_P (argtype))
7189 return END_BUILTINS;
7191 else
7192 return END_BUILTINS;
7195 /* Variable-length argument list. */
7196 return DECL_FUNCTION_CODE (fndecl);
7199 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7200 evaluate to a constant. */
7202 static tree
7203 fold_builtin_constant_p (tree arg)
7205 /* We return 1 for a numeric type that's known to be a constant
7206 value at compile-time or for an aggregate type that's a
7207 literal constant. */
7208 STRIP_NOPS (arg);
7210 /* If we know this is a constant, emit the constant of one. */
7211 if (CONSTANT_CLASS_P (arg)
7212 || (TREE_CODE (arg) == CONSTRUCTOR
7213 && TREE_CONSTANT (arg)))
7214 return integer_one_node;
7215 if (TREE_CODE (arg) == ADDR_EXPR)
7217 tree op = TREE_OPERAND (arg, 0);
7218 if (TREE_CODE (op) == STRING_CST
7219 || (TREE_CODE (op) == ARRAY_REF
7220 && integer_zerop (TREE_OPERAND (op, 1))
7221 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7222 return integer_one_node;
7225 /* If this expression has side effects, show we don't know it to be a
7226 constant. Likewise if it's a pointer or aggregate type since in
7227 those case we only want literals, since those are only optimized
7228 when generating RTL, not later.
7229 And finally, if we are compiling an initializer, not code, we
7230 need to return a definite result now; there's not going to be any
7231 more optimization done. */
7232 if (TREE_SIDE_EFFECTS (arg)
7233 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7234 || POINTER_TYPE_P (TREE_TYPE (arg))
7235 || cfun == 0
7236 || folding_initializer
7237 || force_folding_builtin_constant_p)
7238 return integer_zero_node;
7240 return NULL_TREE;
7243 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7244 return it as a truthvalue. */
7246 static tree
7247 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7248 tree predictor)
7250 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7252 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7253 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7254 ret_type = TREE_TYPE (TREE_TYPE (fn));
7255 pred_type = TREE_VALUE (arg_types);
7256 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7258 pred = fold_convert_loc (loc, pred_type, pred);
7259 expected = fold_convert_loc (loc, expected_type, expected);
7260 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7261 predictor);
7263 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7264 build_int_cst (ret_type, 0));
7267 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7268 NULL_TREE if no simplification is possible. */
7270 tree
7271 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7273 tree inner, fndecl, inner_arg0;
7274 enum tree_code code;
7276 /* Distribute the expected value over short-circuiting operators.
7277 See through the cast from truthvalue_type_node to long. */
7278 inner_arg0 = arg0;
7279 while (CONVERT_EXPR_P (inner_arg0)
7280 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7281 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7282 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7284 /* If this is a builtin_expect within a builtin_expect keep the
7285 inner one. See through a comparison against a constant. It
7286 might have been added to create a thruthvalue. */
7287 inner = inner_arg0;
7289 if (COMPARISON_CLASS_P (inner)
7290 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7291 inner = TREE_OPERAND (inner, 0);
7293 if (TREE_CODE (inner) == CALL_EXPR
7294 && (fndecl = get_callee_fndecl (inner))
7295 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7296 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7297 return arg0;
7299 inner = inner_arg0;
7300 code = TREE_CODE (inner);
7301 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7303 tree op0 = TREE_OPERAND (inner, 0);
7304 tree op1 = TREE_OPERAND (inner, 1);
7306 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7307 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7308 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7310 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7313 /* If the argument isn't invariant then there's nothing else we can do. */
7314 if (!TREE_CONSTANT (inner_arg0))
7315 return NULL_TREE;
7317 /* If we expect that a comparison against the argument will fold to
7318 a constant return the constant. In practice, this means a true
7319 constant or the address of a non-weak symbol. */
7320 inner = inner_arg0;
7321 STRIP_NOPS (inner);
7322 if (TREE_CODE (inner) == ADDR_EXPR)
7326 inner = TREE_OPERAND (inner, 0);
7328 while (TREE_CODE (inner) == COMPONENT_REF
7329 || TREE_CODE (inner) == ARRAY_REF);
7330 if ((TREE_CODE (inner) == VAR_DECL
7331 || TREE_CODE (inner) == FUNCTION_DECL)
7332 && DECL_WEAK (inner))
7333 return NULL_TREE;
7336 /* Otherwise, ARG0 already has the proper type for the return value. */
7337 return arg0;
7340 /* Fold a call to __builtin_classify_type with argument ARG. */
7342 static tree
7343 fold_builtin_classify_type (tree arg)
7345 if (arg == 0)
7346 return build_int_cst (integer_type_node, no_type_class);
7348 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7351 /* Fold a call to __builtin_strlen with argument ARG. */
7353 static tree
7354 fold_builtin_strlen (location_t loc, tree type, tree arg)
7356 if (!validate_arg (arg, POINTER_TYPE))
7357 return NULL_TREE;
7358 else
7360 tree len = c_strlen (arg, 0);
7362 if (len)
7363 return fold_convert_loc (loc, type, len);
7365 return NULL_TREE;
7369 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7371 static tree
7372 fold_builtin_inf (location_t loc, tree type, int warn)
7374 REAL_VALUE_TYPE real;
7376 /* __builtin_inff is intended to be usable to define INFINITY on all
7377 targets. If an infinity is not available, INFINITY expands "to a
7378 positive constant of type float that overflows at translation
7379 time", footnote "In this case, using INFINITY will violate the
7380 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7381 Thus we pedwarn to ensure this constraint violation is
7382 diagnosed. */
7383 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7384 pedwarn (loc, 0, "target format does not support infinity");
7386 real_inf (&real);
7387 return build_real (type, real);
7390 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7392 static tree
7393 fold_builtin_nan (tree arg, tree type, int quiet)
7395 REAL_VALUE_TYPE real;
7396 const char *str;
7398 if (!validate_arg (arg, POINTER_TYPE))
7399 return NULL_TREE;
7400 str = c_getstr (arg);
7401 if (!str)
7402 return NULL_TREE;
7404 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7405 return NULL_TREE;
7407 return build_real (type, real);
7410 /* Return true if the floating point expression T has an integer value.
7411 We also allow +Inf, -Inf and NaN to be considered integer values. */
7413 static bool
7414 integer_valued_real_p (tree t)
7416 switch (TREE_CODE (t))
7418 case FLOAT_EXPR:
7419 return true;
7421 case ABS_EXPR:
7422 case SAVE_EXPR:
7423 return integer_valued_real_p (TREE_OPERAND (t, 0));
7425 case COMPOUND_EXPR:
7426 case MODIFY_EXPR:
7427 case BIND_EXPR:
7428 return integer_valued_real_p (TREE_OPERAND (t, 1));
7430 case PLUS_EXPR:
7431 case MINUS_EXPR:
7432 case MULT_EXPR:
7433 case MIN_EXPR:
7434 case MAX_EXPR:
7435 return integer_valued_real_p (TREE_OPERAND (t, 0))
7436 && integer_valued_real_p (TREE_OPERAND (t, 1));
7438 case COND_EXPR:
7439 return integer_valued_real_p (TREE_OPERAND (t, 1))
7440 && integer_valued_real_p (TREE_OPERAND (t, 2));
7442 case REAL_CST:
7443 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7445 CASE_CONVERT:
7447 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7448 if (TREE_CODE (type) == INTEGER_TYPE)
7449 return true;
7450 if (TREE_CODE (type) == REAL_TYPE)
7451 return integer_valued_real_p (TREE_OPERAND (t, 0));
7452 break;
7455 case CALL_EXPR:
7456 switch (builtin_mathfn_code (t))
7458 CASE_FLT_FN (BUILT_IN_CEIL):
7459 CASE_FLT_FN (BUILT_IN_FLOOR):
7460 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7461 CASE_FLT_FN (BUILT_IN_RINT):
7462 CASE_FLT_FN (BUILT_IN_ROUND):
7463 CASE_FLT_FN (BUILT_IN_TRUNC):
7464 return true;
7466 CASE_FLT_FN (BUILT_IN_FMIN):
7467 CASE_FLT_FN (BUILT_IN_FMAX):
7468 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7469 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7471 default:
7472 break;
7474 break;
7476 default:
7477 break;
7479 return false;
7482 /* FNDECL is assumed to be a builtin where truncation can be propagated
7483 across (for instance floor((double)f) == (double)floorf (f).
7484 Do the transformation for a call with argument ARG. */
7486 static tree
7487 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7489 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7491 if (!validate_arg (arg, REAL_TYPE))
7492 return NULL_TREE;
7494 /* Integer rounding functions are idempotent. */
7495 if (fcode == builtin_mathfn_code (arg))
7496 return arg;
7498 /* If argument is already integer valued, and we don't need to worry
7499 about setting errno, there's no need to perform rounding. */
7500 if (! flag_errno_math && integer_valued_real_p (arg))
7501 return arg;
7503 if (optimize)
7505 tree arg0 = strip_float_extensions (arg);
7506 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7507 tree newtype = TREE_TYPE (arg0);
7508 tree decl;
7510 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7511 && (decl = mathfn_built_in (newtype, fcode)))
7512 return fold_convert_loc (loc, ftype,
7513 build_call_expr_loc (loc, decl, 1,
7514 fold_convert_loc (loc,
7515 newtype,
7516 arg0)));
7518 return NULL_TREE;
7521 /* FNDECL is assumed to be builtin which can narrow the FP type of
7522 the argument, for instance lround((double)f) -> lroundf (f).
7523 Do the transformation for a call with argument ARG. */
7525 static tree
7526 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7528 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7530 if (!validate_arg (arg, REAL_TYPE))
7531 return NULL_TREE;
7533 /* If argument is already integer valued, and we don't need to worry
7534 about setting errno, there's no need to perform rounding. */
7535 if (! flag_errno_math && integer_valued_real_p (arg))
7536 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7537 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7539 if (optimize)
7541 tree ftype = TREE_TYPE (arg);
7542 tree arg0 = strip_float_extensions (arg);
7543 tree newtype = TREE_TYPE (arg0);
7544 tree decl;
7546 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7547 && (decl = mathfn_built_in (newtype, fcode)))
7548 return build_call_expr_loc (loc, decl, 1,
7549 fold_convert_loc (loc, newtype, arg0));
7552 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7553 sizeof (int) == sizeof (long). */
7554 if (TYPE_PRECISION (integer_type_node)
7555 == TYPE_PRECISION (long_integer_type_node))
7557 tree newfn = NULL_TREE;
7558 switch (fcode)
7560 CASE_FLT_FN (BUILT_IN_ICEIL):
7561 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7562 break;
7564 CASE_FLT_FN (BUILT_IN_IFLOOR):
7565 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7566 break;
7568 CASE_FLT_FN (BUILT_IN_IROUND):
7569 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7570 break;
7572 CASE_FLT_FN (BUILT_IN_IRINT):
7573 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7574 break;
7576 default:
7577 break;
7580 if (newfn)
7582 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7583 return fold_convert_loc (loc,
7584 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7588 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7589 sizeof (long long) == sizeof (long). */
7590 if (TYPE_PRECISION (long_long_integer_type_node)
7591 == TYPE_PRECISION (long_integer_type_node))
7593 tree newfn = NULL_TREE;
7594 switch (fcode)
7596 CASE_FLT_FN (BUILT_IN_LLCEIL):
7597 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7598 break;
7600 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7601 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7602 break;
7604 CASE_FLT_FN (BUILT_IN_LLROUND):
7605 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7606 break;
7608 CASE_FLT_FN (BUILT_IN_LLRINT):
7609 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7610 break;
7612 default:
7613 break;
7616 if (newfn)
7618 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7619 return fold_convert_loc (loc,
7620 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7624 return NULL_TREE;
7627 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7628 return type. Return NULL_TREE if no simplification can be made. */
7630 static tree
7631 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7633 tree res;
7635 if (!validate_arg (arg, COMPLEX_TYPE)
7636 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7637 return NULL_TREE;
7639 /* Calculate the result when the argument is a constant. */
7640 if (TREE_CODE (arg) == COMPLEX_CST
7641 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7642 type, mpfr_hypot)))
7643 return res;
7645 if (TREE_CODE (arg) == COMPLEX_EXPR)
7647 tree real = TREE_OPERAND (arg, 0);
7648 tree imag = TREE_OPERAND (arg, 1);
7650 /* If either part is zero, cabs is fabs of the other. */
7651 if (real_zerop (real))
7652 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7653 if (real_zerop (imag))
7654 return fold_build1_loc (loc, ABS_EXPR, type, real);
7656 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7657 if (flag_unsafe_math_optimizations
7658 && operand_equal_p (real, imag, OEP_PURE_SAME))
7660 const REAL_VALUE_TYPE sqrt2_trunc
7661 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7662 STRIP_NOPS (real);
7663 return fold_build2_loc (loc, MULT_EXPR, type,
7664 fold_build1_loc (loc, ABS_EXPR, type, real),
7665 build_real (type, sqrt2_trunc));
7669 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7670 if (TREE_CODE (arg) == NEGATE_EXPR
7671 || TREE_CODE (arg) == CONJ_EXPR)
7672 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7674 /* Don't do this when optimizing for size. */
7675 if (flag_unsafe_math_optimizations
7676 && optimize && optimize_function_for_speed_p (cfun))
7678 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7680 if (sqrtfn != NULL_TREE)
7682 tree rpart, ipart, result;
7684 arg = builtin_save_expr (arg);
7686 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7687 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7689 rpart = builtin_save_expr (rpart);
7690 ipart = builtin_save_expr (ipart);
7692 result = fold_build2_loc (loc, PLUS_EXPR, type,
7693 fold_build2_loc (loc, MULT_EXPR, type,
7694 rpart, rpart),
7695 fold_build2_loc (loc, MULT_EXPR, type,
7696 ipart, ipart));
7698 return build_call_expr_loc (loc, sqrtfn, 1, result);
7702 return NULL_TREE;
7705 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7706 complex tree type of the result. If NEG is true, the imaginary
7707 zero is negative. */
7709 static tree
7710 build_complex_cproj (tree type, bool neg)
7712 REAL_VALUE_TYPE rinf, rzero = dconst0;
7714 real_inf (&rinf);
7715 rzero.sign = neg;
7716 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7717 build_real (TREE_TYPE (type), rzero));
7720 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7721 return type. Return NULL_TREE if no simplification can be made. */
7723 static tree
7724 fold_builtin_cproj (location_t loc, tree arg, tree type)
7726 if (!validate_arg (arg, COMPLEX_TYPE)
7727 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7728 return NULL_TREE;
7730 /* If there are no infinities, return arg. */
7731 if (! HONOR_INFINITIES (type))
7732 return non_lvalue_loc (loc, arg);
7734 /* Calculate the result when the argument is a constant. */
7735 if (TREE_CODE (arg) == COMPLEX_CST)
7737 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7738 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7740 if (real_isinf (real) || real_isinf (imag))
7741 return build_complex_cproj (type, imag->sign);
7742 else
7743 return arg;
7745 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7747 tree real = TREE_OPERAND (arg, 0);
7748 tree imag = TREE_OPERAND (arg, 1);
7750 STRIP_NOPS (real);
7751 STRIP_NOPS (imag);
7753 /* If the real part is inf and the imag part is known to be
7754 nonnegative, return (inf + 0i). Remember side-effects are
7755 possible in the imag part. */
7756 if (TREE_CODE (real) == REAL_CST
7757 && real_isinf (TREE_REAL_CST_PTR (real))
7758 && tree_expr_nonnegative_p (imag))
7759 return omit_one_operand_loc (loc, type,
7760 build_complex_cproj (type, false),
7761 arg);
7763 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7764 Remember side-effects are possible in the real part. */
7765 if (TREE_CODE (imag) == REAL_CST
7766 && real_isinf (TREE_REAL_CST_PTR (imag)))
7767 return
7768 omit_one_operand_loc (loc, type,
7769 build_complex_cproj (type, TREE_REAL_CST_PTR
7770 (imag)->sign), arg);
7773 return NULL_TREE;
7776 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7777 Return NULL_TREE if no simplification can be made. */
7779 static tree
7780 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7783 enum built_in_function fcode;
7784 tree res;
7786 if (!validate_arg (arg, REAL_TYPE))
7787 return NULL_TREE;
7789 /* Calculate the result when the argument is a constant. */
7790 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7791 return res;
7793 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7794 fcode = builtin_mathfn_code (arg);
7795 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7797 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7798 arg = fold_build2_loc (loc, MULT_EXPR, type,
7799 CALL_EXPR_ARG (arg, 0),
7800 build_real (type, dconsthalf));
7801 return build_call_expr_loc (loc, expfn, 1, arg);
7804 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7805 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7807 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7809 if (powfn)
7811 tree arg0 = CALL_EXPR_ARG (arg, 0);
7812 tree tree_root;
7813 /* The inner root was either sqrt or cbrt. */
7814 /* This was a conditional expression but it triggered a bug
7815 in Sun C 5.5. */
7816 REAL_VALUE_TYPE dconstroot;
7817 if (BUILTIN_SQRT_P (fcode))
7818 dconstroot = dconsthalf;
7819 else
7820 dconstroot = dconst_third ();
7822 /* Adjust for the outer root. */
7823 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7824 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7825 tree_root = build_real (type, dconstroot);
7826 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7830 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7831 if (flag_unsafe_math_optimizations
7832 && (fcode == BUILT_IN_POW
7833 || fcode == BUILT_IN_POWF
7834 || fcode == BUILT_IN_POWL))
7836 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7837 tree arg0 = CALL_EXPR_ARG (arg, 0);
7838 tree arg1 = CALL_EXPR_ARG (arg, 1);
7839 tree narg1;
7840 if (!tree_expr_nonnegative_p (arg0))
7841 arg0 = build1 (ABS_EXPR, type, arg0);
7842 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7843 build_real (type, dconsthalf));
7844 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7847 return NULL_TREE;
7850 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7851 Return NULL_TREE if no simplification can be made. */
7853 static tree
7854 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7856 const enum built_in_function fcode = builtin_mathfn_code (arg);
7857 tree res;
7859 if (!validate_arg (arg, REAL_TYPE))
7860 return NULL_TREE;
7862 /* Calculate the result when the argument is a constant. */
7863 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7864 return res;
7866 if (flag_unsafe_math_optimizations)
7868 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7869 if (BUILTIN_EXPONENT_P (fcode))
7871 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7872 const REAL_VALUE_TYPE third_trunc =
7873 real_value_truncate (TYPE_MODE (type), dconst_third ());
7874 arg = fold_build2_loc (loc, MULT_EXPR, type,
7875 CALL_EXPR_ARG (arg, 0),
7876 build_real (type, third_trunc));
7877 return build_call_expr_loc (loc, expfn, 1, arg);
7880 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7881 if (BUILTIN_SQRT_P (fcode))
7883 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7885 if (powfn)
7887 tree arg0 = CALL_EXPR_ARG (arg, 0);
7888 tree tree_root;
7889 REAL_VALUE_TYPE dconstroot = dconst_third ();
7891 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7892 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7893 tree_root = build_real (type, dconstroot);
7894 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7898 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7899 if (BUILTIN_CBRT_P (fcode))
7901 tree arg0 = CALL_EXPR_ARG (arg, 0);
7902 if (tree_expr_nonnegative_p (arg0))
7904 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7906 if (powfn)
7908 tree tree_root;
7909 REAL_VALUE_TYPE dconstroot;
7911 real_arithmetic (&dconstroot, MULT_EXPR,
7912 dconst_third_ptr (), dconst_third_ptr ());
7913 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7914 tree_root = build_real (type, dconstroot);
7915 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7920 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7921 if (fcode == BUILT_IN_POW
7922 || fcode == BUILT_IN_POWF
7923 || fcode == BUILT_IN_POWL)
7925 tree arg00 = CALL_EXPR_ARG (arg, 0);
7926 tree arg01 = CALL_EXPR_ARG (arg, 1);
7927 if (tree_expr_nonnegative_p (arg00))
7929 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7930 const REAL_VALUE_TYPE dconstroot
7931 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7932 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7933 build_real (type, dconstroot));
7934 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7938 return NULL_TREE;
7941 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7942 TYPE is the type of the return value. Return NULL_TREE if no
7943 simplification can be made. */
7945 static tree
7946 fold_builtin_cos (location_t loc,
7947 tree arg, tree type, tree fndecl)
7949 tree res, narg;
7951 if (!validate_arg (arg, REAL_TYPE))
7952 return NULL_TREE;
7954 /* Calculate the result when the argument is a constant. */
7955 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7956 return res;
7958 /* Optimize cos(-x) into cos (x). */
7959 if ((narg = fold_strip_sign_ops (arg)))
7960 return build_call_expr_loc (loc, fndecl, 1, narg);
7962 return NULL_TREE;
7965 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7966 Return NULL_TREE if no simplification can be made. */
7968 static tree
7969 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7971 if (validate_arg (arg, REAL_TYPE))
7973 tree res, narg;
7975 /* Calculate the result when the argument is a constant. */
7976 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7977 return res;
7979 /* Optimize cosh(-x) into cosh (x). */
7980 if ((narg = fold_strip_sign_ops (arg)))
7981 return build_call_expr_loc (loc, fndecl, 1, narg);
7984 return NULL_TREE;
7987 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7988 argument ARG. TYPE is the type of the return value. Return
7989 NULL_TREE if no simplification can be made. */
7991 static tree
7992 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7993 bool hyper)
7995 if (validate_arg (arg, COMPLEX_TYPE)
7996 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7998 tree tmp;
8000 /* Calculate the result when the argument is a constant. */
8001 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
8002 return tmp;
8004 /* Optimize fn(-x) into fn(x). */
8005 if ((tmp = fold_strip_sign_ops (arg)))
8006 return build_call_expr_loc (loc, fndecl, 1, tmp);
8009 return NULL_TREE;
8012 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
8013 Return NULL_TREE if no simplification can be made. */
8015 static tree
8016 fold_builtin_tan (tree arg, tree type)
8018 enum built_in_function fcode;
8019 tree res;
8021 if (!validate_arg (arg, REAL_TYPE))
8022 return NULL_TREE;
8024 /* Calculate the result when the argument is a constant. */
8025 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8026 return res;
8028 /* Optimize tan(atan(x)) = x. */
8029 fcode = builtin_mathfn_code (arg);
8030 if (flag_unsafe_math_optimizations
8031 && (fcode == BUILT_IN_ATAN
8032 || fcode == BUILT_IN_ATANF
8033 || fcode == BUILT_IN_ATANL))
8034 return CALL_EXPR_ARG (arg, 0);
8036 return NULL_TREE;
8039 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8040 NULL_TREE if no simplification can be made. */
8042 static tree
8043 fold_builtin_sincos (location_t loc,
8044 tree arg0, tree arg1, tree arg2)
8046 tree type;
8047 tree res, fn, call;
8049 if (!validate_arg (arg0, REAL_TYPE)
8050 || !validate_arg (arg1, POINTER_TYPE)
8051 || !validate_arg (arg2, POINTER_TYPE))
8052 return NULL_TREE;
8054 type = TREE_TYPE (arg0);
8056 /* Calculate the result when the argument is a constant. */
8057 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8058 return res;
8060 /* Canonicalize sincos to cexpi. */
8061 if (!targetm.libc_has_function (function_c99_math_complex))
8062 return NULL_TREE;
8063 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8064 if (!fn)
8065 return NULL_TREE;
8067 call = build_call_expr_loc (loc, fn, 1, arg0);
8068 call = builtin_save_expr (call);
8070 return build2 (COMPOUND_EXPR, void_type_node,
8071 build2 (MODIFY_EXPR, void_type_node,
8072 build_fold_indirect_ref_loc (loc, arg1),
8073 build1 (IMAGPART_EXPR, type, call)),
8074 build2 (MODIFY_EXPR, void_type_node,
8075 build_fold_indirect_ref_loc (loc, arg2),
8076 build1 (REALPART_EXPR, type, call)));
8079 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8080 NULL_TREE if no simplification can be made. */
8082 static tree
8083 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8085 tree rtype;
8086 tree realp, imagp, ifn;
8087 tree res;
8089 if (!validate_arg (arg0, COMPLEX_TYPE)
8090 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
8091 return NULL_TREE;
8093 /* Calculate the result when the argument is a constant. */
8094 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8095 return res;
8097 rtype = TREE_TYPE (TREE_TYPE (arg0));
8099 /* In case we can figure out the real part of arg0 and it is constant zero
8100 fold to cexpi. */
8101 if (!targetm.libc_has_function (function_c99_math_complex))
8102 return NULL_TREE;
8103 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8104 if (!ifn)
8105 return NULL_TREE;
8107 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8108 && real_zerop (realp))
8110 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8111 return build_call_expr_loc (loc, ifn, 1, narg);
8114 /* In case we can easily decompose real and imaginary parts split cexp
8115 to exp (r) * cexpi (i). */
8116 if (flag_unsafe_math_optimizations
8117 && realp)
8119 tree rfn, rcall, icall;
8121 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8122 if (!rfn)
8123 return NULL_TREE;
8125 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8126 if (!imagp)
8127 return NULL_TREE;
8129 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8130 icall = builtin_save_expr (icall);
8131 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8132 rcall = builtin_save_expr (rcall);
8133 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8134 fold_build2_loc (loc, MULT_EXPR, rtype,
8135 rcall,
8136 fold_build1_loc (loc, REALPART_EXPR,
8137 rtype, icall)),
8138 fold_build2_loc (loc, MULT_EXPR, rtype,
8139 rcall,
8140 fold_build1_loc (loc, IMAGPART_EXPR,
8141 rtype, icall)));
8144 return NULL_TREE;
8147 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8148 Return NULL_TREE if no simplification can be made. */
8150 static tree
8151 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8153 if (!validate_arg (arg, REAL_TYPE))
8154 return NULL_TREE;
8156 /* Optimize trunc of constant value. */
8157 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8159 REAL_VALUE_TYPE r, x;
8160 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8162 x = TREE_REAL_CST (arg);
8163 real_trunc (&r, TYPE_MODE (type), &x);
8164 return build_real (type, r);
8167 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8170 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8171 Return NULL_TREE if no simplification can be made. */
8173 static tree
8174 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8176 if (!validate_arg (arg, REAL_TYPE))
8177 return NULL_TREE;
8179 /* Optimize floor of constant value. */
8180 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8182 REAL_VALUE_TYPE x;
8184 x = TREE_REAL_CST (arg);
8185 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8187 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8188 REAL_VALUE_TYPE r;
8190 real_floor (&r, TYPE_MODE (type), &x);
8191 return build_real (type, r);
8195 /* Fold floor (x) where x is nonnegative to trunc (x). */
8196 if (tree_expr_nonnegative_p (arg))
8198 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8199 if (truncfn)
8200 return build_call_expr_loc (loc, truncfn, 1, arg);
8203 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8206 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8207 Return NULL_TREE if no simplification can be made. */
8209 static tree
8210 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8212 if (!validate_arg (arg, REAL_TYPE))
8213 return NULL_TREE;
8215 /* Optimize ceil of constant value. */
8216 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8218 REAL_VALUE_TYPE x;
8220 x = TREE_REAL_CST (arg);
8221 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8223 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8224 REAL_VALUE_TYPE r;
8226 real_ceil (&r, TYPE_MODE (type), &x);
8227 return build_real (type, r);
8231 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8234 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8235 Return NULL_TREE if no simplification can be made. */
8237 static tree
8238 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8240 if (!validate_arg (arg, REAL_TYPE))
8241 return NULL_TREE;
8243 /* Optimize round of constant value. */
8244 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8246 REAL_VALUE_TYPE x;
8248 x = TREE_REAL_CST (arg);
8249 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8251 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8252 REAL_VALUE_TYPE r;
8254 real_round (&r, TYPE_MODE (type), &x);
8255 return build_real (type, r);
8259 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8262 /* Fold function call to builtin lround, lroundf or lroundl (or the
8263 corresponding long long versions) and other rounding functions. ARG
8264 is the argument to the call. Return NULL_TREE if no simplification
8265 can be made. */
8267 static tree
8268 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8270 if (!validate_arg (arg, REAL_TYPE))
8271 return NULL_TREE;
8273 /* Optimize lround of constant value. */
8274 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8276 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8278 if (real_isfinite (&x))
8280 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8281 tree ftype = TREE_TYPE (arg);
8282 REAL_VALUE_TYPE r;
8283 bool fail = false;
8285 switch (DECL_FUNCTION_CODE (fndecl))
8287 CASE_FLT_FN (BUILT_IN_IFLOOR):
8288 CASE_FLT_FN (BUILT_IN_LFLOOR):
8289 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8290 real_floor (&r, TYPE_MODE (ftype), &x);
8291 break;
8293 CASE_FLT_FN (BUILT_IN_ICEIL):
8294 CASE_FLT_FN (BUILT_IN_LCEIL):
8295 CASE_FLT_FN (BUILT_IN_LLCEIL):
8296 real_ceil (&r, TYPE_MODE (ftype), &x);
8297 break;
8299 CASE_FLT_FN (BUILT_IN_IROUND):
8300 CASE_FLT_FN (BUILT_IN_LROUND):
8301 CASE_FLT_FN (BUILT_IN_LLROUND):
8302 real_round (&r, TYPE_MODE (ftype), &x);
8303 break;
8305 default:
8306 gcc_unreachable ();
8309 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8310 if (!fail)
8311 return wide_int_to_tree (itype, val);
8315 switch (DECL_FUNCTION_CODE (fndecl))
8317 CASE_FLT_FN (BUILT_IN_LFLOOR):
8318 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8319 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8320 if (tree_expr_nonnegative_p (arg))
8321 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8322 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8323 break;
8324 default:;
8327 return fold_fixed_mathfn (loc, fndecl, arg);
8330 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8331 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8332 the argument to the call. Return NULL_TREE if no simplification can
8333 be made. */
8335 static tree
8336 fold_builtin_bitop (tree fndecl, tree arg)
8338 if (!validate_arg (arg, INTEGER_TYPE))
8339 return NULL_TREE;
8341 /* Optimize for constant argument. */
8342 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8344 tree type = TREE_TYPE (arg);
8345 int result;
8347 switch (DECL_FUNCTION_CODE (fndecl))
8349 CASE_INT_FN (BUILT_IN_FFS):
8350 result = wi::ffs (arg);
8351 break;
8353 CASE_INT_FN (BUILT_IN_CLZ):
8354 if (wi::ne_p (arg, 0))
8355 result = wi::clz (arg);
8356 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8357 result = TYPE_PRECISION (type);
8358 break;
8360 CASE_INT_FN (BUILT_IN_CTZ):
8361 if (wi::ne_p (arg, 0))
8362 result = wi::ctz (arg);
8363 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8364 result = TYPE_PRECISION (type);
8365 break;
8367 CASE_INT_FN (BUILT_IN_CLRSB):
8368 result = wi::clrsb (arg);
8369 break;
8371 CASE_INT_FN (BUILT_IN_POPCOUNT):
8372 result = wi::popcount (arg);
8373 break;
8375 CASE_INT_FN (BUILT_IN_PARITY):
8376 result = wi::parity (arg);
8377 break;
8379 default:
8380 gcc_unreachable ();
8383 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8386 return NULL_TREE;
8389 /* Fold function call to builtin_bswap and the short, long and long long
8390 variants. Return NULL_TREE if no simplification can be made. */
8391 static tree
8392 fold_builtin_bswap (tree fndecl, tree arg)
8394 if (! validate_arg (arg, INTEGER_TYPE))
8395 return NULL_TREE;
8397 /* Optimize constant value. */
8398 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8400 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8402 switch (DECL_FUNCTION_CODE (fndecl))
8404 case BUILT_IN_BSWAP16:
8405 case BUILT_IN_BSWAP32:
8406 case BUILT_IN_BSWAP64:
8408 signop sgn = TYPE_SIGN (type);
8409 tree result =
8410 wide_int_to_tree (type,
8411 wide_int::from (arg, TYPE_PRECISION (type),
8412 sgn).bswap ());
8413 return result;
8415 default:
8416 gcc_unreachable ();
8420 return NULL_TREE;
8423 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8424 NULL_TREE if no simplification can be made. */
8426 static tree
8427 fold_builtin_hypot (location_t loc, tree fndecl,
8428 tree arg0, tree arg1, tree type)
8430 tree res, narg0, narg1;
8432 if (!validate_arg (arg0, REAL_TYPE)
8433 || !validate_arg (arg1, REAL_TYPE))
8434 return NULL_TREE;
8436 /* Calculate the result when the argument is a constant. */
8437 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8438 return res;
8440 /* If either argument to hypot has a negate or abs, strip that off.
8441 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8442 narg0 = fold_strip_sign_ops (arg0);
8443 narg1 = fold_strip_sign_ops (arg1);
8444 if (narg0 || narg1)
8446 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8447 narg1 ? narg1 : arg1);
8450 /* If either argument is zero, hypot is fabs of the other. */
8451 if (real_zerop (arg0))
8452 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8453 else if (real_zerop (arg1))
8454 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8456 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8457 if (flag_unsafe_math_optimizations
8458 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8460 const REAL_VALUE_TYPE sqrt2_trunc
8461 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8462 return fold_build2_loc (loc, MULT_EXPR, type,
8463 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8464 build_real (type, sqrt2_trunc));
8467 return NULL_TREE;
8471 /* Fold a builtin function call to pow, powf, or powl. Return
8472 NULL_TREE if no simplification can be made. */
8473 static tree
8474 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8476 tree res;
8478 if (!validate_arg (arg0, REAL_TYPE)
8479 || !validate_arg (arg1, REAL_TYPE))
8480 return NULL_TREE;
8482 /* Calculate the result when the argument is a constant. */
8483 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8484 return res;
8486 /* Optimize pow(1.0,y) = 1.0. */
8487 if (real_onep (arg0))
8488 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8490 if (TREE_CODE (arg1) == REAL_CST
8491 && !TREE_OVERFLOW (arg1))
8493 REAL_VALUE_TYPE cint;
8494 REAL_VALUE_TYPE c;
8495 HOST_WIDE_INT n;
8497 c = TREE_REAL_CST (arg1);
8499 /* Optimize pow(x,0.0) = 1.0. */
8500 if (REAL_VALUES_EQUAL (c, dconst0))
8501 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8502 arg0);
8504 /* Optimize pow(x,1.0) = x. */
8505 if (REAL_VALUES_EQUAL (c, dconst1))
8506 return arg0;
8508 /* Optimize pow(x,-1.0) = 1.0/x. */
8509 if (REAL_VALUES_EQUAL (c, dconstm1))
8510 return fold_build2_loc (loc, RDIV_EXPR, type,
8511 build_real (type, dconst1), arg0);
8513 /* Optimize pow(x,0.5) = sqrt(x). */
8514 if (flag_unsafe_math_optimizations
8515 && REAL_VALUES_EQUAL (c, dconsthalf))
8517 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8519 if (sqrtfn != NULL_TREE)
8520 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8523 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8524 if (flag_unsafe_math_optimizations)
8526 const REAL_VALUE_TYPE dconstroot
8527 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8529 if (REAL_VALUES_EQUAL (c, dconstroot))
8531 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8532 if (cbrtfn != NULL_TREE)
8533 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8537 /* Check for an integer exponent. */
8538 n = real_to_integer (&c);
8539 real_from_integer (&cint, VOIDmode, n, SIGNED);
8540 if (real_identical (&c, &cint))
8542 /* Attempt to evaluate pow at compile-time, unless this should
8543 raise an exception. */
8544 if (TREE_CODE (arg0) == REAL_CST
8545 && !TREE_OVERFLOW (arg0)
8546 && (n > 0
8547 || (!flag_trapping_math && !flag_errno_math)
8548 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8550 REAL_VALUE_TYPE x;
8551 bool inexact;
8553 x = TREE_REAL_CST (arg0);
8554 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8555 if (flag_unsafe_math_optimizations || !inexact)
8556 return build_real (type, x);
8559 /* Strip sign ops from even integer powers. */
8560 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8562 tree narg0 = fold_strip_sign_ops (arg0);
8563 if (narg0)
8564 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8569 if (flag_unsafe_math_optimizations)
8571 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8573 /* Optimize pow(expN(x),y) = expN(x*y). */
8574 if (BUILTIN_EXPONENT_P (fcode))
8576 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8577 tree arg = CALL_EXPR_ARG (arg0, 0);
8578 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8579 return build_call_expr_loc (loc, expfn, 1, arg);
8582 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8583 if (BUILTIN_SQRT_P (fcode))
8585 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8586 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8587 build_real (type, dconsthalf));
8588 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8591 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8592 if (BUILTIN_CBRT_P (fcode))
8594 tree arg = CALL_EXPR_ARG (arg0, 0);
8595 if (tree_expr_nonnegative_p (arg))
8597 const REAL_VALUE_TYPE dconstroot
8598 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8599 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8600 build_real (type, dconstroot));
8601 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8605 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8606 if (fcode == BUILT_IN_POW
8607 || fcode == BUILT_IN_POWF
8608 || fcode == BUILT_IN_POWL)
8610 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8611 if (tree_expr_nonnegative_p (arg00))
8613 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8614 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8615 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8620 return NULL_TREE;
8623 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8624 Return NULL_TREE if no simplification can be made. */
8625 static tree
8626 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8627 tree arg0, tree arg1, tree type)
8629 if (!validate_arg (arg0, REAL_TYPE)
8630 || !validate_arg (arg1, INTEGER_TYPE))
8631 return NULL_TREE;
8633 /* Optimize pow(1.0,y) = 1.0. */
8634 if (real_onep (arg0))
8635 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8637 if (tree_fits_shwi_p (arg1))
8639 HOST_WIDE_INT c = tree_to_shwi (arg1);
8641 /* Evaluate powi at compile-time. */
8642 if (TREE_CODE (arg0) == REAL_CST
8643 && !TREE_OVERFLOW (arg0))
8645 REAL_VALUE_TYPE x;
8646 x = TREE_REAL_CST (arg0);
8647 real_powi (&x, TYPE_MODE (type), &x, c);
8648 return build_real (type, x);
8651 /* Optimize pow(x,0) = 1.0. */
8652 if (c == 0)
8653 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8654 arg0);
8656 /* Optimize pow(x,1) = x. */
8657 if (c == 1)
8658 return arg0;
8660 /* Optimize pow(x,-1) = 1.0/x. */
8661 if (c == -1)
8662 return fold_build2_loc (loc, RDIV_EXPR, type,
8663 build_real (type, dconst1), arg0);
8666 return NULL_TREE;
8669 /* A subroutine of fold_builtin to fold the various exponent
8670 functions. Return NULL_TREE if no simplification can be made.
8671 FUNC is the corresponding MPFR exponent function. */
8673 static tree
8674 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8675 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8677 if (validate_arg (arg, REAL_TYPE))
8679 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8680 tree res;
8682 /* Calculate the result when the argument is a constant. */
8683 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8684 return res;
8686 /* Optimize expN(logN(x)) = x. */
8687 if (flag_unsafe_math_optimizations)
8689 const enum built_in_function fcode = builtin_mathfn_code (arg);
8691 if ((func == mpfr_exp
8692 && (fcode == BUILT_IN_LOG
8693 || fcode == BUILT_IN_LOGF
8694 || fcode == BUILT_IN_LOGL))
8695 || (func == mpfr_exp2
8696 && (fcode == BUILT_IN_LOG2
8697 || fcode == BUILT_IN_LOG2F
8698 || fcode == BUILT_IN_LOG2L))
8699 || (func == mpfr_exp10
8700 && (fcode == BUILT_IN_LOG10
8701 || fcode == BUILT_IN_LOG10F
8702 || fcode == BUILT_IN_LOG10L)))
8703 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8707 return NULL_TREE;
8710 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8711 arguments to the call, and TYPE is its return type.
8712 Return NULL_TREE if no simplification can be made. */
8714 static tree
8715 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8717 if (!validate_arg (arg1, POINTER_TYPE)
8718 || !validate_arg (arg2, INTEGER_TYPE)
8719 || !validate_arg (len, INTEGER_TYPE))
8720 return NULL_TREE;
8721 else
8723 const char *p1;
8725 if (TREE_CODE (arg2) != INTEGER_CST
8726 || !tree_fits_uhwi_p (len))
8727 return NULL_TREE;
8729 p1 = c_getstr (arg1);
8730 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8732 char c;
8733 const char *r;
8734 tree tem;
8736 if (target_char_cast (arg2, &c))
8737 return NULL_TREE;
8739 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8741 if (r == NULL)
8742 return build_int_cst (TREE_TYPE (arg1), 0);
8744 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8745 return fold_convert_loc (loc, type, tem);
8747 return NULL_TREE;
8751 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8752 Return NULL_TREE if no simplification can be made. */
8754 static tree
8755 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8757 const char *p1, *p2;
8759 if (!validate_arg (arg1, POINTER_TYPE)
8760 || !validate_arg (arg2, POINTER_TYPE)
8761 || !validate_arg (len, INTEGER_TYPE))
8762 return NULL_TREE;
8764 /* If the LEN parameter is zero, return zero. */
8765 if (integer_zerop (len))
8766 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8767 arg1, arg2);
8769 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8770 if (operand_equal_p (arg1, arg2, 0))
8771 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8773 p1 = c_getstr (arg1);
8774 p2 = c_getstr (arg2);
8776 /* If all arguments are constant, and the value of len is not greater
8777 than the lengths of arg1 and arg2, evaluate at compile-time. */
8778 if (tree_fits_uhwi_p (len) && p1 && p2
8779 && compare_tree_int (len, strlen (p1) + 1) <= 0
8780 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8782 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8784 if (r > 0)
8785 return integer_one_node;
8786 else if (r < 0)
8787 return integer_minus_one_node;
8788 else
8789 return integer_zero_node;
8792 /* If len parameter is one, return an expression corresponding to
8793 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8794 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8796 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8797 tree cst_uchar_ptr_node
8798 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8800 tree ind1
8801 = fold_convert_loc (loc, integer_type_node,
8802 build1 (INDIRECT_REF, cst_uchar_node,
8803 fold_convert_loc (loc,
8804 cst_uchar_ptr_node,
8805 arg1)));
8806 tree ind2
8807 = fold_convert_loc (loc, integer_type_node,
8808 build1 (INDIRECT_REF, cst_uchar_node,
8809 fold_convert_loc (loc,
8810 cst_uchar_ptr_node,
8811 arg2)));
8812 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8815 return NULL_TREE;
8818 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8819 Return NULL_TREE if no simplification can be made. */
8821 static tree
8822 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8824 const char *p1, *p2;
8826 if (!validate_arg (arg1, POINTER_TYPE)
8827 || !validate_arg (arg2, POINTER_TYPE))
8828 return NULL_TREE;
8830 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8831 if (operand_equal_p (arg1, arg2, 0))
8832 return integer_zero_node;
8834 p1 = c_getstr (arg1);
8835 p2 = c_getstr (arg2);
8837 if (p1 && p2)
8839 const int i = strcmp (p1, p2);
8840 if (i < 0)
8841 return integer_minus_one_node;
8842 else if (i > 0)
8843 return integer_one_node;
8844 else
8845 return integer_zero_node;
8848 /* If the second arg is "", return *(const unsigned char*)arg1. */
8849 if (p2 && *p2 == '\0')
8851 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8852 tree cst_uchar_ptr_node
8853 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8855 return fold_convert_loc (loc, integer_type_node,
8856 build1 (INDIRECT_REF, cst_uchar_node,
8857 fold_convert_loc (loc,
8858 cst_uchar_ptr_node,
8859 arg1)));
8862 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8863 if (p1 && *p1 == '\0')
8865 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8866 tree cst_uchar_ptr_node
8867 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8869 tree temp
8870 = fold_convert_loc (loc, integer_type_node,
8871 build1 (INDIRECT_REF, cst_uchar_node,
8872 fold_convert_loc (loc,
8873 cst_uchar_ptr_node,
8874 arg2)));
8875 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8878 return NULL_TREE;
8881 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8882 Return NULL_TREE if no simplification can be made. */
8884 static tree
8885 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8887 const char *p1, *p2;
8889 if (!validate_arg (arg1, POINTER_TYPE)
8890 || !validate_arg (arg2, POINTER_TYPE)
8891 || !validate_arg (len, INTEGER_TYPE))
8892 return NULL_TREE;
8894 /* If the LEN parameter is zero, return zero. */
8895 if (integer_zerop (len))
8896 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8897 arg1, arg2);
8899 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8900 if (operand_equal_p (arg1, arg2, 0))
8901 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8903 p1 = c_getstr (arg1);
8904 p2 = c_getstr (arg2);
8906 if (tree_fits_uhwi_p (len) && p1 && p2)
8908 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8909 if (i > 0)
8910 return integer_one_node;
8911 else if (i < 0)
8912 return integer_minus_one_node;
8913 else
8914 return integer_zero_node;
8917 /* If the second arg is "", and the length is greater than zero,
8918 return *(const unsigned char*)arg1. */
8919 if (p2 && *p2 == '\0'
8920 && TREE_CODE (len) == INTEGER_CST
8921 && tree_int_cst_sgn (len) == 1)
8923 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8924 tree cst_uchar_ptr_node
8925 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8927 return fold_convert_loc (loc, integer_type_node,
8928 build1 (INDIRECT_REF, cst_uchar_node,
8929 fold_convert_loc (loc,
8930 cst_uchar_ptr_node,
8931 arg1)));
8934 /* If the first arg is "", and the length is greater than zero,
8935 return -*(const unsigned char*)arg2. */
8936 if (p1 && *p1 == '\0'
8937 && TREE_CODE (len) == INTEGER_CST
8938 && tree_int_cst_sgn (len) == 1)
8940 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8941 tree cst_uchar_ptr_node
8942 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8944 tree temp = fold_convert_loc (loc, integer_type_node,
8945 build1 (INDIRECT_REF, cst_uchar_node,
8946 fold_convert_loc (loc,
8947 cst_uchar_ptr_node,
8948 arg2)));
8949 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8952 /* If len parameter is one, return an expression corresponding to
8953 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8954 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8956 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8957 tree cst_uchar_ptr_node
8958 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8960 tree ind1 = fold_convert_loc (loc, integer_type_node,
8961 build1 (INDIRECT_REF, cst_uchar_node,
8962 fold_convert_loc (loc,
8963 cst_uchar_ptr_node,
8964 arg1)));
8965 tree ind2 = fold_convert_loc (loc, integer_type_node,
8966 build1 (INDIRECT_REF, cst_uchar_node,
8967 fold_convert_loc (loc,
8968 cst_uchar_ptr_node,
8969 arg2)));
8970 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8973 return NULL_TREE;
8976 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8977 ARG. Return NULL_TREE if no simplification can be made. */
8979 static tree
8980 fold_builtin_signbit (location_t loc, tree arg, tree type)
8982 if (!validate_arg (arg, REAL_TYPE))
8983 return NULL_TREE;
8985 /* If ARG is a compile-time constant, determine the result. */
8986 if (TREE_CODE (arg) == REAL_CST
8987 && !TREE_OVERFLOW (arg))
8989 REAL_VALUE_TYPE c;
8991 c = TREE_REAL_CST (arg);
8992 return (REAL_VALUE_NEGATIVE (c)
8993 ? build_one_cst (type)
8994 : build_zero_cst (type));
8997 /* If ARG is non-negative, the result is always zero. */
8998 if (tree_expr_nonnegative_p (arg))
8999 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9001 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9002 if (!HONOR_SIGNED_ZEROS (arg))
9003 return fold_convert (type,
9004 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9005 build_real (TREE_TYPE (arg), dconst0)));
9007 return NULL_TREE;
9010 /* Fold function call to builtin copysign, copysignf or copysignl with
9011 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9012 be made. */
9014 static tree
9015 fold_builtin_copysign (location_t loc, tree fndecl,
9016 tree arg1, tree arg2, tree type)
9018 tree tem;
9020 if (!validate_arg (arg1, REAL_TYPE)
9021 || !validate_arg (arg2, REAL_TYPE))
9022 return NULL_TREE;
9024 /* copysign(X,X) is X. */
9025 if (operand_equal_p (arg1, arg2, 0))
9026 return fold_convert_loc (loc, type, arg1);
9028 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9029 if (TREE_CODE (arg1) == REAL_CST
9030 && TREE_CODE (arg2) == REAL_CST
9031 && !TREE_OVERFLOW (arg1)
9032 && !TREE_OVERFLOW (arg2))
9034 REAL_VALUE_TYPE c1, c2;
9036 c1 = TREE_REAL_CST (arg1);
9037 c2 = TREE_REAL_CST (arg2);
9038 /* c1.sign := c2.sign. */
9039 real_copysign (&c1, &c2);
9040 return build_real (type, c1);
9043 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9044 Remember to evaluate Y for side-effects. */
9045 if (tree_expr_nonnegative_p (arg2))
9046 return omit_one_operand_loc (loc, type,
9047 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9048 arg2);
9050 /* Strip sign changing operations for the first argument. */
9051 tem = fold_strip_sign_ops (arg1);
9052 if (tem)
9053 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9055 return NULL_TREE;
9058 /* Fold a call to builtin isascii with argument ARG. */
9060 static tree
9061 fold_builtin_isascii (location_t loc, tree arg)
9063 if (!validate_arg (arg, INTEGER_TYPE))
9064 return NULL_TREE;
9065 else
9067 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9068 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9069 build_int_cst (integer_type_node,
9070 ~ (unsigned HOST_WIDE_INT) 0x7f));
9071 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9072 arg, integer_zero_node);
9076 /* Fold a call to builtin toascii with argument ARG. */
9078 static tree
9079 fold_builtin_toascii (location_t loc, tree arg)
9081 if (!validate_arg (arg, INTEGER_TYPE))
9082 return NULL_TREE;
9084 /* Transform toascii(c) -> (c & 0x7f). */
9085 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9086 build_int_cst (integer_type_node, 0x7f));
9089 /* Fold a call to builtin isdigit with argument ARG. */
9091 static tree
9092 fold_builtin_isdigit (location_t loc, tree arg)
9094 if (!validate_arg (arg, INTEGER_TYPE))
9095 return NULL_TREE;
9096 else
9098 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9099 /* According to the C standard, isdigit is unaffected by locale.
9100 However, it definitely is affected by the target character set. */
9101 unsigned HOST_WIDE_INT target_digit0
9102 = lang_hooks.to_target_charset ('0');
9104 if (target_digit0 == 0)
9105 return NULL_TREE;
9107 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9108 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9109 build_int_cst (unsigned_type_node, target_digit0));
9110 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9111 build_int_cst (unsigned_type_node, 9));
9115 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9117 static tree
9118 fold_builtin_fabs (location_t loc, tree arg, tree type)
9120 if (!validate_arg (arg, REAL_TYPE))
9121 return NULL_TREE;
9123 arg = fold_convert_loc (loc, type, arg);
9124 if (TREE_CODE (arg) == REAL_CST)
9125 return fold_abs_const (arg, type);
9126 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9129 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9131 static tree
9132 fold_builtin_abs (location_t loc, tree arg, tree type)
9134 if (!validate_arg (arg, INTEGER_TYPE))
9135 return NULL_TREE;
9137 arg = fold_convert_loc (loc, type, arg);
9138 if (TREE_CODE (arg) == INTEGER_CST)
9139 return fold_abs_const (arg, type);
9140 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9143 /* Fold a fma operation with arguments ARG[012]. */
9145 tree
9146 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9147 tree type, tree arg0, tree arg1, tree arg2)
9149 if (TREE_CODE (arg0) == REAL_CST
9150 && TREE_CODE (arg1) == REAL_CST
9151 && TREE_CODE (arg2) == REAL_CST)
9152 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9154 return NULL_TREE;
9157 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9159 static tree
9160 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9162 if (validate_arg (arg0, REAL_TYPE)
9163 && validate_arg (arg1, REAL_TYPE)
9164 && validate_arg (arg2, REAL_TYPE))
9166 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9167 if (tem)
9168 return tem;
9170 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9171 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9172 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9174 return NULL_TREE;
9177 /* Fold a call to builtin fmin or fmax. */
9179 static tree
9180 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9181 tree type, bool max)
9183 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9185 /* Calculate the result when the argument is a constant. */
9186 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9188 if (res)
9189 return res;
9191 /* If either argument is NaN, return the other one. Avoid the
9192 transformation if we get (and honor) a signalling NaN. Using
9193 omit_one_operand() ensures we create a non-lvalue. */
9194 if (TREE_CODE (arg0) == REAL_CST
9195 && real_isnan (&TREE_REAL_CST (arg0))
9196 && (! HONOR_SNANS (arg0)
9197 || ! TREE_REAL_CST (arg0).signalling))
9198 return omit_one_operand_loc (loc, type, arg1, arg0);
9199 if (TREE_CODE (arg1) == REAL_CST
9200 && real_isnan (&TREE_REAL_CST (arg1))
9201 && (! HONOR_SNANS (arg1)
9202 || ! TREE_REAL_CST (arg1).signalling))
9203 return omit_one_operand_loc (loc, type, arg0, arg1);
9205 /* Transform fmin/fmax(x,x) -> x. */
9206 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9207 return omit_one_operand_loc (loc, type, arg0, arg1);
9209 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9210 functions to return the numeric arg if the other one is NaN.
9211 These tree codes don't honor that, so only transform if
9212 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9213 handled, so we don't have to worry about it either. */
9214 if (flag_finite_math_only)
9215 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9216 fold_convert_loc (loc, type, arg0),
9217 fold_convert_loc (loc, type, arg1));
9219 return NULL_TREE;
9222 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9224 static tree
9225 fold_builtin_carg (location_t loc, tree arg, tree type)
9227 if (validate_arg (arg, COMPLEX_TYPE)
9228 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9230 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9232 if (atan2_fn)
9234 tree new_arg = builtin_save_expr (arg);
9235 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9236 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9237 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9241 return NULL_TREE;
9244 /* Fold a call to builtin logb/ilogb. */
9246 static tree
9247 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9249 if (! validate_arg (arg, REAL_TYPE))
9250 return NULL_TREE;
9252 STRIP_NOPS (arg);
9254 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9256 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9258 switch (value->cl)
9260 case rvc_nan:
9261 case rvc_inf:
9262 /* If arg is Inf or NaN and we're logb, return it. */
9263 if (TREE_CODE (rettype) == REAL_TYPE)
9265 /* For logb(-Inf) we have to return +Inf. */
9266 if (real_isinf (value) && real_isneg (value))
9268 REAL_VALUE_TYPE tem;
9269 real_inf (&tem);
9270 return build_real (rettype, tem);
9272 return fold_convert_loc (loc, rettype, arg);
9274 /* Fall through... */
9275 case rvc_zero:
9276 /* Zero may set errno and/or raise an exception for logb, also
9277 for ilogb we don't know FP_ILOGB0. */
9278 return NULL_TREE;
9279 case rvc_normal:
9280 /* For normal numbers, proceed iff radix == 2. In GCC,
9281 normalized significands are in the range [0.5, 1.0). We
9282 want the exponent as if they were [1.0, 2.0) so get the
9283 exponent and subtract 1. */
9284 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9285 return fold_convert_loc (loc, rettype,
9286 build_int_cst (integer_type_node,
9287 REAL_EXP (value)-1));
9288 break;
9292 return NULL_TREE;
9295 /* Fold a call to builtin significand, if radix == 2. */
9297 static tree
9298 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9300 if (! validate_arg (arg, REAL_TYPE))
9301 return NULL_TREE;
9303 STRIP_NOPS (arg);
9305 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9307 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9309 switch (value->cl)
9311 case rvc_zero:
9312 case rvc_nan:
9313 case rvc_inf:
9314 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9315 return fold_convert_loc (loc, rettype, arg);
9316 case rvc_normal:
9317 /* For normal numbers, proceed iff radix == 2. */
9318 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9320 REAL_VALUE_TYPE result = *value;
9321 /* In GCC, normalized significands are in the range [0.5,
9322 1.0). We want them to be [1.0, 2.0) so set the
9323 exponent to 1. */
9324 SET_REAL_EXP (&result, 1);
9325 return build_real (rettype, result);
9327 break;
9331 return NULL_TREE;
9334 /* Fold a call to builtin frexp, we can assume the base is 2. */
9336 static tree
9337 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9339 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9340 return NULL_TREE;
9342 STRIP_NOPS (arg0);
9344 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9345 return NULL_TREE;
9347 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9349 /* Proceed if a valid pointer type was passed in. */
9350 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9352 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9353 tree frac, exp;
9355 switch (value->cl)
9357 case rvc_zero:
9358 /* For +-0, return (*exp = 0, +-0). */
9359 exp = integer_zero_node;
9360 frac = arg0;
9361 break;
9362 case rvc_nan:
9363 case rvc_inf:
9364 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9365 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9366 case rvc_normal:
9368 /* Since the frexp function always expects base 2, and in
9369 GCC normalized significands are already in the range
9370 [0.5, 1.0), we have exactly what frexp wants. */
9371 REAL_VALUE_TYPE frac_rvt = *value;
9372 SET_REAL_EXP (&frac_rvt, 0);
9373 frac = build_real (rettype, frac_rvt);
9374 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9376 break;
9377 default:
9378 gcc_unreachable ();
9381 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9382 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9383 TREE_SIDE_EFFECTS (arg1) = 1;
9384 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9387 return NULL_TREE;
9390 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9391 then we can assume the base is two. If it's false, then we have to
9392 check the mode of the TYPE parameter in certain cases. */
9394 static tree
9395 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9396 tree type, bool ldexp)
9398 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9400 STRIP_NOPS (arg0);
9401 STRIP_NOPS (arg1);
9403 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9404 if (real_zerop (arg0) || integer_zerop (arg1)
9405 || (TREE_CODE (arg0) == REAL_CST
9406 && !real_isfinite (&TREE_REAL_CST (arg0))))
9407 return omit_one_operand_loc (loc, type, arg0, arg1);
9409 /* If both arguments are constant, then try to evaluate it. */
9410 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9411 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9412 && tree_fits_shwi_p (arg1))
9414 /* Bound the maximum adjustment to twice the range of the
9415 mode's valid exponents. Use abs to ensure the range is
9416 positive as a sanity check. */
9417 const long max_exp_adj = 2 *
9418 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9419 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9421 /* Get the user-requested adjustment. */
9422 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9424 /* The requested adjustment must be inside this range. This
9425 is a preliminary cap to avoid things like overflow, we
9426 may still fail to compute the result for other reasons. */
9427 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9429 REAL_VALUE_TYPE initial_result;
9431 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9433 /* Ensure we didn't overflow. */
9434 if (! real_isinf (&initial_result))
9436 const REAL_VALUE_TYPE trunc_result
9437 = real_value_truncate (TYPE_MODE (type), initial_result);
9439 /* Only proceed if the target mode can hold the
9440 resulting value. */
9441 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9442 return build_real (type, trunc_result);
9448 return NULL_TREE;
9451 /* Fold a call to builtin modf. */
9453 static tree
9454 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9456 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9457 return NULL_TREE;
9459 STRIP_NOPS (arg0);
9461 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9462 return NULL_TREE;
9464 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9466 /* Proceed if a valid pointer type was passed in. */
9467 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9469 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9470 REAL_VALUE_TYPE trunc, frac;
9472 switch (value->cl)
9474 case rvc_nan:
9475 case rvc_zero:
9476 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9477 trunc = frac = *value;
9478 break;
9479 case rvc_inf:
9480 /* For +-Inf, return (*arg1 = arg0, +-0). */
9481 frac = dconst0;
9482 frac.sign = value->sign;
9483 trunc = *value;
9484 break;
9485 case rvc_normal:
9486 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9487 real_trunc (&trunc, VOIDmode, value);
9488 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9489 /* If the original number was negative and already
9490 integral, then the fractional part is -0.0. */
9491 if (value->sign && frac.cl == rvc_zero)
9492 frac.sign = value->sign;
9493 break;
9496 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9497 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9498 build_real (rettype, trunc));
9499 TREE_SIDE_EFFECTS (arg1) = 1;
9500 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9501 build_real (rettype, frac));
9504 return NULL_TREE;
9507 /* Given a location LOC, an interclass builtin function decl FNDECL
9508 and its single argument ARG, return an folded expression computing
9509 the same, or NULL_TREE if we either couldn't or didn't want to fold
9510 (the latter happen if there's an RTL instruction available). */
9512 static tree
9513 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9515 machine_mode mode;
9517 if (!validate_arg (arg, REAL_TYPE))
9518 return NULL_TREE;
9520 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9521 return NULL_TREE;
9523 mode = TYPE_MODE (TREE_TYPE (arg));
9525 /* If there is no optab, try generic code. */
9526 switch (DECL_FUNCTION_CODE (fndecl))
9528 tree result;
9530 CASE_FLT_FN (BUILT_IN_ISINF):
9532 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9533 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9534 tree const type = TREE_TYPE (arg);
9535 REAL_VALUE_TYPE r;
9536 char buf[128];
9538 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9539 real_from_string (&r, buf);
9540 result = build_call_expr (isgr_fn, 2,
9541 fold_build1_loc (loc, ABS_EXPR, type, arg),
9542 build_real (type, r));
9543 return result;
9545 CASE_FLT_FN (BUILT_IN_FINITE):
9546 case BUILT_IN_ISFINITE:
9548 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9549 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9550 tree const type = TREE_TYPE (arg);
9551 REAL_VALUE_TYPE r;
9552 char buf[128];
9554 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9555 real_from_string (&r, buf);
9556 result = build_call_expr (isle_fn, 2,
9557 fold_build1_loc (loc, ABS_EXPR, type, arg),
9558 build_real (type, r));
9559 /*result = fold_build2_loc (loc, UNGT_EXPR,
9560 TREE_TYPE (TREE_TYPE (fndecl)),
9561 fold_build1_loc (loc, ABS_EXPR, type, arg),
9562 build_real (type, r));
9563 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9564 TREE_TYPE (TREE_TYPE (fndecl)),
9565 result);*/
9566 return result;
9568 case BUILT_IN_ISNORMAL:
9570 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9571 islessequal(fabs(x),DBL_MAX). */
9572 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9573 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9574 tree const type = TREE_TYPE (arg);
9575 REAL_VALUE_TYPE rmax, rmin;
9576 char buf[128];
9578 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9579 real_from_string (&rmax, buf);
9580 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9581 real_from_string (&rmin, buf);
9582 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9583 result = build_call_expr (isle_fn, 2, arg,
9584 build_real (type, rmax));
9585 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9586 build_call_expr (isge_fn, 2, arg,
9587 build_real (type, rmin)));
9588 return result;
9590 default:
9591 break;
9594 return NULL_TREE;
9597 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9598 ARG is the argument for the call. */
9600 static tree
9601 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9603 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9604 REAL_VALUE_TYPE r;
9606 if (!validate_arg (arg, REAL_TYPE))
9607 return NULL_TREE;
9609 switch (builtin_index)
9611 case BUILT_IN_ISINF:
9612 if (!HONOR_INFINITIES (arg))
9613 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9615 if (TREE_CODE (arg) == REAL_CST)
9617 r = TREE_REAL_CST (arg);
9618 if (real_isinf (&r))
9619 return real_compare (GT_EXPR, &r, &dconst0)
9620 ? integer_one_node : integer_minus_one_node;
9621 else
9622 return integer_zero_node;
9625 return NULL_TREE;
9627 case BUILT_IN_ISINF_SIGN:
9629 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9630 /* In a boolean context, GCC will fold the inner COND_EXPR to
9631 1. So e.g. "if (isinf_sign(x))" would be folded to just
9632 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9633 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9634 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9635 tree tmp = NULL_TREE;
9637 arg = builtin_save_expr (arg);
9639 if (signbit_fn && isinf_fn)
9641 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9642 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9644 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9645 signbit_call, integer_zero_node);
9646 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9647 isinf_call, integer_zero_node);
9649 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9650 integer_minus_one_node, integer_one_node);
9651 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9652 isinf_call, tmp,
9653 integer_zero_node);
9656 return tmp;
9659 case BUILT_IN_ISFINITE:
9660 if (!HONOR_NANS (arg)
9661 && !HONOR_INFINITIES (arg))
9662 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9664 if (TREE_CODE (arg) == REAL_CST)
9666 r = TREE_REAL_CST (arg);
9667 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9670 return NULL_TREE;
9672 case BUILT_IN_ISNAN:
9673 if (!HONOR_NANS (arg))
9674 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9676 if (TREE_CODE (arg) == REAL_CST)
9678 r = TREE_REAL_CST (arg);
9679 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9682 arg = builtin_save_expr (arg);
9683 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9685 default:
9686 gcc_unreachable ();
9690 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9691 This builtin will generate code to return the appropriate floating
9692 point classification depending on the value of the floating point
9693 number passed in. The possible return values must be supplied as
9694 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9695 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9696 one floating point argument which is "type generic". */
9698 static tree
9699 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9701 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9702 arg, type, res, tmp;
9703 machine_mode mode;
9704 REAL_VALUE_TYPE r;
9705 char buf[128];
9707 /* Verify the required arguments in the original call. */
9708 if (nargs != 6
9709 || !validate_arg (args[0], INTEGER_TYPE)
9710 || !validate_arg (args[1], INTEGER_TYPE)
9711 || !validate_arg (args[2], INTEGER_TYPE)
9712 || !validate_arg (args[3], INTEGER_TYPE)
9713 || !validate_arg (args[4], INTEGER_TYPE)
9714 || !validate_arg (args[5], REAL_TYPE))
9715 return NULL_TREE;
9717 fp_nan = args[0];
9718 fp_infinite = args[1];
9719 fp_normal = args[2];
9720 fp_subnormal = args[3];
9721 fp_zero = args[4];
9722 arg = args[5];
9723 type = TREE_TYPE (arg);
9724 mode = TYPE_MODE (type);
9725 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9727 /* fpclassify(x) ->
9728 isnan(x) ? FP_NAN :
9729 (fabs(x) == Inf ? FP_INFINITE :
9730 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9731 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9733 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9734 build_real (type, dconst0));
9735 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9736 tmp, fp_zero, fp_subnormal);
9738 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9739 real_from_string (&r, buf);
9740 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9741 arg, build_real (type, r));
9742 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9744 if (HONOR_INFINITIES (mode))
9746 real_inf (&r);
9747 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9748 build_real (type, r));
9749 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9750 fp_infinite, res);
9753 if (HONOR_NANS (mode))
9755 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9756 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9759 return res;
9762 /* Fold a call to an unordered comparison function such as
9763 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9764 being called and ARG0 and ARG1 are the arguments for the call.
9765 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9766 the opposite of the desired result. UNORDERED_CODE is used
9767 for modes that can hold NaNs and ORDERED_CODE is used for
9768 the rest. */
9770 static tree
9771 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9772 enum tree_code unordered_code,
9773 enum tree_code ordered_code)
9775 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9776 enum tree_code code;
9777 tree type0, type1;
9778 enum tree_code code0, code1;
9779 tree cmp_type = NULL_TREE;
9781 type0 = TREE_TYPE (arg0);
9782 type1 = TREE_TYPE (arg1);
9784 code0 = TREE_CODE (type0);
9785 code1 = TREE_CODE (type1);
9787 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9788 /* Choose the wider of two real types. */
9789 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9790 ? type0 : type1;
9791 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9792 cmp_type = type0;
9793 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9794 cmp_type = type1;
9796 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9797 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9799 if (unordered_code == UNORDERED_EXPR)
9801 if (!HONOR_NANS (arg0))
9802 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9803 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9806 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9807 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9808 fold_build2_loc (loc, code, type, arg0, arg1));
9811 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9812 arithmetics if it can never overflow, or into internal functions that
9813 return both result of arithmetics and overflowed boolean flag in
9814 a complex integer result, or some other check for overflow. */
9816 static tree
9817 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9818 tree arg0, tree arg1, tree arg2)
9820 enum internal_fn ifn = IFN_LAST;
9821 tree type = TREE_TYPE (TREE_TYPE (arg2));
9822 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9823 switch (fcode)
9825 case BUILT_IN_ADD_OVERFLOW:
9826 case BUILT_IN_SADD_OVERFLOW:
9827 case BUILT_IN_SADDL_OVERFLOW:
9828 case BUILT_IN_SADDLL_OVERFLOW:
9829 case BUILT_IN_UADD_OVERFLOW:
9830 case BUILT_IN_UADDL_OVERFLOW:
9831 case BUILT_IN_UADDLL_OVERFLOW:
9832 ifn = IFN_ADD_OVERFLOW;
9833 break;
9834 case BUILT_IN_SUB_OVERFLOW:
9835 case BUILT_IN_SSUB_OVERFLOW:
9836 case BUILT_IN_SSUBL_OVERFLOW:
9837 case BUILT_IN_SSUBLL_OVERFLOW:
9838 case BUILT_IN_USUB_OVERFLOW:
9839 case BUILT_IN_USUBL_OVERFLOW:
9840 case BUILT_IN_USUBLL_OVERFLOW:
9841 ifn = IFN_SUB_OVERFLOW;
9842 break;
9843 case BUILT_IN_MUL_OVERFLOW:
9844 case BUILT_IN_SMUL_OVERFLOW:
9845 case BUILT_IN_SMULL_OVERFLOW:
9846 case BUILT_IN_SMULLL_OVERFLOW:
9847 case BUILT_IN_UMUL_OVERFLOW:
9848 case BUILT_IN_UMULL_OVERFLOW:
9849 case BUILT_IN_UMULLL_OVERFLOW:
9850 ifn = IFN_MUL_OVERFLOW;
9851 break;
9852 default:
9853 gcc_unreachable ();
9855 tree ctype = build_complex_type (type);
9856 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9857 2, arg0, arg1);
9858 tree tgt = save_expr (call);
9859 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9860 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9861 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9862 tree store
9863 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9864 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9867 /* Fold a call to built-in function FNDECL with 0 arguments.
9868 This function returns NULL_TREE if no simplification was possible. */
9870 static tree
9871 fold_builtin_0 (location_t loc, tree fndecl)
9873 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9874 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9875 switch (fcode)
9877 CASE_FLT_FN (BUILT_IN_INF):
9878 case BUILT_IN_INFD32:
9879 case BUILT_IN_INFD64:
9880 case BUILT_IN_INFD128:
9881 return fold_builtin_inf (loc, type, true);
9883 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9884 return fold_builtin_inf (loc, type, false);
9886 case BUILT_IN_CLASSIFY_TYPE:
9887 return fold_builtin_classify_type (NULL_TREE);
9889 default:
9890 break;
9892 return NULL_TREE;
9895 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9896 This function returns NULL_TREE if no simplification was possible. */
9898 static tree
9899 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9901 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9902 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9903 switch (fcode)
9905 case BUILT_IN_CONSTANT_P:
9907 tree val = fold_builtin_constant_p (arg0);
9909 /* Gimplification will pull the CALL_EXPR for the builtin out of
9910 an if condition. When not optimizing, we'll not CSE it back.
9911 To avoid link error types of regressions, return false now. */
9912 if (!val && !optimize)
9913 val = integer_zero_node;
9915 return val;
9918 case BUILT_IN_CLASSIFY_TYPE:
9919 return fold_builtin_classify_type (arg0);
9921 case BUILT_IN_STRLEN:
9922 return fold_builtin_strlen (loc, type, arg0);
9924 CASE_FLT_FN (BUILT_IN_FABS):
9925 case BUILT_IN_FABSD32:
9926 case BUILT_IN_FABSD64:
9927 case BUILT_IN_FABSD128:
9928 return fold_builtin_fabs (loc, arg0, type);
9930 case BUILT_IN_ABS:
9931 case BUILT_IN_LABS:
9932 case BUILT_IN_LLABS:
9933 case BUILT_IN_IMAXABS:
9934 return fold_builtin_abs (loc, arg0, type);
9936 CASE_FLT_FN (BUILT_IN_CONJ):
9937 if (validate_arg (arg0, COMPLEX_TYPE)
9938 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9939 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9940 break;
9942 CASE_FLT_FN (BUILT_IN_CREAL):
9943 if (validate_arg (arg0, COMPLEX_TYPE)
9944 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9945 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9946 break;
9948 CASE_FLT_FN (BUILT_IN_CIMAG):
9949 if (validate_arg (arg0, COMPLEX_TYPE)
9950 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9951 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9952 break;
9954 CASE_FLT_FN (BUILT_IN_CCOS):
9955 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9957 CASE_FLT_FN (BUILT_IN_CCOSH):
9958 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9960 CASE_FLT_FN (BUILT_IN_CPROJ):
9961 return fold_builtin_cproj (loc, arg0, type);
9963 CASE_FLT_FN (BUILT_IN_CSIN):
9964 if (validate_arg (arg0, COMPLEX_TYPE)
9965 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9966 return do_mpc_arg1 (arg0, type, mpc_sin);
9967 break;
9969 CASE_FLT_FN (BUILT_IN_CSINH):
9970 if (validate_arg (arg0, COMPLEX_TYPE)
9971 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9972 return do_mpc_arg1 (arg0, type, mpc_sinh);
9973 break;
9975 CASE_FLT_FN (BUILT_IN_CTAN):
9976 if (validate_arg (arg0, COMPLEX_TYPE)
9977 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9978 return do_mpc_arg1 (arg0, type, mpc_tan);
9979 break;
9981 CASE_FLT_FN (BUILT_IN_CTANH):
9982 if (validate_arg (arg0, COMPLEX_TYPE)
9983 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9984 return do_mpc_arg1 (arg0, type, mpc_tanh);
9985 break;
9987 CASE_FLT_FN (BUILT_IN_CLOG):
9988 if (validate_arg (arg0, COMPLEX_TYPE)
9989 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9990 return do_mpc_arg1 (arg0, type, mpc_log);
9991 break;
9993 CASE_FLT_FN (BUILT_IN_CSQRT):
9994 if (validate_arg (arg0, COMPLEX_TYPE)
9995 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9996 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9997 break;
9999 CASE_FLT_FN (BUILT_IN_CASIN):
10000 if (validate_arg (arg0, COMPLEX_TYPE)
10001 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10002 return do_mpc_arg1 (arg0, type, mpc_asin);
10003 break;
10005 CASE_FLT_FN (BUILT_IN_CACOS):
10006 if (validate_arg (arg0, COMPLEX_TYPE)
10007 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10008 return do_mpc_arg1 (arg0, type, mpc_acos);
10009 break;
10011 CASE_FLT_FN (BUILT_IN_CATAN):
10012 if (validate_arg (arg0, COMPLEX_TYPE)
10013 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10014 return do_mpc_arg1 (arg0, type, mpc_atan);
10015 break;
10017 CASE_FLT_FN (BUILT_IN_CASINH):
10018 if (validate_arg (arg0, COMPLEX_TYPE)
10019 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10020 return do_mpc_arg1 (arg0, type, mpc_asinh);
10021 break;
10023 CASE_FLT_FN (BUILT_IN_CACOSH):
10024 if (validate_arg (arg0, COMPLEX_TYPE)
10025 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10026 return do_mpc_arg1 (arg0, type, mpc_acosh);
10027 break;
10029 CASE_FLT_FN (BUILT_IN_CATANH):
10030 if (validate_arg (arg0, COMPLEX_TYPE)
10031 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10032 return do_mpc_arg1 (arg0, type, mpc_atanh);
10033 break;
10035 CASE_FLT_FN (BUILT_IN_CABS):
10036 return fold_builtin_cabs (loc, arg0, type, fndecl);
10038 CASE_FLT_FN (BUILT_IN_CARG):
10039 return fold_builtin_carg (loc, arg0, type);
10041 CASE_FLT_FN (BUILT_IN_SQRT):
10042 return fold_builtin_sqrt (loc, arg0, type);
10044 CASE_FLT_FN (BUILT_IN_CBRT):
10045 return fold_builtin_cbrt (loc, arg0, type);
10047 CASE_FLT_FN (BUILT_IN_ASIN):
10048 if (validate_arg (arg0, REAL_TYPE))
10049 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10050 &dconstm1, &dconst1, true);
10051 break;
10053 CASE_FLT_FN (BUILT_IN_ACOS):
10054 if (validate_arg (arg0, REAL_TYPE))
10055 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10056 &dconstm1, &dconst1, true);
10057 break;
10059 CASE_FLT_FN (BUILT_IN_ATAN):
10060 if (validate_arg (arg0, REAL_TYPE))
10061 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10062 break;
10064 CASE_FLT_FN (BUILT_IN_ASINH):
10065 if (validate_arg (arg0, REAL_TYPE))
10066 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10067 break;
10069 CASE_FLT_FN (BUILT_IN_ACOSH):
10070 if (validate_arg (arg0, REAL_TYPE))
10071 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10072 &dconst1, NULL, true);
10073 break;
10075 CASE_FLT_FN (BUILT_IN_ATANH):
10076 if (validate_arg (arg0, REAL_TYPE))
10077 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10078 &dconstm1, &dconst1, false);
10079 break;
10081 CASE_FLT_FN (BUILT_IN_SIN):
10082 if (validate_arg (arg0, REAL_TYPE))
10083 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10084 break;
10086 CASE_FLT_FN (BUILT_IN_COS):
10087 return fold_builtin_cos (loc, arg0, type, fndecl);
10089 CASE_FLT_FN (BUILT_IN_TAN):
10090 return fold_builtin_tan (arg0, type);
10092 CASE_FLT_FN (BUILT_IN_CEXP):
10093 return fold_builtin_cexp (loc, arg0, type);
10095 CASE_FLT_FN (BUILT_IN_CEXPI):
10096 if (validate_arg (arg0, REAL_TYPE))
10097 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10098 break;
10100 CASE_FLT_FN (BUILT_IN_SINH):
10101 if (validate_arg (arg0, REAL_TYPE))
10102 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10103 break;
10105 CASE_FLT_FN (BUILT_IN_COSH):
10106 return fold_builtin_cosh (loc, arg0, type, fndecl);
10108 CASE_FLT_FN (BUILT_IN_TANH):
10109 if (validate_arg (arg0, REAL_TYPE))
10110 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10111 break;
10113 CASE_FLT_FN (BUILT_IN_ERF):
10114 if (validate_arg (arg0, REAL_TYPE))
10115 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10116 break;
10118 CASE_FLT_FN (BUILT_IN_ERFC):
10119 if (validate_arg (arg0, REAL_TYPE))
10120 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10121 break;
10123 CASE_FLT_FN (BUILT_IN_TGAMMA):
10124 if (validate_arg (arg0, REAL_TYPE))
10125 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10126 break;
10128 CASE_FLT_FN (BUILT_IN_EXP):
10129 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10131 CASE_FLT_FN (BUILT_IN_EXP2):
10132 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10134 CASE_FLT_FN (BUILT_IN_EXP10):
10135 CASE_FLT_FN (BUILT_IN_POW10):
10136 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10138 CASE_FLT_FN (BUILT_IN_EXPM1):
10139 if (validate_arg (arg0, REAL_TYPE))
10140 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10141 break;
10143 CASE_FLT_FN (BUILT_IN_LOG):
10144 if (validate_arg (arg0, REAL_TYPE))
10145 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10146 break;
10148 CASE_FLT_FN (BUILT_IN_LOG2):
10149 if (validate_arg (arg0, REAL_TYPE))
10150 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10151 break;
10153 CASE_FLT_FN (BUILT_IN_LOG10):
10154 if (validate_arg (arg0, REAL_TYPE))
10155 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10156 break;
10158 CASE_FLT_FN (BUILT_IN_LOG1P):
10159 if (validate_arg (arg0, REAL_TYPE))
10160 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10161 &dconstm1, NULL, false);
10162 break;
10164 CASE_FLT_FN (BUILT_IN_J0):
10165 if (validate_arg (arg0, REAL_TYPE))
10166 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10167 NULL, NULL, 0);
10168 break;
10170 CASE_FLT_FN (BUILT_IN_J1):
10171 if (validate_arg (arg0, REAL_TYPE))
10172 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10173 NULL, NULL, 0);
10174 break;
10176 CASE_FLT_FN (BUILT_IN_Y0):
10177 if (validate_arg (arg0, REAL_TYPE))
10178 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10179 &dconst0, NULL, false);
10180 break;
10182 CASE_FLT_FN (BUILT_IN_Y1):
10183 if (validate_arg (arg0, REAL_TYPE))
10184 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10185 &dconst0, NULL, false);
10186 break;
10188 CASE_FLT_FN (BUILT_IN_NAN):
10189 case BUILT_IN_NAND32:
10190 case BUILT_IN_NAND64:
10191 case BUILT_IN_NAND128:
10192 return fold_builtin_nan (arg0, type, true);
10194 CASE_FLT_FN (BUILT_IN_NANS):
10195 return fold_builtin_nan (arg0, type, false);
10197 CASE_FLT_FN (BUILT_IN_FLOOR):
10198 return fold_builtin_floor (loc, fndecl, arg0);
10200 CASE_FLT_FN (BUILT_IN_CEIL):
10201 return fold_builtin_ceil (loc, fndecl, arg0);
10203 CASE_FLT_FN (BUILT_IN_TRUNC):
10204 return fold_builtin_trunc (loc, fndecl, arg0);
10206 CASE_FLT_FN (BUILT_IN_ROUND):
10207 return fold_builtin_round (loc, fndecl, arg0);
10209 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10210 CASE_FLT_FN (BUILT_IN_RINT):
10211 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10213 CASE_FLT_FN (BUILT_IN_ICEIL):
10214 CASE_FLT_FN (BUILT_IN_LCEIL):
10215 CASE_FLT_FN (BUILT_IN_LLCEIL):
10216 CASE_FLT_FN (BUILT_IN_LFLOOR):
10217 CASE_FLT_FN (BUILT_IN_IFLOOR):
10218 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10219 CASE_FLT_FN (BUILT_IN_IROUND):
10220 CASE_FLT_FN (BUILT_IN_LROUND):
10221 CASE_FLT_FN (BUILT_IN_LLROUND):
10222 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10224 CASE_FLT_FN (BUILT_IN_IRINT):
10225 CASE_FLT_FN (BUILT_IN_LRINT):
10226 CASE_FLT_FN (BUILT_IN_LLRINT):
10227 return fold_fixed_mathfn (loc, fndecl, arg0);
10229 case BUILT_IN_BSWAP16:
10230 case BUILT_IN_BSWAP32:
10231 case BUILT_IN_BSWAP64:
10232 return fold_builtin_bswap (fndecl, arg0);
10234 CASE_INT_FN (BUILT_IN_FFS):
10235 CASE_INT_FN (BUILT_IN_CLZ):
10236 CASE_INT_FN (BUILT_IN_CTZ):
10237 CASE_INT_FN (BUILT_IN_CLRSB):
10238 CASE_INT_FN (BUILT_IN_POPCOUNT):
10239 CASE_INT_FN (BUILT_IN_PARITY):
10240 return fold_builtin_bitop (fndecl, arg0);
10242 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10243 return fold_builtin_signbit (loc, arg0, type);
10245 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10246 return fold_builtin_significand (loc, arg0, type);
10248 CASE_FLT_FN (BUILT_IN_ILOGB):
10249 CASE_FLT_FN (BUILT_IN_LOGB):
10250 return fold_builtin_logb (loc, arg0, type);
10252 case BUILT_IN_ISASCII:
10253 return fold_builtin_isascii (loc, arg0);
10255 case BUILT_IN_TOASCII:
10256 return fold_builtin_toascii (loc, arg0);
10258 case BUILT_IN_ISDIGIT:
10259 return fold_builtin_isdigit (loc, arg0);
10261 CASE_FLT_FN (BUILT_IN_FINITE):
10262 case BUILT_IN_FINITED32:
10263 case BUILT_IN_FINITED64:
10264 case BUILT_IN_FINITED128:
10265 case BUILT_IN_ISFINITE:
10267 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10268 if (ret)
10269 return ret;
10270 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10273 CASE_FLT_FN (BUILT_IN_ISINF):
10274 case BUILT_IN_ISINFD32:
10275 case BUILT_IN_ISINFD64:
10276 case BUILT_IN_ISINFD128:
10278 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10279 if (ret)
10280 return ret;
10281 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10284 case BUILT_IN_ISNORMAL:
10285 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10287 case BUILT_IN_ISINF_SIGN:
10288 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10290 CASE_FLT_FN (BUILT_IN_ISNAN):
10291 case BUILT_IN_ISNAND32:
10292 case BUILT_IN_ISNAND64:
10293 case BUILT_IN_ISNAND128:
10294 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10296 case BUILT_IN_FREE:
10297 if (integer_zerop (arg0))
10298 return build_empty_stmt (loc);
10299 break;
10301 default:
10302 break;
10305 return NULL_TREE;
10309 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10310 This function returns NULL_TREE if no simplification was possible. */
10312 static tree
10313 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10315 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10316 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10318 switch (fcode)
10320 CASE_FLT_FN (BUILT_IN_JN):
10321 if (validate_arg (arg0, INTEGER_TYPE)
10322 && validate_arg (arg1, REAL_TYPE))
10323 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10324 break;
10326 CASE_FLT_FN (BUILT_IN_YN):
10327 if (validate_arg (arg0, INTEGER_TYPE)
10328 && validate_arg (arg1, REAL_TYPE))
10329 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10330 &dconst0, false);
10331 break;
10333 CASE_FLT_FN (BUILT_IN_DREM):
10334 CASE_FLT_FN (BUILT_IN_REMAINDER):
10335 if (validate_arg (arg0, REAL_TYPE)
10336 && validate_arg (arg1, REAL_TYPE))
10337 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10338 break;
10340 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10341 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10342 if (validate_arg (arg0, REAL_TYPE)
10343 && validate_arg (arg1, POINTER_TYPE))
10344 return do_mpfr_lgamma_r (arg0, arg1, type);
10345 break;
10347 CASE_FLT_FN (BUILT_IN_ATAN2):
10348 if (validate_arg (arg0, REAL_TYPE)
10349 && validate_arg (arg1, REAL_TYPE))
10350 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10351 break;
10353 CASE_FLT_FN (BUILT_IN_FDIM):
10354 if (validate_arg (arg0, REAL_TYPE)
10355 && validate_arg (arg1, REAL_TYPE))
10356 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10357 break;
10359 CASE_FLT_FN (BUILT_IN_HYPOT):
10360 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10362 CASE_FLT_FN (BUILT_IN_CPOW):
10363 if (validate_arg (arg0, COMPLEX_TYPE)
10364 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10365 && validate_arg (arg1, COMPLEX_TYPE)
10366 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10367 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10368 break;
10370 CASE_FLT_FN (BUILT_IN_LDEXP):
10371 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10372 CASE_FLT_FN (BUILT_IN_SCALBN):
10373 CASE_FLT_FN (BUILT_IN_SCALBLN):
10374 return fold_builtin_load_exponent (loc, arg0, arg1,
10375 type, /*ldexp=*/false);
10377 CASE_FLT_FN (BUILT_IN_FREXP):
10378 return fold_builtin_frexp (loc, arg0, arg1, type);
10380 CASE_FLT_FN (BUILT_IN_MODF):
10381 return fold_builtin_modf (loc, arg0, arg1, type);
10383 case BUILT_IN_STRSTR:
10384 return fold_builtin_strstr (loc, arg0, arg1, type);
10386 case BUILT_IN_STRSPN:
10387 return fold_builtin_strspn (loc, arg0, arg1);
10389 case BUILT_IN_STRCSPN:
10390 return fold_builtin_strcspn (loc, arg0, arg1);
10392 case BUILT_IN_STRCHR:
10393 case BUILT_IN_INDEX:
10394 return fold_builtin_strchr (loc, arg0, arg1, type);
10396 case BUILT_IN_STRRCHR:
10397 case BUILT_IN_RINDEX:
10398 return fold_builtin_strrchr (loc, arg0, arg1, type);
10400 case BUILT_IN_STRCMP:
10401 return fold_builtin_strcmp (loc, arg0, arg1);
10403 case BUILT_IN_STRPBRK:
10404 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10406 case BUILT_IN_EXPECT:
10407 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10409 CASE_FLT_FN (BUILT_IN_POW):
10410 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10412 CASE_FLT_FN (BUILT_IN_POWI):
10413 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10415 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10416 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10418 CASE_FLT_FN (BUILT_IN_FMIN):
10419 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10421 CASE_FLT_FN (BUILT_IN_FMAX):
10422 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10424 case BUILT_IN_ISGREATER:
10425 return fold_builtin_unordered_cmp (loc, fndecl,
10426 arg0, arg1, UNLE_EXPR, LE_EXPR);
10427 case BUILT_IN_ISGREATEREQUAL:
10428 return fold_builtin_unordered_cmp (loc, fndecl,
10429 arg0, arg1, UNLT_EXPR, LT_EXPR);
10430 case BUILT_IN_ISLESS:
10431 return fold_builtin_unordered_cmp (loc, fndecl,
10432 arg0, arg1, UNGE_EXPR, GE_EXPR);
10433 case BUILT_IN_ISLESSEQUAL:
10434 return fold_builtin_unordered_cmp (loc, fndecl,
10435 arg0, arg1, UNGT_EXPR, GT_EXPR);
10436 case BUILT_IN_ISLESSGREATER:
10437 return fold_builtin_unordered_cmp (loc, fndecl,
10438 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10439 case BUILT_IN_ISUNORDERED:
10440 return fold_builtin_unordered_cmp (loc, fndecl,
10441 arg0, arg1, UNORDERED_EXPR,
10442 NOP_EXPR);
10444 /* We do the folding for va_start in the expander. */
10445 case BUILT_IN_VA_START:
10446 break;
10448 case BUILT_IN_OBJECT_SIZE:
10449 return fold_builtin_object_size (arg0, arg1);
10451 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10452 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10454 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10455 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10457 default:
10458 break;
10460 return NULL_TREE;
10463 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10464 and ARG2.
10465 This function returns NULL_TREE if no simplification was possible. */
10467 static tree
10468 fold_builtin_3 (location_t loc, tree fndecl,
10469 tree arg0, tree arg1, tree arg2)
10471 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10472 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10473 switch (fcode)
10476 CASE_FLT_FN (BUILT_IN_SINCOS):
10477 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10479 CASE_FLT_FN (BUILT_IN_FMA):
10480 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10481 break;
10483 CASE_FLT_FN (BUILT_IN_REMQUO):
10484 if (validate_arg (arg0, REAL_TYPE)
10485 && validate_arg (arg1, REAL_TYPE)
10486 && validate_arg (arg2, POINTER_TYPE))
10487 return do_mpfr_remquo (arg0, arg1, arg2);
10488 break;
10490 case BUILT_IN_STRNCMP:
10491 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10493 case BUILT_IN_MEMCHR:
10494 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10496 case BUILT_IN_BCMP:
10497 case BUILT_IN_MEMCMP:
10498 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10500 case BUILT_IN_EXPECT:
10501 return fold_builtin_expect (loc, arg0, arg1, arg2);
10503 case BUILT_IN_ADD_OVERFLOW:
10504 case BUILT_IN_SUB_OVERFLOW:
10505 case BUILT_IN_MUL_OVERFLOW:
10506 case BUILT_IN_SADD_OVERFLOW:
10507 case BUILT_IN_SADDL_OVERFLOW:
10508 case BUILT_IN_SADDLL_OVERFLOW:
10509 case BUILT_IN_SSUB_OVERFLOW:
10510 case BUILT_IN_SSUBL_OVERFLOW:
10511 case BUILT_IN_SSUBLL_OVERFLOW:
10512 case BUILT_IN_SMUL_OVERFLOW:
10513 case BUILT_IN_SMULL_OVERFLOW:
10514 case BUILT_IN_SMULLL_OVERFLOW:
10515 case BUILT_IN_UADD_OVERFLOW:
10516 case BUILT_IN_UADDL_OVERFLOW:
10517 case BUILT_IN_UADDLL_OVERFLOW:
10518 case BUILT_IN_USUB_OVERFLOW:
10519 case BUILT_IN_USUBL_OVERFLOW:
10520 case BUILT_IN_USUBLL_OVERFLOW:
10521 case BUILT_IN_UMUL_OVERFLOW:
10522 case BUILT_IN_UMULL_OVERFLOW:
10523 case BUILT_IN_UMULLL_OVERFLOW:
10524 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10526 default:
10527 break;
10529 return NULL_TREE;
10532 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10533 arguments. IGNORE is true if the result of the
10534 function call is ignored. This function returns NULL_TREE if no
10535 simplification was possible. */
10537 tree
10538 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10540 tree ret = NULL_TREE;
10542 switch (nargs)
10544 case 0:
10545 ret = fold_builtin_0 (loc, fndecl);
10546 break;
10547 case 1:
10548 ret = fold_builtin_1 (loc, fndecl, args[0]);
10549 break;
10550 case 2:
10551 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10552 break;
10553 case 3:
10554 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10555 break;
10556 default:
10557 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10558 break;
10560 if (ret)
10562 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10563 SET_EXPR_LOCATION (ret, loc);
10564 TREE_NO_WARNING (ret) = 1;
10565 return ret;
10567 return NULL_TREE;
10570 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10571 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10572 of arguments in ARGS to be omitted. OLDNARGS is the number of
10573 elements in ARGS. */
10575 static tree
10576 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10577 int skip, tree fndecl, int n, va_list newargs)
10579 int nargs = oldnargs - skip + n;
10580 tree *buffer;
10582 if (n > 0)
10584 int i, j;
10586 buffer = XALLOCAVEC (tree, nargs);
10587 for (i = 0; i < n; i++)
10588 buffer[i] = va_arg (newargs, tree);
10589 for (j = skip; j < oldnargs; j++, i++)
10590 buffer[i] = args[j];
10592 else
10593 buffer = args + skip;
10595 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10598 /* Return true if FNDECL shouldn't be folded right now.
10599 If a built-in function has an inline attribute always_inline
10600 wrapper, defer folding it after always_inline functions have
10601 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10602 might not be performed. */
10604 bool
10605 avoid_folding_inline_builtin (tree fndecl)
10607 return (DECL_DECLARED_INLINE_P (fndecl)
10608 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10609 && cfun
10610 && !cfun->always_inline_functions_inlined
10611 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10614 /* A wrapper function for builtin folding that prevents warnings for
10615 "statement without effect" and the like, caused by removing the
10616 call node earlier than the warning is generated. */
10618 tree
10619 fold_call_expr (location_t loc, tree exp, bool ignore)
10621 tree ret = NULL_TREE;
10622 tree fndecl = get_callee_fndecl (exp);
10623 if (fndecl
10624 && TREE_CODE (fndecl) == FUNCTION_DECL
10625 && DECL_BUILT_IN (fndecl)
10626 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10627 yet. Defer folding until we see all the arguments
10628 (after inlining). */
10629 && !CALL_EXPR_VA_ARG_PACK (exp))
10631 int nargs = call_expr_nargs (exp);
10633 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10634 instead last argument is __builtin_va_arg_pack (). Defer folding
10635 even in that case, until arguments are finalized. */
10636 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10638 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10639 if (fndecl2
10640 && TREE_CODE (fndecl2) == FUNCTION_DECL
10641 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10642 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10643 return NULL_TREE;
10646 if (avoid_folding_inline_builtin (fndecl))
10647 return NULL_TREE;
10649 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10650 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10651 CALL_EXPR_ARGP (exp), ignore);
10652 else
10654 tree *args = CALL_EXPR_ARGP (exp);
10655 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10656 if (ret)
10657 return ret;
10660 return NULL_TREE;
10663 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10664 N arguments are passed in the array ARGARRAY. Return a folded
10665 expression or NULL_TREE if no simplification was possible. */
10667 tree
10668 fold_builtin_call_array (location_t loc, tree,
10669 tree fn,
10670 int n,
10671 tree *argarray)
10673 if (TREE_CODE (fn) != ADDR_EXPR)
10674 return NULL_TREE;
10676 tree fndecl = TREE_OPERAND (fn, 0);
10677 if (TREE_CODE (fndecl) == FUNCTION_DECL
10678 && DECL_BUILT_IN (fndecl))
10680 /* If last argument is __builtin_va_arg_pack (), arguments to this
10681 function are not finalized yet. Defer folding until they are. */
10682 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10684 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10685 if (fndecl2
10686 && TREE_CODE (fndecl2) == FUNCTION_DECL
10687 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10688 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10689 return NULL_TREE;
10691 if (avoid_folding_inline_builtin (fndecl))
10692 return NULL_TREE;
10693 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10694 return targetm.fold_builtin (fndecl, n, argarray, false);
10695 else
10696 return fold_builtin_n (loc, fndecl, argarray, n, false);
10699 return NULL_TREE;
10702 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10703 along with N new arguments specified as the "..." parameters. SKIP
10704 is the number of arguments in EXP to be omitted. This function is used
10705 to do varargs-to-varargs transformations. */
10707 static tree
10708 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10710 va_list ap;
10711 tree t;
10713 va_start (ap, n);
10714 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10715 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10716 va_end (ap);
10718 return t;
10721 /* Validate a single argument ARG against a tree code CODE representing
10722 a type. */
10724 static bool
10725 validate_arg (const_tree arg, enum tree_code code)
10727 if (!arg)
10728 return false;
10729 else if (code == POINTER_TYPE)
10730 return POINTER_TYPE_P (TREE_TYPE (arg));
10731 else if (code == INTEGER_TYPE)
10732 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10733 return code == TREE_CODE (TREE_TYPE (arg));
10736 /* This function validates the types of a function call argument list
10737 against a specified list of tree_codes. If the last specifier is a 0,
10738 that represents an ellipses, otherwise the last specifier must be a
10739 VOID_TYPE.
10741 This is the GIMPLE version of validate_arglist. Eventually we want to
10742 completely convert builtins.c to work from GIMPLEs and the tree based
10743 validate_arglist will then be removed. */
10745 bool
10746 validate_gimple_arglist (const gcall *call, ...)
10748 enum tree_code code;
10749 bool res = 0;
10750 va_list ap;
10751 const_tree arg;
10752 size_t i;
10754 va_start (ap, call);
10755 i = 0;
10759 code = (enum tree_code) va_arg (ap, int);
10760 switch (code)
10762 case 0:
10763 /* This signifies an ellipses, any further arguments are all ok. */
10764 res = true;
10765 goto end;
10766 case VOID_TYPE:
10767 /* This signifies an endlink, if no arguments remain, return
10768 true, otherwise return false. */
10769 res = (i == gimple_call_num_args (call));
10770 goto end;
10771 default:
10772 /* If no parameters remain or the parameter's code does not
10773 match the specified code, return false. Otherwise continue
10774 checking any remaining arguments. */
10775 arg = gimple_call_arg (call, i++);
10776 if (!validate_arg (arg, code))
10777 goto end;
10778 break;
10781 while (1);
10783 /* We need gotos here since we can only have one VA_CLOSE in a
10784 function. */
10785 end: ;
10786 va_end (ap);
10788 return res;
10791 /* Default target-specific builtin expander that does nothing. */
10794 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10795 rtx target ATTRIBUTE_UNUSED,
10796 rtx subtarget ATTRIBUTE_UNUSED,
10797 machine_mode mode ATTRIBUTE_UNUSED,
10798 int ignore ATTRIBUTE_UNUSED)
10800 return NULL_RTX;
10803 /* Returns true is EXP represents data that would potentially reside
10804 in a readonly section. */
10806 bool
10807 readonly_data_expr (tree exp)
10809 STRIP_NOPS (exp);
10811 if (TREE_CODE (exp) != ADDR_EXPR)
10812 return false;
10814 exp = get_base_address (TREE_OPERAND (exp, 0));
10815 if (!exp)
10816 return false;
10818 /* Make sure we call decl_readonly_section only for trees it
10819 can handle (since it returns true for everything it doesn't
10820 understand). */
10821 if (TREE_CODE (exp) == STRING_CST
10822 || TREE_CODE (exp) == CONSTRUCTOR
10823 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10824 return decl_readonly_section (exp, 0);
10825 else
10826 return false;
10829 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10830 to the call, and TYPE is its return type.
10832 Return NULL_TREE if no simplification was possible, otherwise return the
10833 simplified form of the call as a tree.
10835 The simplified form may be a constant or other expression which
10836 computes the same value, but in a more efficient manner (including
10837 calls to other builtin functions).
10839 The call may contain arguments which need to be evaluated, but
10840 which are not useful to determine the result of the call. In
10841 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10842 COMPOUND_EXPR will be an argument which must be evaluated.
10843 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10844 COMPOUND_EXPR in the chain will contain the tree for the simplified
10845 form of the builtin function call. */
10847 static tree
10848 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10850 if (!validate_arg (s1, POINTER_TYPE)
10851 || !validate_arg (s2, POINTER_TYPE))
10852 return NULL_TREE;
10853 else
10855 tree fn;
10856 const char *p1, *p2;
10858 p2 = c_getstr (s2);
10859 if (p2 == NULL)
10860 return NULL_TREE;
10862 p1 = c_getstr (s1);
10863 if (p1 != NULL)
10865 const char *r = strstr (p1, p2);
10866 tree tem;
10868 if (r == NULL)
10869 return build_int_cst (TREE_TYPE (s1), 0);
10871 /* Return an offset into the constant string argument. */
10872 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10873 return fold_convert_loc (loc, type, tem);
10876 /* The argument is const char *, and the result is char *, so we need
10877 a type conversion here to avoid a warning. */
10878 if (p2[0] == '\0')
10879 return fold_convert_loc (loc, type, s1);
10881 if (p2[1] != '\0')
10882 return NULL_TREE;
10884 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10885 if (!fn)
10886 return NULL_TREE;
10888 /* New argument list transforming strstr(s1, s2) to
10889 strchr(s1, s2[0]). */
10890 return build_call_expr_loc (loc, fn, 2, s1,
10891 build_int_cst (integer_type_node, p2[0]));
10895 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10896 the call, and TYPE is its return type.
10898 Return NULL_TREE if no simplification was possible, otherwise return the
10899 simplified form of the call as a tree.
10901 The simplified form may be a constant or other expression which
10902 computes the same value, but in a more efficient manner (including
10903 calls to other builtin functions).
10905 The call may contain arguments which need to be evaluated, but
10906 which are not useful to determine the result of the call. In
10907 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10908 COMPOUND_EXPR will be an argument which must be evaluated.
10909 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10910 COMPOUND_EXPR in the chain will contain the tree for the simplified
10911 form of the builtin function call. */
10913 static tree
10914 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10916 if (!validate_arg (s1, POINTER_TYPE)
10917 || !validate_arg (s2, INTEGER_TYPE))
10918 return NULL_TREE;
10919 else
10921 const char *p1;
10923 if (TREE_CODE (s2) != INTEGER_CST)
10924 return NULL_TREE;
10926 p1 = c_getstr (s1);
10927 if (p1 != NULL)
10929 char c;
10930 const char *r;
10931 tree tem;
10933 if (target_char_cast (s2, &c))
10934 return NULL_TREE;
10936 r = strchr (p1, c);
10938 if (r == NULL)
10939 return build_int_cst (TREE_TYPE (s1), 0);
10941 /* Return an offset into the constant string argument. */
10942 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10943 return fold_convert_loc (loc, type, tem);
10945 return NULL_TREE;
10949 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10950 the call, and TYPE is its return type.
10952 Return NULL_TREE if no simplification was possible, otherwise return the
10953 simplified form of the call as a tree.
10955 The simplified form may be a constant or other expression which
10956 computes the same value, but in a more efficient manner (including
10957 calls to other builtin functions).
10959 The call may contain arguments which need to be evaluated, but
10960 which are not useful to determine the result of the call. In
10961 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10962 COMPOUND_EXPR will be an argument which must be evaluated.
10963 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10964 COMPOUND_EXPR in the chain will contain the tree for the simplified
10965 form of the builtin function call. */
10967 static tree
10968 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10970 if (!validate_arg (s1, POINTER_TYPE)
10971 || !validate_arg (s2, INTEGER_TYPE))
10972 return NULL_TREE;
10973 else
10975 tree fn;
10976 const char *p1;
10978 if (TREE_CODE (s2) != INTEGER_CST)
10979 return NULL_TREE;
10981 p1 = c_getstr (s1);
10982 if (p1 != NULL)
10984 char c;
10985 const char *r;
10986 tree tem;
10988 if (target_char_cast (s2, &c))
10989 return NULL_TREE;
10991 r = strrchr (p1, c);
10993 if (r == NULL)
10994 return build_int_cst (TREE_TYPE (s1), 0);
10996 /* Return an offset into the constant string argument. */
10997 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10998 return fold_convert_loc (loc, type, tem);
11001 if (! integer_zerop (s2))
11002 return NULL_TREE;
11004 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11005 if (!fn)
11006 return NULL_TREE;
11008 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11009 return build_call_expr_loc (loc, fn, 2, s1, s2);
11013 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11014 to the call, and TYPE is its return type.
11016 Return NULL_TREE if no simplification was possible, otherwise return the
11017 simplified form of the call as a tree.
11019 The simplified form may be a constant or other expression which
11020 computes the same value, but in a more efficient manner (including
11021 calls to other builtin functions).
11023 The call may contain arguments which need to be evaluated, but
11024 which are not useful to determine the result of the call. In
11025 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11026 COMPOUND_EXPR will be an argument which must be evaluated.
11027 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11028 COMPOUND_EXPR in the chain will contain the tree for the simplified
11029 form of the builtin function call. */
11031 static tree
11032 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11034 if (!validate_arg (s1, POINTER_TYPE)
11035 || !validate_arg (s2, POINTER_TYPE))
11036 return NULL_TREE;
11037 else
11039 tree fn;
11040 const char *p1, *p2;
11042 p2 = c_getstr (s2);
11043 if (p2 == NULL)
11044 return NULL_TREE;
11046 p1 = c_getstr (s1);
11047 if (p1 != NULL)
11049 const char *r = strpbrk (p1, p2);
11050 tree tem;
11052 if (r == NULL)
11053 return build_int_cst (TREE_TYPE (s1), 0);
11055 /* Return an offset into the constant string argument. */
11056 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11057 return fold_convert_loc (loc, type, tem);
11060 if (p2[0] == '\0')
11061 /* strpbrk(x, "") == NULL.
11062 Evaluate and ignore s1 in case it had side-effects. */
11063 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11065 if (p2[1] != '\0')
11066 return NULL_TREE; /* Really call strpbrk. */
11068 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11069 if (!fn)
11070 return NULL_TREE;
11072 /* New argument list transforming strpbrk(s1, s2) to
11073 strchr(s1, s2[0]). */
11074 return build_call_expr_loc (loc, fn, 2, s1,
11075 build_int_cst (integer_type_node, p2[0]));
11079 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11080 to the call.
11082 Return NULL_TREE if no simplification was possible, otherwise return the
11083 simplified form of the call as a tree.
11085 The simplified form may be a constant or other expression which
11086 computes the same value, but in a more efficient manner (including
11087 calls to other builtin functions).
11089 The call may contain arguments which need to be evaluated, but
11090 which are not useful to determine the result of the call. In
11091 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11092 COMPOUND_EXPR will be an argument which must be evaluated.
11093 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11094 COMPOUND_EXPR in the chain will contain the tree for the simplified
11095 form of the builtin function call. */
11097 static tree
11098 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11100 if (!validate_arg (s1, POINTER_TYPE)
11101 || !validate_arg (s2, POINTER_TYPE))
11102 return NULL_TREE;
11103 else
11105 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11107 /* If both arguments are constants, evaluate at compile-time. */
11108 if (p1 && p2)
11110 const size_t r = strspn (p1, p2);
11111 return build_int_cst (size_type_node, r);
11114 /* If either argument is "", return NULL_TREE. */
11115 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11116 /* Evaluate and ignore both arguments in case either one has
11117 side-effects. */
11118 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11119 s1, s2);
11120 return NULL_TREE;
11124 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11125 to the call.
11127 Return NULL_TREE if no simplification was possible, otherwise return the
11128 simplified form of the call as a tree.
11130 The simplified form may be a constant or other expression which
11131 computes the same value, but in a more efficient manner (including
11132 calls to other builtin functions).
11134 The call may contain arguments which need to be evaluated, but
11135 which are not useful to determine the result of the call. In
11136 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11137 COMPOUND_EXPR will be an argument which must be evaluated.
11138 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11139 COMPOUND_EXPR in the chain will contain the tree for the simplified
11140 form of the builtin function call. */
11142 static tree
11143 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11145 if (!validate_arg (s1, POINTER_TYPE)
11146 || !validate_arg (s2, POINTER_TYPE))
11147 return NULL_TREE;
11148 else
11150 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11152 /* If both arguments are constants, evaluate at compile-time. */
11153 if (p1 && p2)
11155 const size_t r = strcspn (p1, p2);
11156 return build_int_cst (size_type_node, r);
11159 /* If the first argument is "", return NULL_TREE. */
11160 if (p1 && *p1 == '\0')
11162 /* Evaluate and ignore argument s2 in case it has
11163 side-effects. */
11164 return omit_one_operand_loc (loc, size_type_node,
11165 size_zero_node, s2);
11168 /* If the second argument is "", return __builtin_strlen(s1). */
11169 if (p2 && *p2 == '\0')
11171 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11173 /* If the replacement _DECL isn't initialized, don't do the
11174 transformation. */
11175 if (!fn)
11176 return NULL_TREE;
11178 return build_call_expr_loc (loc, fn, 1, s1);
11180 return NULL_TREE;
11184 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11185 produced. False otherwise. This is done so that we don't output the error
11186 or warning twice or three times. */
11188 bool
11189 fold_builtin_next_arg (tree exp, bool va_start_p)
11191 tree fntype = TREE_TYPE (current_function_decl);
11192 int nargs = call_expr_nargs (exp);
11193 tree arg;
11194 /* There is good chance the current input_location points inside the
11195 definition of the va_start macro (perhaps on the token for
11196 builtin) in a system header, so warnings will not be emitted.
11197 Use the location in real source code. */
11198 source_location current_location =
11199 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11200 NULL);
11202 if (!stdarg_p (fntype))
11204 error ("%<va_start%> used in function with fixed args");
11205 return true;
11208 if (va_start_p)
11210 if (va_start_p && (nargs != 2))
11212 error ("wrong number of arguments to function %<va_start%>");
11213 return true;
11215 arg = CALL_EXPR_ARG (exp, 1);
11217 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11218 when we checked the arguments and if needed issued a warning. */
11219 else
11221 if (nargs == 0)
11223 /* Evidently an out of date version of <stdarg.h>; can't validate
11224 va_start's second argument, but can still work as intended. */
11225 warning_at (current_location,
11226 OPT_Wvarargs,
11227 "%<__builtin_next_arg%> called without an argument");
11228 return true;
11230 else if (nargs > 1)
11232 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11233 return true;
11235 arg = CALL_EXPR_ARG (exp, 0);
11238 if (TREE_CODE (arg) == SSA_NAME)
11239 arg = SSA_NAME_VAR (arg);
11241 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11242 or __builtin_next_arg (0) the first time we see it, after checking
11243 the arguments and if needed issuing a warning. */
11244 if (!integer_zerop (arg))
11246 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11248 /* Strip off all nops for the sake of the comparison. This
11249 is not quite the same as STRIP_NOPS. It does more.
11250 We must also strip off INDIRECT_EXPR for C++ reference
11251 parameters. */
11252 while (CONVERT_EXPR_P (arg)
11253 || TREE_CODE (arg) == INDIRECT_REF)
11254 arg = TREE_OPERAND (arg, 0);
11255 if (arg != last_parm)
11257 /* FIXME: Sometimes with the tree optimizers we can get the
11258 not the last argument even though the user used the last
11259 argument. We just warn and set the arg to be the last
11260 argument so that we will get wrong-code because of
11261 it. */
11262 warning_at (current_location,
11263 OPT_Wvarargs,
11264 "second parameter of %<va_start%> not last named argument");
11267 /* Undefined by C99 7.15.1.4p4 (va_start):
11268 "If the parameter parmN is declared with the register storage
11269 class, with a function or array type, or with a type that is
11270 not compatible with the type that results after application of
11271 the default argument promotions, the behavior is undefined."
11273 else if (DECL_REGISTER (arg))
11275 warning_at (current_location,
11276 OPT_Wvarargs,
11277 "undefined behaviour when second parameter of "
11278 "%<va_start%> is declared with %<register%> storage");
11281 /* We want to verify the second parameter just once before the tree
11282 optimizers are run and then avoid keeping it in the tree,
11283 as otherwise we could warn even for correct code like:
11284 void foo (int i, ...)
11285 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11286 if (va_start_p)
11287 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11288 else
11289 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11291 return false;
11295 /* Expand a call EXP to __builtin_object_size. */
11297 static rtx
11298 expand_builtin_object_size (tree exp)
11300 tree ost;
11301 int object_size_type;
11302 tree fndecl = get_callee_fndecl (exp);
11304 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11306 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11307 exp, fndecl);
11308 expand_builtin_trap ();
11309 return const0_rtx;
11312 ost = CALL_EXPR_ARG (exp, 1);
11313 STRIP_NOPS (ost);
11315 if (TREE_CODE (ost) != INTEGER_CST
11316 || tree_int_cst_sgn (ost) < 0
11317 || compare_tree_int (ost, 3) > 0)
11319 error ("%Klast argument of %D is not integer constant between 0 and 3",
11320 exp, fndecl);
11321 expand_builtin_trap ();
11322 return const0_rtx;
11325 object_size_type = tree_to_shwi (ost);
11327 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11330 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11331 FCODE is the BUILT_IN_* to use.
11332 Return NULL_RTX if we failed; the caller should emit a normal call,
11333 otherwise try to get the result in TARGET, if convenient (and in
11334 mode MODE if that's convenient). */
11336 static rtx
11337 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11338 enum built_in_function fcode)
11340 tree dest, src, len, size;
11342 if (!validate_arglist (exp,
11343 POINTER_TYPE,
11344 fcode == BUILT_IN_MEMSET_CHK
11345 ? INTEGER_TYPE : POINTER_TYPE,
11346 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11347 return NULL_RTX;
11349 dest = CALL_EXPR_ARG (exp, 0);
11350 src = CALL_EXPR_ARG (exp, 1);
11351 len = CALL_EXPR_ARG (exp, 2);
11352 size = CALL_EXPR_ARG (exp, 3);
11354 if (! tree_fits_uhwi_p (size))
11355 return NULL_RTX;
11357 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11359 tree fn;
11361 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11363 warning_at (tree_nonartificial_location (exp),
11364 0, "%Kcall to %D will always overflow destination buffer",
11365 exp, get_callee_fndecl (exp));
11366 return NULL_RTX;
11369 fn = NULL_TREE;
11370 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11371 mem{cpy,pcpy,move,set} is available. */
11372 switch (fcode)
11374 case BUILT_IN_MEMCPY_CHK:
11375 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11376 break;
11377 case BUILT_IN_MEMPCPY_CHK:
11378 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11379 break;
11380 case BUILT_IN_MEMMOVE_CHK:
11381 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11382 break;
11383 case BUILT_IN_MEMSET_CHK:
11384 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11385 break;
11386 default:
11387 break;
11390 if (! fn)
11391 return NULL_RTX;
11393 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11394 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11395 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11396 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11398 else if (fcode == BUILT_IN_MEMSET_CHK)
11399 return NULL_RTX;
11400 else
11402 unsigned int dest_align = get_pointer_alignment (dest);
11404 /* If DEST is not a pointer type, call the normal function. */
11405 if (dest_align == 0)
11406 return NULL_RTX;
11408 /* If SRC and DEST are the same (and not volatile), do nothing. */
11409 if (operand_equal_p (src, dest, 0))
11411 tree expr;
11413 if (fcode != BUILT_IN_MEMPCPY_CHK)
11415 /* Evaluate and ignore LEN in case it has side-effects. */
11416 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11417 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11420 expr = fold_build_pointer_plus (dest, len);
11421 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11424 /* __memmove_chk special case. */
11425 if (fcode == BUILT_IN_MEMMOVE_CHK)
11427 unsigned int src_align = get_pointer_alignment (src);
11429 if (src_align == 0)
11430 return NULL_RTX;
11432 /* If src is categorized for a readonly section we can use
11433 normal __memcpy_chk. */
11434 if (readonly_data_expr (src))
11436 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11437 if (!fn)
11438 return NULL_RTX;
11439 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11440 dest, src, len, size);
11441 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11442 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11443 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11446 return NULL_RTX;
11450 /* Emit warning if a buffer overflow is detected at compile time. */
11452 static void
11453 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11455 int is_strlen = 0;
11456 tree len, size;
11457 location_t loc = tree_nonartificial_location (exp);
11459 switch (fcode)
11461 case BUILT_IN_STRCPY_CHK:
11462 case BUILT_IN_STPCPY_CHK:
11463 /* For __strcat_chk the warning will be emitted only if overflowing
11464 by at least strlen (dest) + 1 bytes. */
11465 case BUILT_IN_STRCAT_CHK:
11466 len = CALL_EXPR_ARG (exp, 1);
11467 size = CALL_EXPR_ARG (exp, 2);
11468 is_strlen = 1;
11469 break;
11470 case BUILT_IN_STRNCAT_CHK:
11471 case BUILT_IN_STRNCPY_CHK:
11472 case BUILT_IN_STPNCPY_CHK:
11473 len = CALL_EXPR_ARG (exp, 2);
11474 size = CALL_EXPR_ARG (exp, 3);
11475 break;
11476 case BUILT_IN_SNPRINTF_CHK:
11477 case BUILT_IN_VSNPRINTF_CHK:
11478 len = CALL_EXPR_ARG (exp, 1);
11479 size = CALL_EXPR_ARG (exp, 3);
11480 break;
11481 default:
11482 gcc_unreachable ();
11485 if (!len || !size)
11486 return;
11488 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11489 return;
11491 if (is_strlen)
11493 len = c_strlen (len, 1);
11494 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11495 return;
11497 else if (fcode == BUILT_IN_STRNCAT_CHK)
11499 tree src = CALL_EXPR_ARG (exp, 1);
11500 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11501 return;
11502 src = c_strlen (src, 1);
11503 if (! src || ! tree_fits_uhwi_p (src))
11505 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11506 exp, get_callee_fndecl (exp));
11507 return;
11509 else if (tree_int_cst_lt (src, size))
11510 return;
11512 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11513 return;
11515 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11516 exp, get_callee_fndecl (exp));
11519 /* Emit warning if a buffer overflow is detected at compile time
11520 in __sprintf_chk/__vsprintf_chk calls. */
11522 static void
11523 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11525 tree size, len, fmt;
11526 const char *fmt_str;
11527 int nargs = call_expr_nargs (exp);
11529 /* Verify the required arguments in the original call. */
11531 if (nargs < 4)
11532 return;
11533 size = CALL_EXPR_ARG (exp, 2);
11534 fmt = CALL_EXPR_ARG (exp, 3);
11536 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11537 return;
11539 /* Check whether the format is a literal string constant. */
11540 fmt_str = c_getstr (fmt);
11541 if (fmt_str == NULL)
11542 return;
11544 if (!init_target_chars ())
11545 return;
11547 /* If the format doesn't contain % args or %%, we know its size. */
11548 if (strchr (fmt_str, target_percent) == 0)
11549 len = build_int_cstu (size_type_node, strlen (fmt_str));
11550 /* If the format is "%s" and first ... argument is a string literal,
11551 we know it too. */
11552 else if (fcode == BUILT_IN_SPRINTF_CHK
11553 && strcmp (fmt_str, target_percent_s) == 0)
11555 tree arg;
11557 if (nargs < 5)
11558 return;
11559 arg = CALL_EXPR_ARG (exp, 4);
11560 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11561 return;
11563 len = c_strlen (arg, 1);
11564 if (!len || ! tree_fits_uhwi_p (len))
11565 return;
11567 else
11568 return;
11570 if (! tree_int_cst_lt (len, size))
11571 warning_at (tree_nonartificial_location (exp),
11572 0, "%Kcall to %D will always overflow destination buffer",
11573 exp, get_callee_fndecl (exp));
11576 /* Emit warning if a free is called with address of a variable. */
11578 static void
11579 maybe_emit_free_warning (tree exp)
11581 tree arg = CALL_EXPR_ARG (exp, 0);
11583 STRIP_NOPS (arg);
11584 if (TREE_CODE (arg) != ADDR_EXPR)
11585 return;
11587 arg = get_base_address (TREE_OPERAND (arg, 0));
11588 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11589 return;
11591 if (SSA_VAR_P (arg))
11592 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11593 "%Kattempt to free a non-heap object %qD", exp, arg);
11594 else
11595 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11596 "%Kattempt to free a non-heap object", exp);
11599 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11600 if possible. */
11602 static tree
11603 fold_builtin_object_size (tree ptr, tree ost)
11605 unsigned HOST_WIDE_INT bytes;
11606 int object_size_type;
11608 if (!validate_arg (ptr, POINTER_TYPE)
11609 || !validate_arg (ost, INTEGER_TYPE))
11610 return NULL_TREE;
11612 STRIP_NOPS (ost);
11614 if (TREE_CODE (ost) != INTEGER_CST
11615 || tree_int_cst_sgn (ost) < 0
11616 || compare_tree_int (ost, 3) > 0)
11617 return NULL_TREE;
11619 object_size_type = tree_to_shwi (ost);
11621 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11622 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11623 and (size_t) 0 for types 2 and 3. */
11624 if (TREE_SIDE_EFFECTS (ptr))
11625 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11627 if (TREE_CODE (ptr) == ADDR_EXPR)
11629 bytes = compute_builtin_object_size (ptr, object_size_type);
11630 if (wi::fits_to_tree_p (bytes, size_type_node))
11631 return build_int_cstu (size_type_node, bytes);
11633 else if (TREE_CODE (ptr) == SSA_NAME)
11635 /* If object size is not known yet, delay folding until
11636 later. Maybe subsequent passes will help determining
11637 it. */
11638 bytes = compute_builtin_object_size (ptr, object_size_type);
11639 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11640 && wi::fits_to_tree_p (bytes, size_type_node))
11641 return build_int_cstu (size_type_node, bytes);
11644 return NULL_TREE;
11647 /* Builtins with folding operations that operate on "..." arguments
11648 need special handling; we need to store the arguments in a convenient
11649 data structure before attempting any folding. Fortunately there are
11650 only a few builtins that fall into this category. FNDECL is the
11651 function, EXP is the CALL_EXPR for the call. */
11653 static tree
11654 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11656 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11657 tree ret = NULL_TREE;
11659 switch (fcode)
11661 case BUILT_IN_FPCLASSIFY:
11662 ret = fold_builtin_fpclassify (loc, args, nargs);
11663 break;
11665 default:
11666 break;
11668 if (ret)
11670 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11671 SET_EXPR_LOCATION (ret, loc);
11672 TREE_NO_WARNING (ret) = 1;
11673 return ret;
11675 return NULL_TREE;
11678 /* Initialize format string characters in the target charset. */
11680 bool
11681 init_target_chars (void)
11683 static bool init;
11684 if (!init)
11686 target_newline = lang_hooks.to_target_charset ('\n');
11687 target_percent = lang_hooks.to_target_charset ('%');
11688 target_c = lang_hooks.to_target_charset ('c');
11689 target_s = lang_hooks.to_target_charset ('s');
11690 if (target_newline == 0 || target_percent == 0 || target_c == 0
11691 || target_s == 0)
11692 return false;
11694 target_percent_c[0] = target_percent;
11695 target_percent_c[1] = target_c;
11696 target_percent_c[2] = '\0';
11698 target_percent_s[0] = target_percent;
11699 target_percent_s[1] = target_s;
11700 target_percent_s[2] = '\0';
11702 target_percent_s_newline[0] = target_percent;
11703 target_percent_s_newline[1] = target_s;
11704 target_percent_s_newline[2] = target_newline;
11705 target_percent_s_newline[3] = '\0';
11707 init = true;
11709 return true;
11712 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11713 and no overflow/underflow occurred. INEXACT is true if M was not
11714 exactly calculated. TYPE is the tree type for the result. This
11715 function assumes that you cleared the MPFR flags and then
11716 calculated M to see if anything subsequently set a flag prior to
11717 entering this function. Return NULL_TREE if any checks fail. */
11719 static tree
11720 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11722 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11723 overflow/underflow occurred. If -frounding-math, proceed iff the
11724 result of calling FUNC was exact. */
11725 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11726 && (!flag_rounding_math || !inexact))
11728 REAL_VALUE_TYPE rr;
11730 real_from_mpfr (&rr, m, type, GMP_RNDN);
11731 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11732 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11733 but the mpft_t is not, then we underflowed in the
11734 conversion. */
11735 if (real_isfinite (&rr)
11736 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11738 REAL_VALUE_TYPE rmode;
11740 real_convert (&rmode, TYPE_MODE (type), &rr);
11741 /* Proceed iff the specified mode can hold the value. */
11742 if (real_identical (&rmode, &rr))
11743 return build_real (type, rmode);
11746 return NULL_TREE;
11749 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11750 number and no overflow/underflow occurred. INEXACT is true if M
11751 was not exactly calculated. TYPE is the tree type for the result.
11752 This function assumes that you cleared the MPFR flags and then
11753 calculated M to see if anything subsequently set a flag prior to
11754 entering this function. Return NULL_TREE if any checks fail, if
11755 FORCE_CONVERT is true, then bypass the checks. */
11757 static tree
11758 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11760 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11761 overflow/underflow occurred. If -frounding-math, proceed iff the
11762 result of calling FUNC was exact. */
11763 if (force_convert
11764 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11765 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11766 && (!flag_rounding_math || !inexact)))
11768 REAL_VALUE_TYPE re, im;
11770 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11771 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11772 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11773 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11774 but the mpft_t is not, then we underflowed in the
11775 conversion. */
11776 if (force_convert
11777 || (real_isfinite (&re) && real_isfinite (&im)
11778 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11779 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11781 REAL_VALUE_TYPE re_mode, im_mode;
11783 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11784 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11785 /* Proceed iff the specified mode can hold the value. */
11786 if (force_convert
11787 || (real_identical (&re_mode, &re)
11788 && real_identical (&im_mode, &im)))
11789 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11790 build_real (TREE_TYPE (type), im_mode));
11793 return NULL_TREE;
11796 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11797 FUNC on it and return the resulting value as a tree with type TYPE.
11798 If MIN and/or MAX are not NULL, then the supplied ARG must be
11799 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11800 acceptable values, otherwise they are not. The mpfr precision is
11801 set to the precision of TYPE. We assume that function FUNC returns
11802 zero if the result could be calculated exactly within the requested
11803 precision. */
11805 static tree
11806 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11807 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11808 bool inclusive)
11810 tree result = NULL_TREE;
11812 STRIP_NOPS (arg);
11814 /* To proceed, MPFR must exactly represent the target floating point
11815 format, which only happens when the target base equals two. */
11816 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11817 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11819 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11821 if (real_isfinite (ra)
11822 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11823 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11825 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11826 const int prec = fmt->p;
11827 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11828 int inexact;
11829 mpfr_t m;
11831 mpfr_init2 (m, prec);
11832 mpfr_from_real (m, ra, GMP_RNDN);
11833 mpfr_clear_flags ();
11834 inexact = func (m, m, rnd);
11835 result = do_mpfr_ckconv (m, type, inexact);
11836 mpfr_clear (m);
11840 return result;
11843 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11844 FUNC on it and return the resulting value as a tree with type TYPE.
11845 The mpfr precision is set to the precision of TYPE. We assume that
11846 function FUNC returns zero if the result could be calculated
11847 exactly within the requested precision. */
11849 static tree
11850 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11851 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11853 tree result = NULL_TREE;
11855 STRIP_NOPS (arg1);
11856 STRIP_NOPS (arg2);
11858 /* To proceed, MPFR must exactly represent the target floating point
11859 format, which only happens when the target base equals two. */
11860 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11861 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11862 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11864 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11865 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11867 if (real_isfinite (ra1) && real_isfinite (ra2))
11869 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11870 const int prec = fmt->p;
11871 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11872 int inexact;
11873 mpfr_t m1, m2;
11875 mpfr_inits2 (prec, m1, m2, NULL);
11876 mpfr_from_real (m1, ra1, GMP_RNDN);
11877 mpfr_from_real (m2, ra2, GMP_RNDN);
11878 mpfr_clear_flags ();
11879 inexact = func (m1, m1, m2, rnd);
11880 result = do_mpfr_ckconv (m1, type, inexact);
11881 mpfr_clears (m1, m2, NULL);
11885 return result;
11888 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11889 FUNC on it and return the resulting value as a tree with type TYPE.
11890 The mpfr precision is set to the precision of TYPE. We assume that
11891 function FUNC returns zero if the result could be calculated
11892 exactly within the requested precision. */
11894 static tree
11895 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11896 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11898 tree result = NULL_TREE;
11900 STRIP_NOPS (arg1);
11901 STRIP_NOPS (arg2);
11902 STRIP_NOPS (arg3);
11904 /* To proceed, MPFR must exactly represent the target floating point
11905 format, which only happens when the target base equals two. */
11906 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11907 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11908 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11909 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11911 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11912 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11913 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11915 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11917 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11918 const int prec = fmt->p;
11919 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11920 int inexact;
11921 mpfr_t m1, m2, m3;
11923 mpfr_inits2 (prec, m1, m2, m3, NULL);
11924 mpfr_from_real (m1, ra1, GMP_RNDN);
11925 mpfr_from_real (m2, ra2, GMP_RNDN);
11926 mpfr_from_real (m3, ra3, GMP_RNDN);
11927 mpfr_clear_flags ();
11928 inexact = func (m1, m1, m2, m3, rnd);
11929 result = do_mpfr_ckconv (m1, type, inexact);
11930 mpfr_clears (m1, m2, m3, NULL);
11934 return result;
11937 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11938 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11939 If ARG_SINP and ARG_COSP are NULL then the result is returned
11940 as a complex value.
11941 The type is taken from the type of ARG and is used for setting the
11942 precision of the calculation and results. */
11944 static tree
11945 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11947 tree const type = TREE_TYPE (arg);
11948 tree result = NULL_TREE;
11950 STRIP_NOPS (arg);
11952 /* To proceed, MPFR must exactly represent the target floating point
11953 format, which only happens when the target base equals two. */
11954 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11955 && TREE_CODE (arg) == REAL_CST
11956 && !TREE_OVERFLOW (arg))
11958 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11960 if (real_isfinite (ra))
11962 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11963 const int prec = fmt->p;
11964 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11965 tree result_s, result_c;
11966 int inexact;
11967 mpfr_t m, ms, mc;
11969 mpfr_inits2 (prec, m, ms, mc, NULL);
11970 mpfr_from_real (m, ra, GMP_RNDN);
11971 mpfr_clear_flags ();
11972 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11973 result_s = do_mpfr_ckconv (ms, type, inexact);
11974 result_c = do_mpfr_ckconv (mc, type, inexact);
11975 mpfr_clears (m, ms, mc, NULL);
11976 if (result_s && result_c)
11978 /* If we are to return in a complex value do so. */
11979 if (!arg_sinp && !arg_cosp)
11980 return build_complex (build_complex_type (type),
11981 result_c, result_s);
11983 /* Dereference the sin/cos pointer arguments. */
11984 arg_sinp = build_fold_indirect_ref (arg_sinp);
11985 arg_cosp = build_fold_indirect_ref (arg_cosp);
11986 /* Proceed if valid pointer type were passed in. */
11987 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11988 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11990 /* Set the values. */
11991 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
11992 result_s);
11993 TREE_SIDE_EFFECTS (result_s) = 1;
11994 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
11995 result_c);
11996 TREE_SIDE_EFFECTS (result_c) = 1;
11997 /* Combine the assignments into a compound expr. */
11998 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11999 result_s, result_c));
12004 return result;
12007 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12008 two-argument mpfr order N Bessel function FUNC on them and return
12009 the resulting value as a tree with type TYPE. The mpfr precision
12010 is set to the precision of TYPE. We assume that function FUNC
12011 returns zero if the result could be calculated exactly within the
12012 requested precision. */
12013 static tree
12014 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12015 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12016 const REAL_VALUE_TYPE *min, bool inclusive)
12018 tree result = NULL_TREE;
12020 STRIP_NOPS (arg1);
12021 STRIP_NOPS (arg2);
12023 /* To proceed, MPFR must exactly represent the target floating point
12024 format, which only happens when the target base equals two. */
12025 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12026 && tree_fits_shwi_p (arg1)
12027 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12029 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12030 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12032 if (n == (long)n
12033 && real_isfinite (ra)
12034 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12036 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12037 const int prec = fmt->p;
12038 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12039 int inexact;
12040 mpfr_t m;
12042 mpfr_init2 (m, prec);
12043 mpfr_from_real (m, ra, GMP_RNDN);
12044 mpfr_clear_flags ();
12045 inexact = func (m, n, m, rnd);
12046 result = do_mpfr_ckconv (m, type, inexact);
12047 mpfr_clear (m);
12051 return result;
12054 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12055 the pointer *(ARG_QUO) and return the result. The type is taken
12056 from the type of ARG0 and is used for setting the precision of the
12057 calculation and results. */
12059 static tree
12060 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12062 tree const type = TREE_TYPE (arg0);
12063 tree result = NULL_TREE;
12065 STRIP_NOPS (arg0);
12066 STRIP_NOPS (arg1);
12068 /* To proceed, MPFR must exactly represent the target floating point
12069 format, which only happens when the target base equals two. */
12070 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12071 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12072 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12074 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12075 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12077 if (real_isfinite (ra0) && real_isfinite (ra1))
12079 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12080 const int prec = fmt->p;
12081 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12082 tree result_rem;
12083 long integer_quo;
12084 mpfr_t m0, m1;
12086 mpfr_inits2 (prec, m0, m1, NULL);
12087 mpfr_from_real (m0, ra0, GMP_RNDN);
12088 mpfr_from_real (m1, ra1, GMP_RNDN);
12089 mpfr_clear_flags ();
12090 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12091 /* Remquo is independent of the rounding mode, so pass
12092 inexact=0 to do_mpfr_ckconv(). */
12093 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12094 mpfr_clears (m0, m1, NULL);
12095 if (result_rem)
12097 /* MPFR calculates quo in the host's long so it may
12098 return more bits in quo than the target int can hold
12099 if sizeof(host long) > sizeof(target int). This can
12100 happen even for native compilers in LP64 mode. In
12101 these cases, modulo the quo value with the largest
12102 number that the target int can hold while leaving one
12103 bit for the sign. */
12104 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12105 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12107 /* Dereference the quo pointer argument. */
12108 arg_quo = build_fold_indirect_ref (arg_quo);
12109 /* Proceed iff a valid pointer type was passed in. */
12110 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12112 /* Set the value. */
12113 tree result_quo
12114 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12115 build_int_cst (TREE_TYPE (arg_quo),
12116 integer_quo));
12117 TREE_SIDE_EFFECTS (result_quo) = 1;
12118 /* Combine the quo assignment with the rem. */
12119 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12120 result_quo, result_rem));
12125 return result;
12128 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12129 resulting value as a tree with type TYPE. The mpfr precision is
12130 set to the precision of TYPE. We assume that this mpfr function
12131 returns zero if the result could be calculated exactly within the
12132 requested precision. In addition, the integer pointer represented
12133 by ARG_SG will be dereferenced and set to the appropriate signgam
12134 (-1,1) value. */
12136 static tree
12137 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12139 tree result = NULL_TREE;
12141 STRIP_NOPS (arg);
12143 /* To proceed, MPFR must exactly represent the target floating point
12144 format, which only happens when the target base equals two. Also
12145 verify ARG is a constant and that ARG_SG is an int pointer. */
12146 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12147 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12148 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12149 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12151 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12153 /* In addition to NaN and Inf, the argument cannot be zero or a
12154 negative integer. */
12155 if (real_isfinite (ra)
12156 && ra->cl != rvc_zero
12157 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12159 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12160 const int prec = fmt->p;
12161 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12162 int inexact, sg;
12163 mpfr_t m;
12164 tree result_lg;
12166 mpfr_init2 (m, prec);
12167 mpfr_from_real (m, ra, GMP_RNDN);
12168 mpfr_clear_flags ();
12169 inexact = mpfr_lgamma (m, &sg, m, rnd);
12170 result_lg = do_mpfr_ckconv (m, type, inexact);
12171 mpfr_clear (m);
12172 if (result_lg)
12174 tree result_sg;
12176 /* Dereference the arg_sg pointer argument. */
12177 arg_sg = build_fold_indirect_ref (arg_sg);
12178 /* Assign the signgam value into *arg_sg. */
12179 result_sg = fold_build2 (MODIFY_EXPR,
12180 TREE_TYPE (arg_sg), arg_sg,
12181 build_int_cst (TREE_TYPE (arg_sg), sg));
12182 TREE_SIDE_EFFECTS (result_sg) = 1;
12183 /* Combine the signgam assignment with the lgamma result. */
12184 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12185 result_sg, result_lg));
12190 return result;
12193 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12194 function FUNC on it and return the resulting value as a tree with
12195 type TYPE. The mpfr precision is set to the precision of TYPE. We
12196 assume that function FUNC returns zero if the result could be
12197 calculated exactly within the requested precision. */
12199 static tree
12200 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12202 tree result = NULL_TREE;
12204 STRIP_NOPS (arg);
12206 /* To proceed, MPFR must exactly represent the target floating point
12207 format, which only happens when the target base equals two. */
12208 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12209 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12210 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12212 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12213 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12215 if (real_isfinite (re) && real_isfinite (im))
12217 const struct real_format *const fmt =
12218 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12219 const int prec = fmt->p;
12220 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12221 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12222 int inexact;
12223 mpc_t m;
12225 mpc_init2 (m, prec);
12226 mpfr_from_real (mpc_realref (m), re, rnd);
12227 mpfr_from_real (mpc_imagref (m), im, rnd);
12228 mpfr_clear_flags ();
12229 inexact = func (m, m, crnd);
12230 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12231 mpc_clear (m);
12235 return result;
12238 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12239 mpc function FUNC on it and return the resulting value as a tree
12240 with type TYPE. The mpfr precision is set to the precision of
12241 TYPE. We assume that function FUNC returns zero if the result
12242 could be calculated exactly within the requested precision. If
12243 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12244 in the arguments and/or results. */
12246 tree
12247 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12248 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12250 tree result = NULL_TREE;
12252 STRIP_NOPS (arg0);
12253 STRIP_NOPS (arg1);
12255 /* To proceed, MPFR must exactly represent the target floating point
12256 format, which only happens when the target base equals two. */
12257 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12258 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12259 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12260 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12261 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12263 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12264 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12265 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12266 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12268 if (do_nonfinite
12269 || (real_isfinite (re0) && real_isfinite (im0)
12270 && real_isfinite (re1) && real_isfinite (im1)))
12272 const struct real_format *const fmt =
12273 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12274 const int prec = fmt->p;
12275 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12276 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12277 int inexact;
12278 mpc_t m0, m1;
12280 mpc_init2 (m0, prec);
12281 mpc_init2 (m1, prec);
12282 mpfr_from_real (mpc_realref (m0), re0, rnd);
12283 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12284 mpfr_from_real (mpc_realref (m1), re1, rnd);
12285 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12286 mpfr_clear_flags ();
12287 inexact = func (m0, m0, m1, crnd);
12288 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12289 mpc_clear (m0);
12290 mpc_clear (m1);
12294 return result;
12297 /* A wrapper function for builtin folding that prevents warnings for
12298 "statement without effect" and the like, caused by removing the
12299 call node earlier than the warning is generated. */
12301 tree
12302 fold_call_stmt (gcall *stmt, bool ignore)
12304 tree ret = NULL_TREE;
12305 tree fndecl = gimple_call_fndecl (stmt);
12306 location_t loc = gimple_location (stmt);
12307 if (fndecl
12308 && TREE_CODE (fndecl) == FUNCTION_DECL
12309 && DECL_BUILT_IN (fndecl)
12310 && !gimple_call_va_arg_pack_p (stmt))
12312 int nargs = gimple_call_num_args (stmt);
12313 tree *args = (nargs > 0
12314 ? gimple_call_arg_ptr (stmt, 0)
12315 : &error_mark_node);
12317 if (avoid_folding_inline_builtin (fndecl))
12318 return NULL_TREE;
12319 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12321 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12323 else
12325 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12326 if (ret)
12328 /* Propagate location information from original call to
12329 expansion of builtin. Otherwise things like
12330 maybe_emit_chk_warning, that operate on the expansion
12331 of a builtin, will use the wrong location information. */
12332 if (gimple_has_location (stmt))
12334 tree realret = ret;
12335 if (TREE_CODE (ret) == NOP_EXPR)
12336 realret = TREE_OPERAND (ret, 0);
12337 if (CAN_HAVE_LOCATION_P (realret)
12338 && !EXPR_HAS_LOCATION (realret))
12339 SET_EXPR_LOCATION (realret, loc);
12340 return realret;
12342 return ret;
12346 return NULL_TREE;
12349 /* Look up the function in builtin_decl that corresponds to DECL
12350 and set ASMSPEC as its user assembler name. DECL must be a
12351 function decl that declares a builtin. */
12353 void
12354 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12356 tree builtin;
12357 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12358 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12359 && asmspec != 0);
12361 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12362 set_user_assembler_name (builtin, asmspec);
12363 switch (DECL_FUNCTION_CODE (decl))
12365 case BUILT_IN_MEMCPY:
12366 init_block_move_fn (asmspec);
12367 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12368 break;
12369 case BUILT_IN_MEMSET:
12370 init_block_clear_fn (asmspec);
12371 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12372 break;
12373 case BUILT_IN_MEMMOVE:
12374 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12375 break;
12376 case BUILT_IN_MEMCMP:
12377 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12378 break;
12379 case BUILT_IN_ABORT:
12380 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12381 break;
12382 case BUILT_IN_FFS:
12383 if (INT_TYPE_SIZE < BITS_PER_WORD)
12385 set_user_assembler_libfunc ("ffs", asmspec);
12386 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12387 MODE_INT, 0), "ffs");
12389 break;
12390 default:
12391 break;
12395 /* Return true if DECL is a builtin that expands to a constant or similarly
12396 simple code. */
12397 bool
12398 is_simple_builtin (tree decl)
12400 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12401 switch (DECL_FUNCTION_CODE (decl))
12403 /* Builtins that expand to constants. */
12404 case BUILT_IN_CONSTANT_P:
12405 case BUILT_IN_EXPECT:
12406 case BUILT_IN_OBJECT_SIZE:
12407 case BUILT_IN_UNREACHABLE:
12408 /* Simple register moves or loads from stack. */
12409 case BUILT_IN_ASSUME_ALIGNED:
12410 case BUILT_IN_RETURN_ADDRESS:
12411 case BUILT_IN_EXTRACT_RETURN_ADDR:
12412 case BUILT_IN_FROB_RETURN_ADDR:
12413 case BUILT_IN_RETURN:
12414 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12415 case BUILT_IN_FRAME_ADDRESS:
12416 case BUILT_IN_VA_END:
12417 case BUILT_IN_STACK_SAVE:
12418 case BUILT_IN_STACK_RESTORE:
12419 /* Exception state returns or moves registers around. */
12420 case BUILT_IN_EH_FILTER:
12421 case BUILT_IN_EH_POINTER:
12422 case BUILT_IN_EH_COPY_VALUES:
12423 return true;
12425 default:
12426 return false;
12429 return false;
12432 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12433 most probably expanded inline into reasonably simple code. This is a
12434 superset of is_simple_builtin. */
12435 bool
12436 is_inexpensive_builtin (tree decl)
12438 if (!decl)
12439 return false;
12440 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12441 return true;
12442 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12443 switch (DECL_FUNCTION_CODE (decl))
12445 case BUILT_IN_ABS:
12446 case BUILT_IN_ALLOCA:
12447 case BUILT_IN_ALLOCA_WITH_ALIGN:
12448 case BUILT_IN_BSWAP16:
12449 case BUILT_IN_BSWAP32:
12450 case BUILT_IN_BSWAP64:
12451 case BUILT_IN_CLZ:
12452 case BUILT_IN_CLZIMAX:
12453 case BUILT_IN_CLZL:
12454 case BUILT_IN_CLZLL:
12455 case BUILT_IN_CTZ:
12456 case BUILT_IN_CTZIMAX:
12457 case BUILT_IN_CTZL:
12458 case BUILT_IN_CTZLL:
12459 case BUILT_IN_FFS:
12460 case BUILT_IN_FFSIMAX:
12461 case BUILT_IN_FFSL:
12462 case BUILT_IN_FFSLL:
12463 case BUILT_IN_IMAXABS:
12464 case BUILT_IN_FINITE:
12465 case BUILT_IN_FINITEF:
12466 case BUILT_IN_FINITEL:
12467 case BUILT_IN_FINITED32:
12468 case BUILT_IN_FINITED64:
12469 case BUILT_IN_FINITED128:
12470 case BUILT_IN_FPCLASSIFY:
12471 case BUILT_IN_ISFINITE:
12472 case BUILT_IN_ISINF_SIGN:
12473 case BUILT_IN_ISINF:
12474 case BUILT_IN_ISINFF:
12475 case BUILT_IN_ISINFL:
12476 case BUILT_IN_ISINFD32:
12477 case BUILT_IN_ISINFD64:
12478 case BUILT_IN_ISINFD128:
12479 case BUILT_IN_ISNAN:
12480 case BUILT_IN_ISNANF:
12481 case BUILT_IN_ISNANL:
12482 case BUILT_IN_ISNAND32:
12483 case BUILT_IN_ISNAND64:
12484 case BUILT_IN_ISNAND128:
12485 case BUILT_IN_ISNORMAL:
12486 case BUILT_IN_ISGREATER:
12487 case BUILT_IN_ISGREATEREQUAL:
12488 case BUILT_IN_ISLESS:
12489 case BUILT_IN_ISLESSEQUAL:
12490 case BUILT_IN_ISLESSGREATER:
12491 case BUILT_IN_ISUNORDERED:
12492 case BUILT_IN_VA_ARG_PACK:
12493 case BUILT_IN_VA_ARG_PACK_LEN:
12494 case BUILT_IN_VA_COPY:
12495 case BUILT_IN_TRAP:
12496 case BUILT_IN_SAVEREGS:
12497 case BUILT_IN_POPCOUNTL:
12498 case BUILT_IN_POPCOUNTLL:
12499 case BUILT_IN_POPCOUNTIMAX:
12500 case BUILT_IN_POPCOUNT:
12501 case BUILT_IN_PARITYL:
12502 case BUILT_IN_PARITYLL:
12503 case BUILT_IN_PARITYIMAX:
12504 case BUILT_IN_PARITY:
12505 case BUILT_IN_LABS:
12506 case BUILT_IN_LLABS:
12507 case BUILT_IN_PREFETCH:
12508 case BUILT_IN_ACC_ON_DEVICE:
12509 return true;
12511 default:
12512 return is_simple_builtin (decl);
12515 return false;