* testsuite/17_intro/static.cc: Ignore AIX TOC reload warnings.
[official-gcc.git] / gcc / builtins.c
blobe96245709c08d8c62dc43e5cca8eabc59848ac88
1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "regs.h"
36 #include "hard-reg-set.h"
37 #include "except.h"
38 #include "function.h"
39 #include "insn-config.h"
40 #include "expr.h"
41 #include "optabs.h"
42 #include "libfuncs.h"
43 #include "recog.h"
44 #include "output.h"
45 #include "typeclass.h"
46 #include "predict.h"
47 #include "tm_p.h"
48 #include "target.h"
49 #include "langhooks.h"
50 #include "basic-block.h"
51 #include "tree-ssanames.h"
52 #include "tree-dfa.h"
53 #include "value-prof.h"
54 #include "diagnostic-core.h"
55 #include "builtins.h"
56 #include "ubsan.h"
57 #include "cilk.h"
60 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
62 struct target_builtins default_target_builtins;
63 #if SWITCHABLE_TARGET
64 struct target_builtins *this_target_builtins = &default_target_builtins;
65 #endif
67 /* Define the names of the builtin function types and codes. */
68 const char *const built_in_class_names[BUILT_IN_LAST]
69 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
71 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
72 const char * built_in_names[(int) END_BUILTINS] =
74 #include "builtins.def"
76 #undef DEF_BUILTIN
78 /* Setup an array of _DECL trees, make sure each element is
79 initialized to NULL_TREE. */
80 builtin_info_type builtin_info;
82 /* Non-zero if __builtin_constant_p should be folded right away. */
83 bool force_folding_builtin_constant_p;
85 static const char *c_getstr (tree);
86 static rtx c_readstr (const char *, enum machine_mode);
87 static int target_char_cast (tree, char *);
88 static rtx get_memory_rtx (tree, tree);
89 static int apply_args_size (void);
90 static int apply_result_size (void);
91 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
92 static rtx result_vector (int, rtx);
93 #endif
94 static void expand_builtin_update_setjmp_buf (rtx);
95 static void expand_builtin_prefetch (tree);
96 static rtx expand_builtin_apply_args (void);
97 static rtx expand_builtin_apply_args_1 (void);
98 static rtx expand_builtin_apply (rtx, rtx, rtx);
99 static void expand_builtin_return (rtx);
100 static enum type_class type_to_class (tree);
101 static rtx expand_builtin_classify_type (tree);
102 static void expand_errno_check (tree, rtx);
103 static rtx expand_builtin_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
105 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
106 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
107 static rtx expand_builtin_interclass_mathfn (tree, rtx);
108 static rtx expand_builtin_sincos (tree);
109 static rtx expand_builtin_cexpi (tree, rtx);
110 static rtx expand_builtin_int_roundingfn (tree, rtx);
111 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
112 static rtx expand_builtin_next_arg (void);
113 static rtx expand_builtin_va_start (tree);
114 static rtx expand_builtin_va_end (tree);
115 static rtx expand_builtin_va_copy (tree);
116 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strcmp (tree, rtx);
118 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
119 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
120 static rtx expand_builtin_memcpy (tree, rtx);
121 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_strcpy (tree, rtx);
125 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
126 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_strncpy (tree, rtx);
128 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
129 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
131 static rtx expand_builtin_bzero (tree);
132 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
133 static rtx expand_builtin_alloca (tree, bool);
134 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
135 static rtx expand_builtin_frame_address (tree, tree);
136 static tree stabilize_va_list_loc (location_t, tree, int);
137 static rtx expand_builtin_expect (tree, rtx);
138 static tree fold_builtin_constant_p (tree);
139 static tree fold_builtin_expect (location_t, tree, tree);
140 static tree fold_builtin_classify_type (tree);
141 static tree fold_builtin_strlen (location_t, tree, tree);
142 static tree fold_builtin_inf (location_t, tree, int);
143 static tree fold_builtin_nan (tree, tree, int);
144 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
145 static bool validate_arg (const_tree, enum tree_code code);
146 static bool integer_valued_real_p (tree);
147 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
148 static bool readonly_data_expr (tree);
149 static rtx expand_builtin_fabs (tree, rtx, rtx);
150 static rtx expand_builtin_signbit (tree, rtx);
151 static tree fold_builtin_sqrt (location_t, tree, tree);
152 static tree fold_builtin_cbrt (location_t, tree, tree);
153 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
154 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
155 static tree fold_builtin_cos (location_t, tree, tree, tree);
156 static tree fold_builtin_cosh (location_t, tree, tree, tree);
157 static tree fold_builtin_tan (tree, tree);
158 static tree fold_builtin_trunc (location_t, tree, tree);
159 static tree fold_builtin_floor (location_t, tree, tree);
160 static tree fold_builtin_ceil (location_t, tree, tree);
161 static tree fold_builtin_round (location_t, tree, tree);
162 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
163 static tree fold_builtin_bitop (tree, tree);
164 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
165 static tree fold_builtin_strchr (location_t, tree, tree, tree);
166 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
168 static tree fold_builtin_strcmp (location_t, tree, tree);
169 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
170 static tree fold_builtin_signbit (location_t, tree, tree);
171 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_isascii (location_t, tree);
173 static tree fold_builtin_toascii (location_t, tree);
174 static tree fold_builtin_isdigit (location_t, tree);
175 static tree fold_builtin_fabs (location_t, tree, tree);
176 static tree fold_builtin_abs (location_t, tree, tree);
177 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
178 enum tree_code);
179 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
180 static tree fold_builtin_0 (location_t, tree, bool);
181 static tree fold_builtin_1 (location_t, tree, tree, bool);
182 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
183 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
184 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
185 static tree fold_builtin_varargs (location_t, tree, tree, bool);
187 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
188 static tree fold_builtin_strstr (location_t, tree, tree, tree);
189 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
190 static tree fold_builtin_strcat (location_t, tree, tree);
191 static tree fold_builtin_strncat (location_t, tree, tree, tree);
192 static tree fold_builtin_strspn (location_t, tree, tree);
193 static tree fold_builtin_strcspn (location_t, tree, tree);
194 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
195 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
197 static rtx expand_builtin_object_size (tree);
198 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
199 enum built_in_function);
200 static void maybe_emit_chk_warning (tree, enum built_in_function);
201 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
202 static void maybe_emit_free_warning (tree);
203 static tree fold_builtin_object_size (tree, tree);
204 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
205 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
206 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
207 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
208 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
209 enum built_in_function);
210 static bool init_target_chars (void);
212 static unsigned HOST_WIDE_INT target_newline;
213 static unsigned HOST_WIDE_INT target_percent;
214 static unsigned HOST_WIDE_INT target_c;
215 static unsigned HOST_WIDE_INT target_s;
216 static char target_percent_c[3];
217 static char target_percent_s[3];
218 static char target_percent_s_newline[4];
219 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
220 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
221 static tree do_mpfr_arg2 (tree, tree, tree,
222 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
223 static tree do_mpfr_arg3 (tree, tree, tree, tree,
224 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
225 static tree do_mpfr_sincos (tree, tree, tree);
226 static tree do_mpfr_bessel_n (tree, tree, tree,
227 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
228 const REAL_VALUE_TYPE *, bool);
229 static tree do_mpfr_remquo (tree, tree, tree);
230 static tree do_mpfr_lgamma_r (tree, tree, tree);
231 static void expand_builtin_sync_synchronize (void);
233 /* Return true if NAME starts with __builtin_ or __sync_. */
235 static bool
236 is_builtin_name (const char *name)
238 if (strncmp (name, "__builtin_", 10) == 0)
239 return true;
240 if (strncmp (name, "__sync_", 7) == 0)
241 return true;
242 if (strncmp (name, "__atomic_", 9) == 0)
243 return true;
244 if (flag_enable_cilkplus
245 && (!strcmp (name, "__cilkrts_detach")
246 || !strcmp (name, "__cilkrts_pop_frame")))
247 return true;
248 return false;
252 /* Return true if DECL is a function symbol representing a built-in. */
254 bool
255 is_builtin_fn (tree decl)
257 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
260 /* By default we assume that c99 functions are present at the runtime,
261 but sincos is not. */
262 bool
263 default_libc_has_function (enum function_class fn_class)
265 if (fn_class == function_c94
266 || fn_class == function_c99_misc
267 || fn_class == function_c99_math_complex)
268 return true;
270 return false;
273 bool
274 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
276 return true;
279 bool
280 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
282 return false;
285 /* Return true if NODE should be considered for inline expansion regardless
286 of the optimization level. This means whenever a function is invoked with
287 its "internal" name, which normally contains the prefix "__builtin". */
289 static bool
290 called_as_built_in (tree node)
292 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
293 we want the name used to call the function, not the name it
294 will have. */
295 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
296 return is_builtin_name (name);
299 /* Compute values M and N such that M divides (address of EXP - N) and such
300 that N < M. If these numbers can be determined, store M in alignp and N in
301 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
302 *alignp and any bit-offset to *bitposp.
304 Note that the address (and thus the alignment) computed here is based
305 on the address to which a symbol resolves, whereas DECL_ALIGN is based
306 on the address at which an object is actually located. These two
307 addresses are not always the same. For example, on ARM targets,
308 the address &foo of a Thumb function foo() has the lowest bit set,
309 whereas foo() itself starts on an even address.
311 If ADDR_P is true we are taking the address of the memory reference EXP
312 and thus cannot rely on the access taking place. */
314 static bool
315 get_object_alignment_2 (tree exp, unsigned int *alignp,
316 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
318 HOST_WIDE_INT bitsize, bitpos;
319 tree offset;
320 enum machine_mode mode;
321 int unsignedp, volatilep;
322 unsigned int align = BITS_PER_UNIT;
323 bool known_alignment = false;
325 /* Get the innermost object and the constant (bitpos) and possibly
326 variable (offset) offset of the access. */
327 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
328 &mode, &unsignedp, &volatilep, true);
330 /* Extract alignment information from the innermost object and
331 possibly adjust bitpos and offset. */
332 if (TREE_CODE (exp) == FUNCTION_DECL)
334 /* Function addresses can encode extra information besides their
335 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
336 allows the low bit to be used as a virtual bit, we know
337 that the address itself must be at least 2-byte aligned. */
338 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
339 align = 2 * BITS_PER_UNIT;
341 else if (TREE_CODE (exp) == LABEL_DECL)
343 else if (TREE_CODE (exp) == CONST_DECL)
345 /* The alignment of a CONST_DECL is determined by its initializer. */
346 exp = DECL_INITIAL (exp);
347 align = TYPE_ALIGN (TREE_TYPE (exp));
348 #ifdef CONSTANT_ALIGNMENT
349 if (CONSTANT_CLASS_P (exp))
350 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
351 #endif
352 known_alignment = true;
354 else if (DECL_P (exp))
356 align = DECL_ALIGN (exp);
357 known_alignment = true;
359 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
361 align = TYPE_ALIGN (TREE_TYPE (exp));
363 else if (TREE_CODE (exp) == INDIRECT_REF
364 || TREE_CODE (exp) == MEM_REF
365 || TREE_CODE (exp) == TARGET_MEM_REF)
367 tree addr = TREE_OPERAND (exp, 0);
368 unsigned ptr_align;
369 unsigned HOST_WIDE_INT ptr_bitpos;
371 if (TREE_CODE (addr) == BIT_AND_EXPR
372 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
374 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
375 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
376 align *= BITS_PER_UNIT;
377 addr = TREE_OPERAND (addr, 0);
380 known_alignment
381 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
382 align = MAX (ptr_align, align);
384 /* The alignment of the pointer operand in a TARGET_MEM_REF
385 has to take the variable offset parts into account. */
386 if (TREE_CODE (exp) == TARGET_MEM_REF)
388 if (TMR_INDEX (exp))
390 unsigned HOST_WIDE_INT step = 1;
391 if (TMR_STEP (exp))
392 step = TREE_INT_CST_LOW (TMR_STEP (exp));
393 align = MIN (align, (step & -step) * BITS_PER_UNIT);
395 if (TMR_INDEX2 (exp))
396 align = BITS_PER_UNIT;
397 known_alignment = false;
400 /* When EXP is an actual memory reference then we can use
401 TYPE_ALIGN of a pointer indirection to derive alignment.
402 Do so only if get_pointer_alignment_1 did not reveal absolute
403 alignment knowledge and if using that alignment would
404 improve the situation. */
405 if (!addr_p && !known_alignment
406 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
407 align = TYPE_ALIGN (TREE_TYPE (exp));
408 else
410 /* Else adjust bitpos accordingly. */
411 bitpos += ptr_bitpos;
412 if (TREE_CODE (exp) == MEM_REF
413 || TREE_CODE (exp) == TARGET_MEM_REF)
414 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
417 else if (TREE_CODE (exp) == STRING_CST)
419 /* STRING_CST are the only constant objects we allow to be not
420 wrapped inside a CONST_DECL. */
421 align = TYPE_ALIGN (TREE_TYPE (exp));
422 #ifdef CONSTANT_ALIGNMENT
423 if (CONSTANT_CLASS_P (exp))
424 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
425 #endif
426 known_alignment = true;
429 /* If there is a non-constant offset part extract the maximum
430 alignment that can prevail. */
431 if (offset)
433 int trailing_zeros = tree_ctz (offset);
434 if (trailing_zeros < HOST_BITS_PER_INT)
436 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
437 if (inner)
438 align = MIN (align, inner);
442 *alignp = align;
443 *bitposp = bitpos & (*alignp - 1);
444 return known_alignment;
447 /* For a memory reference expression EXP compute values M and N such that M
448 divides (&EXP - N) and such that N < M. If these numbers can be determined,
449 store M in alignp and N in *BITPOSP and return true. Otherwise return false
450 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
452 bool
453 get_object_alignment_1 (tree exp, unsigned int *alignp,
454 unsigned HOST_WIDE_INT *bitposp)
456 return get_object_alignment_2 (exp, alignp, bitposp, false);
459 /* Return the alignment in bits of EXP, an object. */
461 unsigned int
462 get_object_alignment (tree exp)
464 unsigned HOST_WIDE_INT bitpos = 0;
465 unsigned int align;
467 get_object_alignment_1 (exp, &align, &bitpos);
469 /* align and bitpos now specify known low bits of the pointer.
470 ptr & (align - 1) == bitpos. */
472 if (bitpos != 0)
473 align = (bitpos & -bitpos);
474 return align;
477 /* For a pointer valued expression EXP compute values M and N such that M
478 divides (EXP - N) and such that N < M. If these numbers can be determined,
479 store M in alignp and N in *BITPOSP and return true. Return false if
480 the results are just a conservative approximation.
482 If EXP is not a pointer, false is returned too. */
484 bool
485 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
486 unsigned HOST_WIDE_INT *bitposp)
488 STRIP_NOPS (exp);
490 if (TREE_CODE (exp) == ADDR_EXPR)
491 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
492 alignp, bitposp, true);
493 else if (TREE_CODE (exp) == SSA_NAME
494 && POINTER_TYPE_P (TREE_TYPE (exp)))
496 unsigned int ptr_align, ptr_misalign;
497 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
499 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
501 *bitposp = ptr_misalign * BITS_PER_UNIT;
502 *alignp = ptr_align * BITS_PER_UNIT;
503 /* We cannot really tell whether this result is an approximation. */
504 return true;
506 else
508 *bitposp = 0;
509 *alignp = BITS_PER_UNIT;
510 return false;
513 else if (TREE_CODE (exp) == INTEGER_CST)
515 *alignp = BIGGEST_ALIGNMENT;
516 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
517 & (BIGGEST_ALIGNMENT - 1));
518 return true;
521 *bitposp = 0;
522 *alignp = BITS_PER_UNIT;
523 return false;
526 /* Return the alignment in bits of EXP, a pointer valued expression.
527 The alignment returned is, by default, the alignment of the thing that
528 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
530 Otherwise, look at the expression to see if we can do better, i.e., if the
531 expression is actually pointing at an object whose alignment is tighter. */
533 unsigned int
534 get_pointer_alignment (tree exp)
536 unsigned HOST_WIDE_INT bitpos = 0;
537 unsigned int align;
539 get_pointer_alignment_1 (exp, &align, &bitpos);
541 /* align and bitpos now specify known low bits of the pointer.
542 ptr & (align - 1) == bitpos. */
544 if (bitpos != 0)
545 align = (bitpos & -bitpos);
547 return align;
550 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
551 way, because it could contain a zero byte in the middle.
552 TREE_STRING_LENGTH is the size of the character array, not the string.
554 ONLY_VALUE should be nonzero if the result is not going to be emitted
555 into the instruction stream and zero if it is going to be expanded.
556 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
557 is returned, otherwise NULL, since
558 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
559 evaluate the side-effects.
561 The value returned is of type `ssizetype'.
563 Unfortunately, string_constant can't access the values of const char
564 arrays with initializers, so neither can we do so here. */
566 tree
567 c_strlen (tree src, int only_value)
569 tree offset_node;
570 HOST_WIDE_INT offset;
571 int max;
572 const char *ptr;
573 location_t loc;
575 STRIP_NOPS (src);
576 if (TREE_CODE (src) == COND_EXPR
577 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
579 tree len1, len2;
581 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
582 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
583 if (tree_int_cst_equal (len1, len2))
584 return len1;
587 if (TREE_CODE (src) == COMPOUND_EXPR
588 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
589 return c_strlen (TREE_OPERAND (src, 1), only_value);
591 loc = EXPR_LOC_OR_HERE (src);
593 src = string_constant (src, &offset_node);
594 if (src == 0)
595 return NULL_TREE;
597 max = TREE_STRING_LENGTH (src) - 1;
598 ptr = TREE_STRING_POINTER (src);
600 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
602 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
603 compute the offset to the following null if we don't know where to
604 start searching for it. */
605 int i;
607 for (i = 0; i < max; i++)
608 if (ptr[i] == 0)
609 return NULL_TREE;
611 /* We don't know the starting offset, but we do know that the string
612 has no internal zero bytes. We can assume that the offset falls
613 within the bounds of the string; otherwise, the programmer deserves
614 what he gets. Subtract the offset from the length of the string,
615 and return that. This would perhaps not be valid if we were dealing
616 with named arrays in addition to literal string constants. */
618 return size_diffop_loc (loc, size_int (max), offset_node);
621 /* We have a known offset into the string. Start searching there for
622 a null character if we can represent it as a single HOST_WIDE_INT. */
623 if (offset_node == 0)
624 offset = 0;
625 else if (! tree_fits_shwi_p (offset_node))
626 offset = -1;
627 else
628 offset = tree_to_shwi (offset_node);
630 /* If the offset is known to be out of bounds, warn, and call strlen at
631 runtime. */
632 if (offset < 0 || offset > max)
634 /* Suppress multiple warnings for propagated constant strings. */
635 if (! TREE_NO_WARNING (src))
637 warning_at (loc, 0, "offset outside bounds of constant string");
638 TREE_NO_WARNING (src) = 1;
640 return NULL_TREE;
643 /* Use strlen to search for the first zero byte. Since any strings
644 constructed with build_string will have nulls appended, we win even
645 if we get handed something like (char[4])"abcd".
647 Since OFFSET is our starting index into the string, no further
648 calculation is needed. */
649 return ssize_int (strlen (ptr + offset));
652 /* Return a char pointer for a C string if it is a string constant
653 or sum of string constant and integer constant. */
655 static const char *
656 c_getstr (tree src)
658 tree offset_node;
660 src = string_constant (src, &offset_node);
661 if (src == 0)
662 return 0;
664 if (offset_node == 0)
665 return TREE_STRING_POINTER (src);
666 else if (!tree_fits_uhwi_p (offset_node)
667 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
668 return 0;
670 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
673 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
674 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
676 static rtx
677 c_readstr (const char *str, enum machine_mode mode)
679 HOST_WIDE_INT c[2];
680 HOST_WIDE_INT ch;
681 unsigned int i, j;
683 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
685 c[0] = 0;
686 c[1] = 0;
687 ch = 1;
688 for (i = 0; i < GET_MODE_SIZE (mode); i++)
690 j = i;
691 if (WORDS_BIG_ENDIAN)
692 j = GET_MODE_SIZE (mode) - i - 1;
693 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
694 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
695 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
696 j *= BITS_PER_UNIT;
697 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
699 if (ch)
700 ch = (unsigned char) str[i];
701 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
703 return immed_double_const (c[0], c[1], mode);
706 /* Cast a target constant CST to target CHAR and if that value fits into
707 host char type, return zero and put that value into variable pointed to by
708 P. */
710 static int
711 target_char_cast (tree cst, char *p)
713 unsigned HOST_WIDE_INT val, hostval;
715 if (TREE_CODE (cst) != INTEGER_CST
716 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
717 return 1;
719 val = TREE_INT_CST_LOW (cst);
720 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
721 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
723 hostval = val;
724 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
725 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
727 if (val != hostval)
728 return 1;
730 *p = hostval;
731 return 0;
734 /* Similar to save_expr, but assumes that arbitrary code is not executed
735 in between the multiple evaluations. In particular, we assume that a
736 non-addressable local variable will not be modified. */
738 static tree
739 builtin_save_expr (tree exp)
741 if (TREE_CODE (exp) == SSA_NAME
742 || (TREE_ADDRESSABLE (exp) == 0
743 && (TREE_CODE (exp) == PARM_DECL
744 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
745 return exp;
747 return save_expr (exp);
750 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
751 times to get the address of either a higher stack frame, or a return
752 address located within it (depending on FNDECL_CODE). */
754 static rtx
755 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
757 int i;
759 #ifdef INITIAL_FRAME_ADDRESS_RTX
760 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
761 #else
762 rtx tem;
764 /* For a zero count with __builtin_return_address, we don't care what
765 frame address we return, because target-specific definitions will
766 override us. Therefore frame pointer elimination is OK, and using
767 the soft frame pointer is OK.
769 For a nonzero count, or a zero count with __builtin_frame_address,
770 we require a stable offset from the current frame pointer to the
771 previous one, so we must use the hard frame pointer, and
772 we must disable frame pointer elimination. */
773 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
774 tem = frame_pointer_rtx;
775 else
777 tem = hard_frame_pointer_rtx;
779 /* Tell reload not to eliminate the frame pointer. */
780 crtl->accesses_prior_frames = 1;
782 #endif
784 /* Some machines need special handling before we can access
785 arbitrary frames. For example, on the SPARC, we must first flush
786 all register windows to the stack. */
787 #ifdef SETUP_FRAME_ADDRESSES
788 if (count > 0)
789 SETUP_FRAME_ADDRESSES ();
790 #endif
792 /* On the SPARC, the return address is not in the frame, it is in a
793 register. There is no way to access it off of the current frame
794 pointer, but it can be accessed off the previous frame pointer by
795 reading the value from the register window save area. */
796 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
797 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
798 count--;
799 #endif
801 /* Scan back COUNT frames to the specified frame. */
802 for (i = 0; i < count; i++)
804 /* Assume the dynamic chain pointer is in the word that the
805 frame address points to, unless otherwise specified. */
806 #ifdef DYNAMIC_CHAIN_ADDRESS
807 tem = DYNAMIC_CHAIN_ADDRESS (tem);
808 #endif
809 tem = memory_address (Pmode, tem);
810 tem = gen_frame_mem (Pmode, tem);
811 tem = copy_to_reg (tem);
814 /* For __builtin_frame_address, return what we've got. But, on
815 the SPARC for example, we may have to add a bias. */
816 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
817 #ifdef FRAME_ADDR_RTX
818 return FRAME_ADDR_RTX (tem);
819 #else
820 return tem;
821 #endif
823 /* For __builtin_return_address, get the return address from that frame. */
824 #ifdef RETURN_ADDR_RTX
825 tem = RETURN_ADDR_RTX (count, tem);
826 #else
827 tem = memory_address (Pmode,
828 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
829 tem = gen_frame_mem (Pmode, tem);
830 #endif
831 return tem;
834 /* Alias set used for setjmp buffer. */
835 static alias_set_type setjmp_alias_set = -1;
837 /* Construct the leading half of a __builtin_setjmp call. Control will
838 return to RECEIVER_LABEL. This is also called directly by the SJLJ
839 exception handling code. */
841 void
842 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
844 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
845 rtx stack_save;
846 rtx mem;
848 if (setjmp_alias_set == -1)
849 setjmp_alias_set = new_alias_set ();
851 buf_addr = convert_memory_address (Pmode, buf_addr);
853 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
855 /* We store the frame pointer and the address of receiver_label in
856 the buffer and use the rest of it for the stack save area, which
857 is machine-dependent. */
859 mem = gen_rtx_MEM (Pmode, buf_addr);
860 set_mem_alias_set (mem, setjmp_alias_set);
861 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
863 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
864 GET_MODE_SIZE (Pmode))),
865 set_mem_alias_set (mem, setjmp_alias_set);
867 emit_move_insn (validize_mem (mem),
868 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
870 stack_save = gen_rtx_MEM (sa_mode,
871 plus_constant (Pmode, buf_addr,
872 2 * GET_MODE_SIZE (Pmode)));
873 set_mem_alias_set (stack_save, setjmp_alias_set);
874 emit_stack_save (SAVE_NONLOCAL, &stack_save);
876 /* If there is further processing to do, do it. */
877 #ifdef HAVE_builtin_setjmp_setup
878 if (HAVE_builtin_setjmp_setup)
879 emit_insn (gen_builtin_setjmp_setup (buf_addr));
880 #endif
882 /* We have a nonlocal label. */
883 cfun->has_nonlocal_label = 1;
886 /* Construct the trailing part of a __builtin_setjmp call. This is
887 also called directly by the SJLJ exception handling code.
888 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
890 void
891 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
893 rtx chain;
895 /* Mark the FP as used when we get here, so we have to make sure it's
896 marked as used by this function. */
897 emit_use (hard_frame_pointer_rtx);
899 /* Mark the static chain as clobbered here so life information
900 doesn't get messed up for it. */
901 chain = targetm.calls.static_chain (current_function_decl, true);
902 if (chain && REG_P (chain))
903 emit_clobber (chain);
905 /* Now put in the code to restore the frame pointer, and argument
906 pointer, if needed. */
907 #ifdef HAVE_nonlocal_goto
908 if (! HAVE_nonlocal_goto)
909 #endif
910 /* First adjust our frame pointer to its actual value. It was
911 previously set to the start of the virtual area corresponding to
912 the stacked variables when we branched here and now needs to be
913 adjusted to the actual hardware fp value.
915 Assignments to virtual registers are converted by
916 instantiate_virtual_regs into the corresponding assignment
917 to the underlying register (fp in this case) that makes
918 the original assignment true.
919 So the following insn will actually be decrementing fp by
920 STARTING_FRAME_OFFSET. */
921 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
923 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
924 if (fixed_regs[ARG_POINTER_REGNUM])
926 #ifdef ELIMINABLE_REGS
927 /* If the argument pointer can be eliminated in favor of the
928 frame pointer, we don't need to restore it. We assume here
929 that if such an elimination is present, it can always be used.
930 This is the case on all known machines; if we don't make this
931 assumption, we do unnecessary saving on many machines. */
932 size_t i;
933 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
935 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
936 if (elim_regs[i].from == ARG_POINTER_REGNUM
937 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
938 break;
940 if (i == ARRAY_SIZE (elim_regs))
941 #endif
943 /* Now restore our arg pointer from the address at which it
944 was saved in our stack frame. */
945 emit_move_insn (crtl->args.internal_arg_pointer,
946 copy_to_reg (get_arg_pointer_save_area ()));
949 #endif
951 #ifdef HAVE_builtin_setjmp_receiver
952 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
953 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
954 else
955 #endif
956 #ifdef HAVE_nonlocal_goto_receiver
957 if (HAVE_nonlocal_goto_receiver)
958 emit_insn (gen_nonlocal_goto_receiver ());
959 else
960 #endif
961 { /* Nothing */ }
963 /* We must not allow the code we just generated to be reordered by
964 scheduling. Specifically, the update of the frame pointer must
965 happen immediately, not later. Similarly, we must block
966 (frame-related) register values to be used across this code. */
967 emit_insn (gen_blockage ());
970 /* __builtin_longjmp is passed a pointer to an array of five words (not
971 all will be used on all machines). It operates similarly to the C
972 library function of the same name, but is more efficient. Much of
973 the code below is copied from the handling of non-local gotos. */
975 static void
976 expand_builtin_longjmp (rtx buf_addr, rtx value)
978 rtx fp, lab, stack, insn, last;
979 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
981 /* DRAP is needed for stack realign if longjmp is expanded to current
982 function */
983 if (SUPPORTS_STACK_ALIGNMENT)
984 crtl->need_drap = true;
986 if (setjmp_alias_set == -1)
987 setjmp_alias_set = new_alias_set ();
989 buf_addr = convert_memory_address (Pmode, buf_addr);
991 buf_addr = force_reg (Pmode, buf_addr);
993 /* We require that the user must pass a second argument of 1, because
994 that is what builtin_setjmp will return. */
995 gcc_assert (value == const1_rtx);
997 last = get_last_insn ();
998 #ifdef HAVE_builtin_longjmp
999 if (HAVE_builtin_longjmp)
1000 emit_insn (gen_builtin_longjmp (buf_addr));
1001 else
1002 #endif
1004 fp = gen_rtx_MEM (Pmode, buf_addr);
1005 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1006 GET_MODE_SIZE (Pmode)));
1008 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1009 2 * GET_MODE_SIZE (Pmode)));
1010 set_mem_alias_set (fp, setjmp_alias_set);
1011 set_mem_alias_set (lab, setjmp_alias_set);
1012 set_mem_alias_set (stack, setjmp_alias_set);
1014 /* Pick up FP, label, and SP from the block and jump. This code is
1015 from expand_goto in stmt.c; see there for detailed comments. */
1016 #ifdef HAVE_nonlocal_goto
1017 if (HAVE_nonlocal_goto)
1018 /* We have to pass a value to the nonlocal_goto pattern that will
1019 get copied into the static_chain pointer, but it does not matter
1020 what that value is, because builtin_setjmp does not use it. */
1021 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1022 else
1023 #endif
1025 lab = copy_to_reg (lab);
1027 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1028 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1030 emit_move_insn (hard_frame_pointer_rtx, fp);
1031 emit_stack_restore (SAVE_NONLOCAL, stack);
1033 emit_use (hard_frame_pointer_rtx);
1034 emit_use (stack_pointer_rtx);
1035 emit_indirect_jump (lab);
1039 /* Search backwards and mark the jump insn as a non-local goto.
1040 Note that this precludes the use of __builtin_longjmp to a
1041 __builtin_setjmp target in the same function. However, we've
1042 already cautioned the user that these functions are for
1043 internal exception handling use only. */
1044 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1046 gcc_assert (insn != last);
1048 if (JUMP_P (insn))
1050 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1051 break;
1053 else if (CALL_P (insn))
1054 break;
1058 static inline bool
1059 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1061 return (iter->i < iter->n);
1064 /* This function validates the types of a function call argument list
1065 against a specified list of tree_codes. If the last specifier is a 0,
1066 that represents an ellipses, otherwise the last specifier must be a
1067 VOID_TYPE. */
1069 static bool
1070 validate_arglist (const_tree callexpr, ...)
1072 enum tree_code code;
1073 bool res = 0;
1074 va_list ap;
1075 const_call_expr_arg_iterator iter;
1076 const_tree arg;
1078 va_start (ap, callexpr);
1079 init_const_call_expr_arg_iterator (callexpr, &iter);
1083 code = (enum tree_code) va_arg (ap, int);
1084 switch (code)
1086 case 0:
1087 /* This signifies an ellipses, any further arguments are all ok. */
1088 res = true;
1089 goto end;
1090 case VOID_TYPE:
1091 /* This signifies an endlink, if no arguments remain, return
1092 true, otherwise return false. */
1093 res = !more_const_call_expr_args_p (&iter);
1094 goto end;
1095 default:
1096 /* If no parameters remain or the parameter's code does not
1097 match the specified code, return false. Otherwise continue
1098 checking any remaining arguments. */
1099 arg = next_const_call_expr_arg (&iter);
1100 if (!validate_arg (arg, code))
1101 goto end;
1102 break;
1105 while (1);
1107 /* We need gotos here since we can only have one VA_CLOSE in a
1108 function. */
1109 end: ;
1110 va_end (ap);
1112 return res;
1115 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1116 and the address of the save area. */
1118 static rtx
1119 expand_builtin_nonlocal_goto (tree exp)
1121 tree t_label, t_save_area;
1122 rtx r_label, r_save_area, r_fp, r_sp, insn;
1124 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1125 return NULL_RTX;
1127 t_label = CALL_EXPR_ARG (exp, 0);
1128 t_save_area = CALL_EXPR_ARG (exp, 1);
1130 r_label = expand_normal (t_label);
1131 r_label = convert_memory_address (Pmode, r_label);
1132 r_save_area = expand_normal (t_save_area);
1133 r_save_area = convert_memory_address (Pmode, r_save_area);
1134 /* Copy the address of the save location to a register just in case it was
1135 based on the frame pointer. */
1136 r_save_area = copy_to_reg (r_save_area);
1137 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1138 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1139 plus_constant (Pmode, r_save_area,
1140 GET_MODE_SIZE (Pmode)));
1142 crtl->has_nonlocal_goto = 1;
1144 #ifdef HAVE_nonlocal_goto
1145 /* ??? We no longer need to pass the static chain value, afaik. */
1146 if (HAVE_nonlocal_goto)
1147 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1148 else
1149 #endif
1151 r_label = copy_to_reg (r_label);
1153 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1154 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1156 /* Restore frame pointer for containing function. */
1157 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1158 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1160 /* USE of hard_frame_pointer_rtx added for consistency;
1161 not clear if really needed. */
1162 emit_use (hard_frame_pointer_rtx);
1163 emit_use (stack_pointer_rtx);
1165 /* If the architecture is using a GP register, we must
1166 conservatively assume that the target function makes use of it.
1167 The prologue of functions with nonlocal gotos must therefore
1168 initialize the GP register to the appropriate value, and we
1169 must then make sure that this value is live at the point
1170 of the jump. (Note that this doesn't necessarily apply
1171 to targets with a nonlocal_goto pattern; they are free
1172 to implement it in their own way. Note also that this is
1173 a no-op if the GP register is a global invariant.) */
1174 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1175 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1176 emit_use (pic_offset_table_rtx);
1178 emit_indirect_jump (r_label);
1181 /* Search backwards to the jump insn and mark it as a
1182 non-local goto. */
1183 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1185 if (JUMP_P (insn))
1187 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1188 break;
1190 else if (CALL_P (insn))
1191 break;
1194 return const0_rtx;
1197 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1198 (not all will be used on all machines) that was passed to __builtin_setjmp.
1199 It updates the stack pointer in that block to correspond to the current
1200 stack pointer. */
1202 static void
1203 expand_builtin_update_setjmp_buf (rtx buf_addr)
1205 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1206 rtx stack_save
1207 = gen_rtx_MEM (sa_mode,
1208 memory_address
1209 (sa_mode,
1210 plus_constant (Pmode, buf_addr,
1211 2 * GET_MODE_SIZE (Pmode))));
1213 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1216 /* Expand a call to __builtin_prefetch. For a target that does not support
1217 data prefetch, evaluate the memory address argument in case it has side
1218 effects. */
1220 static void
1221 expand_builtin_prefetch (tree exp)
1223 tree arg0, arg1, arg2;
1224 int nargs;
1225 rtx op0, op1, op2;
1227 if (!validate_arglist (exp, POINTER_TYPE, 0))
1228 return;
1230 arg0 = CALL_EXPR_ARG (exp, 0);
1232 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1233 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1234 locality). */
1235 nargs = call_expr_nargs (exp);
1236 if (nargs > 1)
1237 arg1 = CALL_EXPR_ARG (exp, 1);
1238 else
1239 arg1 = integer_zero_node;
1240 if (nargs > 2)
1241 arg2 = CALL_EXPR_ARG (exp, 2);
1242 else
1243 arg2 = integer_three_node;
1245 /* Argument 0 is an address. */
1246 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1248 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1249 if (TREE_CODE (arg1) != INTEGER_CST)
1251 error ("second argument to %<__builtin_prefetch%> must be a constant");
1252 arg1 = integer_zero_node;
1254 op1 = expand_normal (arg1);
1255 /* Argument 1 must be either zero or one. */
1256 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1258 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1259 " using zero");
1260 op1 = const0_rtx;
1263 /* Argument 2 (locality) must be a compile-time constant int. */
1264 if (TREE_CODE (arg2) != INTEGER_CST)
1266 error ("third argument to %<__builtin_prefetch%> must be a constant");
1267 arg2 = integer_zero_node;
1269 op2 = expand_normal (arg2);
1270 /* Argument 2 must be 0, 1, 2, or 3. */
1271 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1273 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1274 op2 = const0_rtx;
1277 #ifdef HAVE_prefetch
1278 if (HAVE_prefetch)
1280 struct expand_operand ops[3];
1282 create_address_operand (&ops[0], op0);
1283 create_integer_operand (&ops[1], INTVAL (op1));
1284 create_integer_operand (&ops[2], INTVAL (op2));
1285 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1286 return;
1288 #endif
1290 /* Don't do anything with direct references to volatile memory, but
1291 generate code to handle other side effects. */
1292 if (!MEM_P (op0) && side_effects_p (op0))
1293 emit_insn (op0);
1296 /* Get a MEM rtx for expression EXP which is the address of an operand
1297 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1298 the maximum length of the block of memory that might be accessed or
1299 NULL if unknown. */
1301 static rtx
1302 get_memory_rtx (tree exp, tree len)
1304 tree orig_exp = exp;
1305 rtx addr, mem;
1307 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1308 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1309 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1310 exp = TREE_OPERAND (exp, 0);
1312 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1313 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1315 /* Get an expression we can use to find the attributes to assign to MEM.
1316 First remove any nops. */
1317 while (CONVERT_EXPR_P (exp)
1318 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1319 exp = TREE_OPERAND (exp, 0);
1321 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1322 (as builtin stringops may alias with anything). */
1323 exp = fold_build2 (MEM_REF,
1324 build_array_type (char_type_node,
1325 build_range_type (sizetype,
1326 size_one_node, len)),
1327 exp, build_int_cst (ptr_type_node, 0));
1329 /* If the MEM_REF has no acceptable address, try to get the base object
1330 from the original address we got, and build an all-aliasing
1331 unknown-sized access to that one. */
1332 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1333 set_mem_attributes (mem, exp, 0);
1334 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1335 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1336 0))))
1338 exp = build_fold_addr_expr (exp);
1339 exp = fold_build2 (MEM_REF,
1340 build_array_type (char_type_node,
1341 build_range_type (sizetype,
1342 size_zero_node,
1343 NULL)),
1344 exp, build_int_cst (ptr_type_node, 0));
1345 set_mem_attributes (mem, exp, 0);
1347 set_mem_alias_set (mem, 0);
1348 return mem;
1351 /* Built-in functions to perform an untyped call and return. */
1353 #define apply_args_mode \
1354 (this_target_builtins->x_apply_args_mode)
1355 #define apply_result_mode \
1356 (this_target_builtins->x_apply_result_mode)
1358 /* Return the size required for the block returned by __builtin_apply_args,
1359 and initialize apply_args_mode. */
1361 static int
1362 apply_args_size (void)
1364 static int size = -1;
1365 int align;
1366 unsigned int regno;
1367 enum machine_mode mode;
1369 /* The values computed by this function never change. */
1370 if (size < 0)
1372 /* The first value is the incoming arg-pointer. */
1373 size = GET_MODE_SIZE (Pmode);
1375 /* The second value is the structure value address unless this is
1376 passed as an "invisible" first argument. */
1377 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1378 size += GET_MODE_SIZE (Pmode);
1380 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1381 if (FUNCTION_ARG_REGNO_P (regno))
1383 mode = targetm.calls.get_raw_arg_mode (regno);
1385 gcc_assert (mode != VOIDmode);
1387 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1388 if (size % align != 0)
1389 size = CEIL (size, align) * align;
1390 size += GET_MODE_SIZE (mode);
1391 apply_args_mode[regno] = mode;
1393 else
1395 apply_args_mode[regno] = VOIDmode;
1398 return size;
1401 /* Return the size required for the block returned by __builtin_apply,
1402 and initialize apply_result_mode. */
1404 static int
1405 apply_result_size (void)
1407 static int size = -1;
1408 int align, regno;
1409 enum machine_mode mode;
1411 /* The values computed by this function never change. */
1412 if (size < 0)
1414 size = 0;
1416 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1417 if (targetm.calls.function_value_regno_p (regno))
1419 mode = targetm.calls.get_raw_result_mode (regno);
1421 gcc_assert (mode != VOIDmode);
1423 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1424 if (size % align != 0)
1425 size = CEIL (size, align) * align;
1426 size += GET_MODE_SIZE (mode);
1427 apply_result_mode[regno] = mode;
1429 else
1430 apply_result_mode[regno] = VOIDmode;
1432 /* Allow targets that use untyped_call and untyped_return to override
1433 the size so that machine-specific information can be stored here. */
1434 #ifdef APPLY_RESULT_SIZE
1435 size = APPLY_RESULT_SIZE;
1436 #endif
1438 return size;
1441 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1442 /* Create a vector describing the result block RESULT. If SAVEP is true,
1443 the result block is used to save the values; otherwise it is used to
1444 restore the values. */
1446 static rtx
1447 result_vector (int savep, rtx result)
1449 int regno, size, align, nelts;
1450 enum machine_mode mode;
1451 rtx reg, mem;
1452 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1454 size = nelts = 0;
1455 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1456 if ((mode = apply_result_mode[regno]) != VOIDmode)
1458 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1459 if (size % align != 0)
1460 size = CEIL (size, align) * align;
1461 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1462 mem = adjust_address (result, mode, size);
1463 savevec[nelts++] = (savep
1464 ? gen_rtx_SET (VOIDmode, mem, reg)
1465 : gen_rtx_SET (VOIDmode, reg, mem));
1466 size += GET_MODE_SIZE (mode);
1468 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1470 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1472 /* Save the state required to perform an untyped call with the same
1473 arguments as were passed to the current function. */
1475 static rtx
1476 expand_builtin_apply_args_1 (void)
1478 rtx registers, tem;
1479 int size, align, regno;
1480 enum machine_mode mode;
1481 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1483 /* Create a block where the arg-pointer, structure value address,
1484 and argument registers can be saved. */
1485 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1487 /* Walk past the arg-pointer and structure value address. */
1488 size = GET_MODE_SIZE (Pmode);
1489 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1490 size += GET_MODE_SIZE (Pmode);
1492 /* Save each register used in calling a function to the block. */
1493 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1494 if ((mode = apply_args_mode[regno]) != VOIDmode)
1496 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1497 if (size % align != 0)
1498 size = CEIL (size, align) * align;
1500 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1502 emit_move_insn (adjust_address (registers, mode, size), tem);
1503 size += GET_MODE_SIZE (mode);
1506 /* Save the arg pointer to the block. */
1507 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1508 #ifdef STACK_GROWS_DOWNWARD
1509 /* We need the pointer as the caller actually passed them to us, not
1510 as we might have pretended they were passed. Make sure it's a valid
1511 operand, as emit_move_insn isn't expected to handle a PLUS. */
1513 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1514 NULL_RTX);
1515 #endif
1516 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1518 size = GET_MODE_SIZE (Pmode);
1520 /* Save the structure value address unless this is passed as an
1521 "invisible" first argument. */
1522 if (struct_incoming_value)
1524 emit_move_insn (adjust_address (registers, Pmode, size),
1525 copy_to_reg (struct_incoming_value));
1526 size += GET_MODE_SIZE (Pmode);
1529 /* Return the address of the block. */
1530 return copy_addr_to_reg (XEXP (registers, 0));
1533 /* __builtin_apply_args returns block of memory allocated on
1534 the stack into which is stored the arg pointer, structure
1535 value address, static chain, and all the registers that might
1536 possibly be used in performing a function call. The code is
1537 moved to the start of the function so the incoming values are
1538 saved. */
1540 static rtx
1541 expand_builtin_apply_args (void)
1543 /* Don't do __builtin_apply_args more than once in a function.
1544 Save the result of the first call and reuse it. */
1545 if (apply_args_value != 0)
1546 return apply_args_value;
1548 /* When this function is called, it means that registers must be
1549 saved on entry to this function. So we migrate the
1550 call to the first insn of this function. */
1551 rtx temp;
1552 rtx seq;
1554 start_sequence ();
1555 temp = expand_builtin_apply_args_1 ();
1556 seq = get_insns ();
1557 end_sequence ();
1559 apply_args_value = temp;
1561 /* Put the insns after the NOTE that starts the function.
1562 If this is inside a start_sequence, make the outer-level insn
1563 chain current, so the code is placed at the start of the
1564 function. If internal_arg_pointer is a non-virtual pseudo,
1565 it needs to be placed after the function that initializes
1566 that pseudo. */
1567 push_topmost_sequence ();
1568 if (REG_P (crtl->args.internal_arg_pointer)
1569 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1570 emit_insn_before (seq, parm_birth_insn);
1571 else
1572 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1573 pop_topmost_sequence ();
1574 return temp;
1578 /* Perform an untyped call and save the state required to perform an
1579 untyped return of whatever value was returned by the given function. */
1581 static rtx
1582 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1584 int size, align, regno;
1585 enum machine_mode mode;
1586 rtx incoming_args, result, reg, dest, src, call_insn;
1587 rtx old_stack_level = 0;
1588 rtx call_fusage = 0;
1589 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1591 arguments = convert_memory_address (Pmode, arguments);
1593 /* Create a block where the return registers can be saved. */
1594 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1596 /* Fetch the arg pointer from the ARGUMENTS block. */
1597 incoming_args = gen_reg_rtx (Pmode);
1598 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1599 #ifndef STACK_GROWS_DOWNWARD
1600 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1601 incoming_args, 0, OPTAB_LIB_WIDEN);
1602 #endif
1604 /* Push a new argument block and copy the arguments. Do not allow
1605 the (potential) memcpy call below to interfere with our stack
1606 manipulations. */
1607 do_pending_stack_adjust ();
1608 NO_DEFER_POP;
1610 /* Save the stack with nonlocal if available. */
1611 #ifdef HAVE_save_stack_nonlocal
1612 if (HAVE_save_stack_nonlocal)
1613 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1614 else
1615 #endif
1616 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1618 /* Allocate a block of memory onto the stack and copy the memory
1619 arguments to the outgoing arguments address. We can pass TRUE
1620 as the 4th argument because we just saved the stack pointer
1621 and will restore it right after the call. */
1622 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1624 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1625 may have already set current_function_calls_alloca to true.
1626 current_function_calls_alloca won't be set if argsize is zero,
1627 so we have to guarantee need_drap is true here. */
1628 if (SUPPORTS_STACK_ALIGNMENT)
1629 crtl->need_drap = true;
1631 dest = virtual_outgoing_args_rtx;
1632 #ifndef STACK_GROWS_DOWNWARD
1633 if (CONST_INT_P (argsize))
1634 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1635 else
1636 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1637 #endif
1638 dest = gen_rtx_MEM (BLKmode, dest);
1639 set_mem_align (dest, PARM_BOUNDARY);
1640 src = gen_rtx_MEM (BLKmode, incoming_args);
1641 set_mem_align (src, PARM_BOUNDARY);
1642 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1644 /* Refer to the argument block. */
1645 apply_args_size ();
1646 arguments = gen_rtx_MEM (BLKmode, arguments);
1647 set_mem_align (arguments, PARM_BOUNDARY);
1649 /* Walk past the arg-pointer and structure value address. */
1650 size = GET_MODE_SIZE (Pmode);
1651 if (struct_value)
1652 size += GET_MODE_SIZE (Pmode);
1654 /* Restore each of the registers previously saved. Make USE insns
1655 for each of these registers for use in making the call. */
1656 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1657 if ((mode = apply_args_mode[regno]) != VOIDmode)
1659 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1660 if (size % align != 0)
1661 size = CEIL (size, align) * align;
1662 reg = gen_rtx_REG (mode, regno);
1663 emit_move_insn (reg, adjust_address (arguments, mode, size));
1664 use_reg (&call_fusage, reg);
1665 size += GET_MODE_SIZE (mode);
1668 /* Restore the structure value address unless this is passed as an
1669 "invisible" first argument. */
1670 size = GET_MODE_SIZE (Pmode);
1671 if (struct_value)
1673 rtx value = gen_reg_rtx (Pmode);
1674 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1675 emit_move_insn (struct_value, value);
1676 if (REG_P (struct_value))
1677 use_reg (&call_fusage, struct_value);
1678 size += GET_MODE_SIZE (Pmode);
1681 /* All arguments and registers used for the call are set up by now! */
1682 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1684 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1685 and we don't want to load it into a register as an optimization,
1686 because prepare_call_address already did it if it should be done. */
1687 if (GET_CODE (function) != SYMBOL_REF)
1688 function = memory_address (FUNCTION_MODE, function);
1690 /* Generate the actual call instruction and save the return value. */
1691 #ifdef HAVE_untyped_call
1692 if (HAVE_untyped_call)
1693 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1694 result, result_vector (1, result)));
1695 else
1696 #endif
1697 #ifdef HAVE_call_value
1698 if (HAVE_call_value)
1700 rtx valreg = 0;
1702 /* Locate the unique return register. It is not possible to
1703 express a call that sets more than one return register using
1704 call_value; use untyped_call for that. In fact, untyped_call
1705 only needs to save the return registers in the given block. */
1706 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1707 if ((mode = apply_result_mode[regno]) != VOIDmode)
1709 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1711 valreg = gen_rtx_REG (mode, regno);
1714 emit_call_insn (GEN_CALL_VALUE (valreg,
1715 gen_rtx_MEM (FUNCTION_MODE, function),
1716 const0_rtx, NULL_RTX, const0_rtx));
1718 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1720 else
1721 #endif
1722 gcc_unreachable ();
1724 /* Find the CALL insn we just emitted, and attach the register usage
1725 information. */
1726 call_insn = last_call_insn ();
1727 add_function_usage_to (call_insn, call_fusage);
1729 /* Restore the stack. */
1730 #ifdef HAVE_save_stack_nonlocal
1731 if (HAVE_save_stack_nonlocal)
1732 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1733 else
1734 #endif
1735 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1736 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1738 OK_DEFER_POP;
1740 /* Return the address of the result block. */
1741 result = copy_addr_to_reg (XEXP (result, 0));
1742 return convert_memory_address (ptr_mode, result);
1745 /* Perform an untyped return. */
1747 static void
1748 expand_builtin_return (rtx result)
1750 int size, align, regno;
1751 enum machine_mode mode;
1752 rtx reg;
1753 rtx call_fusage = 0;
1755 result = convert_memory_address (Pmode, result);
1757 apply_result_size ();
1758 result = gen_rtx_MEM (BLKmode, result);
1760 #ifdef HAVE_untyped_return
1761 if (HAVE_untyped_return)
1763 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1764 emit_barrier ();
1765 return;
1767 #endif
1769 /* Restore the return value and note that each value is used. */
1770 size = 0;
1771 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1772 if ((mode = apply_result_mode[regno]) != VOIDmode)
1774 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1775 if (size % align != 0)
1776 size = CEIL (size, align) * align;
1777 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1778 emit_move_insn (reg, adjust_address (result, mode, size));
1780 push_to_sequence (call_fusage);
1781 emit_use (reg);
1782 call_fusage = get_insns ();
1783 end_sequence ();
1784 size += GET_MODE_SIZE (mode);
1787 /* Put the USE insns before the return. */
1788 emit_insn (call_fusage);
1790 /* Return whatever values was restored by jumping directly to the end
1791 of the function. */
1792 expand_naked_return ();
1795 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1797 static enum type_class
1798 type_to_class (tree type)
1800 switch (TREE_CODE (type))
1802 case VOID_TYPE: return void_type_class;
1803 case INTEGER_TYPE: return integer_type_class;
1804 case ENUMERAL_TYPE: return enumeral_type_class;
1805 case BOOLEAN_TYPE: return boolean_type_class;
1806 case POINTER_TYPE: return pointer_type_class;
1807 case REFERENCE_TYPE: return reference_type_class;
1808 case OFFSET_TYPE: return offset_type_class;
1809 case REAL_TYPE: return real_type_class;
1810 case COMPLEX_TYPE: return complex_type_class;
1811 case FUNCTION_TYPE: return function_type_class;
1812 case METHOD_TYPE: return method_type_class;
1813 case RECORD_TYPE: return record_type_class;
1814 case UNION_TYPE:
1815 case QUAL_UNION_TYPE: return union_type_class;
1816 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1817 ? string_type_class : array_type_class);
1818 case LANG_TYPE: return lang_type_class;
1819 default: return no_type_class;
1823 /* Expand a call EXP to __builtin_classify_type. */
1825 static rtx
1826 expand_builtin_classify_type (tree exp)
1828 if (call_expr_nargs (exp))
1829 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1830 return GEN_INT (no_type_class);
1833 /* This helper macro, meant to be used in mathfn_built_in below,
1834 determines which among a set of three builtin math functions is
1835 appropriate for a given type mode. The `F' and `L' cases are
1836 automatically generated from the `double' case. */
1837 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1838 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1839 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1840 fcodel = BUILT_IN_MATHFN##L ; break;
1841 /* Similar to above, but appends _R after any F/L suffix. */
1842 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1843 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1844 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1845 fcodel = BUILT_IN_MATHFN##L_R ; break;
1847 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1848 if available. If IMPLICIT is true use the implicit builtin declaration,
1849 otherwise use the explicit declaration. If we can't do the conversion,
1850 return zero. */
1852 static tree
1853 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1855 enum built_in_function fcode, fcodef, fcodel, fcode2;
1857 switch (fn)
1859 CASE_MATHFN (BUILT_IN_ACOS)
1860 CASE_MATHFN (BUILT_IN_ACOSH)
1861 CASE_MATHFN (BUILT_IN_ASIN)
1862 CASE_MATHFN (BUILT_IN_ASINH)
1863 CASE_MATHFN (BUILT_IN_ATAN)
1864 CASE_MATHFN (BUILT_IN_ATAN2)
1865 CASE_MATHFN (BUILT_IN_ATANH)
1866 CASE_MATHFN (BUILT_IN_CBRT)
1867 CASE_MATHFN (BUILT_IN_CEIL)
1868 CASE_MATHFN (BUILT_IN_CEXPI)
1869 CASE_MATHFN (BUILT_IN_COPYSIGN)
1870 CASE_MATHFN (BUILT_IN_COS)
1871 CASE_MATHFN (BUILT_IN_COSH)
1872 CASE_MATHFN (BUILT_IN_DREM)
1873 CASE_MATHFN (BUILT_IN_ERF)
1874 CASE_MATHFN (BUILT_IN_ERFC)
1875 CASE_MATHFN (BUILT_IN_EXP)
1876 CASE_MATHFN (BUILT_IN_EXP10)
1877 CASE_MATHFN (BUILT_IN_EXP2)
1878 CASE_MATHFN (BUILT_IN_EXPM1)
1879 CASE_MATHFN (BUILT_IN_FABS)
1880 CASE_MATHFN (BUILT_IN_FDIM)
1881 CASE_MATHFN (BUILT_IN_FLOOR)
1882 CASE_MATHFN (BUILT_IN_FMA)
1883 CASE_MATHFN (BUILT_IN_FMAX)
1884 CASE_MATHFN (BUILT_IN_FMIN)
1885 CASE_MATHFN (BUILT_IN_FMOD)
1886 CASE_MATHFN (BUILT_IN_FREXP)
1887 CASE_MATHFN (BUILT_IN_GAMMA)
1888 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1889 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1890 CASE_MATHFN (BUILT_IN_HYPOT)
1891 CASE_MATHFN (BUILT_IN_ILOGB)
1892 CASE_MATHFN (BUILT_IN_ICEIL)
1893 CASE_MATHFN (BUILT_IN_IFLOOR)
1894 CASE_MATHFN (BUILT_IN_INF)
1895 CASE_MATHFN (BUILT_IN_IRINT)
1896 CASE_MATHFN (BUILT_IN_IROUND)
1897 CASE_MATHFN (BUILT_IN_ISINF)
1898 CASE_MATHFN (BUILT_IN_J0)
1899 CASE_MATHFN (BUILT_IN_J1)
1900 CASE_MATHFN (BUILT_IN_JN)
1901 CASE_MATHFN (BUILT_IN_LCEIL)
1902 CASE_MATHFN (BUILT_IN_LDEXP)
1903 CASE_MATHFN (BUILT_IN_LFLOOR)
1904 CASE_MATHFN (BUILT_IN_LGAMMA)
1905 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1906 CASE_MATHFN (BUILT_IN_LLCEIL)
1907 CASE_MATHFN (BUILT_IN_LLFLOOR)
1908 CASE_MATHFN (BUILT_IN_LLRINT)
1909 CASE_MATHFN (BUILT_IN_LLROUND)
1910 CASE_MATHFN (BUILT_IN_LOG)
1911 CASE_MATHFN (BUILT_IN_LOG10)
1912 CASE_MATHFN (BUILT_IN_LOG1P)
1913 CASE_MATHFN (BUILT_IN_LOG2)
1914 CASE_MATHFN (BUILT_IN_LOGB)
1915 CASE_MATHFN (BUILT_IN_LRINT)
1916 CASE_MATHFN (BUILT_IN_LROUND)
1917 CASE_MATHFN (BUILT_IN_MODF)
1918 CASE_MATHFN (BUILT_IN_NAN)
1919 CASE_MATHFN (BUILT_IN_NANS)
1920 CASE_MATHFN (BUILT_IN_NEARBYINT)
1921 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1922 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1923 CASE_MATHFN (BUILT_IN_POW)
1924 CASE_MATHFN (BUILT_IN_POWI)
1925 CASE_MATHFN (BUILT_IN_POW10)
1926 CASE_MATHFN (BUILT_IN_REMAINDER)
1927 CASE_MATHFN (BUILT_IN_REMQUO)
1928 CASE_MATHFN (BUILT_IN_RINT)
1929 CASE_MATHFN (BUILT_IN_ROUND)
1930 CASE_MATHFN (BUILT_IN_SCALB)
1931 CASE_MATHFN (BUILT_IN_SCALBLN)
1932 CASE_MATHFN (BUILT_IN_SCALBN)
1933 CASE_MATHFN (BUILT_IN_SIGNBIT)
1934 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1935 CASE_MATHFN (BUILT_IN_SIN)
1936 CASE_MATHFN (BUILT_IN_SINCOS)
1937 CASE_MATHFN (BUILT_IN_SINH)
1938 CASE_MATHFN (BUILT_IN_SQRT)
1939 CASE_MATHFN (BUILT_IN_TAN)
1940 CASE_MATHFN (BUILT_IN_TANH)
1941 CASE_MATHFN (BUILT_IN_TGAMMA)
1942 CASE_MATHFN (BUILT_IN_TRUNC)
1943 CASE_MATHFN (BUILT_IN_Y0)
1944 CASE_MATHFN (BUILT_IN_Y1)
1945 CASE_MATHFN (BUILT_IN_YN)
1947 default:
1948 return NULL_TREE;
1951 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1952 fcode2 = fcode;
1953 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1954 fcode2 = fcodef;
1955 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1956 fcode2 = fcodel;
1957 else
1958 return NULL_TREE;
1960 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1961 return NULL_TREE;
1963 return builtin_decl_explicit (fcode2);
1966 /* Like mathfn_built_in_1(), but always use the implicit array. */
1968 tree
1969 mathfn_built_in (tree type, enum built_in_function fn)
1971 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1974 /* If errno must be maintained, expand the RTL to check if the result,
1975 TARGET, of a built-in function call, EXP, is NaN, and if so set
1976 errno to EDOM. */
1978 static void
1979 expand_errno_check (tree exp, rtx target)
1981 rtx lab = gen_label_rtx ();
1983 /* Test the result; if it is NaN, set errno=EDOM because
1984 the argument was not in the domain. */
1985 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1986 NULL_RTX, NULL_RTX, lab,
1987 /* The jump is very likely. */
1988 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1990 #ifdef TARGET_EDOM
1991 /* If this built-in doesn't throw an exception, set errno directly. */
1992 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1994 #ifdef GEN_ERRNO_RTX
1995 rtx errno_rtx = GEN_ERRNO_RTX;
1996 #else
1997 rtx errno_rtx
1998 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1999 #endif
2000 emit_move_insn (errno_rtx,
2001 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2002 emit_label (lab);
2003 return;
2005 #endif
2007 /* Make sure the library call isn't expanded as a tail call. */
2008 CALL_EXPR_TAILCALL (exp) = 0;
2010 /* We can't set errno=EDOM directly; let the library call do it.
2011 Pop the arguments right away in case the call gets deleted. */
2012 NO_DEFER_POP;
2013 expand_call (exp, target, 0);
2014 OK_DEFER_POP;
2015 emit_label (lab);
2018 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2019 Return NULL_RTX if a normal call should be emitted rather than expanding
2020 the function in-line. EXP is the expression that is a call to the builtin
2021 function; if convenient, the result should be placed in TARGET.
2022 SUBTARGET may be used as the target for computing one of EXP's operands. */
2024 static rtx
2025 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2027 optab builtin_optab;
2028 rtx op0, insns;
2029 tree fndecl = get_callee_fndecl (exp);
2030 enum machine_mode mode;
2031 bool errno_set = false;
2032 bool try_widening = false;
2033 tree arg;
2035 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2036 return NULL_RTX;
2038 arg = CALL_EXPR_ARG (exp, 0);
2040 switch (DECL_FUNCTION_CODE (fndecl))
2042 CASE_FLT_FN (BUILT_IN_SQRT):
2043 errno_set = ! tree_expr_nonnegative_p (arg);
2044 try_widening = true;
2045 builtin_optab = sqrt_optab;
2046 break;
2047 CASE_FLT_FN (BUILT_IN_EXP):
2048 errno_set = true; builtin_optab = exp_optab; break;
2049 CASE_FLT_FN (BUILT_IN_EXP10):
2050 CASE_FLT_FN (BUILT_IN_POW10):
2051 errno_set = true; builtin_optab = exp10_optab; break;
2052 CASE_FLT_FN (BUILT_IN_EXP2):
2053 errno_set = true; builtin_optab = exp2_optab; break;
2054 CASE_FLT_FN (BUILT_IN_EXPM1):
2055 errno_set = true; builtin_optab = expm1_optab; break;
2056 CASE_FLT_FN (BUILT_IN_LOGB):
2057 errno_set = true; builtin_optab = logb_optab; break;
2058 CASE_FLT_FN (BUILT_IN_LOG):
2059 errno_set = true; builtin_optab = log_optab; break;
2060 CASE_FLT_FN (BUILT_IN_LOG10):
2061 errno_set = true; builtin_optab = log10_optab; break;
2062 CASE_FLT_FN (BUILT_IN_LOG2):
2063 errno_set = true; builtin_optab = log2_optab; break;
2064 CASE_FLT_FN (BUILT_IN_LOG1P):
2065 errno_set = true; builtin_optab = log1p_optab; break;
2066 CASE_FLT_FN (BUILT_IN_ASIN):
2067 builtin_optab = asin_optab; break;
2068 CASE_FLT_FN (BUILT_IN_ACOS):
2069 builtin_optab = acos_optab; break;
2070 CASE_FLT_FN (BUILT_IN_TAN):
2071 builtin_optab = tan_optab; break;
2072 CASE_FLT_FN (BUILT_IN_ATAN):
2073 builtin_optab = atan_optab; break;
2074 CASE_FLT_FN (BUILT_IN_FLOOR):
2075 builtin_optab = floor_optab; break;
2076 CASE_FLT_FN (BUILT_IN_CEIL):
2077 builtin_optab = ceil_optab; break;
2078 CASE_FLT_FN (BUILT_IN_TRUNC):
2079 builtin_optab = btrunc_optab; break;
2080 CASE_FLT_FN (BUILT_IN_ROUND):
2081 builtin_optab = round_optab; break;
2082 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2083 builtin_optab = nearbyint_optab;
2084 if (flag_trapping_math)
2085 break;
2086 /* Else fallthrough and expand as rint. */
2087 CASE_FLT_FN (BUILT_IN_RINT):
2088 builtin_optab = rint_optab; break;
2089 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2090 builtin_optab = significand_optab; break;
2091 default:
2092 gcc_unreachable ();
2095 /* Make a suitable register to place result in. */
2096 mode = TYPE_MODE (TREE_TYPE (exp));
2098 if (! flag_errno_math || ! HONOR_NANS (mode))
2099 errno_set = false;
2101 /* Before working hard, check whether the instruction is available, but try
2102 to widen the mode for specific operations. */
2103 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2104 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2105 && (!errno_set || !optimize_insn_for_size_p ()))
2107 rtx result = gen_reg_rtx (mode);
2109 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2110 need to expand the argument again. This way, we will not perform
2111 side-effects more the once. */
2112 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2114 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2116 start_sequence ();
2118 /* Compute into RESULT.
2119 Set RESULT to wherever the result comes back. */
2120 result = expand_unop (mode, builtin_optab, op0, result, 0);
2122 if (result != 0)
2124 if (errno_set)
2125 expand_errno_check (exp, result);
2127 /* Output the entire sequence. */
2128 insns = get_insns ();
2129 end_sequence ();
2130 emit_insn (insns);
2131 return result;
2134 /* If we were unable to expand via the builtin, stop the sequence
2135 (without outputting the insns) and call to the library function
2136 with the stabilized argument list. */
2137 end_sequence ();
2140 return expand_call (exp, target, target == const0_rtx);
2143 /* Expand a call to the builtin binary math functions (pow and atan2).
2144 Return NULL_RTX if a normal call should be emitted rather than expanding the
2145 function in-line. EXP is the expression that is a call to the builtin
2146 function; if convenient, the result should be placed in TARGET.
2147 SUBTARGET may be used as the target for computing one of EXP's
2148 operands. */
2150 static rtx
2151 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2153 optab builtin_optab;
2154 rtx op0, op1, insns, result;
2155 int op1_type = REAL_TYPE;
2156 tree fndecl = get_callee_fndecl (exp);
2157 tree arg0, arg1;
2158 enum machine_mode mode;
2159 bool errno_set = true;
2161 switch (DECL_FUNCTION_CODE (fndecl))
2163 CASE_FLT_FN (BUILT_IN_SCALBN):
2164 CASE_FLT_FN (BUILT_IN_SCALBLN):
2165 CASE_FLT_FN (BUILT_IN_LDEXP):
2166 op1_type = INTEGER_TYPE;
2167 default:
2168 break;
2171 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2172 return NULL_RTX;
2174 arg0 = CALL_EXPR_ARG (exp, 0);
2175 arg1 = CALL_EXPR_ARG (exp, 1);
2177 switch (DECL_FUNCTION_CODE (fndecl))
2179 CASE_FLT_FN (BUILT_IN_POW):
2180 builtin_optab = pow_optab; break;
2181 CASE_FLT_FN (BUILT_IN_ATAN2):
2182 builtin_optab = atan2_optab; break;
2183 CASE_FLT_FN (BUILT_IN_SCALB):
2184 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2185 return 0;
2186 builtin_optab = scalb_optab; break;
2187 CASE_FLT_FN (BUILT_IN_SCALBN):
2188 CASE_FLT_FN (BUILT_IN_SCALBLN):
2189 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2190 return 0;
2191 /* Fall through... */
2192 CASE_FLT_FN (BUILT_IN_LDEXP):
2193 builtin_optab = ldexp_optab; break;
2194 CASE_FLT_FN (BUILT_IN_FMOD):
2195 builtin_optab = fmod_optab; break;
2196 CASE_FLT_FN (BUILT_IN_REMAINDER):
2197 CASE_FLT_FN (BUILT_IN_DREM):
2198 builtin_optab = remainder_optab; break;
2199 default:
2200 gcc_unreachable ();
2203 /* Make a suitable register to place result in. */
2204 mode = TYPE_MODE (TREE_TYPE (exp));
2206 /* Before working hard, check whether the instruction is available. */
2207 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2208 return NULL_RTX;
2210 result = gen_reg_rtx (mode);
2212 if (! flag_errno_math || ! HONOR_NANS (mode))
2213 errno_set = false;
2215 if (errno_set && optimize_insn_for_size_p ())
2216 return 0;
2218 /* Always stabilize the argument list. */
2219 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2220 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2222 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2223 op1 = expand_normal (arg1);
2225 start_sequence ();
2227 /* Compute into RESULT.
2228 Set RESULT to wherever the result comes back. */
2229 result = expand_binop (mode, builtin_optab, op0, op1,
2230 result, 0, OPTAB_DIRECT);
2232 /* If we were unable to expand via the builtin, stop the sequence
2233 (without outputting the insns) and call to the library function
2234 with the stabilized argument list. */
2235 if (result == 0)
2237 end_sequence ();
2238 return expand_call (exp, target, target == const0_rtx);
2241 if (errno_set)
2242 expand_errno_check (exp, result);
2244 /* Output the entire sequence. */
2245 insns = get_insns ();
2246 end_sequence ();
2247 emit_insn (insns);
2249 return result;
2252 /* Expand a call to the builtin trinary math functions (fma).
2253 Return NULL_RTX if a normal call should be emitted rather than expanding the
2254 function in-line. EXP is the expression that is a call to the builtin
2255 function; if convenient, the result should be placed in TARGET.
2256 SUBTARGET may be used as the target for computing one of EXP's
2257 operands. */
2259 static rtx
2260 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2262 optab builtin_optab;
2263 rtx op0, op1, op2, insns, result;
2264 tree fndecl = get_callee_fndecl (exp);
2265 tree arg0, arg1, arg2;
2266 enum machine_mode mode;
2268 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2269 return NULL_RTX;
2271 arg0 = CALL_EXPR_ARG (exp, 0);
2272 arg1 = CALL_EXPR_ARG (exp, 1);
2273 arg2 = CALL_EXPR_ARG (exp, 2);
2275 switch (DECL_FUNCTION_CODE (fndecl))
2277 CASE_FLT_FN (BUILT_IN_FMA):
2278 builtin_optab = fma_optab; break;
2279 default:
2280 gcc_unreachable ();
2283 /* Make a suitable register to place result in. */
2284 mode = TYPE_MODE (TREE_TYPE (exp));
2286 /* Before working hard, check whether the instruction is available. */
2287 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2288 return NULL_RTX;
2290 result = gen_reg_rtx (mode);
2292 /* Always stabilize the argument list. */
2293 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2294 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2295 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2297 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2298 op1 = expand_normal (arg1);
2299 op2 = expand_normal (arg2);
2301 start_sequence ();
2303 /* Compute into RESULT.
2304 Set RESULT to wherever the result comes back. */
2305 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2306 result, 0);
2308 /* If we were unable to expand via the builtin, stop the sequence
2309 (without outputting the insns) and call to the library function
2310 with the stabilized argument list. */
2311 if (result == 0)
2313 end_sequence ();
2314 return expand_call (exp, target, target == const0_rtx);
2317 /* Output the entire sequence. */
2318 insns = get_insns ();
2319 end_sequence ();
2320 emit_insn (insns);
2322 return result;
2325 /* Expand a call to the builtin sin and cos math functions.
2326 Return NULL_RTX if a normal call should be emitted rather than expanding the
2327 function in-line. EXP is the expression that is a call to the builtin
2328 function; if convenient, the result should be placed in TARGET.
2329 SUBTARGET may be used as the target for computing one of EXP's
2330 operands. */
2332 static rtx
2333 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2335 optab builtin_optab;
2336 rtx op0, insns;
2337 tree fndecl = get_callee_fndecl (exp);
2338 enum machine_mode mode;
2339 tree arg;
2341 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2342 return NULL_RTX;
2344 arg = CALL_EXPR_ARG (exp, 0);
2346 switch (DECL_FUNCTION_CODE (fndecl))
2348 CASE_FLT_FN (BUILT_IN_SIN):
2349 CASE_FLT_FN (BUILT_IN_COS):
2350 builtin_optab = sincos_optab; break;
2351 default:
2352 gcc_unreachable ();
2355 /* Make a suitable register to place result in. */
2356 mode = TYPE_MODE (TREE_TYPE (exp));
2358 /* Check if sincos insn is available, otherwise fallback
2359 to sin or cos insn. */
2360 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2361 switch (DECL_FUNCTION_CODE (fndecl))
2363 CASE_FLT_FN (BUILT_IN_SIN):
2364 builtin_optab = sin_optab; break;
2365 CASE_FLT_FN (BUILT_IN_COS):
2366 builtin_optab = cos_optab; break;
2367 default:
2368 gcc_unreachable ();
2371 /* Before working hard, check whether the instruction is available. */
2372 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2374 rtx result = gen_reg_rtx (mode);
2376 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2377 need to expand the argument again. This way, we will not perform
2378 side-effects more the once. */
2379 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2381 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2383 start_sequence ();
2385 /* Compute into RESULT.
2386 Set RESULT to wherever the result comes back. */
2387 if (builtin_optab == sincos_optab)
2389 int ok;
2391 switch (DECL_FUNCTION_CODE (fndecl))
2393 CASE_FLT_FN (BUILT_IN_SIN):
2394 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2395 break;
2396 CASE_FLT_FN (BUILT_IN_COS):
2397 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2398 break;
2399 default:
2400 gcc_unreachable ();
2402 gcc_assert (ok);
2404 else
2405 result = expand_unop (mode, builtin_optab, op0, result, 0);
2407 if (result != 0)
2409 /* Output the entire sequence. */
2410 insns = get_insns ();
2411 end_sequence ();
2412 emit_insn (insns);
2413 return result;
2416 /* If we were unable to expand via the builtin, stop the sequence
2417 (without outputting the insns) and call to the library function
2418 with the stabilized argument list. */
2419 end_sequence ();
2422 return expand_call (exp, target, target == const0_rtx);
2425 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2426 return an RTL instruction code that implements the functionality.
2427 If that isn't possible or available return CODE_FOR_nothing. */
2429 static enum insn_code
2430 interclass_mathfn_icode (tree arg, tree fndecl)
2432 bool errno_set = false;
2433 optab builtin_optab = unknown_optab;
2434 enum machine_mode mode;
2436 switch (DECL_FUNCTION_CODE (fndecl))
2438 CASE_FLT_FN (BUILT_IN_ILOGB):
2439 errno_set = true; builtin_optab = ilogb_optab; break;
2440 CASE_FLT_FN (BUILT_IN_ISINF):
2441 builtin_optab = isinf_optab; break;
2442 case BUILT_IN_ISNORMAL:
2443 case BUILT_IN_ISFINITE:
2444 CASE_FLT_FN (BUILT_IN_FINITE):
2445 case BUILT_IN_FINITED32:
2446 case BUILT_IN_FINITED64:
2447 case BUILT_IN_FINITED128:
2448 case BUILT_IN_ISINFD32:
2449 case BUILT_IN_ISINFD64:
2450 case BUILT_IN_ISINFD128:
2451 /* These builtins have no optabs (yet). */
2452 break;
2453 default:
2454 gcc_unreachable ();
2457 /* There's no easy way to detect the case we need to set EDOM. */
2458 if (flag_errno_math && errno_set)
2459 return CODE_FOR_nothing;
2461 /* Optab mode depends on the mode of the input argument. */
2462 mode = TYPE_MODE (TREE_TYPE (arg));
2464 if (builtin_optab)
2465 return optab_handler (builtin_optab, mode);
2466 return CODE_FOR_nothing;
2469 /* Expand a call to one of the builtin math functions that operate on
2470 floating point argument and output an integer result (ilogb, isinf,
2471 isnan, etc).
2472 Return 0 if a normal call should be emitted rather than expanding the
2473 function in-line. EXP is the expression that is a call to the builtin
2474 function; if convenient, the result should be placed in TARGET. */
2476 static rtx
2477 expand_builtin_interclass_mathfn (tree exp, rtx target)
2479 enum insn_code icode = CODE_FOR_nothing;
2480 rtx op0;
2481 tree fndecl = get_callee_fndecl (exp);
2482 enum machine_mode mode;
2483 tree arg;
2485 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2486 return NULL_RTX;
2488 arg = CALL_EXPR_ARG (exp, 0);
2489 icode = interclass_mathfn_icode (arg, fndecl);
2490 mode = TYPE_MODE (TREE_TYPE (arg));
2492 if (icode != CODE_FOR_nothing)
2494 struct expand_operand ops[1];
2495 rtx last = get_last_insn ();
2496 tree orig_arg = arg;
2498 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2499 need to expand the argument again. This way, we will not perform
2500 side-effects more the once. */
2501 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2503 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2505 if (mode != GET_MODE (op0))
2506 op0 = convert_to_mode (mode, op0, 0);
2508 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2509 if (maybe_legitimize_operands (icode, 0, 1, ops)
2510 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2511 return ops[0].value;
2513 delete_insns_since (last);
2514 CALL_EXPR_ARG (exp, 0) = orig_arg;
2517 return NULL_RTX;
2520 /* Expand a call to the builtin sincos math function.
2521 Return NULL_RTX if a normal call should be emitted rather than expanding the
2522 function in-line. EXP is the expression that is a call to the builtin
2523 function. */
2525 static rtx
2526 expand_builtin_sincos (tree exp)
2528 rtx op0, op1, op2, target1, target2;
2529 enum machine_mode mode;
2530 tree arg, sinp, cosp;
2531 int result;
2532 location_t loc = EXPR_LOCATION (exp);
2533 tree alias_type, alias_off;
2535 if (!validate_arglist (exp, REAL_TYPE,
2536 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2537 return NULL_RTX;
2539 arg = CALL_EXPR_ARG (exp, 0);
2540 sinp = CALL_EXPR_ARG (exp, 1);
2541 cosp = CALL_EXPR_ARG (exp, 2);
2543 /* Make a suitable register to place result in. */
2544 mode = TYPE_MODE (TREE_TYPE (arg));
2546 /* Check if sincos insn is available, otherwise emit the call. */
2547 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2548 return NULL_RTX;
2550 target1 = gen_reg_rtx (mode);
2551 target2 = gen_reg_rtx (mode);
2553 op0 = expand_normal (arg);
2554 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2555 alias_off = build_int_cst (alias_type, 0);
2556 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2557 sinp, alias_off));
2558 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2559 cosp, alias_off));
2561 /* Compute into target1 and target2.
2562 Set TARGET to wherever the result comes back. */
2563 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2564 gcc_assert (result);
2566 /* Move target1 and target2 to the memory locations indicated
2567 by op1 and op2. */
2568 emit_move_insn (op1, target1);
2569 emit_move_insn (op2, target2);
2571 return const0_rtx;
2574 /* Expand a call to the internal cexpi builtin to the sincos math function.
2575 EXP is the expression that is a call to the builtin function; if convenient,
2576 the result should be placed in TARGET. */
2578 static rtx
2579 expand_builtin_cexpi (tree exp, rtx target)
2581 tree fndecl = get_callee_fndecl (exp);
2582 tree arg, type;
2583 enum machine_mode mode;
2584 rtx op0, op1, op2;
2585 location_t loc = EXPR_LOCATION (exp);
2587 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2588 return NULL_RTX;
2590 arg = CALL_EXPR_ARG (exp, 0);
2591 type = TREE_TYPE (arg);
2592 mode = TYPE_MODE (TREE_TYPE (arg));
2594 /* Try expanding via a sincos optab, fall back to emitting a libcall
2595 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2596 is only generated from sincos, cexp or if we have either of them. */
2597 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2599 op1 = gen_reg_rtx (mode);
2600 op2 = gen_reg_rtx (mode);
2602 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2604 /* Compute into op1 and op2. */
2605 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2607 else if (targetm.libc_has_function (function_sincos))
2609 tree call, fn = NULL_TREE;
2610 tree top1, top2;
2611 rtx op1a, op2a;
2613 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2614 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2615 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2616 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2617 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2618 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2619 else
2620 gcc_unreachable ();
2622 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2623 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2624 op1a = copy_addr_to_reg (XEXP (op1, 0));
2625 op2a = copy_addr_to_reg (XEXP (op2, 0));
2626 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2627 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2629 /* Make sure not to fold the sincos call again. */
2630 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2631 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2632 call, 3, arg, top1, top2));
2634 else
2636 tree call, fn = NULL_TREE, narg;
2637 tree ctype = build_complex_type (type);
2639 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2640 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2641 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2642 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2643 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2644 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2645 else
2646 gcc_unreachable ();
2648 /* If we don't have a decl for cexp create one. This is the
2649 friendliest fallback if the user calls __builtin_cexpi
2650 without full target C99 function support. */
2651 if (fn == NULL_TREE)
2653 tree fntype;
2654 const char *name = NULL;
2656 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2657 name = "cexpf";
2658 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2659 name = "cexp";
2660 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2661 name = "cexpl";
2663 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2664 fn = build_fn_decl (name, fntype);
2667 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2668 build_real (type, dconst0), arg);
2670 /* Make sure not to fold the cexp call again. */
2671 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2672 return expand_expr (build_call_nary (ctype, call, 1, narg),
2673 target, VOIDmode, EXPAND_NORMAL);
2676 /* Now build the proper return type. */
2677 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2678 make_tree (TREE_TYPE (arg), op2),
2679 make_tree (TREE_TYPE (arg), op1)),
2680 target, VOIDmode, EXPAND_NORMAL);
2683 /* Conveniently construct a function call expression. FNDECL names the
2684 function to be called, N is the number of arguments, and the "..."
2685 parameters are the argument expressions. Unlike build_call_exr
2686 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2688 static tree
2689 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2691 va_list ap;
2692 tree fntype = TREE_TYPE (fndecl);
2693 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2695 va_start (ap, n);
2696 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2697 va_end (ap);
2698 SET_EXPR_LOCATION (fn, loc);
2699 return fn;
2702 /* Expand a call to one of the builtin rounding functions gcc defines
2703 as an extension (lfloor and lceil). As these are gcc extensions we
2704 do not need to worry about setting errno to EDOM.
2705 If expanding via optab fails, lower expression to (int)(floor(x)).
2706 EXP is the expression that is a call to the builtin function;
2707 if convenient, the result should be placed in TARGET. */
2709 static rtx
2710 expand_builtin_int_roundingfn (tree exp, rtx target)
2712 convert_optab builtin_optab;
2713 rtx op0, insns, tmp;
2714 tree fndecl = get_callee_fndecl (exp);
2715 enum built_in_function fallback_fn;
2716 tree fallback_fndecl;
2717 enum machine_mode mode;
2718 tree arg;
2720 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2721 gcc_unreachable ();
2723 arg = CALL_EXPR_ARG (exp, 0);
2725 switch (DECL_FUNCTION_CODE (fndecl))
2727 CASE_FLT_FN (BUILT_IN_ICEIL):
2728 CASE_FLT_FN (BUILT_IN_LCEIL):
2729 CASE_FLT_FN (BUILT_IN_LLCEIL):
2730 builtin_optab = lceil_optab;
2731 fallback_fn = BUILT_IN_CEIL;
2732 break;
2734 CASE_FLT_FN (BUILT_IN_IFLOOR):
2735 CASE_FLT_FN (BUILT_IN_LFLOOR):
2736 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2737 builtin_optab = lfloor_optab;
2738 fallback_fn = BUILT_IN_FLOOR;
2739 break;
2741 default:
2742 gcc_unreachable ();
2745 /* Make a suitable register to place result in. */
2746 mode = TYPE_MODE (TREE_TYPE (exp));
2748 target = gen_reg_rtx (mode);
2750 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2751 need to expand the argument again. This way, we will not perform
2752 side-effects more the once. */
2753 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2755 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2757 start_sequence ();
2759 /* Compute into TARGET. */
2760 if (expand_sfix_optab (target, op0, builtin_optab))
2762 /* Output the entire sequence. */
2763 insns = get_insns ();
2764 end_sequence ();
2765 emit_insn (insns);
2766 return target;
2769 /* If we were unable to expand via the builtin, stop the sequence
2770 (without outputting the insns). */
2771 end_sequence ();
2773 /* Fall back to floating point rounding optab. */
2774 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2776 /* For non-C99 targets we may end up without a fallback fndecl here
2777 if the user called __builtin_lfloor directly. In this case emit
2778 a call to the floor/ceil variants nevertheless. This should result
2779 in the best user experience for not full C99 targets. */
2780 if (fallback_fndecl == NULL_TREE)
2782 tree fntype;
2783 const char *name = NULL;
2785 switch (DECL_FUNCTION_CODE (fndecl))
2787 case BUILT_IN_ICEIL:
2788 case BUILT_IN_LCEIL:
2789 case BUILT_IN_LLCEIL:
2790 name = "ceil";
2791 break;
2792 case BUILT_IN_ICEILF:
2793 case BUILT_IN_LCEILF:
2794 case BUILT_IN_LLCEILF:
2795 name = "ceilf";
2796 break;
2797 case BUILT_IN_ICEILL:
2798 case BUILT_IN_LCEILL:
2799 case BUILT_IN_LLCEILL:
2800 name = "ceill";
2801 break;
2802 case BUILT_IN_IFLOOR:
2803 case BUILT_IN_LFLOOR:
2804 case BUILT_IN_LLFLOOR:
2805 name = "floor";
2806 break;
2807 case BUILT_IN_IFLOORF:
2808 case BUILT_IN_LFLOORF:
2809 case BUILT_IN_LLFLOORF:
2810 name = "floorf";
2811 break;
2812 case BUILT_IN_IFLOORL:
2813 case BUILT_IN_LFLOORL:
2814 case BUILT_IN_LLFLOORL:
2815 name = "floorl";
2816 break;
2817 default:
2818 gcc_unreachable ();
2821 fntype = build_function_type_list (TREE_TYPE (arg),
2822 TREE_TYPE (arg), NULL_TREE);
2823 fallback_fndecl = build_fn_decl (name, fntype);
2826 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2828 tmp = expand_normal (exp);
2829 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2831 /* Truncate the result of floating point optab to integer
2832 via expand_fix (). */
2833 target = gen_reg_rtx (mode);
2834 expand_fix (target, tmp, 0);
2836 return target;
2839 /* Expand a call to one of the builtin math functions doing integer
2840 conversion (lrint).
2841 Return 0 if a normal call should be emitted rather than expanding the
2842 function in-line. EXP is the expression that is a call to the builtin
2843 function; if convenient, the result should be placed in TARGET. */
2845 static rtx
2846 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2848 convert_optab builtin_optab;
2849 rtx op0, insns;
2850 tree fndecl = get_callee_fndecl (exp);
2851 tree arg;
2852 enum machine_mode mode;
2853 enum built_in_function fallback_fn = BUILT_IN_NONE;
2855 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2856 gcc_unreachable ();
2858 arg = CALL_EXPR_ARG (exp, 0);
2860 switch (DECL_FUNCTION_CODE (fndecl))
2862 CASE_FLT_FN (BUILT_IN_IRINT):
2863 fallback_fn = BUILT_IN_LRINT;
2864 /* FALLTHRU */
2865 CASE_FLT_FN (BUILT_IN_LRINT):
2866 CASE_FLT_FN (BUILT_IN_LLRINT):
2867 builtin_optab = lrint_optab;
2868 break;
2870 CASE_FLT_FN (BUILT_IN_IROUND):
2871 fallback_fn = BUILT_IN_LROUND;
2872 /* FALLTHRU */
2873 CASE_FLT_FN (BUILT_IN_LROUND):
2874 CASE_FLT_FN (BUILT_IN_LLROUND):
2875 builtin_optab = lround_optab;
2876 break;
2878 default:
2879 gcc_unreachable ();
2882 /* There's no easy way to detect the case we need to set EDOM. */
2883 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2884 return NULL_RTX;
2886 /* Make a suitable register to place result in. */
2887 mode = TYPE_MODE (TREE_TYPE (exp));
2889 /* There's no easy way to detect the case we need to set EDOM. */
2890 if (!flag_errno_math)
2892 rtx result = gen_reg_rtx (mode);
2894 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2895 need to expand the argument again. This way, we will not perform
2896 side-effects more the once. */
2897 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2899 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2901 start_sequence ();
2903 if (expand_sfix_optab (result, op0, builtin_optab))
2905 /* Output the entire sequence. */
2906 insns = get_insns ();
2907 end_sequence ();
2908 emit_insn (insns);
2909 return result;
2912 /* If we were unable to expand via the builtin, stop the sequence
2913 (without outputting the insns) and call to the library function
2914 with the stabilized argument list. */
2915 end_sequence ();
2918 if (fallback_fn != BUILT_IN_NONE)
2920 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2921 targets, (int) round (x) should never be transformed into
2922 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2923 a call to lround in the hope that the target provides at least some
2924 C99 functions. This should result in the best user experience for
2925 not full C99 targets. */
2926 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2927 fallback_fn, 0);
2929 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2930 fallback_fndecl, 1, arg);
2932 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2933 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2934 return convert_to_mode (mode, target, 0);
2937 return expand_call (exp, target, target == const0_rtx);
2940 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2941 a normal call should be emitted rather than expanding the function
2942 in-line. EXP is the expression that is a call to the builtin
2943 function; if convenient, the result should be placed in TARGET. */
2945 static rtx
2946 expand_builtin_powi (tree exp, rtx target)
2948 tree arg0, arg1;
2949 rtx op0, op1;
2950 enum machine_mode mode;
2951 enum machine_mode mode2;
2953 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2954 return NULL_RTX;
2956 arg0 = CALL_EXPR_ARG (exp, 0);
2957 arg1 = CALL_EXPR_ARG (exp, 1);
2958 mode = TYPE_MODE (TREE_TYPE (exp));
2960 /* Emit a libcall to libgcc. */
2962 /* Mode of the 2nd argument must match that of an int. */
2963 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2965 if (target == NULL_RTX)
2966 target = gen_reg_rtx (mode);
2968 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2969 if (GET_MODE (op0) != mode)
2970 op0 = convert_to_mode (mode, op0, 0);
2971 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2972 if (GET_MODE (op1) != mode2)
2973 op1 = convert_to_mode (mode2, op1, 0);
2975 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2976 target, LCT_CONST, mode, 2,
2977 op0, mode, op1, mode2);
2979 return target;
2982 /* Expand expression EXP which is a call to the strlen builtin. Return
2983 NULL_RTX if we failed the caller should emit a normal call, otherwise
2984 try to get the result in TARGET, if convenient. */
2986 static rtx
2987 expand_builtin_strlen (tree exp, rtx target,
2988 enum machine_mode target_mode)
2990 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2991 return NULL_RTX;
2992 else
2994 struct expand_operand ops[4];
2995 rtx pat;
2996 tree len;
2997 tree src = CALL_EXPR_ARG (exp, 0);
2998 rtx src_reg, before_strlen;
2999 enum machine_mode insn_mode = target_mode;
3000 enum insn_code icode = CODE_FOR_nothing;
3001 unsigned int align;
3003 /* If the length can be computed at compile-time, return it. */
3004 len = c_strlen (src, 0);
3005 if (len)
3006 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3008 /* If the length can be computed at compile-time and is constant
3009 integer, but there are side-effects in src, evaluate
3010 src for side-effects, then return len.
3011 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3012 can be optimized into: i++; x = 3; */
3013 len = c_strlen (src, 1);
3014 if (len && TREE_CODE (len) == INTEGER_CST)
3016 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3017 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3020 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3022 /* If SRC is not a pointer type, don't do this operation inline. */
3023 if (align == 0)
3024 return NULL_RTX;
3026 /* Bail out if we can't compute strlen in the right mode. */
3027 while (insn_mode != VOIDmode)
3029 icode = optab_handler (strlen_optab, insn_mode);
3030 if (icode != CODE_FOR_nothing)
3031 break;
3033 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3035 if (insn_mode == VOIDmode)
3036 return NULL_RTX;
3038 /* Make a place to hold the source address. We will not expand
3039 the actual source until we are sure that the expansion will
3040 not fail -- there are trees that cannot be expanded twice. */
3041 src_reg = gen_reg_rtx (Pmode);
3043 /* Mark the beginning of the strlen sequence so we can emit the
3044 source operand later. */
3045 before_strlen = get_last_insn ();
3047 create_output_operand (&ops[0], target, insn_mode);
3048 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3049 create_integer_operand (&ops[2], 0);
3050 create_integer_operand (&ops[3], align);
3051 if (!maybe_expand_insn (icode, 4, ops))
3052 return NULL_RTX;
3054 /* Now that we are assured of success, expand the source. */
3055 start_sequence ();
3056 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3057 if (pat != src_reg)
3059 #ifdef POINTERS_EXTEND_UNSIGNED
3060 if (GET_MODE (pat) != Pmode)
3061 pat = convert_to_mode (Pmode, pat,
3062 POINTERS_EXTEND_UNSIGNED);
3063 #endif
3064 emit_move_insn (src_reg, pat);
3066 pat = get_insns ();
3067 end_sequence ();
3069 if (before_strlen)
3070 emit_insn_after (pat, before_strlen);
3071 else
3072 emit_insn_before (pat, get_insns ());
3074 /* Return the value in the proper mode for this function. */
3075 if (GET_MODE (ops[0].value) == target_mode)
3076 target = ops[0].value;
3077 else if (target != 0)
3078 convert_move (target, ops[0].value, 0);
3079 else
3080 target = convert_to_mode (target_mode, ops[0].value, 0);
3082 return target;
3086 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3087 bytes from constant string DATA + OFFSET and return it as target
3088 constant. */
3090 static rtx
3091 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3092 enum machine_mode mode)
3094 const char *str = (const char *) data;
3096 gcc_assert (offset >= 0
3097 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3098 <= strlen (str) + 1));
3100 return c_readstr (str + offset, mode);
3103 /* LEN specify length of the block of memcpy/memset operation.
3104 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3105 In some cases we can make very likely guess on max size, then we
3106 set it into PROBABLE_MAX_SIZE. */
3108 static void
3109 determine_block_size (tree len, rtx len_rtx,
3110 unsigned HOST_WIDE_INT *min_size,
3111 unsigned HOST_WIDE_INT *max_size,
3112 unsigned HOST_WIDE_INT *probable_max_size)
3114 if (CONST_INT_P (len_rtx))
3116 *min_size = *max_size = UINTVAL (len_rtx);
3117 return;
3119 else
3121 double_int min, max;
3122 enum value_range_type range_type = VR_UNDEFINED;
3124 /* Determine bounds from the type. */
3125 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3126 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3127 else
3128 *min_size = 0;
3129 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3130 *probable_max_size = *max_size = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3131 else
3132 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3134 if (TREE_CODE (len) == SSA_NAME)
3135 range_type = get_range_info (len, &min, &max);
3136 if (range_type == VR_RANGE)
3138 if (min.fits_uhwi () && *min_size < min.to_uhwi ())
3139 *min_size = min.to_uhwi ();
3140 if (max.fits_uhwi () && *max_size > max.to_uhwi ())
3141 *probable_max_size = *max_size = max.to_uhwi ();
3143 else if (range_type == VR_ANTI_RANGE)
3145 /* Anti range 0...N lets us to determine minmal size to N+1. */
3146 if (min.is_zero ())
3148 if ((max + double_int_one).fits_uhwi ())
3149 *min_size = (max + double_int_one).to_uhwi ();
3151 /* Code like
3153 int n;
3154 if (n < 100)
3155 memcpy (a,b, n)
3157 Produce anti range allowing negative values of N. We still
3158 can use the information and make a guess that N is not negative.
3160 else if (!max.ule (double_int_one.lshift (30))
3161 && min.fits_uhwi ())
3162 *probable_max_size = min.to_uhwi () - 1;
3165 gcc_checking_assert (*max_size <=
3166 (unsigned HOST_WIDE_INT)
3167 GET_MODE_MASK (GET_MODE (len_rtx)));
3170 /* Expand a call EXP to the memcpy builtin.
3171 Return NULL_RTX if we failed, the caller should emit a normal call,
3172 otherwise try to get the result in TARGET, if convenient (and in
3173 mode MODE if that's convenient). */
3175 static rtx
3176 expand_builtin_memcpy (tree exp, rtx target)
3178 if (!validate_arglist (exp,
3179 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3180 return NULL_RTX;
3181 else
3183 tree dest = CALL_EXPR_ARG (exp, 0);
3184 tree src = CALL_EXPR_ARG (exp, 1);
3185 tree len = CALL_EXPR_ARG (exp, 2);
3186 const char *src_str;
3187 unsigned int src_align = get_pointer_alignment (src);
3188 unsigned int dest_align = get_pointer_alignment (dest);
3189 rtx dest_mem, src_mem, dest_addr, len_rtx;
3190 HOST_WIDE_INT expected_size = -1;
3191 unsigned int expected_align = 0;
3192 unsigned HOST_WIDE_INT min_size;
3193 unsigned HOST_WIDE_INT max_size;
3194 unsigned HOST_WIDE_INT probable_max_size;
3196 /* If DEST is not a pointer type, call the normal function. */
3197 if (dest_align == 0)
3198 return NULL_RTX;
3200 /* If either SRC is not a pointer type, don't do this
3201 operation in-line. */
3202 if (src_align == 0)
3203 return NULL_RTX;
3205 if (currently_expanding_gimple_stmt)
3206 stringop_block_profile (currently_expanding_gimple_stmt,
3207 &expected_align, &expected_size);
3209 if (expected_align < dest_align)
3210 expected_align = dest_align;
3211 dest_mem = get_memory_rtx (dest, len);
3212 set_mem_align (dest_mem, dest_align);
3213 len_rtx = expand_normal (len);
3214 determine_block_size (len, len_rtx, &min_size, &max_size,
3215 &probable_max_size);
3216 src_str = c_getstr (src);
3218 /* If SRC is a string constant and block move would be done
3219 by pieces, we can avoid loading the string from memory
3220 and only stored the computed constants. */
3221 if (src_str
3222 && CONST_INT_P (len_rtx)
3223 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3224 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3225 CONST_CAST (char *, src_str),
3226 dest_align, false))
3228 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3229 builtin_memcpy_read_str,
3230 CONST_CAST (char *, src_str),
3231 dest_align, false, 0);
3232 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3233 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3234 return dest_mem;
3237 src_mem = get_memory_rtx (src, len);
3238 set_mem_align (src_mem, src_align);
3240 /* Copy word part most expediently. */
3241 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3242 CALL_EXPR_TAILCALL (exp)
3243 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3244 expected_align, expected_size,
3245 min_size, max_size, probable_max_size);
3247 if (dest_addr == 0)
3249 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3250 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3252 return dest_addr;
3256 /* Expand a call EXP to the mempcpy builtin.
3257 Return NULL_RTX if we failed; the caller should emit a normal call,
3258 otherwise try to get the result in TARGET, if convenient (and in
3259 mode MODE if that's convenient). If ENDP is 0 return the
3260 destination pointer, if ENDP is 1 return the end pointer ala
3261 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3262 stpcpy. */
3264 static rtx
3265 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3267 if (!validate_arglist (exp,
3268 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3269 return NULL_RTX;
3270 else
3272 tree dest = CALL_EXPR_ARG (exp, 0);
3273 tree src = CALL_EXPR_ARG (exp, 1);
3274 tree len = CALL_EXPR_ARG (exp, 2);
3275 return expand_builtin_mempcpy_args (dest, src, len,
3276 target, mode, /*endp=*/ 1);
3280 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3281 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3282 so that this can also be called without constructing an actual CALL_EXPR.
3283 The other arguments and return value are the same as for
3284 expand_builtin_mempcpy. */
3286 static rtx
3287 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3288 rtx target, enum machine_mode mode, int endp)
3290 /* If return value is ignored, transform mempcpy into memcpy. */
3291 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3293 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3294 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3295 dest, src, len);
3296 return expand_expr (result, target, mode, EXPAND_NORMAL);
3298 else
3300 const char *src_str;
3301 unsigned int src_align = get_pointer_alignment (src);
3302 unsigned int dest_align = get_pointer_alignment (dest);
3303 rtx dest_mem, src_mem, len_rtx;
3305 /* If either SRC or DEST is not a pointer type, don't do this
3306 operation in-line. */
3307 if (dest_align == 0 || src_align == 0)
3308 return NULL_RTX;
3310 /* If LEN is not constant, call the normal function. */
3311 if (! tree_fits_uhwi_p (len))
3312 return NULL_RTX;
3314 len_rtx = expand_normal (len);
3315 src_str = c_getstr (src);
3317 /* If SRC is a string constant and block move would be done
3318 by pieces, we can avoid loading the string from memory
3319 and only stored the computed constants. */
3320 if (src_str
3321 && CONST_INT_P (len_rtx)
3322 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3323 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3324 CONST_CAST (char *, src_str),
3325 dest_align, false))
3327 dest_mem = get_memory_rtx (dest, len);
3328 set_mem_align (dest_mem, dest_align);
3329 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3330 builtin_memcpy_read_str,
3331 CONST_CAST (char *, src_str),
3332 dest_align, false, endp);
3333 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3334 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3335 return dest_mem;
3338 if (CONST_INT_P (len_rtx)
3339 && can_move_by_pieces (INTVAL (len_rtx),
3340 MIN (dest_align, src_align)))
3342 dest_mem = get_memory_rtx (dest, len);
3343 set_mem_align (dest_mem, dest_align);
3344 src_mem = get_memory_rtx (src, len);
3345 set_mem_align (src_mem, src_align);
3346 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3347 MIN (dest_align, src_align), endp);
3348 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3349 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3350 return dest_mem;
3353 return NULL_RTX;
3357 #ifndef HAVE_movstr
3358 # define HAVE_movstr 0
3359 # define CODE_FOR_movstr CODE_FOR_nothing
3360 #endif
3362 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3363 we failed, the caller should emit a normal call, otherwise try to
3364 get the result in TARGET, if convenient. If ENDP is 0 return the
3365 destination pointer, if ENDP is 1 return the end pointer ala
3366 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3367 stpcpy. */
3369 static rtx
3370 expand_movstr (tree dest, tree src, rtx target, int endp)
3372 struct expand_operand ops[3];
3373 rtx dest_mem;
3374 rtx src_mem;
3376 if (!HAVE_movstr)
3377 return NULL_RTX;
3379 dest_mem = get_memory_rtx (dest, NULL);
3380 src_mem = get_memory_rtx (src, NULL);
3381 if (!endp)
3383 target = force_reg (Pmode, XEXP (dest_mem, 0));
3384 dest_mem = replace_equiv_address (dest_mem, target);
3387 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3388 create_fixed_operand (&ops[1], dest_mem);
3389 create_fixed_operand (&ops[2], src_mem);
3390 expand_insn (CODE_FOR_movstr, 3, ops);
3392 if (endp && target != const0_rtx)
3394 target = ops[0].value;
3395 /* movstr is supposed to set end to the address of the NUL
3396 terminator. If the caller requested a mempcpy-like return value,
3397 adjust it. */
3398 if (endp == 1)
3400 rtx tem = plus_constant (GET_MODE (target),
3401 gen_lowpart (GET_MODE (target), target), 1);
3402 emit_move_insn (target, force_operand (tem, NULL_RTX));
3405 return target;
3408 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3409 NULL_RTX if we failed the caller should emit a normal call, otherwise
3410 try to get the result in TARGET, if convenient (and in mode MODE if that's
3411 convenient). */
3413 static rtx
3414 expand_builtin_strcpy (tree exp, rtx target)
3416 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3418 tree dest = CALL_EXPR_ARG (exp, 0);
3419 tree src = CALL_EXPR_ARG (exp, 1);
3420 return expand_builtin_strcpy_args (dest, src, target);
3422 return NULL_RTX;
3425 /* Helper function to do the actual work for expand_builtin_strcpy. The
3426 arguments to the builtin_strcpy call DEST and SRC are broken out
3427 so that this can also be called without constructing an actual CALL_EXPR.
3428 The other arguments and return value are the same as for
3429 expand_builtin_strcpy. */
3431 static rtx
3432 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3434 return expand_movstr (dest, src, target, /*endp=*/0);
3437 /* Expand a call EXP to the stpcpy builtin.
3438 Return NULL_RTX if we failed the caller should emit a normal call,
3439 otherwise try to get the result in TARGET, if convenient (and in
3440 mode MODE if that's convenient). */
3442 static rtx
3443 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3445 tree dst, src;
3446 location_t loc = EXPR_LOCATION (exp);
3448 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3449 return NULL_RTX;
3451 dst = CALL_EXPR_ARG (exp, 0);
3452 src = CALL_EXPR_ARG (exp, 1);
3454 /* If return value is ignored, transform stpcpy into strcpy. */
3455 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3457 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3458 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3459 return expand_expr (result, target, mode, EXPAND_NORMAL);
3461 else
3463 tree len, lenp1;
3464 rtx ret;
3466 /* Ensure we get an actual string whose length can be evaluated at
3467 compile-time, not an expression containing a string. This is
3468 because the latter will potentially produce pessimized code
3469 when used to produce the return value. */
3470 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3471 return expand_movstr (dst, src, target, /*endp=*/2);
3473 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3474 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3475 target, mode, /*endp=*/2);
3477 if (ret)
3478 return ret;
3480 if (TREE_CODE (len) == INTEGER_CST)
3482 rtx len_rtx = expand_normal (len);
3484 if (CONST_INT_P (len_rtx))
3486 ret = expand_builtin_strcpy_args (dst, src, target);
3488 if (ret)
3490 if (! target)
3492 if (mode != VOIDmode)
3493 target = gen_reg_rtx (mode);
3494 else
3495 target = gen_reg_rtx (GET_MODE (ret));
3497 if (GET_MODE (target) != GET_MODE (ret))
3498 ret = gen_lowpart (GET_MODE (target), ret);
3500 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3501 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3502 gcc_assert (ret);
3504 return target;
3509 return expand_movstr (dst, src, target, /*endp=*/2);
3513 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3514 bytes from constant string DATA + OFFSET and return it as target
3515 constant. */
3518 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3519 enum machine_mode mode)
3521 const char *str = (const char *) data;
3523 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3524 return const0_rtx;
3526 return c_readstr (str + offset, mode);
3529 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3530 NULL_RTX if we failed the caller should emit a normal call. */
3532 static rtx
3533 expand_builtin_strncpy (tree exp, rtx target)
3535 location_t loc = EXPR_LOCATION (exp);
3537 if (validate_arglist (exp,
3538 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3540 tree dest = CALL_EXPR_ARG (exp, 0);
3541 tree src = CALL_EXPR_ARG (exp, 1);
3542 tree len = CALL_EXPR_ARG (exp, 2);
3543 tree slen = c_strlen (src, 1);
3545 /* We must be passed a constant len and src parameter. */
3546 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3547 return NULL_RTX;
3549 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3551 /* We're required to pad with trailing zeros if the requested
3552 len is greater than strlen(s2)+1. In that case try to
3553 use store_by_pieces, if it fails, punt. */
3554 if (tree_int_cst_lt (slen, len))
3556 unsigned int dest_align = get_pointer_alignment (dest);
3557 const char *p = c_getstr (src);
3558 rtx dest_mem;
3560 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3561 || !can_store_by_pieces (tree_to_uhwi (len),
3562 builtin_strncpy_read_str,
3563 CONST_CAST (char *, p),
3564 dest_align, false))
3565 return NULL_RTX;
3567 dest_mem = get_memory_rtx (dest, len);
3568 store_by_pieces (dest_mem, tree_to_uhwi (len),
3569 builtin_strncpy_read_str,
3570 CONST_CAST (char *, p), dest_align, false, 0);
3571 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3572 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3573 return dest_mem;
3576 return NULL_RTX;
3579 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3580 bytes from constant string DATA + OFFSET and return it as target
3581 constant. */
3584 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3585 enum machine_mode mode)
3587 const char *c = (const char *) data;
3588 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3590 memset (p, *c, GET_MODE_SIZE (mode));
3592 return c_readstr (p, mode);
3595 /* Callback routine for store_by_pieces. Return the RTL of a register
3596 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3597 char value given in the RTL register data. For example, if mode is
3598 4 bytes wide, return the RTL for 0x01010101*data. */
3600 static rtx
3601 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3602 enum machine_mode mode)
3604 rtx target, coeff;
3605 size_t size;
3606 char *p;
3608 size = GET_MODE_SIZE (mode);
3609 if (size == 1)
3610 return (rtx) data;
3612 p = XALLOCAVEC (char, size);
3613 memset (p, 1, size);
3614 coeff = c_readstr (p, mode);
3616 target = convert_to_mode (mode, (rtx) data, 1);
3617 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3618 return force_reg (mode, target);
3621 /* Expand expression EXP, which is a call to the memset builtin. Return
3622 NULL_RTX if we failed the caller should emit a normal call, otherwise
3623 try to get the result in TARGET, if convenient (and in mode MODE if that's
3624 convenient). */
3626 static rtx
3627 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3629 if (!validate_arglist (exp,
3630 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3631 return NULL_RTX;
3632 else
3634 tree dest = CALL_EXPR_ARG (exp, 0);
3635 tree val = CALL_EXPR_ARG (exp, 1);
3636 tree len = CALL_EXPR_ARG (exp, 2);
3637 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3641 /* Helper function to do the actual work for expand_builtin_memset. The
3642 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3643 so that this can also be called without constructing an actual CALL_EXPR.
3644 The other arguments and return value are the same as for
3645 expand_builtin_memset. */
3647 static rtx
3648 expand_builtin_memset_args (tree dest, tree val, tree len,
3649 rtx target, enum machine_mode mode, tree orig_exp)
3651 tree fndecl, fn;
3652 enum built_in_function fcode;
3653 enum machine_mode val_mode;
3654 char c;
3655 unsigned int dest_align;
3656 rtx dest_mem, dest_addr, len_rtx;
3657 HOST_WIDE_INT expected_size = -1;
3658 unsigned int expected_align = 0;
3659 unsigned HOST_WIDE_INT min_size;
3660 unsigned HOST_WIDE_INT max_size;
3661 unsigned HOST_WIDE_INT probable_max_size;
3663 dest_align = get_pointer_alignment (dest);
3665 /* If DEST is not a pointer type, don't do this operation in-line. */
3666 if (dest_align == 0)
3667 return NULL_RTX;
3669 if (currently_expanding_gimple_stmt)
3670 stringop_block_profile (currently_expanding_gimple_stmt,
3671 &expected_align, &expected_size);
3673 if (expected_align < dest_align)
3674 expected_align = dest_align;
3676 /* If the LEN parameter is zero, return DEST. */
3677 if (integer_zerop (len))
3679 /* Evaluate and ignore VAL in case it has side-effects. */
3680 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3681 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3684 /* Stabilize the arguments in case we fail. */
3685 dest = builtin_save_expr (dest);
3686 val = builtin_save_expr (val);
3687 len = builtin_save_expr (len);
3689 len_rtx = expand_normal (len);
3690 determine_block_size (len, len_rtx, &min_size, &max_size,
3691 &probable_max_size);
3692 dest_mem = get_memory_rtx (dest, len);
3693 val_mode = TYPE_MODE (unsigned_char_type_node);
3695 if (TREE_CODE (val) != INTEGER_CST)
3697 rtx val_rtx;
3699 val_rtx = expand_normal (val);
3700 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3702 /* Assume that we can memset by pieces if we can store
3703 * the coefficients by pieces (in the required modes).
3704 * We can't pass builtin_memset_gen_str as that emits RTL. */
3705 c = 1;
3706 if (tree_fits_uhwi_p (len)
3707 && can_store_by_pieces (tree_to_uhwi (len),
3708 builtin_memset_read_str, &c, dest_align,
3709 true))
3711 val_rtx = force_reg (val_mode, val_rtx);
3712 store_by_pieces (dest_mem, tree_to_uhwi (len),
3713 builtin_memset_gen_str, val_rtx, dest_align,
3714 true, 0);
3716 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3717 dest_align, expected_align,
3718 expected_size, min_size, max_size,
3719 probable_max_size))
3720 goto do_libcall;
3722 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3723 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3724 return dest_mem;
3727 if (target_char_cast (val, &c))
3728 goto do_libcall;
3730 if (c)
3732 if (tree_fits_uhwi_p (len)
3733 && can_store_by_pieces (tree_to_uhwi (len),
3734 builtin_memset_read_str, &c, dest_align,
3735 true))
3736 store_by_pieces (dest_mem, tree_to_uhwi (len),
3737 builtin_memset_read_str, &c, dest_align, true, 0);
3738 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3739 gen_int_mode (c, val_mode),
3740 dest_align, expected_align,
3741 expected_size, min_size, max_size,
3742 probable_max_size))
3743 goto do_libcall;
3745 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3746 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3747 return dest_mem;
3750 set_mem_align (dest_mem, dest_align);
3751 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3752 CALL_EXPR_TAILCALL (orig_exp)
3753 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3754 expected_align, expected_size,
3755 min_size, max_size,
3756 probable_max_size);
3758 if (dest_addr == 0)
3760 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3761 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3764 return dest_addr;
3766 do_libcall:
3767 fndecl = get_callee_fndecl (orig_exp);
3768 fcode = DECL_FUNCTION_CODE (fndecl);
3769 if (fcode == BUILT_IN_MEMSET)
3770 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3771 dest, val, len);
3772 else if (fcode == BUILT_IN_BZERO)
3773 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3774 dest, len);
3775 else
3776 gcc_unreachable ();
3777 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3778 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3779 return expand_call (fn, target, target == const0_rtx);
3782 /* Expand expression EXP, which is a call to the bzero builtin. Return
3783 NULL_RTX if we failed the caller should emit a normal call. */
3785 static rtx
3786 expand_builtin_bzero (tree exp)
3788 tree dest, size;
3789 location_t loc = EXPR_LOCATION (exp);
3791 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3792 return NULL_RTX;
3794 dest = CALL_EXPR_ARG (exp, 0);
3795 size = CALL_EXPR_ARG (exp, 1);
3797 /* New argument list transforming bzero(ptr x, int y) to
3798 memset(ptr x, int 0, size_t y). This is done this way
3799 so that if it isn't expanded inline, we fallback to
3800 calling bzero instead of memset. */
3802 return expand_builtin_memset_args (dest, integer_zero_node,
3803 fold_convert_loc (loc,
3804 size_type_node, size),
3805 const0_rtx, VOIDmode, exp);
3808 /* Expand expression EXP, which is a call to the memcmp built-in function.
3809 Return NULL_RTX if we failed and the caller should emit a normal call,
3810 otherwise try to get the result in TARGET, if convenient (and in mode
3811 MODE, if that's convenient). */
3813 static rtx
3814 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3815 ATTRIBUTE_UNUSED enum machine_mode mode)
3817 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3819 if (!validate_arglist (exp,
3820 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3821 return NULL_RTX;
3823 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3824 implementing memcmp because it will stop if it encounters two
3825 zero bytes. */
3826 #if defined HAVE_cmpmemsi
3828 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3829 rtx result;
3830 rtx insn;
3831 tree arg1 = CALL_EXPR_ARG (exp, 0);
3832 tree arg2 = CALL_EXPR_ARG (exp, 1);
3833 tree len = CALL_EXPR_ARG (exp, 2);
3835 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3836 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3837 enum machine_mode insn_mode;
3839 if (HAVE_cmpmemsi)
3840 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3841 else
3842 return NULL_RTX;
3844 /* If we don't have POINTER_TYPE, call the function. */
3845 if (arg1_align == 0 || arg2_align == 0)
3846 return NULL_RTX;
3848 /* Make a place to write the result of the instruction. */
3849 result = target;
3850 if (! (result != 0
3851 && REG_P (result) && GET_MODE (result) == insn_mode
3852 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3853 result = gen_reg_rtx (insn_mode);
3855 arg1_rtx = get_memory_rtx (arg1, len);
3856 arg2_rtx = get_memory_rtx (arg2, len);
3857 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3859 /* Set MEM_SIZE as appropriate. */
3860 if (CONST_INT_P (arg3_rtx))
3862 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3863 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3866 if (HAVE_cmpmemsi)
3867 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3868 GEN_INT (MIN (arg1_align, arg2_align)));
3869 else
3870 gcc_unreachable ();
3872 if (insn)
3873 emit_insn (insn);
3874 else
3875 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3876 TYPE_MODE (integer_type_node), 3,
3877 XEXP (arg1_rtx, 0), Pmode,
3878 XEXP (arg2_rtx, 0), Pmode,
3879 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3880 TYPE_UNSIGNED (sizetype)),
3881 TYPE_MODE (sizetype));
3883 /* Return the value in the proper mode for this function. */
3884 mode = TYPE_MODE (TREE_TYPE (exp));
3885 if (GET_MODE (result) == mode)
3886 return result;
3887 else if (target != 0)
3889 convert_move (target, result, 0);
3890 return target;
3892 else
3893 return convert_to_mode (mode, result, 0);
3895 #endif /* HAVE_cmpmemsi. */
3897 return NULL_RTX;
3900 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3901 if we failed the caller should emit a normal call, otherwise try to get
3902 the result in TARGET, if convenient. */
3904 static rtx
3905 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3907 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3908 return NULL_RTX;
3910 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3911 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3912 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3914 rtx arg1_rtx, arg2_rtx;
3915 rtx result, insn = NULL_RTX;
3916 tree fndecl, fn;
3917 tree arg1 = CALL_EXPR_ARG (exp, 0);
3918 tree arg2 = CALL_EXPR_ARG (exp, 1);
3920 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3921 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3923 /* If we don't have POINTER_TYPE, call the function. */
3924 if (arg1_align == 0 || arg2_align == 0)
3925 return NULL_RTX;
3927 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3928 arg1 = builtin_save_expr (arg1);
3929 arg2 = builtin_save_expr (arg2);
3931 arg1_rtx = get_memory_rtx (arg1, NULL);
3932 arg2_rtx = get_memory_rtx (arg2, NULL);
3934 #ifdef HAVE_cmpstrsi
3935 /* Try to call cmpstrsi. */
3936 if (HAVE_cmpstrsi)
3938 enum machine_mode insn_mode
3939 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3941 /* Make a place to write the result of the instruction. */
3942 result = target;
3943 if (! (result != 0
3944 && REG_P (result) && GET_MODE (result) == insn_mode
3945 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3946 result = gen_reg_rtx (insn_mode);
3948 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3949 GEN_INT (MIN (arg1_align, arg2_align)));
3951 #endif
3952 #ifdef HAVE_cmpstrnsi
3953 /* Try to determine at least one length and call cmpstrnsi. */
3954 if (!insn && HAVE_cmpstrnsi)
3956 tree len;
3957 rtx arg3_rtx;
3959 enum machine_mode insn_mode
3960 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3961 tree len1 = c_strlen (arg1, 1);
3962 tree len2 = c_strlen (arg2, 1);
3964 if (len1)
3965 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3966 if (len2)
3967 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3969 /* If we don't have a constant length for the first, use the length
3970 of the second, if we know it. We don't require a constant for
3971 this case; some cost analysis could be done if both are available
3972 but neither is constant. For now, assume they're equally cheap,
3973 unless one has side effects. If both strings have constant lengths,
3974 use the smaller. */
3976 if (!len1)
3977 len = len2;
3978 else if (!len2)
3979 len = len1;
3980 else if (TREE_SIDE_EFFECTS (len1))
3981 len = len2;
3982 else if (TREE_SIDE_EFFECTS (len2))
3983 len = len1;
3984 else if (TREE_CODE (len1) != INTEGER_CST)
3985 len = len2;
3986 else if (TREE_CODE (len2) != INTEGER_CST)
3987 len = len1;
3988 else if (tree_int_cst_lt (len1, len2))
3989 len = len1;
3990 else
3991 len = len2;
3993 /* If both arguments have side effects, we cannot optimize. */
3994 if (!len || TREE_SIDE_EFFECTS (len))
3995 goto do_libcall;
3997 arg3_rtx = expand_normal (len);
3999 /* Make a place to write the result of the instruction. */
4000 result = target;
4001 if (! (result != 0
4002 && REG_P (result) && GET_MODE (result) == insn_mode
4003 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4004 result = gen_reg_rtx (insn_mode);
4006 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4007 GEN_INT (MIN (arg1_align, arg2_align)));
4009 #endif
4011 if (insn)
4013 enum machine_mode mode;
4014 emit_insn (insn);
4016 /* Return the value in the proper mode for this function. */
4017 mode = TYPE_MODE (TREE_TYPE (exp));
4018 if (GET_MODE (result) == mode)
4019 return result;
4020 if (target == 0)
4021 return convert_to_mode (mode, result, 0);
4022 convert_move (target, result, 0);
4023 return target;
4026 /* Expand the library call ourselves using a stabilized argument
4027 list to avoid re-evaluating the function's arguments twice. */
4028 #ifdef HAVE_cmpstrnsi
4029 do_libcall:
4030 #endif
4031 fndecl = get_callee_fndecl (exp);
4032 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4033 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4034 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4035 return expand_call (fn, target, target == const0_rtx);
4037 #endif
4038 return NULL_RTX;
4041 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4042 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4043 the result in TARGET, if convenient. */
4045 static rtx
4046 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4047 ATTRIBUTE_UNUSED enum machine_mode mode)
4049 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4051 if (!validate_arglist (exp,
4052 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4053 return NULL_RTX;
4055 /* If c_strlen can determine an expression for one of the string
4056 lengths, and it doesn't have side effects, then emit cmpstrnsi
4057 using length MIN(strlen(string)+1, arg3). */
4058 #ifdef HAVE_cmpstrnsi
4059 if (HAVE_cmpstrnsi)
4061 tree len, len1, len2;
4062 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4063 rtx result, insn;
4064 tree fndecl, fn;
4065 tree arg1 = CALL_EXPR_ARG (exp, 0);
4066 tree arg2 = CALL_EXPR_ARG (exp, 1);
4067 tree arg3 = CALL_EXPR_ARG (exp, 2);
4069 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4070 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4071 enum machine_mode insn_mode
4072 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4074 len1 = c_strlen (arg1, 1);
4075 len2 = c_strlen (arg2, 1);
4077 if (len1)
4078 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4079 if (len2)
4080 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4082 /* If we don't have a constant length for the first, use the length
4083 of the second, if we know it. We don't require a constant for
4084 this case; some cost analysis could be done if both are available
4085 but neither is constant. For now, assume they're equally cheap,
4086 unless one has side effects. If both strings have constant lengths,
4087 use the smaller. */
4089 if (!len1)
4090 len = len2;
4091 else if (!len2)
4092 len = len1;
4093 else if (TREE_SIDE_EFFECTS (len1))
4094 len = len2;
4095 else if (TREE_SIDE_EFFECTS (len2))
4096 len = len1;
4097 else if (TREE_CODE (len1) != INTEGER_CST)
4098 len = len2;
4099 else if (TREE_CODE (len2) != INTEGER_CST)
4100 len = len1;
4101 else if (tree_int_cst_lt (len1, len2))
4102 len = len1;
4103 else
4104 len = len2;
4106 /* If both arguments have side effects, we cannot optimize. */
4107 if (!len || TREE_SIDE_EFFECTS (len))
4108 return NULL_RTX;
4110 /* The actual new length parameter is MIN(len,arg3). */
4111 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4112 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4114 /* If we don't have POINTER_TYPE, call the function. */
4115 if (arg1_align == 0 || arg2_align == 0)
4116 return NULL_RTX;
4118 /* Make a place to write the result of the instruction. */
4119 result = target;
4120 if (! (result != 0
4121 && REG_P (result) && GET_MODE (result) == insn_mode
4122 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4123 result = gen_reg_rtx (insn_mode);
4125 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4126 arg1 = builtin_save_expr (arg1);
4127 arg2 = builtin_save_expr (arg2);
4128 len = builtin_save_expr (len);
4130 arg1_rtx = get_memory_rtx (arg1, len);
4131 arg2_rtx = get_memory_rtx (arg2, len);
4132 arg3_rtx = expand_normal (len);
4133 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4134 GEN_INT (MIN (arg1_align, arg2_align)));
4135 if (insn)
4137 emit_insn (insn);
4139 /* Return the value in the proper mode for this function. */
4140 mode = TYPE_MODE (TREE_TYPE (exp));
4141 if (GET_MODE (result) == mode)
4142 return result;
4143 if (target == 0)
4144 return convert_to_mode (mode, result, 0);
4145 convert_move (target, result, 0);
4146 return target;
4149 /* Expand the library call ourselves using a stabilized argument
4150 list to avoid re-evaluating the function's arguments twice. */
4151 fndecl = get_callee_fndecl (exp);
4152 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4153 arg1, arg2, len);
4154 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4155 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4156 return expand_call (fn, target, target == const0_rtx);
4158 #endif
4159 return NULL_RTX;
4162 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4163 if that's convenient. */
4166 expand_builtin_saveregs (void)
4168 rtx val, seq;
4170 /* Don't do __builtin_saveregs more than once in a function.
4171 Save the result of the first call and reuse it. */
4172 if (saveregs_value != 0)
4173 return saveregs_value;
4175 /* When this function is called, it means that registers must be
4176 saved on entry to this function. So we migrate the call to the
4177 first insn of this function. */
4179 start_sequence ();
4181 /* Do whatever the machine needs done in this case. */
4182 val = targetm.calls.expand_builtin_saveregs ();
4184 seq = get_insns ();
4185 end_sequence ();
4187 saveregs_value = val;
4189 /* Put the insns after the NOTE that starts the function. If this
4190 is inside a start_sequence, make the outer-level insn chain current, so
4191 the code is placed at the start of the function. */
4192 push_topmost_sequence ();
4193 emit_insn_after (seq, entry_of_function ());
4194 pop_topmost_sequence ();
4196 return val;
4199 /* Expand a call to __builtin_next_arg. */
4201 static rtx
4202 expand_builtin_next_arg (void)
4204 /* Checking arguments is already done in fold_builtin_next_arg
4205 that must be called before this function. */
4206 return expand_binop (ptr_mode, add_optab,
4207 crtl->args.internal_arg_pointer,
4208 crtl->args.arg_offset_rtx,
4209 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4212 /* Make it easier for the backends by protecting the valist argument
4213 from multiple evaluations. */
4215 static tree
4216 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4218 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4220 /* The current way of determining the type of valist is completely
4221 bogus. We should have the information on the va builtin instead. */
4222 if (!vatype)
4223 vatype = targetm.fn_abi_va_list (cfun->decl);
4225 if (TREE_CODE (vatype) == ARRAY_TYPE)
4227 if (TREE_SIDE_EFFECTS (valist))
4228 valist = save_expr (valist);
4230 /* For this case, the backends will be expecting a pointer to
4231 vatype, but it's possible we've actually been given an array
4232 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4233 So fix it. */
4234 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4236 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4237 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4240 else
4242 tree pt = build_pointer_type (vatype);
4244 if (! needs_lvalue)
4246 if (! TREE_SIDE_EFFECTS (valist))
4247 return valist;
4249 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4250 TREE_SIDE_EFFECTS (valist) = 1;
4253 if (TREE_SIDE_EFFECTS (valist))
4254 valist = save_expr (valist);
4255 valist = fold_build2_loc (loc, MEM_REF,
4256 vatype, valist, build_int_cst (pt, 0));
4259 return valist;
4262 /* The "standard" definition of va_list is void*. */
4264 tree
4265 std_build_builtin_va_list (void)
4267 return ptr_type_node;
4270 /* The "standard" abi va_list is va_list_type_node. */
4272 tree
4273 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4275 return va_list_type_node;
4278 /* The "standard" type of va_list is va_list_type_node. */
4280 tree
4281 std_canonical_va_list_type (tree type)
4283 tree wtype, htype;
4285 if (INDIRECT_REF_P (type))
4286 type = TREE_TYPE (type);
4287 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4288 type = TREE_TYPE (type);
4289 wtype = va_list_type_node;
4290 htype = type;
4291 /* Treat structure va_list types. */
4292 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4293 htype = TREE_TYPE (htype);
4294 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4296 /* If va_list is an array type, the argument may have decayed
4297 to a pointer type, e.g. by being passed to another function.
4298 In that case, unwrap both types so that we can compare the
4299 underlying records. */
4300 if (TREE_CODE (htype) == ARRAY_TYPE
4301 || POINTER_TYPE_P (htype))
4303 wtype = TREE_TYPE (wtype);
4304 htype = TREE_TYPE (htype);
4307 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4308 return va_list_type_node;
4310 return NULL_TREE;
4313 /* The "standard" implementation of va_start: just assign `nextarg' to
4314 the variable. */
4316 void
4317 std_expand_builtin_va_start (tree valist, rtx nextarg)
4319 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4320 convert_move (va_r, nextarg, 0);
4323 /* Expand EXP, a call to __builtin_va_start. */
4325 static rtx
4326 expand_builtin_va_start (tree exp)
4328 rtx nextarg;
4329 tree valist;
4330 location_t loc = EXPR_LOCATION (exp);
4332 if (call_expr_nargs (exp) < 2)
4334 error_at (loc, "too few arguments to function %<va_start%>");
4335 return const0_rtx;
4338 if (fold_builtin_next_arg (exp, true))
4339 return const0_rtx;
4341 nextarg = expand_builtin_next_arg ();
4342 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4344 if (targetm.expand_builtin_va_start)
4345 targetm.expand_builtin_va_start (valist, nextarg);
4346 else
4347 std_expand_builtin_va_start (valist, nextarg);
4349 return const0_rtx;
4352 /* Expand EXP, a call to __builtin_va_end. */
4354 static rtx
4355 expand_builtin_va_end (tree exp)
4357 tree valist = CALL_EXPR_ARG (exp, 0);
4359 /* Evaluate for side effects, if needed. I hate macros that don't
4360 do that. */
4361 if (TREE_SIDE_EFFECTS (valist))
4362 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4364 return const0_rtx;
4367 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4368 builtin rather than just as an assignment in stdarg.h because of the
4369 nastiness of array-type va_list types. */
4371 static rtx
4372 expand_builtin_va_copy (tree exp)
4374 tree dst, src, t;
4375 location_t loc = EXPR_LOCATION (exp);
4377 dst = CALL_EXPR_ARG (exp, 0);
4378 src = CALL_EXPR_ARG (exp, 1);
4380 dst = stabilize_va_list_loc (loc, dst, 1);
4381 src = stabilize_va_list_loc (loc, src, 0);
4383 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4385 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4387 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4388 TREE_SIDE_EFFECTS (t) = 1;
4389 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4391 else
4393 rtx dstb, srcb, size;
4395 /* Evaluate to pointers. */
4396 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4397 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4398 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4399 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4401 dstb = convert_memory_address (Pmode, dstb);
4402 srcb = convert_memory_address (Pmode, srcb);
4404 /* "Dereference" to BLKmode memories. */
4405 dstb = gen_rtx_MEM (BLKmode, dstb);
4406 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4407 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4408 srcb = gen_rtx_MEM (BLKmode, srcb);
4409 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4410 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4412 /* Copy. */
4413 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4416 return const0_rtx;
4419 /* Expand a call to one of the builtin functions __builtin_frame_address or
4420 __builtin_return_address. */
4422 static rtx
4423 expand_builtin_frame_address (tree fndecl, tree exp)
4425 /* The argument must be a nonnegative integer constant.
4426 It counts the number of frames to scan up the stack.
4427 The value is the return address saved in that frame. */
4428 if (call_expr_nargs (exp) == 0)
4429 /* Warning about missing arg was already issued. */
4430 return const0_rtx;
4431 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4433 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4434 error ("invalid argument to %<__builtin_frame_address%>");
4435 else
4436 error ("invalid argument to %<__builtin_return_address%>");
4437 return const0_rtx;
4439 else
4441 rtx tem
4442 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4443 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4445 /* Some ports cannot access arbitrary stack frames. */
4446 if (tem == NULL)
4448 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4449 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4450 else
4451 warning (0, "unsupported argument to %<__builtin_return_address%>");
4452 return const0_rtx;
4455 /* For __builtin_frame_address, return what we've got. */
4456 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4457 return tem;
4459 if (!REG_P (tem)
4460 && ! CONSTANT_P (tem))
4461 tem = copy_addr_to_reg (tem);
4462 return tem;
4466 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4467 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4468 is the same as for allocate_dynamic_stack_space. */
4470 static rtx
4471 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4473 rtx op0;
4474 rtx result;
4475 bool valid_arglist;
4476 unsigned int align;
4477 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4478 == BUILT_IN_ALLOCA_WITH_ALIGN);
4480 valid_arglist
4481 = (alloca_with_align
4482 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4483 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4485 if (!valid_arglist)
4486 return NULL_RTX;
4488 /* Compute the argument. */
4489 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4491 /* Compute the alignment. */
4492 align = (alloca_with_align
4493 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4494 : BIGGEST_ALIGNMENT);
4496 /* Allocate the desired space. */
4497 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4498 result = convert_memory_address (ptr_mode, result);
4500 return result;
4503 /* Expand a call to bswap builtin in EXP.
4504 Return NULL_RTX if a normal call should be emitted rather than expanding the
4505 function in-line. If convenient, the result should be placed in TARGET.
4506 SUBTARGET may be used as the target for computing one of EXP's operands. */
4508 static rtx
4509 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4510 rtx subtarget)
4512 tree arg;
4513 rtx op0;
4515 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4516 return NULL_RTX;
4518 arg = CALL_EXPR_ARG (exp, 0);
4519 op0 = expand_expr (arg,
4520 subtarget && GET_MODE (subtarget) == target_mode
4521 ? subtarget : NULL_RTX,
4522 target_mode, EXPAND_NORMAL);
4523 if (GET_MODE (op0) != target_mode)
4524 op0 = convert_to_mode (target_mode, op0, 1);
4526 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4528 gcc_assert (target);
4530 return convert_to_mode (target_mode, target, 1);
4533 /* Expand a call to a unary builtin in EXP.
4534 Return NULL_RTX if a normal call should be emitted rather than expanding the
4535 function in-line. If convenient, the result should be placed in TARGET.
4536 SUBTARGET may be used as the target for computing one of EXP's operands. */
4538 static rtx
4539 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4540 rtx subtarget, optab op_optab)
4542 rtx op0;
4544 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4545 return NULL_RTX;
4547 /* Compute the argument. */
4548 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4549 (subtarget
4550 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4551 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4552 VOIDmode, EXPAND_NORMAL);
4553 /* Compute op, into TARGET if possible.
4554 Set TARGET to wherever the result comes back. */
4555 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4556 op_optab, op0, target, op_optab != clrsb_optab);
4557 gcc_assert (target);
4559 return convert_to_mode (target_mode, target, 0);
4562 /* Expand a call to __builtin_expect. We just return our argument
4563 as the builtin_expect semantic should've been already executed by
4564 tree branch prediction pass. */
4566 static rtx
4567 expand_builtin_expect (tree exp, rtx target)
4569 tree arg;
4571 if (call_expr_nargs (exp) < 2)
4572 return const0_rtx;
4573 arg = CALL_EXPR_ARG (exp, 0);
4575 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4576 /* When guessing was done, the hints should be already stripped away. */
4577 gcc_assert (!flag_guess_branch_prob
4578 || optimize == 0 || seen_error ());
4579 return target;
4582 /* Expand a call to __builtin_assume_aligned. We just return our first
4583 argument as the builtin_assume_aligned semantic should've been already
4584 executed by CCP. */
4586 static rtx
4587 expand_builtin_assume_aligned (tree exp, rtx target)
4589 if (call_expr_nargs (exp) < 2)
4590 return const0_rtx;
4591 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4592 EXPAND_NORMAL);
4593 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4594 && (call_expr_nargs (exp) < 3
4595 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4596 return target;
4599 void
4600 expand_builtin_trap (void)
4602 #ifdef HAVE_trap
4603 if (HAVE_trap)
4605 rtx insn = emit_insn (gen_trap ());
4606 /* For trap insns when not accumulating outgoing args force
4607 REG_ARGS_SIZE note to prevent crossjumping of calls with
4608 different args sizes. */
4609 if (!ACCUMULATE_OUTGOING_ARGS)
4610 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4612 else
4613 #endif
4614 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4615 emit_barrier ();
4618 /* Expand a call to __builtin_unreachable. We do nothing except emit
4619 a barrier saying that control flow will not pass here.
4621 It is the responsibility of the program being compiled to ensure
4622 that control flow does never reach __builtin_unreachable. */
4623 static void
4624 expand_builtin_unreachable (void)
4626 emit_barrier ();
4629 /* Expand EXP, a call to fabs, fabsf or fabsl.
4630 Return NULL_RTX if a normal call should be emitted rather than expanding
4631 the function inline. If convenient, the result should be placed
4632 in TARGET. SUBTARGET may be used as the target for computing
4633 the operand. */
4635 static rtx
4636 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4638 enum machine_mode mode;
4639 tree arg;
4640 rtx op0;
4642 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4643 return NULL_RTX;
4645 arg = CALL_EXPR_ARG (exp, 0);
4646 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4647 mode = TYPE_MODE (TREE_TYPE (arg));
4648 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4649 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4652 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4653 Return NULL is a normal call should be emitted rather than expanding the
4654 function inline. If convenient, the result should be placed in TARGET.
4655 SUBTARGET may be used as the target for computing the operand. */
4657 static rtx
4658 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4660 rtx op0, op1;
4661 tree arg;
4663 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4664 return NULL_RTX;
4666 arg = CALL_EXPR_ARG (exp, 0);
4667 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4669 arg = CALL_EXPR_ARG (exp, 1);
4670 op1 = expand_normal (arg);
4672 return expand_copysign (op0, op1, target);
4675 /* Create a new constant string literal and return a char* pointer to it.
4676 The STRING_CST value is the LEN characters at STR. */
4677 tree
4678 build_string_literal (int len, const char *str)
4680 tree t, elem, index, type;
4682 t = build_string (len, str);
4683 elem = build_type_variant (char_type_node, 1, 0);
4684 index = build_index_type (size_int (len - 1));
4685 type = build_array_type (elem, index);
4686 TREE_TYPE (t) = type;
4687 TREE_CONSTANT (t) = 1;
4688 TREE_READONLY (t) = 1;
4689 TREE_STATIC (t) = 1;
4691 type = build_pointer_type (elem);
4692 t = build1 (ADDR_EXPR, type,
4693 build4 (ARRAY_REF, elem,
4694 t, integer_zero_node, NULL_TREE, NULL_TREE));
4695 return t;
4698 /* Expand a call to __builtin___clear_cache. */
4700 static rtx
4701 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4703 #ifndef HAVE_clear_cache
4704 #ifdef CLEAR_INSN_CACHE
4705 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4706 does something. Just do the default expansion to a call to
4707 __clear_cache(). */
4708 return NULL_RTX;
4709 #else
4710 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4711 does nothing. There is no need to call it. Do nothing. */
4712 return const0_rtx;
4713 #endif /* CLEAR_INSN_CACHE */
4714 #else
4715 /* We have a "clear_cache" insn, and it will handle everything. */
4716 tree begin, end;
4717 rtx begin_rtx, end_rtx;
4719 /* We must not expand to a library call. If we did, any
4720 fallback library function in libgcc that might contain a call to
4721 __builtin___clear_cache() would recurse infinitely. */
4722 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4724 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4725 return const0_rtx;
4728 if (HAVE_clear_cache)
4730 struct expand_operand ops[2];
4732 begin = CALL_EXPR_ARG (exp, 0);
4733 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4735 end = CALL_EXPR_ARG (exp, 1);
4736 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4738 create_address_operand (&ops[0], begin_rtx);
4739 create_address_operand (&ops[1], end_rtx);
4740 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4741 return const0_rtx;
4743 return const0_rtx;
4744 #endif /* HAVE_clear_cache */
4747 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4749 static rtx
4750 round_trampoline_addr (rtx tramp)
4752 rtx temp, addend, mask;
4754 /* If we don't need too much alignment, we'll have been guaranteed
4755 proper alignment by get_trampoline_type. */
4756 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4757 return tramp;
4759 /* Round address up to desired boundary. */
4760 temp = gen_reg_rtx (Pmode);
4761 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4762 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4764 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4765 temp, 0, OPTAB_LIB_WIDEN);
4766 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4767 temp, 0, OPTAB_LIB_WIDEN);
4769 return tramp;
4772 static rtx
4773 expand_builtin_init_trampoline (tree exp, bool onstack)
4775 tree t_tramp, t_func, t_chain;
4776 rtx m_tramp, r_tramp, r_chain, tmp;
4778 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4779 POINTER_TYPE, VOID_TYPE))
4780 return NULL_RTX;
4782 t_tramp = CALL_EXPR_ARG (exp, 0);
4783 t_func = CALL_EXPR_ARG (exp, 1);
4784 t_chain = CALL_EXPR_ARG (exp, 2);
4786 r_tramp = expand_normal (t_tramp);
4787 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4788 MEM_NOTRAP_P (m_tramp) = 1;
4790 /* If ONSTACK, the TRAMP argument should be the address of a field
4791 within the local function's FRAME decl. Either way, let's see if
4792 we can fill in the MEM_ATTRs for this memory. */
4793 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4794 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4796 /* Creator of a heap trampoline is responsible for making sure the
4797 address is aligned to at least STACK_BOUNDARY. Normally malloc
4798 will ensure this anyhow. */
4799 tmp = round_trampoline_addr (r_tramp);
4800 if (tmp != r_tramp)
4802 m_tramp = change_address (m_tramp, BLKmode, tmp);
4803 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4804 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4807 /* The FUNC argument should be the address of the nested function.
4808 Extract the actual function decl to pass to the hook. */
4809 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4810 t_func = TREE_OPERAND (t_func, 0);
4811 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4813 r_chain = expand_normal (t_chain);
4815 /* Generate insns to initialize the trampoline. */
4816 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4818 if (onstack)
4820 trampolines_created = 1;
4822 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4823 "trampoline generated for nested function %qD", t_func);
4826 return const0_rtx;
4829 static rtx
4830 expand_builtin_adjust_trampoline (tree exp)
4832 rtx tramp;
4834 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4835 return NULL_RTX;
4837 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4838 tramp = round_trampoline_addr (tramp);
4839 if (targetm.calls.trampoline_adjust_address)
4840 tramp = targetm.calls.trampoline_adjust_address (tramp);
4842 return tramp;
4845 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4846 function. The function first checks whether the back end provides
4847 an insn to implement signbit for the respective mode. If not, it
4848 checks whether the floating point format of the value is such that
4849 the sign bit can be extracted. If that is not the case, the
4850 function returns NULL_RTX to indicate that a normal call should be
4851 emitted rather than expanding the function in-line. EXP is the
4852 expression that is a call to the builtin function; if convenient,
4853 the result should be placed in TARGET. */
4854 static rtx
4855 expand_builtin_signbit (tree exp, rtx target)
4857 const struct real_format *fmt;
4858 enum machine_mode fmode, imode, rmode;
4859 tree arg;
4860 int word, bitpos;
4861 enum insn_code icode;
4862 rtx temp;
4863 location_t loc = EXPR_LOCATION (exp);
4865 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4866 return NULL_RTX;
4868 arg = CALL_EXPR_ARG (exp, 0);
4869 fmode = TYPE_MODE (TREE_TYPE (arg));
4870 rmode = TYPE_MODE (TREE_TYPE (exp));
4871 fmt = REAL_MODE_FORMAT (fmode);
4873 arg = builtin_save_expr (arg);
4875 /* Expand the argument yielding a RTX expression. */
4876 temp = expand_normal (arg);
4878 /* Check if the back end provides an insn that handles signbit for the
4879 argument's mode. */
4880 icode = optab_handler (signbit_optab, fmode);
4881 if (icode != CODE_FOR_nothing)
4883 rtx last = get_last_insn ();
4884 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4885 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4886 return target;
4887 delete_insns_since (last);
4890 /* For floating point formats without a sign bit, implement signbit
4891 as "ARG < 0.0". */
4892 bitpos = fmt->signbit_ro;
4893 if (bitpos < 0)
4895 /* But we can't do this if the format supports signed zero. */
4896 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4897 return NULL_RTX;
4899 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4900 build_real (TREE_TYPE (arg), dconst0));
4901 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4904 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4906 imode = int_mode_for_mode (fmode);
4907 if (imode == BLKmode)
4908 return NULL_RTX;
4909 temp = gen_lowpart (imode, temp);
4911 else
4913 imode = word_mode;
4914 /* Handle targets with different FP word orders. */
4915 if (FLOAT_WORDS_BIG_ENDIAN)
4916 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4917 else
4918 word = bitpos / BITS_PER_WORD;
4919 temp = operand_subword_force (temp, word, fmode);
4920 bitpos = bitpos % BITS_PER_WORD;
4923 /* Force the intermediate word_mode (or narrower) result into a
4924 register. This avoids attempting to create paradoxical SUBREGs
4925 of floating point modes below. */
4926 temp = force_reg (imode, temp);
4928 /* If the bitpos is within the "result mode" lowpart, the operation
4929 can be implement with a single bitwise AND. Otherwise, we need
4930 a right shift and an AND. */
4932 if (bitpos < GET_MODE_BITSIZE (rmode))
4934 double_int mask = double_int_zero.set_bit (bitpos);
4936 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4937 temp = gen_lowpart (rmode, temp);
4938 temp = expand_binop (rmode, and_optab, temp,
4939 immed_double_int_const (mask, rmode),
4940 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4942 else
4944 /* Perform a logical right shift to place the signbit in the least
4945 significant bit, then truncate the result to the desired mode
4946 and mask just this bit. */
4947 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4948 temp = gen_lowpart (rmode, temp);
4949 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4950 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4953 return temp;
4956 /* Expand fork or exec calls. TARGET is the desired target of the
4957 call. EXP is the call. FN is the
4958 identificator of the actual function. IGNORE is nonzero if the
4959 value is to be ignored. */
4961 static rtx
4962 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4964 tree id, decl;
4965 tree call;
4967 /* If we are not profiling, just call the function. */
4968 if (!profile_arc_flag)
4969 return NULL_RTX;
4971 /* Otherwise call the wrapper. This should be equivalent for the rest of
4972 compiler, so the code does not diverge, and the wrapper may run the
4973 code necessary for keeping the profiling sane. */
4975 switch (DECL_FUNCTION_CODE (fn))
4977 case BUILT_IN_FORK:
4978 id = get_identifier ("__gcov_fork");
4979 break;
4981 case BUILT_IN_EXECL:
4982 id = get_identifier ("__gcov_execl");
4983 break;
4985 case BUILT_IN_EXECV:
4986 id = get_identifier ("__gcov_execv");
4987 break;
4989 case BUILT_IN_EXECLP:
4990 id = get_identifier ("__gcov_execlp");
4991 break;
4993 case BUILT_IN_EXECLE:
4994 id = get_identifier ("__gcov_execle");
4995 break;
4997 case BUILT_IN_EXECVP:
4998 id = get_identifier ("__gcov_execvp");
4999 break;
5001 case BUILT_IN_EXECVE:
5002 id = get_identifier ("__gcov_execve");
5003 break;
5005 default:
5006 gcc_unreachable ();
5009 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5010 FUNCTION_DECL, id, TREE_TYPE (fn));
5011 DECL_EXTERNAL (decl) = 1;
5012 TREE_PUBLIC (decl) = 1;
5013 DECL_ARTIFICIAL (decl) = 1;
5014 TREE_NOTHROW (decl) = 1;
5015 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5016 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5017 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5018 return expand_call (call, target, ignore);
5023 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5024 the pointer in these functions is void*, the tree optimizers may remove
5025 casts. The mode computed in expand_builtin isn't reliable either, due
5026 to __sync_bool_compare_and_swap.
5028 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5029 group of builtins. This gives us log2 of the mode size. */
5031 static inline enum machine_mode
5032 get_builtin_sync_mode (int fcode_diff)
5034 /* The size is not negotiable, so ask not to get BLKmode in return
5035 if the target indicates that a smaller size would be better. */
5036 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5039 /* Expand the memory expression LOC and return the appropriate memory operand
5040 for the builtin_sync operations. */
5042 static rtx
5043 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5045 rtx addr, mem;
5047 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5048 addr = convert_memory_address (Pmode, addr);
5050 /* Note that we explicitly do not want any alias information for this
5051 memory, so that we kill all other live memories. Otherwise we don't
5052 satisfy the full barrier semantics of the intrinsic. */
5053 mem = validize_mem (gen_rtx_MEM (mode, addr));
5055 /* The alignment needs to be at least according to that of the mode. */
5056 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5057 get_pointer_alignment (loc)));
5058 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5059 MEM_VOLATILE_P (mem) = 1;
5061 return mem;
5064 /* Make sure an argument is in the right mode.
5065 EXP is the tree argument.
5066 MODE is the mode it should be in. */
5068 static rtx
5069 expand_expr_force_mode (tree exp, enum machine_mode mode)
5071 rtx val;
5072 enum machine_mode old_mode;
5074 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5075 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5076 of CONST_INTs, where we know the old_mode only from the call argument. */
5078 old_mode = GET_MODE (val);
5079 if (old_mode == VOIDmode)
5080 old_mode = TYPE_MODE (TREE_TYPE (exp));
5081 val = convert_modes (mode, old_mode, val, 1);
5082 return val;
5086 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5087 EXP is the CALL_EXPR. CODE is the rtx code
5088 that corresponds to the arithmetic or logical operation from the name;
5089 an exception here is that NOT actually means NAND. TARGET is an optional
5090 place for us to store the results; AFTER is true if this is the
5091 fetch_and_xxx form. */
5093 static rtx
5094 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5095 enum rtx_code code, bool after,
5096 rtx target)
5098 rtx val, mem;
5099 location_t loc = EXPR_LOCATION (exp);
5101 if (code == NOT && warn_sync_nand)
5103 tree fndecl = get_callee_fndecl (exp);
5104 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5106 static bool warned_f_a_n, warned_n_a_f;
5108 switch (fcode)
5110 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5111 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5112 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5113 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5114 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5115 if (warned_f_a_n)
5116 break;
5118 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5119 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5120 warned_f_a_n = true;
5121 break;
5123 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5124 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5125 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5126 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5127 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5128 if (warned_n_a_f)
5129 break;
5131 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5132 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5133 warned_n_a_f = true;
5134 break;
5136 default:
5137 gcc_unreachable ();
5141 /* Expand the operands. */
5142 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5143 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5145 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5146 after);
5149 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5150 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5151 true if this is the boolean form. TARGET is a place for us to store the
5152 results; this is NOT optional if IS_BOOL is true. */
5154 static rtx
5155 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5156 bool is_bool, rtx target)
5158 rtx old_val, new_val, mem;
5159 rtx *pbool, *poval;
5161 /* Expand the operands. */
5162 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5163 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5164 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5166 pbool = poval = NULL;
5167 if (target != const0_rtx)
5169 if (is_bool)
5170 pbool = &target;
5171 else
5172 poval = &target;
5174 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5175 false, MEMMODEL_SEQ_CST,
5176 MEMMODEL_SEQ_CST))
5177 return NULL_RTX;
5179 return target;
5182 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5183 general form is actually an atomic exchange, and some targets only
5184 support a reduced form with the second argument being a constant 1.
5185 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5186 the results. */
5188 static rtx
5189 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5190 rtx target)
5192 rtx val, mem;
5194 /* Expand the operands. */
5195 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5196 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5198 return expand_sync_lock_test_and_set (target, mem, val);
5201 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5203 static void
5204 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5206 rtx mem;
5208 /* Expand the operands. */
5209 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5211 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5214 /* Given an integer representing an ``enum memmodel'', verify its
5215 correctness and return the memory model enum. */
5217 static enum memmodel
5218 get_memmodel (tree exp)
5220 rtx op;
5221 unsigned HOST_WIDE_INT val;
5223 /* If the parameter is not a constant, it's a run time value so we'll just
5224 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5225 if (TREE_CODE (exp) != INTEGER_CST)
5226 return MEMMODEL_SEQ_CST;
5228 op = expand_normal (exp);
5230 val = INTVAL (op);
5231 if (targetm.memmodel_check)
5232 val = targetm.memmodel_check (val);
5233 else if (val & ~MEMMODEL_MASK)
5235 warning (OPT_Winvalid_memory_model,
5236 "Unknown architecture specifier in memory model to builtin.");
5237 return MEMMODEL_SEQ_CST;
5240 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5242 warning (OPT_Winvalid_memory_model,
5243 "invalid memory model argument to builtin");
5244 return MEMMODEL_SEQ_CST;
5247 return (enum memmodel) val;
5250 /* Expand the __atomic_exchange intrinsic:
5251 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5252 EXP is the CALL_EXPR.
5253 TARGET is an optional place for us to store the results. */
5255 static rtx
5256 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5258 rtx val, mem;
5259 enum memmodel model;
5261 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5262 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5264 error ("invalid memory model for %<__atomic_exchange%>");
5265 return NULL_RTX;
5268 if (!flag_inline_atomics)
5269 return NULL_RTX;
5271 /* Expand the operands. */
5272 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5273 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5275 return expand_atomic_exchange (target, mem, val, model);
5278 /* Expand the __atomic_compare_exchange intrinsic:
5279 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5280 TYPE desired, BOOL weak,
5281 enum memmodel success,
5282 enum memmodel failure)
5283 EXP is the CALL_EXPR.
5284 TARGET is an optional place for us to store the results. */
5286 static rtx
5287 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5288 rtx target)
5290 rtx expect, desired, mem, oldval;
5291 enum memmodel success, failure;
5292 tree weak;
5293 bool is_weak;
5295 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5296 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5298 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5299 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5301 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5302 return NULL_RTX;
5305 if (failure > success)
5307 error ("failure memory model cannot be stronger than success "
5308 "memory model for %<__atomic_compare_exchange%>");
5309 return NULL_RTX;
5312 if (!flag_inline_atomics)
5313 return NULL_RTX;
5315 /* Expand the operands. */
5316 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5318 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5319 expect = convert_memory_address (Pmode, expect);
5320 expect = gen_rtx_MEM (mode, expect);
5321 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5323 weak = CALL_EXPR_ARG (exp, 3);
5324 is_weak = false;
5325 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5326 is_weak = true;
5328 oldval = expect;
5329 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5330 &oldval, mem, oldval, desired,
5331 is_weak, success, failure))
5332 return NULL_RTX;
5334 if (oldval != expect)
5335 emit_move_insn (expect, oldval);
5337 return target;
5340 /* Expand the __atomic_load intrinsic:
5341 TYPE __atomic_load (TYPE *object, enum memmodel)
5342 EXP is the CALL_EXPR.
5343 TARGET is an optional place for us to store the results. */
5345 static rtx
5346 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5348 rtx mem;
5349 enum memmodel model;
5351 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5352 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5353 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5355 error ("invalid memory model for %<__atomic_load%>");
5356 return NULL_RTX;
5359 if (!flag_inline_atomics)
5360 return NULL_RTX;
5362 /* Expand the operand. */
5363 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5365 return expand_atomic_load (target, mem, model);
5369 /* Expand the __atomic_store intrinsic:
5370 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5371 EXP is the CALL_EXPR.
5372 TARGET is an optional place for us to store the results. */
5374 static rtx
5375 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5377 rtx mem, val;
5378 enum memmodel model;
5380 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5381 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5382 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5383 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5385 error ("invalid memory model for %<__atomic_store%>");
5386 return NULL_RTX;
5389 if (!flag_inline_atomics)
5390 return NULL_RTX;
5392 /* Expand the operands. */
5393 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5394 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5396 return expand_atomic_store (mem, val, model, false);
5399 /* Expand the __atomic_fetch_XXX intrinsic:
5400 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5401 EXP is the CALL_EXPR.
5402 TARGET is an optional place for us to store the results.
5403 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5404 FETCH_AFTER is true if returning the result of the operation.
5405 FETCH_AFTER is false if returning the value before the operation.
5406 IGNORE is true if the result is not used.
5407 EXT_CALL is the correct builtin for an external call if this cannot be
5408 resolved to an instruction sequence. */
5410 static rtx
5411 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5412 enum rtx_code code, bool fetch_after,
5413 bool ignore, enum built_in_function ext_call)
5415 rtx val, mem, ret;
5416 enum memmodel model;
5417 tree fndecl;
5418 tree addr;
5420 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5422 /* Expand the operands. */
5423 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5424 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5426 /* Only try generating instructions if inlining is turned on. */
5427 if (flag_inline_atomics)
5429 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5430 if (ret)
5431 return ret;
5434 /* Return if a different routine isn't needed for the library call. */
5435 if (ext_call == BUILT_IN_NONE)
5436 return NULL_RTX;
5438 /* Change the call to the specified function. */
5439 fndecl = get_callee_fndecl (exp);
5440 addr = CALL_EXPR_FN (exp);
5441 STRIP_NOPS (addr);
5443 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5444 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5446 /* Expand the call here so we can emit trailing code. */
5447 ret = expand_call (exp, target, ignore);
5449 /* Replace the original function just in case it matters. */
5450 TREE_OPERAND (addr, 0) = fndecl;
5452 /* Then issue the arithmetic correction to return the right result. */
5453 if (!ignore)
5455 if (code == NOT)
5457 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5458 OPTAB_LIB_WIDEN);
5459 ret = expand_simple_unop (mode, NOT, ret, target, true);
5461 else
5462 ret = expand_simple_binop (mode, code, ret, val, target, true,
5463 OPTAB_LIB_WIDEN);
5465 return ret;
5469 #ifndef HAVE_atomic_clear
5470 # define HAVE_atomic_clear 0
5471 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5472 #endif
5474 /* Expand an atomic clear operation.
5475 void _atomic_clear (BOOL *obj, enum memmodel)
5476 EXP is the call expression. */
5478 static rtx
5479 expand_builtin_atomic_clear (tree exp)
5481 enum machine_mode mode;
5482 rtx mem, ret;
5483 enum memmodel model;
5485 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5486 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5487 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5489 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5490 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5492 error ("invalid memory model for %<__atomic_store%>");
5493 return const0_rtx;
5496 if (HAVE_atomic_clear)
5498 emit_insn (gen_atomic_clear (mem, model));
5499 return const0_rtx;
5502 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5503 Failing that, a store is issued by __atomic_store. The only way this can
5504 fail is if the bool type is larger than a word size. Unlikely, but
5505 handle it anyway for completeness. Assume a single threaded model since
5506 there is no atomic support in this case, and no barriers are required. */
5507 ret = expand_atomic_store (mem, const0_rtx, model, true);
5508 if (!ret)
5509 emit_move_insn (mem, const0_rtx);
5510 return const0_rtx;
5513 /* Expand an atomic test_and_set operation.
5514 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5515 EXP is the call expression. */
5517 static rtx
5518 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5520 rtx mem;
5521 enum memmodel model;
5522 enum machine_mode mode;
5524 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5525 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5526 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5528 return expand_atomic_test_and_set (target, mem, model);
5532 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5533 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5535 static tree
5536 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5538 int size;
5539 enum machine_mode mode;
5540 unsigned int mode_align, type_align;
5542 if (TREE_CODE (arg0) != INTEGER_CST)
5543 return NULL_TREE;
5545 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5546 mode = mode_for_size (size, MODE_INT, 0);
5547 mode_align = GET_MODE_ALIGNMENT (mode);
5549 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5550 type_align = mode_align;
5551 else
5553 tree ttype = TREE_TYPE (arg1);
5555 /* This function is usually invoked and folded immediately by the front
5556 end before anything else has a chance to look at it. The pointer
5557 parameter at this point is usually cast to a void *, so check for that
5558 and look past the cast. */
5559 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5560 && VOID_TYPE_P (TREE_TYPE (ttype)))
5561 arg1 = TREE_OPERAND (arg1, 0);
5563 ttype = TREE_TYPE (arg1);
5564 gcc_assert (POINTER_TYPE_P (ttype));
5566 /* Get the underlying type of the object. */
5567 ttype = TREE_TYPE (ttype);
5568 type_align = TYPE_ALIGN (ttype);
5571 /* If the object has smaller alignment, the the lock free routines cannot
5572 be used. */
5573 if (type_align < mode_align)
5574 return boolean_false_node;
5576 /* Check if a compare_and_swap pattern exists for the mode which represents
5577 the required size. The pattern is not allowed to fail, so the existence
5578 of the pattern indicates support is present. */
5579 if (can_compare_and_swap_p (mode, true))
5580 return boolean_true_node;
5581 else
5582 return boolean_false_node;
5585 /* Return true if the parameters to call EXP represent an object which will
5586 always generate lock free instructions. The first argument represents the
5587 size of the object, and the second parameter is a pointer to the object
5588 itself. If NULL is passed for the object, then the result is based on
5589 typical alignment for an object of the specified size. Otherwise return
5590 false. */
5592 static rtx
5593 expand_builtin_atomic_always_lock_free (tree exp)
5595 tree size;
5596 tree arg0 = CALL_EXPR_ARG (exp, 0);
5597 tree arg1 = CALL_EXPR_ARG (exp, 1);
5599 if (TREE_CODE (arg0) != INTEGER_CST)
5601 error ("non-constant argument 1 to __atomic_always_lock_free");
5602 return const0_rtx;
5605 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5606 if (size == boolean_true_node)
5607 return const1_rtx;
5608 return const0_rtx;
5611 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5612 is lock free on this architecture. */
5614 static tree
5615 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5617 if (!flag_inline_atomics)
5618 return NULL_TREE;
5620 /* If it isn't always lock free, don't generate a result. */
5621 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5622 return boolean_true_node;
5624 return NULL_TREE;
5627 /* Return true if the parameters to call EXP represent an object which will
5628 always generate lock free instructions. The first argument represents the
5629 size of the object, and the second parameter is a pointer to the object
5630 itself. If NULL is passed for the object, then the result is based on
5631 typical alignment for an object of the specified size. Otherwise return
5632 NULL*/
5634 static rtx
5635 expand_builtin_atomic_is_lock_free (tree exp)
5637 tree size;
5638 tree arg0 = CALL_EXPR_ARG (exp, 0);
5639 tree arg1 = CALL_EXPR_ARG (exp, 1);
5641 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5643 error ("non-integer argument 1 to __atomic_is_lock_free");
5644 return NULL_RTX;
5647 if (!flag_inline_atomics)
5648 return NULL_RTX;
5650 /* If the value is known at compile time, return the RTX for it. */
5651 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5652 if (size == boolean_true_node)
5653 return const1_rtx;
5655 return NULL_RTX;
5658 /* Expand the __atomic_thread_fence intrinsic:
5659 void __atomic_thread_fence (enum memmodel)
5660 EXP is the CALL_EXPR. */
5662 static void
5663 expand_builtin_atomic_thread_fence (tree exp)
5665 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5666 expand_mem_thread_fence (model);
5669 /* Expand the __atomic_signal_fence intrinsic:
5670 void __atomic_signal_fence (enum memmodel)
5671 EXP is the CALL_EXPR. */
5673 static void
5674 expand_builtin_atomic_signal_fence (tree exp)
5676 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5677 expand_mem_signal_fence (model);
5680 /* Expand the __sync_synchronize intrinsic. */
5682 static void
5683 expand_builtin_sync_synchronize (void)
5685 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5688 static rtx
5689 expand_builtin_thread_pointer (tree exp, rtx target)
5691 enum insn_code icode;
5692 if (!validate_arglist (exp, VOID_TYPE))
5693 return const0_rtx;
5694 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5695 if (icode != CODE_FOR_nothing)
5697 struct expand_operand op;
5698 if (!REG_P (target) || GET_MODE (target) != Pmode)
5699 target = gen_reg_rtx (Pmode);
5700 create_output_operand (&op, target, Pmode);
5701 expand_insn (icode, 1, &op);
5702 return target;
5704 error ("__builtin_thread_pointer is not supported on this target");
5705 return const0_rtx;
5708 static void
5709 expand_builtin_set_thread_pointer (tree exp)
5711 enum insn_code icode;
5712 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5713 return;
5714 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5715 if (icode != CODE_FOR_nothing)
5717 struct expand_operand op;
5718 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5719 Pmode, EXPAND_NORMAL);
5720 create_input_operand (&op, val, Pmode);
5721 expand_insn (icode, 1, &op);
5722 return;
5724 error ("__builtin_set_thread_pointer is not supported on this target");
5728 /* Emit code to restore the current value of stack. */
5730 static void
5731 expand_stack_restore (tree var)
5733 rtx prev, sa = expand_normal (var);
5735 sa = convert_memory_address (Pmode, sa);
5737 prev = get_last_insn ();
5738 emit_stack_restore (SAVE_BLOCK, sa);
5739 fixup_args_size_notes (prev, get_last_insn (), 0);
5743 /* Emit code to save the current value of stack. */
5745 static rtx
5746 expand_stack_save (void)
5748 rtx ret = NULL_RTX;
5750 do_pending_stack_adjust ();
5751 emit_stack_save (SAVE_BLOCK, &ret);
5752 return ret;
5755 /* Expand an expression EXP that calls a built-in function,
5756 with result going to TARGET if that's convenient
5757 (and in mode MODE if that's convenient).
5758 SUBTARGET may be used as the target for computing one of EXP's operands.
5759 IGNORE is nonzero if the value is to be ignored. */
5762 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5763 int ignore)
5765 tree fndecl = get_callee_fndecl (exp);
5766 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5767 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5768 int flags;
5770 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5771 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5773 /* When not optimizing, generate calls to library functions for a certain
5774 set of builtins. */
5775 if (!optimize
5776 && !called_as_built_in (fndecl)
5777 && fcode != BUILT_IN_FORK
5778 && fcode != BUILT_IN_EXECL
5779 && fcode != BUILT_IN_EXECV
5780 && fcode != BUILT_IN_EXECLP
5781 && fcode != BUILT_IN_EXECLE
5782 && fcode != BUILT_IN_EXECVP
5783 && fcode != BUILT_IN_EXECVE
5784 && fcode != BUILT_IN_ALLOCA
5785 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5786 && fcode != BUILT_IN_FREE
5787 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5788 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5789 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5790 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5791 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5792 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5793 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5794 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5795 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5796 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5797 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND)
5798 return expand_call (exp, target, ignore);
5800 /* The built-in function expanders test for target == const0_rtx
5801 to determine whether the function's result will be ignored. */
5802 if (ignore)
5803 target = const0_rtx;
5805 /* If the result of a pure or const built-in function is ignored, and
5806 none of its arguments are volatile, we can avoid expanding the
5807 built-in call and just evaluate the arguments for side-effects. */
5808 if (target == const0_rtx
5809 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5810 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5812 bool volatilep = false;
5813 tree arg;
5814 call_expr_arg_iterator iter;
5816 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5817 if (TREE_THIS_VOLATILE (arg))
5819 volatilep = true;
5820 break;
5823 if (! volatilep)
5825 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5826 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5827 return const0_rtx;
5831 switch (fcode)
5833 CASE_FLT_FN (BUILT_IN_FABS):
5834 case BUILT_IN_FABSD32:
5835 case BUILT_IN_FABSD64:
5836 case BUILT_IN_FABSD128:
5837 target = expand_builtin_fabs (exp, target, subtarget);
5838 if (target)
5839 return target;
5840 break;
5842 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5843 target = expand_builtin_copysign (exp, target, subtarget);
5844 if (target)
5845 return target;
5846 break;
5848 /* Just do a normal library call if we were unable to fold
5849 the values. */
5850 CASE_FLT_FN (BUILT_IN_CABS):
5851 break;
5853 CASE_FLT_FN (BUILT_IN_EXP):
5854 CASE_FLT_FN (BUILT_IN_EXP10):
5855 CASE_FLT_FN (BUILT_IN_POW10):
5856 CASE_FLT_FN (BUILT_IN_EXP2):
5857 CASE_FLT_FN (BUILT_IN_EXPM1):
5858 CASE_FLT_FN (BUILT_IN_LOGB):
5859 CASE_FLT_FN (BUILT_IN_LOG):
5860 CASE_FLT_FN (BUILT_IN_LOG10):
5861 CASE_FLT_FN (BUILT_IN_LOG2):
5862 CASE_FLT_FN (BUILT_IN_LOG1P):
5863 CASE_FLT_FN (BUILT_IN_TAN):
5864 CASE_FLT_FN (BUILT_IN_ASIN):
5865 CASE_FLT_FN (BUILT_IN_ACOS):
5866 CASE_FLT_FN (BUILT_IN_ATAN):
5867 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5868 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5869 because of possible accuracy problems. */
5870 if (! flag_unsafe_math_optimizations)
5871 break;
5872 CASE_FLT_FN (BUILT_IN_SQRT):
5873 CASE_FLT_FN (BUILT_IN_FLOOR):
5874 CASE_FLT_FN (BUILT_IN_CEIL):
5875 CASE_FLT_FN (BUILT_IN_TRUNC):
5876 CASE_FLT_FN (BUILT_IN_ROUND):
5877 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5878 CASE_FLT_FN (BUILT_IN_RINT):
5879 target = expand_builtin_mathfn (exp, target, subtarget);
5880 if (target)
5881 return target;
5882 break;
5884 CASE_FLT_FN (BUILT_IN_FMA):
5885 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5886 if (target)
5887 return target;
5888 break;
5890 CASE_FLT_FN (BUILT_IN_ILOGB):
5891 if (! flag_unsafe_math_optimizations)
5892 break;
5893 CASE_FLT_FN (BUILT_IN_ISINF):
5894 CASE_FLT_FN (BUILT_IN_FINITE):
5895 case BUILT_IN_ISFINITE:
5896 case BUILT_IN_ISNORMAL:
5897 target = expand_builtin_interclass_mathfn (exp, target);
5898 if (target)
5899 return target;
5900 break;
5902 CASE_FLT_FN (BUILT_IN_ICEIL):
5903 CASE_FLT_FN (BUILT_IN_LCEIL):
5904 CASE_FLT_FN (BUILT_IN_LLCEIL):
5905 CASE_FLT_FN (BUILT_IN_LFLOOR):
5906 CASE_FLT_FN (BUILT_IN_IFLOOR):
5907 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5908 target = expand_builtin_int_roundingfn (exp, target);
5909 if (target)
5910 return target;
5911 break;
5913 CASE_FLT_FN (BUILT_IN_IRINT):
5914 CASE_FLT_FN (BUILT_IN_LRINT):
5915 CASE_FLT_FN (BUILT_IN_LLRINT):
5916 CASE_FLT_FN (BUILT_IN_IROUND):
5917 CASE_FLT_FN (BUILT_IN_LROUND):
5918 CASE_FLT_FN (BUILT_IN_LLROUND):
5919 target = expand_builtin_int_roundingfn_2 (exp, target);
5920 if (target)
5921 return target;
5922 break;
5924 CASE_FLT_FN (BUILT_IN_POWI):
5925 target = expand_builtin_powi (exp, target);
5926 if (target)
5927 return target;
5928 break;
5930 CASE_FLT_FN (BUILT_IN_ATAN2):
5931 CASE_FLT_FN (BUILT_IN_LDEXP):
5932 CASE_FLT_FN (BUILT_IN_SCALB):
5933 CASE_FLT_FN (BUILT_IN_SCALBN):
5934 CASE_FLT_FN (BUILT_IN_SCALBLN):
5935 if (! flag_unsafe_math_optimizations)
5936 break;
5938 CASE_FLT_FN (BUILT_IN_FMOD):
5939 CASE_FLT_FN (BUILT_IN_REMAINDER):
5940 CASE_FLT_FN (BUILT_IN_DREM):
5941 CASE_FLT_FN (BUILT_IN_POW):
5942 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5943 if (target)
5944 return target;
5945 break;
5947 CASE_FLT_FN (BUILT_IN_CEXPI):
5948 target = expand_builtin_cexpi (exp, target);
5949 gcc_assert (target);
5950 return target;
5952 CASE_FLT_FN (BUILT_IN_SIN):
5953 CASE_FLT_FN (BUILT_IN_COS):
5954 if (! flag_unsafe_math_optimizations)
5955 break;
5956 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5957 if (target)
5958 return target;
5959 break;
5961 CASE_FLT_FN (BUILT_IN_SINCOS):
5962 if (! flag_unsafe_math_optimizations)
5963 break;
5964 target = expand_builtin_sincos (exp);
5965 if (target)
5966 return target;
5967 break;
5969 case BUILT_IN_APPLY_ARGS:
5970 return expand_builtin_apply_args ();
5972 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5973 FUNCTION with a copy of the parameters described by
5974 ARGUMENTS, and ARGSIZE. It returns a block of memory
5975 allocated on the stack into which is stored all the registers
5976 that might possibly be used for returning the result of a
5977 function. ARGUMENTS is the value returned by
5978 __builtin_apply_args. ARGSIZE is the number of bytes of
5979 arguments that must be copied. ??? How should this value be
5980 computed? We'll also need a safe worst case value for varargs
5981 functions. */
5982 case BUILT_IN_APPLY:
5983 if (!validate_arglist (exp, POINTER_TYPE,
5984 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5985 && !validate_arglist (exp, REFERENCE_TYPE,
5986 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5987 return const0_rtx;
5988 else
5990 rtx ops[3];
5992 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5993 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5994 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5996 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5999 /* __builtin_return (RESULT) causes the function to return the
6000 value described by RESULT. RESULT is address of the block of
6001 memory returned by __builtin_apply. */
6002 case BUILT_IN_RETURN:
6003 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6004 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6005 return const0_rtx;
6007 case BUILT_IN_SAVEREGS:
6008 return expand_builtin_saveregs ();
6010 case BUILT_IN_VA_ARG_PACK:
6011 /* All valid uses of __builtin_va_arg_pack () are removed during
6012 inlining. */
6013 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6014 return const0_rtx;
6016 case BUILT_IN_VA_ARG_PACK_LEN:
6017 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6018 inlining. */
6019 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6020 return const0_rtx;
6022 /* Return the address of the first anonymous stack arg. */
6023 case BUILT_IN_NEXT_ARG:
6024 if (fold_builtin_next_arg (exp, false))
6025 return const0_rtx;
6026 return expand_builtin_next_arg ();
6028 case BUILT_IN_CLEAR_CACHE:
6029 target = expand_builtin___clear_cache (exp);
6030 if (target)
6031 return target;
6032 break;
6034 case BUILT_IN_CLASSIFY_TYPE:
6035 return expand_builtin_classify_type (exp);
6037 case BUILT_IN_CONSTANT_P:
6038 return const0_rtx;
6040 case BUILT_IN_FRAME_ADDRESS:
6041 case BUILT_IN_RETURN_ADDRESS:
6042 return expand_builtin_frame_address (fndecl, exp);
6044 /* Returns the address of the area where the structure is returned.
6045 0 otherwise. */
6046 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6047 if (call_expr_nargs (exp) != 0
6048 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6049 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6050 return const0_rtx;
6051 else
6052 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6054 case BUILT_IN_ALLOCA:
6055 case BUILT_IN_ALLOCA_WITH_ALIGN:
6056 /* If the allocation stems from the declaration of a variable-sized
6057 object, it cannot accumulate. */
6058 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6059 if (target)
6060 return target;
6061 break;
6063 case BUILT_IN_STACK_SAVE:
6064 return expand_stack_save ();
6066 case BUILT_IN_STACK_RESTORE:
6067 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6068 return const0_rtx;
6070 case BUILT_IN_BSWAP16:
6071 case BUILT_IN_BSWAP32:
6072 case BUILT_IN_BSWAP64:
6073 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6074 if (target)
6075 return target;
6076 break;
6078 CASE_INT_FN (BUILT_IN_FFS):
6079 target = expand_builtin_unop (target_mode, exp, target,
6080 subtarget, ffs_optab);
6081 if (target)
6082 return target;
6083 break;
6085 CASE_INT_FN (BUILT_IN_CLZ):
6086 target = expand_builtin_unop (target_mode, exp, target,
6087 subtarget, clz_optab);
6088 if (target)
6089 return target;
6090 break;
6092 CASE_INT_FN (BUILT_IN_CTZ):
6093 target = expand_builtin_unop (target_mode, exp, target,
6094 subtarget, ctz_optab);
6095 if (target)
6096 return target;
6097 break;
6099 CASE_INT_FN (BUILT_IN_CLRSB):
6100 target = expand_builtin_unop (target_mode, exp, target,
6101 subtarget, clrsb_optab);
6102 if (target)
6103 return target;
6104 break;
6106 CASE_INT_FN (BUILT_IN_POPCOUNT):
6107 target = expand_builtin_unop (target_mode, exp, target,
6108 subtarget, popcount_optab);
6109 if (target)
6110 return target;
6111 break;
6113 CASE_INT_FN (BUILT_IN_PARITY):
6114 target = expand_builtin_unop (target_mode, exp, target,
6115 subtarget, parity_optab);
6116 if (target)
6117 return target;
6118 break;
6120 case BUILT_IN_STRLEN:
6121 target = expand_builtin_strlen (exp, target, target_mode);
6122 if (target)
6123 return target;
6124 break;
6126 case BUILT_IN_STRCPY:
6127 target = expand_builtin_strcpy (exp, target);
6128 if (target)
6129 return target;
6130 break;
6132 case BUILT_IN_STRNCPY:
6133 target = expand_builtin_strncpy (exp, target);
6134 if (target)
6135 return target;
6136 break;
6138 case BUILT_IN_STPCPY:
6139 target = expand_builtin_stpcpy (exp, target, mode);
6140 if (target)
6141 return target;
6142 break;
6144 case BUILT_IN_MEMCPY:
6145 target = expand_builtin_memcpy (exp, target);
6146 if (target)
6147 return target;
6148 break;
6150 case BUILT_IN_MEMPCPY:
6151 target = expand_builtin_mempcpy (exp, target, mode);
6152 if (target)
6153 return target;
6154 break;
6156 case BUILT_IN_MEMSET:
6157 target = expand_builtin_memset (exp, target, mode);
6158 if (target)
6159 return target;
6160 break;
6162 case BUILT_IN_BZERO:
6163 target = expand_builtin_bzero (exp);
6164 if (target)
6165 return target;
6166 break;
6168 case BUILT_IN_STRCMP:
6169 target = expand_builtin_strcmp (exp, target);
6170 if (target)
6171 return target;
6172 break;
6174 case BUILT_IN_STRNCMP:
6175 target = expand_builtin_strncmp (exp, target, mode);
6176 if (target)
6177 return target;
6178 break;
6180 case BUILT_IN_BCMP:
6181 case BUILT_IN_MEMCMP:
6182 target = expand_builtin_memcmp (exp, target, mode);
6183 if (target)
6184 return target;
6185 break;
6187 case BUILT_IN_SETJMP:
6188 /* This should have been lowered to the builtins below. */
6189 gcc_unreachable ();
6191 case BUILT_IN_SETJMP_SETUP:
6192 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6193 and the receiver label. */
6194 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6196 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6197 VOIDmode, EXPAND_NORMAL);
6198 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6199 rtx label_r = label_rtx (label);
6201 /* This is copied from the handling of non-local gotos. */
6202 expand_builtin_setjmp_setup (buf_addr, label_r);
6203 nonlocal_goto_handler_labels
6204 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6205 nonlocal_goto_handler_labels);
6206 /* ??? Do not let expand_label treat us as such since we would
6207 not want to be both on the list of non-local labels and on
6208 the list of forced labels. */
6209 FORCED_LABEL (label) = 0;
6210 return const0_rtx;
6212 break;
6214 case BUILT_IN_SETJMP_DISPATCHER:
6215 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6216 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6218 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6219 rtx label_r = label_rtx (label);
6221 /* Remove the dispatcher label from the list of non-local labels
6222 since the receiver labels have been added to it above. */
6223 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6224 return const0_rtx;
6226 break;
6228 case BUILT_IN_SETJMP_RECEIVER:
6229 /* __builtin_setjmp_receiver is passed the receiver label. */
6230 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6232 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6233 rtx label_r = label_rtx (label);
6235 expand_builtin_setjmp_receiver (label_r);
6236 return const0_rtx;
6238 break;
6240 /* __builtin_longjmp is passed a pointer to an array of five words.
6241 It's similar to the C library longjmp function but works with
6242 __builtin_setjmp above. */
6243 case BUILT_IN_LONGJMP:
6244 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6246 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6247 VOIDmode, EXPAND_NORMAL);
6248 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6250 if (value != const1_rtx)
6252 error ("%<__builtin_longjmp%> second argument must be 1");
6253 return const0_rtx;
6256 expand_builtin_longjmp (buf_addr, value);
6257 return const0_rtx;
6259 break;
6261 case BUILT_IN_NONLOCAL_GOTO:
6262 target = expand_builtin_nonlocal_goto (exp);
6263 if (target)
6264 return target;
6265 break;
6267 /* This updates the setjmp buffer that is its argument with the value
6268 of the current stack pointer. */
6269 case BUILT_IN_UPDATE_SETJMP_BUF:
6270 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6272 rtx buf_addr
6273 = expand_normal (CALL_EXPR_ARG (exp, 0));
6275 expand_builtin_update_setjmp_buf (buf_addr);
6276 return const0_rtx;
6278 break;
6280 case BUILT_IN_TRAP:
6281 expand_builtin_trap ();
6282 return const0_rtx;
6284 case BUILT_IN_UNREACHABLE:
6285 expand_builtin_unreachable ();
6286 return const0_rtx;
6288 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6289 case BUILT_IN_SIGNBITD32:
6290 case BUILT_IN_SIGNBITD64:
6291 case BUILT_IN_SIGNBITD128:
6292 target = expand_builtin_signbit (exp, target);
6293 if (target)
6294 return target;
6295 break;
6297 /* Various hooks for the DWARF 2 __throw routine. */
6298 case BUILT_IN_UNWIND_INIT:
6299 expand_builtin_unwind_init ();
6300 return const0_rtx;
6301 case BUILT_IN_DWARF_CFA:
6302 return virtual_cfa_rtx;
6303 #ifdef DWARF2_UNWIND_INFO
6304 case BUILT_IN_DWARF_SP_COLUMN:
6305 return expand_builtin_dwarf_sp_column ();
6306 case BUILT_IN_INIT_DWARF_REG_SIZES:
6307 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6308 return const0_rtx;
6309 #endif
6310 case BUILT_IN_FROB_RETURN_ADDR:
6311 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6312 case BUILT_IN_EXTRACT_RETURN_ADDR:
6313 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6314 case BUILT_IN_EH_RETURN:
6315 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6316 CALL_EXPR_ARG (exp, 1));
6317 return const0_rtx;
6318 #ifdef EH_RETURN_DATA_REGNO
6319 case BUILT_IN_EH_RETURN_DATA_REGNO:
6320 return expand_builtin_eh_return_data_regno (exp);
6321 #endif
6322 case BUILT_IN_EXTEND_POINTER:
6323 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6324 case BUILT_IN_EH_POINTER:
6325 return expand_builtin_eh_pointer (exp);
6326 case BUILT_IN_EH_FILTER:
6327 return expand_builtin_eh_filter (exp);
6328 case BUILT_IN_EH_COPY_VALUES:
6329 return expand_builtin_eh_copy_values (exp);
6331 case BUILT_IN_VA_START:
6332 return expand_builtin_va_start (exp);
6333 case BUILT_IN_VA_END:
6334 return expand_builtin_va_end (exp);
6335 case BUILT_IN_VA_COPY:
6336 return expand_builtin_va_copy (exp);
6337 case BUILT_IN_EXPECT:
6338 return expand_builtin_expect (exp, target);
6339 case BUILT_IN_ASSUME_ALIGNED:
6340 return expand_builtin_assume_aligned (exp, target);
6341 case BUILT_IN_PREFETCH:
6342 expand_builtin_prefetch (exp);
6343 return const0_rtx;
6345 case BUILT_IN_INIT_TRAMPOLINE:
6346 return expand_builtin_init_trampoline (exp, true);
6347 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6348 return expand_builtin_init_trampoline (exp, false);
6349 case BUILT_IN_ADJUST_TRAMPOLINE:
6350 return expand_builtin_adjust_trampoline (exp);
6352 case BUILT_IN_FORK:
6353 case BUILT_IN_EXECL:
6354 case BUILT_IN_EXECV:
6355 case BUILT_IN_EXECLP:
6356 case BUILT_IN_EXECLE:
6357 case BUILT_IN_EXECVP:
6358 case BUILT_IN_EXECVE:
6359 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6360 if (target)
6361 return target;
6362 break;
6364 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6365 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6366 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6367 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6368 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6369 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6370 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6371 if (target)
6372 return target;
6373 break;
6375 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6376 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6377 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6378 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6379 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6380 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6381 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6382 if (target)
6383 return target;
6384 break;
6386 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6387 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6388 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6389 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6390 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6391 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6392 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6393 if (target)
6394 return target;
6395 break;
6397 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6398 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6399 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6400 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6401 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6402 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6403 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6404 if (target)
6405 return target;
6406 break;
6408 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6409 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6410 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6411 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6412 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6413 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6414 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6415 if (target)
6416 return target;
6417 break;
6419 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6420 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6421 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6422 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6423 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6424 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6425 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6426 if (target)
6427 return target;
6428 break;
6430 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6431 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6432 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6433 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6434 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6435 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6436 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6437 if (target)
6438 return target;
6439 break;
6441 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6442 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6443 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6444 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6445 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6446 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6447 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6448 if (target)
6449 return target;
6450 break;
6452 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6453 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6454 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6455 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6456 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6457 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6458 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6459 if (target)
6460 return target;
6461 break;
6463 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6464 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6465 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6466 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6467 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6468 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6469 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6470 if (target)
6471 return target;
6472 break;
6474 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6475 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6476 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6477 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6478 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6479 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6480 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6481 if (target)
6482 return target;
6483 break;
6485 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6486 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6487 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6488 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6489 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6490 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6491 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6492 if (target)
6493 return target;
6494 break;
6496 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6497 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6498 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6499 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6500 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6501 if (mode == VOIDmode)
6502 mode = TYPE_MODE (boolean_type_node);
6503 if (!target || !register_operand (target, mode))
6504 target = gen_reg_rtx (mode);
6506 mode = get_builtin_sync_mode
6507 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6508 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6509 if (target)
6510 return target;
6511 break;
6513 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6514 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6515 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6516 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6517 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6518 mode = get_builtin_sync_mode
6519 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6520 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6521 if (target)
6522 return target;
6523 break;
6525 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6526 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6527 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6528 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6529 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6530 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6531 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6532 if (target)
6533 return target;
6534 break;
6536 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6537 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6538 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6539 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6540 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6541 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6542 expand_builtin_sync_lock_release (mode, exp);
6543 return const0_rtx;
6545 case BUILT_IN_SYNC_SYNCHRONIZE:
6546 expand_builtin_sync_synchronize ();
6547 return const0_rtx;
6549 case BUILT_IN_ATOMIC_EXCHANGE_1:
6550 case BUILT_IN_ATOMIC_EXCHANGE_2:
6551 case BUILT_IN_ATOMIC_EXCHANGE_4:
6552 case BUILT_IN_ATOMIC_EXCHANGE_8:
6553 case BUILT_IN_ATOMIC_EXCHANGE_16:
6554 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6555 target = expand_builtin_atomic_exchange (mode, exp, target);
6556 if (target)
6557 return target;
6558 break;
6560 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6561 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6562 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6563 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6564 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6566 unsigned int nargs, z;
6567 vec<tree, va_gc> *vec;
6569 mode =
6570 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6571 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6572 if (target)
6573 return target;
6575 /* If this is turned into an external library call, the weak parameter
6576 must be dropped to match the expected parameter list. */
6577 nargs = call_expr_nargs (exp);
6578 vec_alloc (vec, nargs - 1);
6579 for (z = 0; z < 3; z++)
6580 vec->quick_push (CALL_EXPR_ARG (exp, z));
6581 /* Skip the boolean weak parameter. */
6582 for (z = 4; z < 6; z++)
6583 vec->quick_push (CALL_EXPR_ARG (exp, z));
6584 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6585 break;
6588 case BUILT_IN_ATOMIC_LOAD_1:
6589 case BUILT_IN_ATOMIC_LOAD_2:
6590 case BUILT_IN_ATOMIC_LOAD_4:
6591 case BUILT_IN_ATOMIC_LOAD_8:
6592 case BUILT_IN_ATOMIC_LOAD_16:
6593 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6594 target = expand_builtin_atomic_load (mode, exp, target);
6595 if (target)
6596 return target;
6597 break;
6599 case BUILT_IN_ATOMIC_STORE_1:
6600 case BUILT_IN_ATOMIC_STORE_2:
6601 case BUILT_IN_ATOMIC_STORE_4:
6602 case BUILT_IN_ATOMIC_STORE_8:
6603 case BUILT_IN_ATOMIC_STORE_16:
6604 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6605 target = expand_builtin_atomic_store (mode, exp);
6606 if (target)
6607 return const0_rtx;
6608 break;
6610 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6611 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6612 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6613 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6614 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6616 enum built_in_function lib;
6617 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6618 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6619 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6620 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6621 ignore, lib);
6622 if (target)
6623 return target;
6624 break;
6626 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6627 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6628 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6629 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6630 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6632 enum built_in_function lib;
6633 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6634 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6635 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6636 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6637 ignore, lib);
6638 if (target)
6639 return target;
6640 break;
6642 case BUILT_IN_ATOMIC_AND_FETCH_1:
6643 case BUILT_IN_ATOMIC_AND_FETCH_2:
6644 case BUILT_IN_ATOMIC_AND_FETCH_4:
6645 case BUILT_IN_ATOMIC_AND_FETCH_8:
6646 case BUILT_IN_ATOMIC_AND_FETCH_16:
6648 enum built_in_function lib;
6649 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6650 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6651 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6652 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6653 ignore, lib);
6654 if (target)
6655 return target;
6656 break;
6658 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6659 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6660 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6661 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6662 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6664 enum built_in_function lib;
6665 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6666 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6667 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6668 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6669 ignore, lib);
6670 if (target)
6671 return target;
6672 break;
6674 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6675 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6676 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6677 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6678 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6680 enum built_in_function lib;
6681 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6682 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6683 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6684 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6685 ignore, lib);
6686 if (target)
6687 return target;
6688 break;
6690 case BUILT_IN_ATOMIC_OR_FETCH_1:
6691 case BUILT_IN_ATOMIC_OR_FETCH_2:
6692 case BUILT_IN_ATOMIC_OR_FETCH_4:
6693 case BUILT_IN_ATOMIC_OR_FETCH_8:
6694 case BUILT_IN_ATOMIC_OR_FETCH_16:
6696 enum built_in_function lib;
6697 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6698 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6699 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6700 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6701 ignore, lib);
6702 if (target)
6703 return target;
6704 break;
6706 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6707 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6708 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6709 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6710 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6711 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6712 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6713 ignore, BUILT_IN_NONE);
6714 if (target)
6715 return target;
6716 break;
6718 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6719 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6720 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6721 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6722 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6723 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6724 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6725 ignore, BUILT_IN_NONE);
6726 if (target)
6727 return target;
6728 break;
6730 case BUILT_IN_ATOMIC_FETCH_AND_1:
6731 case BUILT_IN_ATOMIC_FETCH_AND_2:
6732 case BUILT_IN_ATOMIC_FETCH_AND_4:
6733 case BUILT_IN_ATOMIC_FETCH_AND_8:
6734 case BUILT_IN_ATOMIC_FETCH_AND_16:
6735 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6736 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6737 ignore, BUILT_IN_NONE);
6738 if (target)
6739 return target;
6740 break;
6742 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6743 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6744 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6745 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6746 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6747 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6748 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6749 ignore, BUILT_IN_NONE);
6750 if (target)
6751 return target;
6752 break;
6754 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6755 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6756 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6757 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6758 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6759 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6760 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6761 ignore, BUILT_IN_NONE);
6762 if (target)
6763 return target;
6764 break;
6766 case BUILT_IN_ATOMIC_FETCH_OR_1:
6767 case BUILT_IN_ATOMIC_FETCH_OR_2:
6768 case BUILT_IN_ATOMIC_FETCH_OR_4:
6769 case BUILT_IN_ATOMIC_FETCH_OR_8:
6770 case BUILT_IN_ATOMIC_FETCH_OR_16:
6771 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6772 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6773 ignore, BUILT_IN_NONE);
6774 if (target)
6775 return target;
6776 break;
6778 case BUILT_IN_ATOMIC_TEST_AND_SET:
6779 return expand_builtin_atomic_test_and_set (exp, target);
6781 case BUILT_IN_ATOMIC_CLEAR:
6782 return expand_builtin_atomic_clear (exp);
6784 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6785 return expand_builtin_atomic_always_lock_free (exp);
6787 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6788 target = expand_builtin_atomic_is_lock_free (exp);
6789 if (target)
6790 return target;
6791 break;
6793 case BUILT_IN_ATOMIC_THREAD_FENCE:
6794 expand_builtin_atomic_thread_fence (exp);
6795 return const0_rtx;
6797 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6798 expand_builtin_atomic_signal_fence (exp);
6799 return const0_rtx;
6801 case BUILT_IN_OBJECT_SIZE:
6802 return expand_builtin_object_size (exp);
6804 case BUILT_IN_MEMCPY_CHK:
6805 case BUILT_IN_MEMPCPY_CHK:
6806 case BUILT_IN_MEMMOVE_CHK:
6807 case BUILT_IN_MEMSET_CHK:
6808 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6809 if (target)
6810 return target;
6811 break;
6813 case BUILT_IN_STRCPY_CHK:
6814 case BUILT_IN_STPCPY_CHK:
6815 case BUILT_IN_STRNCPY_CHK:
6816 case BUILT_IN_STPNCPY_CHK:
6817 case BUILT_IN_STRCAT_CHK:
6818 case BUILT_IN_STRNCAT_CHK:
6819 case BUILT_IN_SNPRINTF_CHK:
6820 case BUILT_IN_VSNPRINTF_CHK:
6821 maybe_emit_chk_warning (exp, fcode);
6822 break;
6824 case BUILT_IN_SPRINTF_CHK:
6825 case BUILT_IN_VSPRINTF_CHK:
6826 maybe_emit_sprintf_chk_warning (exp, fcode);
6827 break;
6829 case BUILT_IN_FREE:
6830 if (warn_free_nonheap_object)
6831 maybe_emit_free_warning (exp);
6832 break;
6834 case BUILT_IN_THREAD_POINTER:
6835 return expand_builtin_thread_pointer (exp, target);
6837 case BUILT_IN_SET_THREAD_POINTER:
6838 expand_builtin_set_thread_pointer (exp);
6839 return const0_rtx;
6841 case BUILT_IN_CILK_DETACH:
6842 expand_builtin_cilk_detach (exp);
6843 return const0_rtx;
6845 case BUILT_IN_CILK_POP_FRAME:
6846 expand_builtin_cilk_pop_frame (exp);
6847 return const0_rtx;
6849 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6850 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6851 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6852 return expand_normal (CALL_EXPR_ARG (exp, 0));
6854 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6855 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6856 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6857 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6858 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6859 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6860 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6861 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6862 /* We allow user CHKP builtins if Pointer Bounds
6863 Checker is off. */
6864 if (!flag_check_pointer_bounds)
6866 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6867 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
6868 return expand_normal (CALL_EXPR_ARG (exp, 0));
6869 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6870 return expand_normal (size_zero_node);
6871 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6872 return expand_normal (size_int (-1));
6873 else
6874 return const0_rtx;
6876 /* FALLTHROUGH */
6878 case BUILT_IN_CHKP_BNDMK:
6879 case BUILT_IN_CHKP_BNDSTX:
6880 case BUILT_IN_CHKP_BNDCL:
6881 case BUILT_IN_CHKP_BNDCU:
6882 case BUILT_IN_CHKP_BNDLDX:
6883 case BUILT_IN_CHKP_BNDRET:
6884 case BUILT_IN_CHKP_INTERSECT:
6885 case BUILT_IN_CHKP_ARG_BND:
6886 case BUILT_IN_CHKP_NARROW:
6887 case BUILT_IN_CHKP_EXTRACT_LOWER:
6888 case BUILT_IN_CHKP_EXTRACT_UPPER:
6889 /* Software implementation of pointers checker is NYI.
6890 Target support is required. */
6891 error ("Your target platform does not support -fcheck-pointers");
6892 break;
6894 default: /* just do library call, if unknown builtin */
6895 break;
6898 /* The switch statement above can drop through to cause the function
6899 to be called normally. */
6900 return expand_call (exp, target, ignore);
6903 /* Determine whether a tree node represents a call to a built-in
6904 function. If the tree T is a call to a built-in function with
6905 the right number of arguments of the appropriate types, return
6906 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6907 Otherwise the return value is END_BUILTINS. */
6909 enum built_in_function
6910 builtin_mathfn_code (const_tree t)
6912 const_tree fndecl, arg, parmlist;
6913 const_tree argtype, parmtype;
6914 const_call_expr_arg_iterator iter;
6916 if (TREE_CODE (t) != CALL_EXPR
6917 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6918 return END_BUILTINS;
6920 fndecl = get_callee_fndecl (t);
6921 if (fndecl == NULL_TREE
6922 || TREE_CODE (fndecl) != FUNCTION_DECL
6923 || ! DECL_BUILT_IN (fndecl)
6924 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6925 return END_BUILTINS;
6927 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6928 init_const_call_expr_arg_iterator (t, &iter);
6929 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6931 /* If a function doesn't take a variable number of arguments,
6932 the last element in the list will have type `void'. */
6933 parmtype = TREE_VALUE (parmlist);
6934 if (VOID_TYPE_P (parmtype))
6936 if (more_const_call_expr_args_p (&iter))
6937 return END_BUILTINS;
6938 return DECL_FUNCTION_CODE (fndecl);
6941 if (! more_const_call_expr_args_p (&iter))
6942 return END_BUILTINS;
6944 arg = next_const_call_expr_arg (&iter);
6945 argtype = TREE_TYPE (arg);
6947 if (SCALAR_FLOAT_TYPE_P (parmtype))
6949 if (! SCALAR_FLOAT_TYPE_P (argtype))
6950 return END_BUILTINS;
6952 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6954 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6955 return END_BUILTINS;
6957 else if (POINTER_TYPE_P (parmtype))
6959 if (! POINTER_TYPE_P (argtype))
6960 return END_BUILTINS;
6962 else if (INTEGRAL_TYPE_P (parmtype))
6964 if (! INTEGRAL_TYPE_P (argtype))
6965 return END_BUILTINS;
6967 else
6968 return END_BUILTINS;
6971 /* Variable-length argument list. */
6972 return DECL_FUNCTION_CODE (fndecl);
6975 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6976 evaluate to a constant. */
6978 static tree
6979 fold_builtin_constant_p (tree arg)
6981 /* We return 1 for a numeric type that's known to be a constant
6982 value at compile-time or for an aggregate type that's a
6983 literal constant. */
6984 STRIP_NOPS (arg);
6986 /* If we know this is a constant, emit the constant of one. */
6987 if (CONSTANT_CLASS_P (arg)
6988 || (TREE_CODE (arg) == CONSTRUCTOR
6989 && TREE_CONSTANT (arg)))
6990 return integer_one_node;
6991 if (TREE_CODE (arg) == ADDR_EXPR)
6993 tree op = TREE_OPERAND (arg, 0);
6994 if (TREE_CODE (op) == STRING_CST
6995 || (TREE_CODE (op) == ARRAY_REF
6996 && integer_zerop (TREE_OPERAND (op, 1))
6997 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6998 return integer_one_node;
7001 /* If this expression has side effects, show we don't know it to be a
7002 constant. Likewise if it's a pointer or aggregate type since in
7003 those case we only want literals, since those are only optimized
7004 when generating RTL, not later.
7005 And finally, if we are compiling an initializer, not code, we
7006 need to return a definite result now; there's not going to be any
7007 more optimization done. */
7008 if (TREE_SIDE_EFFECTS (arg)
7009 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7010 || POINTER_TYPE_P (TREE_TYPE (arg))
7011 || cfun == 0
7012 || folding_initializer
7013 || force_folding_builtin_constant_p)
7014 return integer_zero_node;
7016 return NULL_TREE;
7019 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7020 return it as a truthvalue. */
7022 static tree
7023 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
7025 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7027 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7028 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7029 ret_type = TREE_TYPE (TREE_TYPE (fn));
7030 pred_type = TREE_VALUE (arg_types);
7031 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7033 pred = fold_convert_loc (loc, pred_type, pred);
7034 expected = fold_convert_loc (loc, expected_type, expected);
7035 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
7037 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7038 build_int_cst (ret_type, 0));
7041 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7042 NULL_TREE if no simplification is possible. */
7044 static tree
7045 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7047 tree inner, fndecl, inner_arg0;
7048 enum tree_code code;
7050 /* Distribute the expected value over short-circuiting operators.
7051 See through the cast from truthvalue_type_node to long. */
7052 inner_arg0 = arg0;
7053 while (TREE_CODE (inner_arg0) == NOP_EXPR
7054 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7055 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7056 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7058 /* If this is a builtin_expect within a builtin_expect keep the
7059 inner one. See through a comparison against a constant. It
7060 might have been added to create a thruthvalue. */
7061 inner = inner_arg0;
7063 if (COMPARISON_CLASS_P (inner)
7064 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7065 inner = TREE_OPERAND (inner, 0);
7067 if (TREE_CODE (inner) == CALL_EXPR
7068 && (fndecl = get_callee_fndecl (inner))
7069 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7070 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7071 return arg0;
7073 inner = inner_arg0;
7074 code = TREE_CODE (inner);
7075 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7077 tree op0 = TREE_OPERAND (inner, 0);
7078 tree op1 = TREE_OPERAND (inner, 1);
7080 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7081 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7082 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7084 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7087 /* If the argument isn't invariant then there's nothing else we can do. */
7088 if (!TREE_CONSTANT (inner_arg0))
7089 return NULL_TREE;
7091 /* If we expect that a comparison against the argument will fold to
7092 a constant return the constant. In practice, this means a true
7093 constant or the address of a non-weak symbol. */
7094 inner = inner_arg0;
7095 STRIP_NOPS (inner);
7096 if (TREE_CODE (inner) == ADDR_EXPR)
7100 inner = TREE_OPERAND (inner, 0);
7102 while (TREE_CODE (inner) == COMPONENT_REF
7103 || TREE_CODE (inner) == ARRAY_REF);
7104 if ((TREE_CODE (inner) == VAR_DECL
7105 || TREE_CODE (inner) == FUNCTION_DECL)
7106 && DECL_WEAK (inner))
7107 return NULL_TREE;
7110 /* Otherwise, ARG0 already has the proper type for the return value. */
7111 return arg0;
7114 /* Fold a call to __builtin_classify_type with argument ARG. */
7116 static tree
7117 fold_builtin_classify_type (tree arg)
7119 if (arg == 0)
7120 return build_int_cst (integer_type_node, no_type_class);
7122 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7125 /* Fold a call to __builtin_strlen with argument ARG. */
7127 static tree
7128 fold_builtin_strlen (location_t loc, tree type, tree arg)
7130 if (!validate_arg (arg, POINTER_TYPE))
7131 return NULL_TREE;
7132 else
7134 tree len = c_strlen (arg, 0);
7136 if (len)
7137 return fold_convert_loc (loc, type, len);
7139 return NULL_TREE;
7143 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7145 static tree
7146 fold_builtin_inf (location_t loc, tree type, int warn)
7148 REAL_VALUE_TYPE real;
7150 /* __builtin_inff is intended to be usable to define INFINITY on all
7151 targets. If an infinity is not available, INFINITY expands "to a
7152 positive constant of type float that overflows at translation
7153 time", footnote "In this case, using INFINITY will violate the
7154 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7155 Thus we pedwarn to ensure this constraint violation is
7156 diagnosed. */
7157 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7158 pedwarn (loc, 0, "target format does not support infinity");
7160 real_inf (&real);
7161 return build_real (type, real);
7164 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7166 static tree
7167 fold_builtin_nan (tree arg, tree type, int quiet)
7169 REAL_VALUE_TYPE real;
7170 const char *str;
7172 if (!validate_arg (arg, POINTER_TYPE))
7173 return NULL_TREE;
7174 str = c_getstr (arg);
7175 if (!str)
7176 return NULL_TREE;
7178 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7179 return NULL_TREE;
7181 return build_real (type, real);
7184 /* Return true if the floating point expression T has an integer value.
7185 We also allow +Inf, -Inf and NaN to be considered integer values. */
7187 static bool
7188 integer_valued_real_p (tree t)
7190 switch (TREE_CODE (t))
7192 case FLOAT_EXPR:
7193 return true;
7195 case ABS_EXPR:
7196 case SAVE_EXPR:
7197 return integer_valued_real_p (TREE_OPERAND (t, 0));
7199 case COMPOUND_EXPR:
7200 case MODIFY_EXPR:
7201 case BIND_EXPR:
7202 return integer_valued_real_p (TREE_OPERAND (t, 1));
7204 case PLUS_EXPR:
7205 case MINUS_EXPR:
7206 case MULT_EXPR:
7207 case MIN_EXPR:
7208 case MAX_EXPR:
7209 return integer_valued_real_p (TREE_OPERAND (t, 0))
7210 && integer_valued_real_p (TREE_OPERAND (t, 1));
7212 case COND_EXPR:
7213 return integer_valued_real_p (TREE_OPERAND (t, 1))
7214 && integer_valued_real_p (TREE_OPERAND (t, 2));
7216 case REAL_CST:
7217 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7219 case NOP_EXPR:
7221 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7222 if (TREE_CODE (type) == INTEGER_TYPE)
7223 return true;
7224 if (TREE_CODE (type) == REAL_TYPE)
7225 return integer_valued_real_p (TREE_OPERAND (t, 0));
7226 break;
7229 case CALL_EXPR:
7230 switch (builtin_mathfn_code (t))
7232 CASE_FLT_FN (BUILT_IN_CEIL):
7233 CASE_FLT_FN (BUILT_IN_FLOOR):
7234 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7235 CASE_FLT_FN (BUILT_IN_RINT):
7236 CASE_FLT_FN (BUILT_IN_ROUND):
7237 CASE_FLT_FN (BUILT_IN_TRUNC):
7238 return true;
7240 CASE_FLT_FN (BUILT_IN_FMIN):
7241 CASE_FLT_FN (BUILT_IN_FMAX):
7242 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7243 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7245 default:
7246 break;
7248 break;
7250 default:
7251 break;
7253 return false;
7256 /* FNDECL is assumed to be a builtin where truncation can be propagated
7257 across (for instance floor((double)f) == (double)floorf (f).
7258 Do the transformation for a call with argument ARG. */
7260 static tree
7261 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7263 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7265 if (!validate_arg (arg, REAL_TYPE))
7266 return NULL_TREE;
7268 /* Integer rounding functions are idempotent. */
7269 if (fcode == builtin_mathfn_code (arg))
7270 return arg;
7272 /* If argument is already integer valued, and we don't need to worry
7273 about setting errno, there's no need to perform rounding. */
7274 if (! flag_errno_math && integer_valued_real_p (arg))
7275 return arg;
7277 if (optimize)
7279 tree arg0 = strip_float_extensions (arg);
7280 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7281 tree newtype = TREE_TYPE (arg0);
7282 tree decl;
7284 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7285 && (decl = mathfn_built_in (newtype, fcode)))
7286 return fold_convert_loc (loc, ftype,
7287 build_call_expr_loc (loc, decl, 1,
7288 fold_convert_loc (loc,
7289 newtype,
7290 arg0)));
7292 return NULL_TREE;
7295 /* FNDECL is assumed to be builtin which can narrow the FP type of
7296 the argument, for instance lround((double)f) -> lroundf (f).
7297 Do the transformation for a call with argument ARG. */
7299 static tree
7300 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7302 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7304 if (!validate_arg (arg, REAL_TYPE))
7305 return NULL_TREE;
7307 /* If argument is already integer valued, and we don't need to worry
7308 about setting errno, there's no need to perform rounding. */
7309 if (! flag_errno_math && integer_valued_real_p (arg))
7310 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7311 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7313 if (optimize)
7315 tree ftype = TREE_TYPE (arg);
7316 tree arg0 = strip_float_extensions (arg);
7317 tree newtype = TREE_TYPE (arg0);
7318 tree decl;
7320 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7321 && (decl = mathfn_built_in (newtype, fcode)))
7322 return build_call_expr_loc (loc, decl, 1,
7323 fold_convert_loc (loc, newtype, arg0));
7326 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7327 sizeof (int) == sizeof (long). */
7328 if (TYPE_PRECISION (integer_type_node)
7329 == TYPE_PRECISION (long_integer_type_node))
7331 tree newfn = NULL_TREE;
7332 switch (fcode)
7334 CASE_FLT_FN (BUILT_IN_ICEIL):
7335 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7336 break;
7338 CASE_FLT_FN (BUILT_IN_IFLOOR):
7339 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7340 break;
7342 CASE_FLT_FN (BUILT_IN_IROUND):
7343 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7344 break;
7346 CASE_FLT_FN (BUILT_IN_IRINT):
7347 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7348 break;
7350 default:
7351 break;
7354 if (newfn)
7356 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7357 return fold_convert_loc (loc,
7358 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7362 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7363 sizeof (long long) == sizeof (long). */
7364 if (TYPE_PRECISION (long_long_integer_type_node)
7365 == TYPE_PRECISION (long_integer_type_node))
7367 tree newfn = NULL_TREE;
7368 switch (fcode)
7370 CASE_FLT_FN (BUILT_IN_LLCEIL):
7371 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7372 break;
7374 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7375 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7376 break;
7378 CASE_FLT_FN (BUILT_IN_LLROUND):
7379 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7380 break;
7382 CASE_FLT_FN (BUILT_IN_LLRINT):
7383 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7384 break;
7386 default:
7387 break;
7390 if (newfn)
7392 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7393 return fold_convert_loc (loc,
7394 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7398 return NULL_TREE;
7401 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7402 return type. Return NULL_TREE if no simplification can be made. */
7404 static tree
7405 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7407 tree res;
7409 if (!validate_arg (arg, COMPLEX_TYPE)
7410 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7411 return NULL_TREE;
7413 /* Calculate the result when the argument is a constant. */
7414 if (TREE_CODE (arg) == COMPLEX_CST
7415 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7416 type, mpfr_hypot)))
7417 return res;
7419 if (TREE_CODE (arg) == COMPLEX_EXPR)
7421 tree real = TREE_OPERAND (arg, 0);
7422 tree imag = TREE_OPERAND (arg, 1);
7424 /* If either part is zero, cabs is fabs of the other. */
7425 if (real_zerop (real))
7426 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7427 if (real_zerop (imag))
7428 return fold_build1_loc (loc, ABS_EXPR, type, real);
7430 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7431 if (flag_unsafe_math_optimizations
7432 && operand_equal_p (real, imag, OEP_PURE_SAME))
7434 const REAL_VALUE_TYPE sqrt2_trunc
7435 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7436 STRIP_NOPS (real);
7437 return fold_build2_loc (loc, MULT_EXPR, type,
7438 fold_build1_loc (loc, ABS_EXPR, type, real),
7439 build_real (type, sqrt2_trunc));
7443 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7444 if (TREE_CODE (arg) == NEGATE_EXPR
7445 || TREE_CODE (arg) == CONJ_EXPR)
7446 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7448 /* Don't do this when optimizing for size. */
7449 if (flag_unsafe_math_optimizations
7450 && optimize && optimize_function_for_speed_p (cfun))
7452 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7454 if (sqrtfn != NULL_TREE)
7456 tree rpart, ipart, result;
7458 arg = builtin_save_expr (arg);
7460 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7461 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7463 rpart = builtin_save_expr (rpart);
7464 ipart = builtin_save_expr (ipart);
7466 result = fold_build2_loc (loc, PLUS_EXPR, type,
7467 fold_build2_loc (loc, MULT_EXPR, type,
7468 rpart, rpart),
7469 fold_build2_loc (loc, MULT_EXPR, type,
7470 ipart, ipart));
7472 return build_call_expr_loc (loc, sqrtfn, 1, result);
7476 return NULL_TREE;
7479 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7480 complex tree type of the result. If NEG is true, the imaginary
7481 zero is negative. */
7483 static tree
7484 build_complex_cproj (tree type, bool neg)
7486 REAL_VALUE_TYPE rinf, rzero = dconst0;
7488 real_inf (&rinf);
7489 rzero.sign = neg;
7490 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7491 build_real (TREE_TYPE (type), rzero));
7494 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7495 return type. Return NULL_TREE if no simplification can be made. */
7497 static tree
7498 fold_builtin_cproj (location_t loc, tree arg, tree type)
7500 if (!validate_arg (arg, COMPLEX_TYPE)
7501 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7502 return NULL_TREE;
7504 /* If there are no infinities, return arg. */
7505 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7506 return non_lvalue_loc (loc, arg);
7508 /* Calculate the result when the argument is a constant. */
7509 if (TREE_CODE (arg) == COMPLEX_CST)
7511 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7512 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7514 if (real_isinf (real) || real_isinf (imag))
7515 return build_complex_cproj (type, imag->sign);
7516 else
7517 return arg;
7519 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7521 tree real = TREE_OPERAND (arg, 0);
7522 tree imag = TREE_OPERAND (arg, 1);
7524 STRIP_NOPS (real);
7525 STRIP_NOPS (imag);
7527 /* If the real part is inf and the imag part is known to be
7528 nonnegative, return (inf + 0i). Remember side-effects are
7529 possible in the imag part. */
7530 if (TREE_CODE (real) == REAL_CST
7531 && real_isinf (TREE_REAL_CST_PTR (real))
7532 && tree_expr_nonnegative_p (imag))
7533 return omit_one_operand_loc (loc, type,
7534 build_complex_cproj (type, false),
7535 arg);
7537 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7538 Remember side-effects are possible in the real part. */
7539 if (TREE_CODE (imag) == REAL_CST
7540 && real_isinf (TREE_REAL_CST_PTR (imag)))
7541 return
7542 omit_one_operand_loc (loc, type,
7543 build_complex_cproj (type, TREE_REAL_CST_PTR
7544 (imag)->sign), arg);
7547 return NULL_TREE;
7550 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7551 Return NULL_TREE if no simplification can be made. */
7553 static tree
7554 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7557 enum built_in_function fcode;
7558 tree res;
7560 if (!validate_arg (arg, REAL_TYPE))
7561 return NULL_TREE;
7563 /* Calculate the result when the argument is a constant. */
7564 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7565 return res;
7567 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7568 fcode = builtin_mathfn_code (arg);
7569 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7571 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7572 arg = fold_build2_loc (loc, MULT_EXPR, type,
7573 CALL_EXPR_ARG (arg, 0),
7574 build_real (type, dconsthalf));
7575 return build_call_expr_loc (loc, expfn, 1, arg);
7578 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7579 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7581 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7583 if (powfn)
7585 tree arg0 = CALL_EXPR_ARG (arg, 0);
7586 tree tree_root;
7587 /* The inner root was either sqrt or cbrt. */
7588 /* This was a conditional expression but it triggered a bug
7589 in Sun C 5.5. */
7590 REAL_VALUE_TYPE dconstroot;
7591 if (BUILTIN_SQRT_P (fcode))
7592 dconstroot = dconsthalf;
7593 else
7594 dconstroot = dconst_third ();
7596 /* Adjust for the outer root. */
7597 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7598 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7599 tree_root = build_real (type, dconstroot);
7600 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7604 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7605 if (flag_unsafe_math_optimizations
7606 && (fcode == BUILT_IN_POW
7607 || fcode == BUILT_IN_POWF
7608 || fcode == BUILT_IN_POWL))
7610 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7611 tree arg0 = CALL_EXPR_ARG (arg, 0);
7612 tree arg1 = CALL_EXPR_ARG (arg, 1);
7613 tree narg1;
7614 if (!tree_expr_nonnegative_p (arg0))
7615 arg0 = build1 (ABS_EXPR, type, arg0);
7616 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7617 build_real (type, dconsthalf));
7618 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7621 return NULL_TREE;
7624 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7625 Return NULL_TREE if no simplification can be made. */
7627 static tree
7628 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7630 const enum built_in_function fcode = builtin_mathfn_code (arg);
7631 tree res;
7633 if (!validate_arg (arg, REAL_TYPE))
7634 return NULL_TREE;
7636 /* Calculate the result when the argument is a constant. */
7637 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7638 return res;
7640 if (flag_unsafe_math_optimizations)
7642 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7643 if (BUILTIN_EXPONENT_P (fcode))
7645 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7646 const REAL_VALUE_TYPE third_trunc =
7647 real_value_truncate (TYPE_MODE (type), dconst_third ());
7648 arg = fold_build2_loc (loc, MULT_EXPR, type,
7649 CALL_EXPR_ARG (arg, 0),
7650 build_real (type, third_trunc));
7651 return build_call_expr_loc (loc, expfn, 1, arg);
7654 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7655 if (BUILTIN_SQRT_P (fcode))
7657 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7659 if (powfn)
7661 tree arg0 = CALL_EXPR_ARG (arg, 0);
7662 tree tree_root;
7663 REAL_VALUE_TYPE dconstroot = dconst_third ();
7665 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7666 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7667 tree_root = build_real (type, dconstroot);
7668 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7672 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7673 if (BUILTIN_CBRT_P (fcode))
7675 tree arg0 = CALL_EXPR_ARG (arg, 0);
7676 if (tree_expr_nonnegative_p (arg0))
7678 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7680 if (powfn)
7682 tree tree_root;
7683 REAL_VALUE_TYPE dconstroot;
7685 real_arithmetic (&dconstroot, MULT_EXPR,
7686 dconst_third_ptr (), dconst_third_ptr ());
7687 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7688 tree_root = build_real (type, dconstroot);
7689 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7694 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7695 if (fcode == BUILT_IN_POW
7696 || fcode == BUILT_IN_POWF
7697 || fcode == BUILT_IN_POWL)
7699 tree arg00 = CALL_EXPR_ARG (arg, 0);
7700 tree arg01 = CALL_EXPR_ARG (arg, 1);
7701 if (tree_expr_nonnegative_p (arg00))
7703 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7704 const REAL_VALUE_TYPE dconstroot
7705 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7706 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7707 build_real (type, dconstroot));
7708 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7712 return NULL_TREE;
7715 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7716 TYPE is the type of the return value. Return NULL_TREE if no
7717 simplification can be made. */
7719 static tree
7720 fold_builtin_cos (location_t loc,
7721 tree arg, tree type, tree fndecl)
7723 tree res, narg;
7725 if (!validate_arg (arg, REAL_TYPE))
7726 return NULL_TREE;
7728 /* Calculate the result when the argument is a constant. */
7729 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7730 return res;
7732 /* Optimize cos(-x) into cos (x). */
7733 if ((narg = fold_strip_sign_ops (arg)))
7734 return build_call_expr_loc (loc, fndecl, 1, narg);
7736 return NULL_TREE;
7739 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7740 Return NULL_TREE if no simplification can be made. */
7742 static tree
7743 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7745 if (validate_arg (arg, REAL_TYPE))
7747 tree res, narg;
7749 /* Calculate the result when the argument is a constant. */
7750 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7751 return res;
7753 /* Optimize cosh(-x) into cosh (x). */
7754 if ((narg = fold_strip_sign_ops (arg)))
7755 return build_call_expr_loc (loc, fndecl, 1, narg);
7758 return NULL_TREE;
7761 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7762 argument ARG. TYPE is the type of the return value. Return
7763 NULL_TREE if no simplification can be made. */
7765 static tree
7766 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7767 bool hyper)
7769 if (validate_arg (arg, COMPLEX_TYPE)
7770 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7772 tree tmp;
7774 /* Calculate the result when the argument is a constant. */
7775 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7776 return tmp;
7778 /* Optimize fn(-x) into fn(x). */
7779 if ((tmp = fold_strip_sign_ops (arg)))
7780 return build_call_expr_loc (loc, fndecl, 1, tmp);
7783 return NULL_TREE;
7786 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7787 Return NULL_TREE if no simplification can be made. */
7789 static tree
7790 fold_builtin_tan (tree arg, tree type)
7792 enum built_in_function fcode;
7793 tree res;
7795 if (!validate_arg (arg, REAL_TYPE))
7796 return NULL_TREE;
7798 /* Calculate the result when the argument is a constant. */
7799 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7800 return res;
7802 /* Optimize tan(atan(x)) = x. */
7803 fcode = builtin_mathfn_code (arg);
7804 if (flag_unsafe_math_optimizations
7805 && (fcode == BUILT_IN_ATAN
7806 || fcode == BUILT_IN_ATANF
7807 || fcode == BUILT_IN_ATANL))
7808 return CALL_EXPR_ARG (arg, 0);
7810 return NULL_TREE;
7813 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7814 NULL_TREE if no simplification can be made. */
7816 static tree
7817 fold_builtin_sincos (location_t loc,
7818 tree arg0, tree arg1, tree arg2)
7820 tree type;
7821 tree res, fn, call;
7823 if (!validate_arg (arg0, REAL_TYPE)
7824 || !validate_arg (arg1, POINTER_TYPE)
7825 || !validate_arg (arg2, POINTER_TYPE))
7826 return NULL_TREE;
7828 type = TREE_TYPE (arg0);
7830 /* Calculate the result when the argument is a constant. */
7831 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7832 return res;
7834 /* Canonicalize sincos to cexpi. */
7835 if (!targetm.libc_has_function (function_c99_math_complex))
7836 return NULL_TREE;
7837 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7838 if (!fn)
7839 return NULL_TREE;
7841 call = build_call_expr_loc (loc, fn, 1, arg0);
7842 call = builtin_save_expr (call);
7844 return build2 (COMPOUND_EXPR, void_type_node,
7845 build2 (MODIFY_EXPR, void_type_node,
7846 build_fold_indirect_ref_loc (loc, arg1),
7847 build1 (IMAGPART_EXPR, type, call)),
7848 build2 (MODIFY_EXPR, void_type_node,
7849 build_fold_indirect_ref_loc (loc, arg2),
7850 build1 (REALPART_EXPR, type, call)));
7853 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7854 NULL_TREE if no simplification can be made. */
7856 static tree
7857 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7859 tree rtype;
7860 tree realp, imagp, ifn;
7861 tree res;
7863 if (!validate_arg (arg0, COMPLEX_TYPE)
7864 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7865 return NULL_TREE;
7867 /* Calculate the result when the argument is a constant. */
7868 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7869 return res;
7871 rtype = TREE_TYPE (TREE_TYPE (arg0));
7873 /* In case we can figure out the real part of arg0 and it is constant zero
7874 fold to cexpi. */
7875 if (!targetm.libc_has_function (function_c99_math_complex))
7876 return NULL_TREE;
7877 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7878 if (!ifn)
7879 return NULL_TREE;
7881 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7882 && real_zerop (realp))
7884 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7885 return build_call_expr_loc (loc, ifn, 1, narg);
7888 /* In case we can easily decompose real and imaginary parts split cexp
7889 to exp (r) * cexpi (i). */
7890 if (flag_unsafe_math_optimizations
7891 && realp)
7893 tree rfn, rcall, icall;
7895 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7896 if (!rfn)
7897 return NULL_TREE;
7899 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7900 if (!imagp)
7901 return NULL_TREE;
7903 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7904 icall = builtin_save_expr (icall);
7905 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7906 rcall = builtin_save_expr (rcall);
7907 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7908 fold_build2_loc (loc, MULT_EXPR, rtype,
7909 rcall,
7910 fold_build1_loc (loc, REALPART_EXPR,
7911 rtype, icall)),
7912 fold_build2_loc (loc, MULT_EXPR, rtype,
7913 rcall,
7914 fold_build1_loc (loc, IMAGPART_EXPR,
7915 rtype, icall)));
7918 return NULL_TREE;
7921 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7922 Return NULL_TREE if no simplification can be made. */
7924 static tree
7925 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7927 if (!validate_arg (arg, REAL_TYPE))
7928 return NULL_TREE;
7930 /* Optimize trunc of constant value. */
7931 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7933 REAL_VALUE_TYPE r, x;
7934 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7936 x = TREE_REAL_CST (arg);
7937 real_trunc (&r, TYPE_MODE (type), &x);
7938 return build_real (type, r);
7941 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7944 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7945 Return NULL_TREE if no simplification can be made. */
7947 static tree
7948 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7950 if (!validate_arg (arg, REAL_TYPE))
7951 return NULL_TREE;
7953 /* Optimize floor of constant value. */
7954 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7956 REAL_VALUE_TYPE x;
7958 x = TREE_REAL_CST (arg);
7959 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7961 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7962 REAL_VALUE_TYPE r;
7964 real_floor (&r, TYPE_MODE (type), &x);
7965 return build_real (type, r);
7969 /* Fold floor (x) where x is nonnegative to trunc (x). */
7970 if (tree_expr_nonnegative_p (arg))
7972 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7973 if (truncfn)
7974 return build_call_expr_loc (loc, truncfn, 1, arg);
7977 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7980 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7981 Return NULL_TREE if no simplification can be made. */
7983 static tree
7984 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7986 if (!validate_arg (arg, REAL_TYPE))
7987 return NULL_TREE;
7989 /* Optimize ceil of constant value. */
7990 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7992 REAL_VALUE_TYPE x;
7994 x = TREE_REAL_CST (arg);
7995 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7997 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7998 REAL_VALUE_TYPE r;
8000 real_ceil (&r, TYPE_MODE (type), &x);
8001 return build_real (type, r);
8005 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8008 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8009 Return NULL_TREE if no simplification can be made. */
8011 static tree
8012 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8014 if (!validate_arg (arg, REAL_TYPE))
8015 return NULL_TREE;
8017 /* Optimize round of constant value. */
8018 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8020 REAL_VALUE_TYPE x;
8022 x = TREE_REAL_CST (arg);
8023 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8025 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8026 REAL_VALUE_TYPE r;
8028 real_round (&r, TYPE_MODE (type), &x);
8029 return build_real (type, r);
8033 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8036 /* Fold function call to builtin lround, lroundf or lroundl (or the
8037 corresponding long long versions) and other rounding functions. ARG
8038 is the argument to the call. Return NULL_TREE if no simplification
8039 can be made. */
8041 static tree
8042 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8044 if (!validate_arg (arg, REAL_TYPE))
8045 return NULL_TREE;
8047 /* Optimize lround of constant value. */
8048 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8050 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8052 if (real_isfinite (&x))
8054 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8055 tree ftype = TREE_TYPE (arg);
8056 double_int val;
8057 REAL_VALUE_TYPE r;
8059 switch (DECL_FUNCTION_CODE (fndecl))
8061 CASE_FLT_FN (BUILT_IN_IFLOOR):
8062 CASE_FLT_FN (BUILT_IN_LFLOOR):
8063 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8064 real_floor (&r, TYPE_MODE (ftype), &x);
8065 break;
8067 CASE_FLT_FN (BUILT_IN_ICEIL):
8068 CASE_FLT_FN (BUILT_IN_LCEIL):
8069 CASE_FLT_FN (BUILT_IN_LLCEIL):
8070 real_ceil (&r, TYPE_MODE (ftype), &x);
8071 break;
8073 CASE_FLT_FN (BUILT_IN_IROUND):
8074 CASE_FLT_FN (BUILT_IN_LROUND):
8075 CASE_FLT_FN (BUILT_IN_LLROUND):
8076 real_round (&r, TYPE_MODE (ftype), &x);
8077 break;
8079 default:
8080 gcc_unreachable ();
8083 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8084 if (double_int_fits_to_tree_p (itype, val))
8085 return double_int_to_tree (itype, val);
8089 switch (DECL_FUNCTION_CODE (fndecl))
8091 CASE_FLT_FN (BUILT_IN_LFLOOR):
8092 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8093 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8094 if (tree_expr_nonnegative_p (arg))
8095 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8096 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8097 break;
8098 default:;
8101 return fold_fixed_mathfn (loc, fndecl, arg);
8104 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8105 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8106 the argument to the call. Return NULL_TREE if no simplification can
8107 be made. */
8109 static tree
8110 fold_builtin_bitop (tree fndecl, tree arg)
8112 if (!validate_arg (arg, INTEGER_TYPE))
8113 return NULL_TREE;
8115 /* Optimize for constant argument. */
8116 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8118 HOST_WIDE_INT hi, width, result;
8119 unsigned HOST_WIDE_INT lo;
8120 tree type;
8122 type = TREE_TYPE (arg);
8123 width = TYPE_PRECISION (type);
8124 lo = TREE_INT_CST_LOW (arg);
8126 /* Clear all the bits that are beyond the type's precision. */
8127 if (width > HOST_BITS_PER_WIDE_INT)
8129 hi = TREE_INT_CST_HIGH (arg);
8130 if (width < HOST_BITS_PER_DOUBLE_INT)
8131 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
8133 else
8135 hi = 0;
8136 if (width < HOST_BITS_PER_WIDE_INT)
8137 lo &= ~(HOST_WIDE_INT_M1U << width);
8140 switch (DECL_FUNCTION_CODE (fndecl))
8142 CASE_INT_FN (BUILT_IN_FFS):
8143 if (lo != 0)
8144 result = ffs_hwi (lo);
8145 else if (hi != 0)
8146 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8147 else
8148 result = 0;
8149 break;
8151 CASE_INT_FN (BUILT_IN_CLZ):
8152 if (hi != 0)
8153 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8154 else if (lo != 0)
8155 result = width - floor_log2 (lo) - 1;
8156 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8157 result = width;
8158 break;
8160 CASE_INT_FN (BUILT_IN_CTZ):
8161 if (lo != 0)
8162 result = ctz_hwi (lo);
8163 else if (hi != 0)
8164 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8165 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8166 result = width;
8167 break;
8169 CASE_INT_FN (BUILT_IN_CLRSB):
8170 if (width > 2 * HOST_BITS_PER_WIDE_INT)
8171 return NULL_TREE;
8172 if (width > HOST_BITS_PER_WIDE_INT
8173 && (hi & ((unsigned HOST_WIDE_INT) 1
8174 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8176 hi = ~hi & ~(HOST_WIDE_INT_M1U
8177 << (width - HOST_BITS_PER_WIDE_INT - 1));
8178 lo = ~lo;
8180 else if (width <= HOST_BITS_PER_WIDE_INT
8181 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8182 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
8183 if (hi != 0)
8184 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8185 else if (lo != 0)
8186 result = width - floor_log2 (lo) - 2;
8187 else
8188 result = width - 1;
8189 break;
8191 CASE_INT_FN (BUILT_IN_POPCOUNT):
8192 result = 0;
8193 while (lo)
8194 result++, lo &= lo - 1;
8195 while (hi)
8196 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8197 break;
8199 CASE_INT_FN (BUILT_IN_PARITY):
8200 result = 0;
8201 while (lo)
8202 result++, lo &= lo - 1;
8203 while (hi)
8204 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8205 result &= 1;
8206 break;
8208 default:
8209 gcc_unreachable ();
8212 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8215 return NULL_TREE;
8218 /* Fold function call to builtin_bswap and the short, long and long long
8219 variants. Return NULL_TREE if no simplification can be made. */
8220 static tree
8221 fold_builtin_bswap (tree fndecl, tree arg)
8223 if (! validate_arg (arg, INTEGER_TYPE))
8224 return NULL_TREE;
8226 /* Optimize constant value. */
8227 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8229 HOST_WIDE_INT hi, width, r_hi = 0;
8230 unsigned HOST_WIDE_INT lo, r_lo = 0;
8231 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8233 width = TYPE_PRECISION (type);
8234 lo = TREE_INT_CST_LOW (arg);
8235 hi = TREE_INT_CST_HIGH (arg);
8237 switch (DECL_FUNCTION_CODE (fndecl))
8239 case BUILT_IN_BSWAP16:
8240 case BUILT_IN_BSWAP32:
8241 case BUILT_IN_BSWAP64:
8243 int s;
8245 for (s = 0; s < width; s += 8)
8247 int d = width - s - 8;
8248 unsigned HOST_WIDE_INT byte;
8250 if (s < HOST_BITS_PER_WIDE_INT)
8251 byte = (lo >> s) & 0xff;
8252 else
8253 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8255 if (d < HOST_BITS_PER_WIDE_INT)
8256 r_lo |= byte << d;
8257 else
8258 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8262 break;
8264 default:
8265 gcc_unreachable ();
8268 if (width < HOST_BITS_PER_WIDE_INT)
8269 return build_int_cst (type, r_lo);
8270 else
8271 return build_int_cst_wide (type, r_lo, r_hi);
8274 return NULL_TREE;
8277 /* A subroutine of fold_builtin to fold the various logarithmic
8278 functions. Return NULL_TREE if no simplification can me made.
8279 FUNC is the corresponding MPFR logarithm function. */
8281 static tree
8282 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8283 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8285 if (validate_arg (arg, REAL_TYPE))
8287 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8288 tree res;
8289 const enum built_in_function fcode = builtin_mathfn_code (arg);
8291 /* Calculate the result when the argument is a constant. */
8292 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8293 return res;
8295 /* Special case, optimize logN(expN(x)) = x. */
8296 if (flag_unsafe_math_optimizations
8297 && ((func == mpfr_log
8298 && (fcode == BUILT_IN_EXP
8299 || fcode == BUILT_IN_EXPF
8300 || fcode == BUILT_IN_EXPL))
8301 || (func == mpfr_log2
8302 && (fcode == BUILT_IN_EXP2
8303 || fcode == BUILT_IN_EXP2F
8304 || fcode == BUILT_IN_EXP2L))
8305 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8306 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8308 /* Optimize logN(func()) for various exponential functions. We
8309 want to determine the value "x" and the power "exponent" in
8310 order to transform logN(x**exponent) into exponent*logN(x). */
8311 if (flag_unsafe_math_optimizations)
8313 tree exponent = 0, x = 0;
8315 switch (fcode)
8317 CASE_FLT_FN (BUILT_IN_EXP):
8318 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8319 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8320 dconst_e ()));
8321 exponent = CALL_EXPR_ARG (arg, 0);
8322 break;
8323 CASE_FLT_FN (BUILT_IN_EXP2):
8324 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8325 x = build_real (type, dconst2);
8326 exponent = CALL_EXPR_ARG (arg, 0);
8327 break;
8328 CASE_FLT_FN (BUILT_IN_EXP10):
8329 CASE_FLT_FN (BUILT_IN_POW10):
8330 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8332 REAL_VALUE_TYPE dconst10;
8333 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8334 x = build_real (type, dconst10);
8336 exponent = CALL_EXPR_ARG (arg, 0);
8337 break;
8338 CASE_FLT_FN (BUILT_IN_SQRT):
8339 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8340 x = CALL_EXPR_ARG (arg, 0);
8341 exponent = build_real (type, dconsthalf);
8342 break;
8343 CASE_FLT_FN (BUILT_IN_CBRT):
8344 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8345 x = CALL_EXPR_ARG (arg, 0);
8346 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8347 dconst_third ()));
8348 break;
8349 CASE_FLT_FN (BUILT_IN_POW):
8350 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8351 x = CALL_EXPR_ARG (arg, 0);
8352 exponent = CALL_EXPR_ARG (arg, 1);
8353 break;
8354 default:
8355 break;
8358 /* Now perform the optimization. */
8359 if (x && exponent)
8361 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8362 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8367 return NULL_TREE;
8370 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8371 NULL_TREE if no simplification can be made. */
8373 static tree
8374 fold_builtin_hypot (location_t loc, tree fndecl,
8375 tree arg0, tree arg1, tree type)
8377 tree res, narg0, narg1;
8379 if (!validate_arg (arg0, REAL_TYPE)
8380 || !validate_arg (arg1, REAL_TYPE))
8381 return NULL_TREE;
8383 /* Calculate the result when the argument is a constant. */
8384 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8385 return res;
8387 /* If either argument to hypot has a negate or abs, strip that off.
8388 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8389 narg0 = fold_strip_sign_ops (arg0);
8390 narg1 = fold_strip_sign_ops (arg1);
8391 if (narg0 || narg1)
8393 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8394 narg1 ? narg1 : arg1);
8397 /* If either argument is zero, hypot is fabs of the other. */
8398 if (real_zerop (arg0))
8399 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8400 else if (real_zerop (arg1))
8401 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8403 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8404 if (flag_unsafe_math_optimizations
8405 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8407 const REAL_VALUE_TYPE sqrt2_trunc
8408 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8409 return fold_build2_loc (loc, MULT_EXPR, type,
8410 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8411 build_real (type, sqrt2_trunc));
8414 return NULL_TREE;
8418 /* Fold a builtin function call to pow, powf, or powl. Return
8419 NULL_TREE if no simplification can be made. */
8420 static tree
8421 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8423 tree res;
8425 if (!validate_arg (arg0, REAL_TYPE)
8426 || !validate_arg (arg1, REAL_TYPE))
8427 return NULL_TREE;
8429 /* Calculate the result when the argument is a constant. */
8430 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8431 return res;
8433 /* Optimize pow(1.0,y) = 1.0. */
8434 if (real_onep (arg0))
8435 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8437 if (TREE_CODE (arg1) == REAL_CST
8438 && !TREE_OVERFLOW (arg1))
8440 REAL_VALUE_TYPE cint;
8441 REAL_VALUE_TYPE c;
8442 HOST_WIDE_INT n;
8444 c = TREE_REAL_CST (arg1);
8446 /* Optimize pow(x,0.0) = 1.0. */
8447 if (REAL_VALUES_EQUAL (c, dconst0))
8448 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8449 arg0);
8451 /* Optimize pow(x,1.0) = x. */
8452 if (REAL_VALUES_EQUAL (c, dconst1))
8453 return arg0;
8455 /* Optimize pow(x,-1.0) = 1.0/x. */
8456 if (REAL_VALUES_EQUAL (c, dconstm1))
8457 return fold_build2_loc (loc, RDIV_EXPR, type,
8458 build_real (type, dconst1), arg0);
8460 /* Optimize pow(x,0.5) = sqrt(x). */
8461 if (flag_unsafe_math_optimizations
8462 && REAL_VALUES_EQUAL (c, dconsthalf))
8464 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8466 if (sqrtfn != NULL_TREE)
8467 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8470 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8471 if (flag_unsafe_math_optimizations)
8473 const REAL_VALUE_TYPE dconstroot
8474 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8476 if (REAL_VALUES_EQUAL (c, dconstroot))
8478 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8479 if (cbrtfn != NULL_TREE)
8480 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8484 /* Check for an integer exponent. */
8485 n = real_to_integer (&c);
8486 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8487 if (real_identical (&c, &cint))
8489 /* Attempt to evaluate pow at compile-time, unless this should
8490 raise an exception. */
8491 if (TREE_CODE (arg0) == REAL_CST
8492 && !TREE_OVERFLOW (arg0)
8493 && (n > 0
8494 || (!flag_trapping_math && !flag_errno_math)
8495 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8497 REAL_VALUE_TYPE x;
8498 bool inexact;
8500 x = TREE_REAL_CST (arg0);
8501 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8502 if (flag_unsafe_math_optimizations || !inexact)
8503 return build_real (type, x);
8506 /* Strip sign ops from even integer powers. */
8507 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8509 tree narg0 = fold_strip_sign_ops (arg0);
8510 if (narg0)
8511 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8516 if (flag_unsafe_math_optimizations)
8518 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8520 /* Optimize pow(expN(x),y) = expN(x*y). */
8521 if (BUILTIN_EXPONENT_P (fcode))
8523 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8524 tree arg = CALL_EXPR_ARG (arg0, 0);
8525 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8526 return build_call_expr_loc (loc, expfn, 1, arg);
8529 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8530 if (BUILTIN_SQRT_P (fcode))
8532 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8533 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8534 build_real (type, dconsthalf));
8535 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8538 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8539 if (BUILTIN_CBRT_P (fcode))
8541 tree arg = CALL_EXPR_ARG (arg0, 0);
8542 if (tree_expr_nonnegative_p (arg))
8544 const REAL_VALUE_TYPE dconstroot
8545 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8546 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8547 build_real (type, dconstroot));
8548 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8552 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8553 if (fcode == BUILT_IN_POW
8554 || fcode == BUILT_IN_POWF
8555 || fcode == BUILT_IN_POWL)
8557 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8558 if (tree_expr_nonnegative_p (arg00))
8560 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8561 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8562 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8567 return NULL_TREE;
8570 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8571 Return NULL_TREE if no simplification can be made. */
8572 static tree
8573 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8574 tree arg0, tree arg1, tree type)
8576 if (!validate_arg (arg0, REAL_TYPE)
8577 || !validate_arg (arg1, INTEGER_TYPE))
8578 return NULL_TREE;
8580 /* Optimize pow(1.0,y) = 1.0. */
8581 if (real_onep (arg0))
8582 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8584 if (tree_fits_shwi_p (arg1))
8586 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8588 /* Evaluate powi at compile-time. */
8589 if (TREE_CODE (arg0) == REAL_CST
8590 && !TREE_OVERFLOW (arg0))
8592 REAL_VALUE_TYPE x;
8593 x = TREE_REAL_CST (arg0);
8594 real_powi (&x, TYPE_MODE (type), &x, c);
8595 return build_real (type, x);
8598 /* Optimize pow(x,0) = 1.0. */
8599 if (c == 0)
8600 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8601 arg0);
8603 /* Optimize pow(x,1) = x. */
8604 if (c == 1)
8605 return arg0;
8607 /* Optimize pow(x,-1) = 1.0/x. */
8608 if (c == -1)
8609 return fold_build2_loc (loc, RDIV_EXPR, type,
8610 build_real (type, dconst1), arg0);
8613 return NULL_TREE;
8616 /* A subroutine of fold_builtin to fold the various exponent
8617 functions. Return NULL_TREE if no simplification can be made.
8618 FUNC is the corresponding MPFR exponent function. */
8620 static tree
8621 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8622 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8624 if (validate_arg (arg, REAL_TYPE))
8626 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8627 tree res;
8629 /* Calculate the result when the argument is a constant. */
8630 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8631 return res;
8633 /* Optimize expN(logN(x)) = x. */
8634 if (flag_unsafe_math_optimizations)
8636 const enum built_in_function fcode = builtin_mathfn_code (arg);
8638 if ((func == mpfr_exp
8639 && (fcode == BUILT_IN_LOG
8640 || fcode == BUILT_IN_LOGF
8641 || fcode == BUILT_IN_LOGL))
8642 || (func == mpfr_exp2
8643 && (fcode == BUILT_IN_LOG2
8644 || fcode == BUILT_IN_LOG2F
8645 || fcode == BUILT_IN_LOG2L))
8646 || (func == mpfr_exp10
8647 && (fcode == BUILT_IN_LOG10
8648 || fcode == BUILT_IN_LOG10F
8649 || fcode == BUILT_IN_LOG10L)))
8650 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8654 return NULL_TREE;
8657 /* Return true if VAR is a VAR_DECL or a component thereof. */
8659 static bool
8660 var_decl_component_p (tree var)
8662 tree inner = var;
8663 while (handled_component_p (inner))
8664 inner = TREE_OPERAND (inner, 0);
8665 return SSA_VAR_P (inner);
8668 /* Fold function call to builtin memset. Return
8669 NULL_TREE if no simplification can be made. */
8671 static tree
8672 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8673 tree type, bool ignore)
8675 tree var, ret, etype;
8676 unsigned HOST_WIDE_INT length, cval;
8678 if (! validate_arg (dest, POINTER_TYPE)
8679 || ! validate_arg (c, INTEGER_TYPE)
8680 || ! validate_arg (len, INTEGER_TYPE))
8681 return NULL_TREE;
8683 if (! tree_fits_uhwi_p (len))
8684 return NULL_TREE;
8686 /* If the LEN parameter is zero, return DEST. */
8687 if (integer_zerop (len))
8688 return omit_one_operand_loc (loc, type, dest, c);
8690 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8691 return NULL_TREE;
8693 var = dest;
8694 STRIP_NOPS (var);
8695 if (TREE_CODE (var) != ADDR_EXPR)
8696 return NULL_TREE;
8698 var = TREE_OPERAND (var, 0);
8699 if (TREE_THIS_VOLATILE (var))
8700 return NULL_TREE;
8702 etype = TREE_TYPE (var);
8703 if (TREE_CODE (etype) == ARRAY_TYPE)
8704 etype = TREE_TYPE (etype);
8706 if (!INTEGRAL_TYPE_P (etype)
8707 && !POINTER_TYPE_P (etype))
8708 return NULL_TREE;
8710 if (! var_decl_component_p (var))
8711 return NULL_TREE;
8713 length = tree_to_uhwi (len);
8714 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8715 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8716 return NULL_TREE;
8718 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8719 return NULL_TREE;
8721 if (integer_zerop (c))
8722 cval = 0;
8723 else
8725 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8726 return NULL_TREE;
8728 cval = TREE_INT_CST_LOW (c);
8729 cval &= 0xff;
8730 cval |= cval << 8;
8731 cval |= cval << 16;
8732 cval |= (cval << 31) << 1;
8735 ret = build_int_cst_type (etype, cval);
8736 var = build_fold_indirect_ref_loc (loc,
8737 fold_convert_loc (loc,
8738 build_pointer_type (etype),
8739 dest));
8740 ret = build2 (MODIFY_EXPR, etype, var, ret);
8741 if (ignore)
8742 return ret;
8744 return omit_one_operand_loc (loc, type, dest, ret);
8747 /* Fold function call to builtin memset. Return
8748 NULL_TREE if no simplification can be made. */
8750 static tree
8751 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8753 if (! validate_arg (dest, POINTER_TYPE)
8754 || ! validate_arg (size, INTEGER_TYPE))
8755 return NULL_TREE;
8757 if (!ignore)
8758 return NULL_TREE;
8760 /* New argument list transforming bzero(ptr x, int y) to
8761 memset(ptr x, int 0, size_t y). This is done this way
8762 so that if it isn't expanded inline, we fallback to
8763 calling bzero instead of memset. */
8765 return fold_builtin_memset (loc, dest, integer_zero_node,
8766 fold_convert_loc (loc, size_type_node, size),
8767 void_type_node, ignore);
8770 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8771 NULL_TREE if no simplification can be made.
8772 If ENDP is 0, return DEST (like memcpy).
8773 If ENDP is 1, return DEST+LEN (like mempcpy).
8774 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8775 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8776 (memmove). */
8778 static tree
8779 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8780 tree len, tree type, bool ignore, int endp)
8782 tree destvar, srcvar, expr;
8784 if (! validate_arg (dest, POINTER_TYPE)
8785 || ! validate_arg (src, POINTER_TYPE)
8786 || ! validate_arg (len, INTEGER_TYPE))
8787 return NULL_TREE;
8789 /* If the LEN parameter is zero, return DEST. */
8790 if (integer_zerop (len))
8791 return omit_one_operand_loc (loc, type, dest, src);
8793 /* If SRC and DEST are the same (and not volatile), return
8794 DEST{,+LEN,+LEN-1}. */
8795 if (operand_equal_p (src, dest, 0))
8796 expr = len;
8797 else
8799 tree srctype, desttype;
8800 unsigned int src_align, dest_align;
8801 tree off0;
8803 if (endp == 3)
8805 src_align = get_pointer_alignment (src);
8806 dest_align = get_pointer_alignment (dest);
8808 /* Both DEST and SRC must be pointer types.
8809 ??? This is what old code did. Is the testing for pointer types
8810 really mandatory?
8812 If either SRC is readonly or length is 1, we can use memcpy. */
8813 if (!dest_align || !src_align)
8814 return NULL_TREE;
8815 if (readonly_data_expr (src)
8816 || (tree_fits_uhwi_p (len)
8817 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8818 >= tree_to_uhwi (len))))
8820 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8821 if (!fn)
8822 return NULL_TREE;
8823 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8826 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8827 if (TREE_CODE (src) == ADDR_EXPR
8828 && TREE_CODE (dest) == ADDR_EXPR)
8830 tree src_base, dest_base, fn;
8831 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8832 HOST_WIDE_INT size = -1;
8833 HOST_WIDE_INT maxsize = -1;
8835 srcvar = TREE_OPERAND (src, 0);
8836 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8837 &size, &maxsize);
8838 destvar = TREE_OPERAND (dest, 0);
8839 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8840 &size, &maxsize);
8841 if (tree_fits_uhwi_p (len))
8842 maxsize = tree_to_uhwi (len);
8843 else
8844 maxsize = -1;
8845 src_offset /= BITS_PER_UNIT;
8846 dest_offset /= BITS_PER_UNIT;
8847 if (SSA_VAR_P (src_base)
8848 && SSA_VAR_P (dest_base))
8850 if (operand_equal_p (src_base, dest_base, 0)
8851 && ranges_overlap_p (src_offset, maxsize,
8852 dest_offset, maxsize))
8853 return NULL_TREE;
8855 else if (TREE_CODE (src_base) == MEM_REF
8856 && TREE_CODE (dest_base) == MEM_REF)
8858 double_int off;
8859 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8860 TREE_OPERAND (dest_base, 0), 0))
8861 return NULL_TREE;
8862 off = mem_ref_offset (src_base) +
8863 double_int::from_shwi (src_offset);
8864 if (!off.fits_shwi ())
8865 return NULL_TREE;
8866 src_offset = off.low;
8867 off = mem_ref_offset (dest_base) +
8868 double_int::from_shwi (dest_offset);
8869 if (!off.fits_shwi ())
8870 return NULL_TREE;
8871 dest_offset = off.low;
8872 if (ranges_overlap_p (src_offset, maxsize,
8873 dest_offset, maxsize))
8874 return NULL_TREE;
8876 else
8877 return NULL_TREE;
8879 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8880 if (!fn)
8881 return NULL_TREE;
8882 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8885 /* If the destination and source do not alias optimize into
8886 memcpy as well. */
8887 if ((is_gimple_min_invariant (dest)
8888 || TREE_CODE (dest) == SSA_NAME)
8889 && (is_gimple_min_invariant (src)
8890 || TREE_CODE (src) == SSA_NAME))
8892 ao_ref destr, srcr;
8893 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8894 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8895 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8897 tree fn;
8898 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8899 if (!fn)
8900 return NULL_TREE;
8901 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8905 return NULL_TREE;
8908 if (!tree_fits_shwi_p (len))
8909 return NULL_TREE;
8910 /* FIXME:
8911 This logic lose for arguments like (type *)malloc (sizeof (type)),
8912 since we strip the casts of up to VOID return value from malloc.
8913 Perhaps we ought to inherit type from non-VOID argument here? */
8914 STRIP_NOPS (src);
8915 STRIP_NOPS (dest);
8916 if (!POINTER_TYPE_P (TREE_TYPE (src))
8917 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8918 return NULL_TREE;
8919 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8920 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8922 tree tem = TREE_OPERAND (src, 0);
8923 STRIP_NOPS (tem);
8924 if (tem != TREE_OPERAND (src, 0))
8925 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8927 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8929 tree tem = TREE_OPERAND (dest, 0);
8930 STRIP_NOPS (tem);
8931 if (tem != TREE_OPERAND (dest, 0))
8932 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8934 srctype = TREE_TYPE (TREE_TYPE (src));
8935 if (TREE_CODE (srctype) == ARRAY_TYPE
8936 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8938 srctype = TREE_TYPE (srctype);
8939 STRIP_NOPS (src);
8940 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8942 desttype = TREE_TYPE (TREE_TYPE (dest));
8943 if (TREE_CODE (desttype) == ARRAY_TYPE
8944 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8946 desttype = TREE_TYPE (desttype);
8947 STRIP_NOPS (dest);
8948 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8950 if (TREE_ADDRESSABLE (srctype)
8951 || TREE_ADDRESSABLE (desttype))
8952 return NULL_TREE;
8954 src_align = get_pointer_alignment (src);
8955 dest_align = get_pointer_alignment (dest);
8956 if (dest_align < TYPE_ALIGN (desttype)
8957 || src_align < TYPE_ALIGN (srctype))
8958 return NULL_TREE;
8960 if (!ignore)
8961 dest = builtin_save_expr (dest);
8963 /* Build accesses at offset zero with a ref-all character type. */
8964 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8965 ptr_mode, true), 0);
8967 destvar = dest;
8968 STRIP_NOPS (destvar);
8969 if (TREE_CODE (destvar) == ADDR_EXPR
8970 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8971 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8972 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8973 else
8974 destvar = NULL_TREE;
8976 srcvar = src;
8977 STRIP_NOPS (srcvar);
8978 if (TREE_CODE (srcvar) == ADDR_EXPR
8979 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8980 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8982 if (!destvar
8983 || src_align >= TYPE_ALIGN (desttype))
8984 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8985 srcvar, off0);
8986 else if (!STRICT_ALIGNMENT)
8988 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8989 src_align);
8990 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8992 else
8993 srcvar = NULL_TREE;
8995 else
8996 srcvar = NULL_TREE;
8998 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8999 return NULL_TREE;
9001 if (srcvar == NULL_TREE)
9003 STRIP_NOPS (src);
9004 if (src_align >= TYPE_ALIGN (desttype))
9005 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
9006 else
9008 if (STRICT_ALIGNMENT)
9009 return NULL_TREE;
9010 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
9011 src_align);
9012 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
9015 else if (destvar == NULL_TREE)
9017 STRIP_NOPS (dest);
9018 if (dest_align >= TYPE_ALIGN (srctype))
9019 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
9020 else
9022 if (STRICT_ALIGNMENT)
9023 return NULL_TREE;
9024 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
9025 dest_align);
9026 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
9030 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
9033 if (ignore)
9034 return expr;
9036 if (endp == 0 || endp == 3)
9037 return omit_one_operand_loc (loc, type, dest, expr);
9039 if (expr == len)
9040 expr = NULL_TREE;
9042 if (endp == 2)
9043 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9044 ssize_int (1));
9046 dest = fold_build_pointer_plus_loc (loc, dest, len);
9047 dest = fold_convert_loc (loc, type, dest);
9048 if (expr)
9049 dest = omit_one_operand_loc (loc, type, dest, expr);
9050 return dest;
9053 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9054 If LEN is not NULL, it represents the length of the string to be
9055 copied. Return NULL_TREE if no simplification can be made. */
9057 tree
9058 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9060 tree fn;
9062 if (!validate_arg (dest, POINTER_TYPE)
9063 || !validate_arg (src, POINTER_TYPE))
9064 return NULL_TREE;
9066 /* If SRC and DEST are the same (and not volatile), return DEST. */
9067 if (operand_equal_p (src, dest, 0))
9068 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9070 if (optimize_function_for_size_p (cfun))
9071 return NULL_TREE;
9073 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9074 if (!fn)
9075 return NULL_TREE;
9077 if (!len)
9079 len = c_strlen (src, 1);
9080 if (! len || TREE_SIDE_EFFECTS (len))
9081 return NULL_TREE;
9084 len = fold_convert_loc (loc, size_type_node, len);
9085 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9086 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9087 build_call_expr_loc (loc, fn, 3, dest, src, len));
9090 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9091 Return NULL_TREE if no simplification can be made. */
9093 static tree
9094 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9096 tree fn, len, lenp1, call, type;
9098 if (!validate_arg (dest, POINTER_TYPE)
9099 || !validate_arg (src, POINTER_TYPE))
9100 return NULL_TREE;
9102 len = c_strlen (src, 1);
9103 if (!len
9104 || TREE_CODE (len) != INTEGER_CST)
9105 return NULL_TREE;
9107 if (optimize_function_for_size_p (cfun)
9108 /* If length is zero it's small enough. */
9109 && !integer_zerop (len))
9110 return NULL_TREE;
9112 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9113 if (!fn)
9114 return NULL_TREE;
9116 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9117 fold_convert_loc (loc, size_type_node, len),
9118 build_int_cst (size_type_node, 1));
9119 /* We use dest twice in building our expression. Save it from
9120 multiple expansions. */
9121 dest = builtin_save_expr (dest);
9122 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9124 type = TREE_TYPE (TREE_TYPE (fndecl));
9125 dest = fold_build_pointer_plus_loc (loc, dest, len);
9126 dest = fold_convert_loc (loc, type, dest);
9127 dest = omit_one_operand_loc (loc, type, dest, call);
9128 return dest;
9131 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9132 If SLEN is not NULL, it represents the length of the source string.
9133 Return NULL_TREE if no simplification can be made. */
9135 tree
9136 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9137 tree src, tree len, tree slen)
9139 tree fn;
9141 if (!validate_arg (dest, POINTER_TYPE)
9142 || !validate_arg (src, POINTER_TYPE)
9143 || !validate_arg (len, INTEGER_TYPE))
9144 return NULL_TREE;
9146 /* If the LEN parameter is zero, return DEST. */
9147 if (integer_zerop (len))
9148 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9150 /* We can't compare slen with len as constants below if len is not a
9151 constant. */
9152 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9153 return NULL_TREE;
9155 if (!slen)
9156 slen = c_strlen (src, 1);
9158 /* Now, we must be passed a constant src ptr parameter. */
9159 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9160 return NULL_TREE;
9162 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9164 /* We do not support simplification of this case, though we do
9165 support it when expanding trees into RTL. */
9166 /* FIXME: generate a call to __builtin_memset. */
9167 if (tree_int_cst_lt (slen, len))
9168 return NULL_TREE;
9170 /* OK transform into builtin memcpy. */
9171 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9172 if (!fn)
9173 return NULL_TREE;
9175 len = fold_convert_loc (loc, size_type_node, len);
9176 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9177 build_call_expr_loc (loc, fn, 3, dest, src, len));
9180 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9181 arguments to the call, and TYPE is its return type.
9182 Return NULL_TREE if no simplification can be made. */
9184 static tree
9185 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9187 if (!validate_arg (arg1, POINTER_TYPE)
9188 || !validate_arg (arg2, INTEGER_TYPE)
9189 || !validate_arg (len, INTEGER_TYPE))
9190 return NULL_TREE;
9191 else
9193 const char *p1;
9195 if (TREE_CODE (arg2) != INTEGER_CST
9196 || !tree_fits_uhwi_p (len))
9197 return NULL_TREE;
9199 p1 = c_getstr (arg1);
9200 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9202 char c;
9203 const char *r;
9204 tree tem;
9206 if (target_char_cast (arg2, &c))
9207 return NULL_TREE;
9209 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
9211 if (r == NULL)
9212 return build_int_cst (TREE_TYPE (arg1), 0);
9214 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9215 return fold_convert_loc (loc, type, tem);
9217 return NULL_TREE;
9221 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9222 Return NULL_TREE if no simplification can be made. */
9224 static tree
9225 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9227 const char *p1, *p2;
9229 if (!validate_arg (arg1, POINTER_TYPE)
9230 || !validate_arg (arg2, POINTER_TYPE)
9231 || !validate_arg (len, INTEGER_TYPE))
9232 return NULL_TREE;
9234 /* If the LEN parameter is zero, return zero. */
9235 if (integer_zerop (len))
9236 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9237 arg1, arg2);
9239 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9240 if (operand_equal_p (arg1, arg2, 0))
9241 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9243 p1 = c_getstr (arg1);
9244 p2 = c_getstr (arg2);
9246 /* If all arguments are constant, and the value of len is not greater
9247 than the lengths of arg1 and arg2, evaluate at compile-time. */
9248 if (tree_fits_uhwi_p (len) && p1 && p2
9249 && compare_tree_int (len, strlen (p1) + 1) <= 0
9250 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9252 const int r = memcmp (p1, p2, tree_to_uhwi (len));
9254 if (r > 0)
9255 return integer_one_node;
9256 else if (r < 0)
9257 return integer_minus_one_node;
9258 else
9259 return integer_zero_node;
9262 /* If len parameter is one, return an expression corresponding to
9263 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9264 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9266 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9267 tree cst_uchar_ptr_node
9268 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9270 tree ind1
9271 = fold_convert_loc (loc, integer_type_node,
9272 build1 (INDIRECT_REF, cst_uchar_node,
9273 fold_convert_loc (loc,
9274 cst_uchar_ptr_node,
9275 arg1)));
9276 tree ind2
9277 = fold_convert_loc (loc, integer_type_node,
9278 build1 (INDIRECT_REF, cst_uchar_node,
9279 fold_convert_loc (loc,
9280 cst_uchar_ptr_node,
9281 arg2)));
9282 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9285 return NULL_TREE;
9288 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9289 Return NULL_TREE if no simplification can be made. */
9291 static tree
9292 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9294 const char *p1, *p2;
9296 if (!validate_arg (arg1, POINTER_TYPE)
9297 || !validate_arg (arg2, POINTER_TYPE))
9298 return NULL_TREE;
9300 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9301 if (operand_equal_p (arg1, arg2, 0))
9302 return integer_zero_node;
9304 p1 = c_getstr (arg1);
9305 p2 = c_getstr (arg2);
9307 if (p1 && p2)
9309 const int i = strcmp (p1, p2);
9310 if (i < 0)
9311 return integer_minus_one_node;
9312 else if (i > 0)
9313 return integer_one_node;
9314 else
9315 return integer_zero_node;
9318 /* If the second arg is "", return *(const unsigned char*)arg1. */
9319 if (p2 && *p2 == '\0')
9321 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9322 tree cst_uchar_ptr_node
9323 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9325 return fold_convert_loc (loc, integer_type_node,
9326 build1 (INDIRECT_REF, cst_uchar_node,
9327 fold_convert_loc (loc,
9328 cst_uchar_ptr_node,
9329 arg1)));
9332 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9333 if (p1 && *p1 == '\0')
9335 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9336 tree cst_uchar_ptr_node
9337 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9339 tree temp
9340 = fold_convert_loc (loc, integer_type_node,
9341 build1 (INDIRECT_REF, cst_uchar_node,
9342 fold_convert_loc (loc,
9343 cst_uchar_ptr_node,
9344 arg2)));
9345 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9348 return NULL_TREE;
9351 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9352 Return NULL_TREE if no simplification can be made. */
9354 static tree
9355 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9357 const char *p1, *p2;
9359 if (!validate_arg (arg1, POINTER_TYPE)
9360 || !validate_arg (arg2, POINTER_TYPE)
9361 || !validate_arg (len, INTEGER_TYPE))
9362 return NULL_TREE;
9364 /* If the LEN parameter is zero, return zero. */
9365 if (integer_zerop (len))
9366 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9367 arg1, arg2);
9369 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9370 if (operand_equal_p (arg1, arg2, 0))
9371 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9373 p1 = c_getstr (arg1);
9374 p2 = c_getstr (arg2);
9376 if (tree_fits_uhwi_p (len) && p1 && p2)
9378 const int i = strncmp (p1, p2, tree_to_uhwi (len));
9379 if (i > 0)
9380 return integer_one_node;
9381 else if (i < 0)
9382 return integer_minus_one_node;
9383 else
9384 return integer_zero_node;
9387 /* If the second arg is "", and the length is greater than zero,
9388 return *(const unsigned char*)arg1. */
9389 if (p2 && *p2 == '\0'
9390 && TREE_CODE (len) == INTEGER_CST
9391 && tree_int_cst_sgn (len) == 1)
9393 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9394 tree cst_uchar_ptr_node
9395 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9397 return fold_convert_loc (loc, integer_type_node,
9398 build1 (INDIRECT_REF, cst_uchar_node,
9399 fold_convert_loc (loc,
9400 cst_uchar_ptr_node,
9401 arg1)));
9404 /* If the first arg is "", and the length is greater than zero,
9405 return -*(const unsigned char*)arg2. */
9406 if (p1 && *p1 == '\0'
9407 && TREE_CODE (len) == INTEGER_CST
9408 && tree_int_cst_sgn (len) == 1)
9410 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9411 tree cst_uchar_ptr_node
9412 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9414 tree temp = fold_convert_loc (loc, integer_type_node,
9415 build1 (INDIRECT_REF, cst_uchar_node,
9416 fold_convert_loc (loc,
9417 cst_uchar_ptr_node,
9418 arg2)));
9419 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9422 /* If len parameter is one, return an expression corresponding to
9423 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9424 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9426 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9427 tree cst_uchar_ptr_node
9428 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9430 tree ind1 = fold_convert_loc (loc, integer_type_node,
9431 build1 (INDIRECT_REF, cst_uchar_node,
9432 fold_convert_loc (loc,
9433 cst_uchar_ptr_node,
9434 arg1)));
9435 tree ind2 = fold_convert_loc (loc, integer_type_node,
9436 build1 (INDIRECT_REF, cst_uchar_node,
9437 fold_convert_loc (loc,
9438 cst_uchar_ptr_node,
9439 arg2)));
9440 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9443 return NULL_TREE;
9446 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9447 ARG. Return NULL_TREE if no simplification can be made. */
9449 static tree
9450 fold_builtin_signbit (location_t loc, tree arg, tree type)
9452 if (!validate_arg (arg, REAL_TYPE))
9453 return NULL_TREE;
9455 /* If ARG is a compile-time constant, determine the result. */
9456 if (TREE_CODE (arg) == REAL_CST
9457 && !TREE_OVERFLOW (arg))
9459 REAL_VALUE_TYPE c;
9461 c = TREE_REAL_CST (arg);
9462 return (REAL_VALUE_NEGATIVE (c)
9463 ? build_one_cst (type)
9464 : build_zero_cst (type));
9467 /* If ARG is non-negative, the result is always zero. */
9468 if (tree_expr_nonnegative_p (arg))
9469 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9471 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9472 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9473 return fold_convert (type,
9474 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9475 build_real (TREE_TYPE (arg), dconst0)));
9477 return NULL_TREE;
9480 /* Fold function call to builtin copysign, copysignf or copysignl with
9481 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9482 be made. */
9484 static tree
9485 fold_builtin_copysign (location_t loc, tree fndecl,
9486 tree arg1, tree arg2, tree type)
9488 tree tem;
9490 if (!validate_arg (arg1, REAL_TYPE)
9491 || !validate_arg (arg2, REAL_TYPE))
9492 return NULL_TREE;
9494 /* copysign(X,X) is X. */
9495 if (operand_equal_p (arg1, arg2, 0))
9496 return fold_convert_loc (loc, type, arg1);
9498 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9499 if (TREE_CODE (arg1) == REAL_CST
9500 && TREE_CODE (arg2) == REAL_CST
9501 && !TREE_OVERFLOW (arg1)
9502 && !TREE_OVERFLOW (arg2))
9504 REAL_VALUE_TYPE c1, c2;
9506 c1 = TREE_REAL_CST (arg1);
9507 c2 = TREE_REAL_CST (arg2);
9508 /* c1.sign := c2.sign. */
9509 real_copysign (&c1, &c2);
9510 return build_real (type, c1);
9513 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9514 Remember to evaluate Y for side-effects. */
9515 if (tree_expr_nonnegative_p (arg2))
9516 return omit_one_operand_loc (loc, type,
9517 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9518 arg2);
9520 /* Strip sign changing operations for the first argument. */
9521 tem = fold_strip_sign_ops (arg1);
9522 if (tem)
9523 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9525 return NULL_TREE;
9528 /* Fold a call to builtin isascii with argument ARG. */
9530 static tree
9531 fold_builtin_isascii (location_t loc, tree arg)
9533 if (!validate_arg (arg, INTEGER_TYPE))
9534 return NULL_TREE;
9535 else
9537 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9538 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9539 build_int_cst (integer_type_node,
9540 ~ (unsigned HOST_WIDE_INT) 0x7f));
9541 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9542 arg, integer_zero_node);
9546 /* Fold a call to builtin toascii with argument ARG. */
9548 static tree
9549 fold_builtin_toascii (location_t loc, tree arg)
9551 if (!validate_arg (arg, INTEGER_TYPE))
9552 return NULL_TREE;
9554 /* Transform toascii(c) -> (c & 0x7f). */
9555 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9556 build_int_cst (integer_type_node, 0x7f));
9559 /* Fold a call to builtin isdigit with argument ARG. */
9561 static tree
9562 fold_builtin_isdigit (location_t loc, tree arg)
9564 if (!validate_arg (arg, INTEGER_TYPE))
9565 return NULL_TREE;
9566 else
9568 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9569 /* According to the C standard, isdigit is unaffected by locale.
9570 However, it definitely is affected by the target character set. */
9571 unsigned HOST_WIDE_INT target_digit0
9572 = lang_hooks.to_target_charset ('0');
9574 if (target_digit0 == 0)
9575 return NULL_TREE;
9577 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9578 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9579 build_int_cst (unsigned_type_node, target_digit0));
9580 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9581 build_int_cst (unsigned_type_node, 9));
9585 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9587 static tree
9588 fold_builtin_fabs (location_t loc, tree arg, tree type)
9590 if (!validate_arg (arg, REAL_TYPE))
9591 return NULL_TREE;
9593 arg = fold_convert_loc (loc, type, arg);
9594 if (TREE_CODE (arg) == REAL_CST)
9595 return fold_abs_const (arg, type);
9596 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9599 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9601 static tree
9602 fold_builtin_abs (location_t loc, tree arg, tree type)
9604 if (!validate_arg (arg, INTEGER_TYPE))
9605 return NULL_TREE;
9607 arg = fold_convert_loc (loc, type, arg);
9608 if (TREE_CODE (arg) == INTEGER_CST)
9609 return fold_abs_const (arg, type);
9610 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9613 /* Fold a fma operation with arguments ARG[012]. */
9615 tree
9616 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9617 tree type, tree arg0, tree arg1, tree arg2)
9619 if (TREE_CODE (arg0) == REAL_CST
9620 && TREE_CODE (arg1) == REAL_CST
9621 && TREE_CODE (arg2) == REAL_CST)
9622 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9624 return NULL_TREE;
9627 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9629 static tree
9630 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9632 if (validate_arg (arg0, REAL_TYPE)
9633 && validate_arg (arg1, REAL_TYPE)
9634 && validate_arg (arg2, REAL_TYPE))
9636 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9637 if (tem)
9638 return tem;
9640 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9641 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9642 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9644 return NULL_TREE;
9647 /* Fold a call to builtin fmin or fmax. */
9649 static tree
9650 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9651 tree type, bool max)
9653 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9655 /* Calculate the result when the argument is a constant. */
9656 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9658 if (res)
9659 return res;
9661 /* If either argument is NaN, return the other one. Avoid the
9662 transformation if we get (and honor) a signalling NaN. Using
9663 omit_one_operand() ensures we create a non-lvalue. */
9664 if (TREE_CODE (arg0) == REAL_CST
9665 && real_isnan (&TREE_REAL_CST (arg0))
9666 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9667 || ! TREE_REAL_CST (arg0).signalling))
9668 return omit_one_operand_loc (loc, type, arg1, arg0);
9669 if (TREE_CODE (arg1) == REAL_CST
9670 && real_isnan (&TREE_REAL_CST (arg1))
9671 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9672 || ! TREE_REAL_CST (arg1).signalling))
9673 return omit_one_operand_loc (loc, type, arg0, arg1);
9675 /* Transform fmin/fmax(x,x) -> x. */
9676 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9677 return omit_one_operand_loc (loc, type, arg0, arg1);
9679 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9680 functions to return the numeric arg if the other one is NaN.
9681 These tree codes don't honor that, so only transform if
9682 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9683 handled, so we don't have to worry about it either. */
9684 if (flag_finite_math_only)
9685 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9686 fold_convert_loc (loc, type, arg0),
9687 fold_convert_loc (loc, type, arg1));
9689 return NULL_TREE;
9692 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9694 static tree
9695 fold_builtin_carg (location_t loc, tree arg, tree type)
9697 if (validate_arg (arg, COMPLEX_TYPE)
9698 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9700 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9702 if (atan2_fn)
9704 tree new_arg = builtin_save_expr (arg);
9705 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9706 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9707 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9711 return NULL_TREE;
9714 /* Fold a call to builtin logb/ilogb. */
9716 static tree
9717 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9719 if (! validate_arg (arg, REAL_TYPE))
9720 return NULL_TREE;
9722 STRIP_NOPS (arg);
9724 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9726 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9728 switch (value->cl)
9730 case rvc_nan:
9731 case rvc_inf:
9732 /* If arg is Inf or NaN and we're logb, return it. */
9733 if (TREE_CODE (rettype) == REAL_TYPE)
9735 /* For logb(-Inf) we have to return +Inf. */
9736 if (real_isinf (value) && real_isneg (value))
9738 REAL_VALUE_TYPE tem;
9739 real_inf (&tem);
9740 return build_real (rettype, tem);
9742 return fold_convert_loc (loc, rettype, arg);
9744 /* Fall through... */
9745 case rvc_zero:
9746 /* Zero may set errno and/or raise an exception for logb, also
9747 for ilogb we don't know FP_ILOGB0. */
9748 return NULL_TREE;
9749 case rvc_normal:
9750 /* For normal numbers, proceed iff radix == 2. In GCC,
9751 normalized significands are in the range [0.5, 1.0). We
9752 want the exponent as if they were [1.0, 2.0) so get the
9753 exponent and subtract 1. */
9754 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9755 return fold_convert_loc (loc, rettype,
9756 build_int_cst (integer_type_node,
9757 REAL_EXP (value)-1));
9758 break;
9762 return NULL_TREE;
9765 /* Fold a call to builtin significand, if radix == 2. */
9767 static tree
9768 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9770 if (! validate_arg (arg, REAL_TYPE))
9771 return NULL_TREE;
9773 STRIP_NOPS (arg);
9775 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9777 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9779 switch (value->cl)
9781 case rvc_zero:
9782 case rvc_nan:
9783 case rvc_inf:
9784 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9785 return fold_convert_loc (loc, rettype, arg);
9786 case rvc_normal:
9787 /* For normal numbers, proceed iff radix == 2. */
9788 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9790 REAL_VALUE_TYPE result = *value;
9791 /* In GCC, normalized significands are in the range [0.5,
9792 1.0). We want them to be [1.0, 2.0) so set the
9793 exponent to 1. */
9794 SET_REAL_EXP (&result, 1);
9795 return build_real (rettype, result);
9797 break;
9801 return NULL_TREE;
9804 /* Fold a call to builtin frexp, we can assume the base is 2. */
9806 static tree
9807 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9809 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9810 return NULL_TREE;
9812 STRIP_NOPS (arg0);
9814 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9815 return NULL_TREE;
9817 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9819 /* Proceed if a valid pointer type was passed in. */
9820 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9822 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9823 tree frac, exp;
9825 switch (value->cl)
9827 case rvc_zero:
9828 /* For +-0, return (*exp = 0, +-0). */
9829 exp = integer_zero_node;
9830 frac = arg0;
9831 break;
9832 case rvc_nan:
9833 case rvc_inf:
9834 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9835 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9836 case rvc_normal:
9838 /* Since the frexp function always expects base 2, and in
9839 GCC normalized significands are already in the range
9840 [0.5, 1.0), we have exactly what frexp wants. */
9841 REAL_VALUE_TYPE frac_rvt = *value;
9842 SET_REAL_EXP (&frac_rvt, 0);
9843 frac = build_real (rettype, frac_rvt);
9844 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9846 break;
9847 default:
9848 gcc_unreachable ();
9851 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9852 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9853 TREE_SIDE_EFFECTS (arg1) = 1;
9854 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9857 return NULL_TREE;
9860 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9861 then we can assume the base is two. If it's false, then we have to
9862 check the mode of the TYPE parameter in certain cases. */
9864 static tree
9865 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9866 tree type, bool ldexp)
9868 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9870 STRIP_NOPS (arg0);
9871 STRIP_NOPS (arg1);
9873 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9874 if (real_zerop (arg0) || integer_zerop (arg1)
9875 || (TREE_CODE (arg0) == REAL_CST
9876 && !real_isfinite (&TREE_REAL_CST (arg0))))
9877 return omit_one_operand_loc (loc, type, arg0, arg1);
9879 /* If both arguments are constant, then try to evaluate it. */
9880 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9881 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9882 && tree_fits_shwi_p (arg1))
9884 /* Bound the maximum adjustment to twice the range of the
9885 mode's valid exponents. Use abs to ensure the range is
9886 positive as a sanity check. */
9887 const long max_exp_adj = 2 *
9888 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9889 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9891 /* Get the user-requested adjustment. */
9892 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9894 /* The requested adjustment must be inside this range. This
9895 is a preliminary cap to avoid things like overflow, we
9896 may still fail to compute the result for other reasons. */
9897 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9899 REAL_VALUE_TYPE initial_result;
9901 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9903 /* Ensure we didn't overflow. */
9904 if (! real_isinf (&initial_result))
9906 const REAL_VALUE_TYPE trunc_result
9907 = real_value_truncate (TYPE_MODE (type), initial_result);
9909 /* Only proceed if the target mode can hold the
9910 resulting value. */
9911 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9912 return build_real (type, trunc_result);
9918 return NULL_TREE;
9921 /* Fold a call to builtin modf. */
9923 static tree
9924 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9926 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9927 return NULL_TREE;
9929 STRIP_NOPS (arg0);
9931 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9932 return NULL_TREE;
9934 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9936 /* Proceed if a valid pointer type was passed in. */
9937 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9939 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9940 REAL_VALUE_TYPE trunc, frac;
9942 switch (value->cl)
9944 case rvc_nan:
9945 case rvc_zero:
9946 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9947 trunc = frac = *value;
9948 break;
9949 case rvc_inf:
9950 /* For +-Inf, return (*arg1 = arg0, +-0). */
9951 frac = dconst0;
9952 frac.sign = value->sign;
9953 trunc = *value;
9954 break;
9955 case rvc_normal:
9956 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9957 real_trunc (&trunc, VOIDmode, value);
9958 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9959 /* If the original number was negative and already
9960 integral, then the fractional part is -0.0. */
9961 if (value->sign && frac.cl == rvc_zero)
9962 frac.sign = value->sign;
9963 break;
9966 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9967 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9968 build_real (rettype, trunc));
9969 TREE_SIDE_EFFECTS (arg1) = 1;
9970 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9971 build_real (rettype, frac));
9974 return NULL_TREE;
9977 /* Given a location LOC, an interclass builtin function decl FNDECL
9978 and its single argument ARG, return an folded expression computing
9979 the same, or NULL_TREE if we either couldn't or didn't want to fold
9980 (the latter happen if there's an RTL instruction available). */
9982 static tree
9983 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9985 enum machine_mode mode;
9987 if (!validate_arg (arg, REAL_TYPE))
9988 return NULL_TREE;
9990 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9991 return NULL_TREE;
9993 mode = TYPE_MODE (TREE_TYPE (arg));
9995 /* If there is no optab, try generic code. */
9996 switch (DECL_FUNCTION_CODE (fndecl))
9998 tree result;
10000 CASE_FLT_FN (BUILT_IN_ISINF):
10002 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
10003 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
10004 tree const type = TREE_TYPE (arg);
10005 REAL_VALUE_TYPE r;
10006 char buf[128];
10008 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10009 real_from_string (&r, buf);
10010 result = build_call_expr (isgr_fn, 2,
10011 fold_build1_loc (loc, ABS_EXPR, type, arg),
10012 build_real (type, r));
10013 return result;
10015 CASE_FLT_FN (BUILT_IN_FINITE):
10016 case BUILT_IN_ISFINITE:
10018 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
10019 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10020 tree const type = TREE_TYPE (arg);
10021 REAL_VALUE_TYPE r;
10022 char buf[128];
10024 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10025 real_from_string (&r, buf);
10026 result = build_call_expr (isle_fn, 2,
10027 fold_build1_loc (loc, ABS_EXPR, type, arg),
10028 build_real (type, r));
10029 /*result = fold_build2_loc (loc, UNGT_EXPR,
10030 TREE_TYPE (TREE_TYPE (fndecl)),
10031 fold_build1_loc (loc, ABS_EXPR, type, arg),
10032 build_real (type, r));
10033 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10034 TREE_TYPE (TREE_TYPE (fndecl)),
10035 result);*/
10036 return result;
10038 case BUILT_IN_ISNORMAL:
10040 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10041 islessequal(fabs(x),DBL_MAX). */
10042 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10043 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10044 tree const type = TREE_TYPE (arg);
10045 REAL_VALUE_TYPE rmax, rmin;
10046 char buf[128];
10048 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10049 real_from_string (&rmax, buf);
10050 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10051 real_from_string (&rmin, buf);
10052 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10053 result = build_call_expr (isle_fn, 2, arg,
10054 build_real (type, rmax));
10055 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
10056 build_call_expr (isge_fn, 2, arg,
10057 build_real (type, rmin)));
10058 return result;
10060 default:
10061 break;
10064 return NULL_TREE;
10067 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10068 ARG is the argument for the call. */
10070 static tree
10071 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10073 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10074 REAL_VALUE_TYPE r;
10076 if (!validate_arg (arg, REAL_TYPE))
10077 return NULL_TREE;
10079 switch (builtin_index)
10081 case BUILT_IN_ISINF:
10082 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10083 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10085 if (TREE_CODE (arg) == REAL_CST)
10087 r = TREE_REAL_CST (arg);
10088 if (real_isinf (&r))
10089 return real_compare (GT_EXPR, &r, &dconst0)
10090 ? integer_one_node : integer_minus_one_node;
10091 else
10092 return integer_zero_node;
10095 return NULL_TREE;
10097 case BUILT_IN_ISINF_SIGN:
10099 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10100 /* In a boolean context, GCC will fold the inner COND_EXPR to
10101 1. So e.g. "if (isinf_sign(x))" would be folded to just
10102 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10103 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10104 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10105 tree tmp = NULL_TREE;
10107 arg = builtin_save_expr (arg);
10109 if (signbit_fn && isinf_fn)
10111 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10112 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10114 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10115 signbit_call, integer_zero_node);
10116 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10117 isinf_call, integer_zero_node);
10119 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10120 integer_minus_one_node, integer_one_node);
10121 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10122 isinf_call, tmp,
10123 integer_zero_node);
10126 return tmp;
10129 case BUILT_IN_ISFINITE:
10130 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10131 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10132 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10134 if (TREE_CODE (arg) == REAL_CST)
10136 r = TREE_REAL_CST (arg);
10137 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10140 return NULL_TREE;
10142 case BUILT_IN_ISNAN:
10143 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10144 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10146 if (TREE_CODE (arg) == REAL_CST)
10148 r = TREE_REAL_CST (arg);
10149 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10152 arg = builtin_save_expr (arg);
10153 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10155 default:
10156 gcc_unreachable ();
10160 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10161 This builtin will generate code to return the appropriate floating
10162 point classification depending on the value of the floating point
10163 number passed in. The possible return values must be supplied as
10164 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10165 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10166 one floating point argument which is "type generic". */
10168 static tree
10169 fold_builtin_fpclassify (location_t loc, tree exp)
10171 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10172 arg, type, res, tmp;
10173 enum machine_mode mode;
10174 REAL_VALUE_TYPE r;
10175 char buf[128];
10177 /* Verify the required arguments in the original call. */
10178 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10179 INTEGER_TYPE, INTEGER_TYPE,
10180 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10181 return NULL_TREE;
10183 fp_nan = CALL_EXPR_ARG (exp, 0);
10184 fp_infinite = CALL_EXPR_ARG (exp, 1);
10185 fp_normal = CALL_EXPR_ARG (exp, 2);
10186 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10187 fp_zero = CALL_EXPR_ARG (exp, 4);
10188 arg = CALL_EXPR_ARG (exp, 5);
10189 type = TREE_TYPE (arg);
10190 mode = TYPE_MODE (type);
10191 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10193 /* fpclassify(x) ->
10194 isnan(x) ? FP_NAN :
10195 (fabs(x) == Inf ? FP_INFINITE :
10196 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10197 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10199 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10200 build_real (type, dconst0));
10201 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10202 tmp, fp_zero, fp_subnormal);
10204 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10205 real_from_string (&r, buf);
10206 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10207 arg, build_real (type, r));
10208 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10210 if (HONOR_INFINITIES (mode))
10212 real_inf (&r);
10213 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10214 build_real (type, r));
10215 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10216 fp_infinite, res);
10219 if (HONOR_NANS (mode))
10221 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10222 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10225 return res;
10228 /* Fold a call to an unordered comparison function such as
10229 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10230 being called and ARG0 and ARG1 are the arguments for the call.
10231 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10232 the opposite of the desired result. UNORDERED_CODE is used
10233 for modes that can hold NaNs and ORDERED_CODE is used for
10234 the rest. */
10236 static tree
10237 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10238 enum tree_code unordered_code,
10239 enum tree_code ordered_code)
10241 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10242 enum tree_code code;
10243 tree type0, type1;
10244 enum tree_code code0, code1;
10245 tree cmp_type = NULL_TREE;
10247 type0 = TREE_TYPE (arg0);
10248 type1 = TREE_TYPE (arg1);
10250 code0 = TREE_CODE (type0);
10251 code1 = TREE_CODE (type1);
10253 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10254 /* Choose the wider of two real types. */
10255 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10256 ? type0 : type1;
10257 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10258 cmp_type = type0;
10259 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10260 cmp_type = type1;
10262 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10263 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10265 if (unordered_code == UNORDERED_EXPR)
10267 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10268 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10269 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10272 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10273 : ordered_code;
10274 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10275 fold_build2_loc (loc, code, type, arg0, arg1));
10278 /* Fold a call to built-in function FNDECL with 0 arguments.
10279 IGNORE is true if the result of the function call is ignored. This
10280 function returns NULL_TREE if no simplification was possible. */
10282 static tree
10283 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10285 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10286 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10287 switch (fcode)
10289 CASE_FLT_FN (BUILT_IN_INF):
10290 case BUILT_IN_INFD32:
10291 case BUILT_IN_INFD64:
10292 case BUILT_IN_INFD128:
10293 return fold_builtin_inf (loc, type, true);
10295 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10296 return fold_builtin_inf (loc, type, false);
10298 case BUILT_IN_CLASSIFY_TYPE:
10299 return fold_builtin_classify_type (NULL_TREE);
10301 case BUILT_IN_UNREACHABLE:
10302 if (flag_sanitize & SANITIZE_UNREACHABLE
10303 && (current_function_decl == NULL
10304 || !lookup_attribute ("no_sanitize_undefined",
10305 DECL_ATTRIBUTES (current_function_decl))))
10306 return ubsan_instrument_unreachable (loc);
10307 break;
10309 default:
10310 break;
10312 return NULL_TREE;
10315 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10316 IGNORE is true if the result of the function call is ignored. This
10317 function returns NULL_TREE if no simplification was possible. */
10319 static tree
10320 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10322 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10323 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10324 switch (fcode)
10326 case BUILT_IN_CONSTANT_P:
10328 tree val = fold_builtin_constant_p (arg0);
10330 /* Gimplification will pull the CALL_EXPR for the builtin out of
10331 an if condition. When not optimizing, we'll not CSE it back.
10332 To avoid link error types of regressions, return false now. */
10333 if (!val && !optimize)
10334 val = integer_zero_node;
10336 return val;
10339 case BUILT_IN_CLASSIFY_TYPE:
10340 return fold_builtin_classify_type (arg0);
10342 case BUILT_IN_STRLEN:
10343 return fold_builtin_strlen (loc, type, arg0);
10345 CASE_FLT_FN (BUILT_IN_FABS):
10346 case BUILT_IN_FABSD32:
10347 case BUILT_IN_FABSD64:
10348 case BUILT_IN_FABSD128:
10349 return fold_builtin_fabs (loc, arg0, type);
10351 case BUILT_IN_ABS:
10352 case BUILT_IN_LABS:
10353 case BUILT_IN_LLABS:
10354 case BUILT_IN_IMAXABS:
10355 return fold_builtin_abs (loc, arg0, type);
10357 CASE_FLT_FN (BUILT_IN_CONJ):
10358 if (validate_arg (arg0, COMPLEX_TYPE)
10359 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10360 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10361 break;
10363 CASE_FLT_FN (BUILT_IN_CREAL):
10364 if (validate_arg (arg0, COMPLEX_TYPE)
10365 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10366 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10367 break;
10369 CASE_FLT_FN (BUILT_IN_CIMAG):
10370 if (validate_arg (arg0, COMPLEX_TYPE)
10371 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10372 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10373 break;
10375 CASE_FLT_FN (BUILT_IN_CCOS):
10376 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10378 CASE_FLT_FN (BUILT_IN_CCOSH):
10379 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10381 CASE_FLT_FN (BUILT_IN_CPROJ):
10382 return fold_builtin_cproj (loc, arg0, type);
10384 CASE_FLT_FN (BUILT_IN_CSIN):
10385 if (validate_arg (arg0, COMPLEX_TYPE)
10386 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10387 return do_mpc_arg1 (arg0, type, mpc_sin);
10388 break;
10390 CASE_FLT_FN (BUILT_IN_CSINH):
10391 if (validate_arg (arg0, COMPLEX_TYPE)
10392 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10393 return do_mpc_arg1 (arg0, type, mpc_sinh);
10394 break;
10396 CASE_FLT_FN (BUILT_IN_CTAN):
10397 if (validate_arg (arg0, COMPLEX_TYPE)
10398 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10399 return do_mpc_arg1 (arg0, type, mpc_tan);
10400 break;
10402 CASE_FLT_FN (BUILT_IN_CTANH):
10403 if (validate_arg (arg0, COMPLEX_TYPE)
10404 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10405 return do_mpc_arg1 (arg0, type, mpc_tanh);
10406 break;
10408 CASE_FLT_FN (BUILT_IN_CLOG):
10409 if (validate_arg (arg0, COMPLEX_TYPE)
10410 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10411 return do_mpc_arg1 (arg0, type, mpc_log);
10412 break;
10414 CASE_FLT_FN (BUILT_IN_CSQRT):
10415 if (validate_arg (arg0, COMPLEX_TYPE)
10416 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10417 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10418 break;
10420 CASE_FLT_FN (BUILT_IN_CASIN):
10421 if (validate_arg (arg0, COMPLEX_TYPE)
10422 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10423 return do_mpc_arg1 (arg0, type, mpc_asin);
10424 break;
10426 CASE_FLT_FN (BUILT_IN_CACOS):
10427 if (validate_arg (arg0, COMPLEX_TYPE)
10428 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10429 return do_mpc_arg1 (arg0, type, mpc_acos);
10430 break;
10432 CASE_FLT_FN (BUILT_IN_CATAN):
10433 if (validate_arg (arg0, COMPLEX_TYPE)
10434 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10435 return do_mpc_arg1 (arg0, type, mpc_atan);
10436 break;
10438 CASE_FLT_FN (BUILT_IN_CASINH):
10439 if (validate_arg (arg0, COMPLEX_TYPE)
10440 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10441 return do_mpc_arg1 (arg0, type, mpc_asinh);
10442 break;
10444 CASE_FLT_FN (BUILT_IN_CACOSH):
10445 if (validate_arg (arg0, COMPLEX_TYPE)
10446 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10447 return do_mpc_arg1 (arg0, type, mpc_acosh);
10448 break;
10450 CASE_FLT_FN (BUILT_IN_CATANH):
10451 if (validate_arg (arg0, COMPLEX_TYPE)
10452 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10453 return do_mpc_arg1 (arg0, type, mpc_atanh);
10454 break;
10456 CASE_FLT_FN (BUILT_IN_CABS):
10457 return fold_builtin_cabs (loc, arg0, type, fndecl);
10459 CASE_FLT_FN (BUILT_IN_CARG):
10460 return fold_builtin_carg (loc, arg0, type);
10462 CASE_FLT_FN (BUILT_IN_SQRT):
10463 return fold_builtin_sqrt (loc, arg0, type);
10465 CASE_FLT_FN (BUILT_IN_CBRT):
10466 return fold_builtin_cbrt (loc, arg0, type);
10468 CASE_FLT_FN (BUILT_IN_ASIN):
10469 if (validate_arg (arg0, REAL_TYPE))
10470 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10471 &dconstm1, &dconst1, true);
10472 break;
10474 CASE_FLT_FN (BUILT_IN_ACOS):
10475 if (validate_arg (arg0, REAL_TYPE))
10476 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10477 &dconstm1, &dconst1, true);
10478 break;
10480 CASE_FLT_FN (BUILT_IN_ATAN):
10481 if (validate_arg (arg0, REAL_TYPE))
10482 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10483 break;
10485 CASE_FLT_FN (BUILT_IN_ASINH):
10486 if (validate_arg (arg0, REAL_TYPE))
10487 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10488 break;
10490 CASE_FLT_FN (BUILT_IN_ACOSH):
10491 if (validate_arg (arg0, REAL_TYPE))
10492 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10493 &dconst1, NULL, true);
10494 break;
10496 CASE_FLT_FN (BUILT_IN_ATANH):
10497 if (validate_arg (arg0, REAL_TYPE))
10498 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10499 &dconstm1, &dconst1, false);
10500 break;
10502 CASE_FLT_FN (BUILT_IN_SIN):
10503 if (validate_arg (arg0, REAL_TYPE))
10504 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10505 break;
10507 CASE_FLT_FN (BUILT_IN_COS):
10508 return fold_builtin_cos (loc, arg0, type, fndecl);
10510 CASE_FLT_FN (BUILT_IN_TAN):
10511 return fold_builtin_tan (arg0, type);
10513 CASE_FLT_FN (BUILT_IN_CEXP):
10514 return fold_builtin_cexp (loc, arg0, type);
10516 CASE_FLT_FN (BUILT_IN_CEXPI):
10517 if (validate_arg (arg0, REAL_TYPE))
10518 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10519 break;
10521 CASE_FLT_FN (BUILT_IN_SINH):
10522 if (validate_arg (arg0, REAL_TYPE))
10523 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10524 break;
10526 CASE_FLT_FN (BUILT_IN_COSH):
10527 return fold_builtin_cosh (loc, arg0, type, fndecl);
10529 CASE_FLT_FN (BUILT_IN_TANH):
10530 if (validate_arg (arg0, REAL_TYPE))
10531 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10532 break;
10534 CASE_FLT_FN (BUILT_IN_ERF):
10535 if (validate_arg (arg0, REAL_TYPE))
10536 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10537 break;
10539 CASE_FLT_FN (BUILT_IN_ERFC):
10540 if (validate_arg (arg0, REAL_TYPE))
10541 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10542 break;
10544 CASE_FLT_FN (BUILT_IN_TGAMMA):
10545 if (validate_arg (arg0, REAL_TYPE))
10546 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10547 break;
10549 CASE_FLT_FN (BUILT_IN_EXP):
10550 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10552 CASE_FLT_FN (BUILT_IN_EXP2):
10553 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10555 CASE_FLT_FN (BUILT_IN_EXP10):
10556 CASE_FLT_FN (BUILT_IN_POW10):
10557 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10559 CASE_FLT_FN (BUILT_IN_EXPM1):
10560 if (validate_arg (arg0, REAL_TYPE))
10561 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10562 break;
10564 CASE_FLT_FN (BUILT_IN_LOG):
10565 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10567 CASE_FLT_FN (BUILT_IN_LOG2):
10568 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10570 CASE_FLT_FN (BUILT_IN_LOG10):
10571 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10573 CASE_FLT_FN (BUILT_IN_LOG1P):
10574 if (validate_arg (arg0, REAL_TYPE))
10575 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10576 &dconstm1, NULL, false);
10577 break;
10579 CASE_FLT_FN (BUILT_IN_J0):
10580 if (validate_arg (arg0, REAL_TYPE))
10581 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10582 NULL, NULL, 0);
10583 break;
10585 CASE_FLT_FN (BUILT_IN_J1):
10586 if (validate_arg (arg0, REAL_TYPE))
10587 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10588 NULL, NULL, 0);
10589 break;
10591 CASE_FLT_FN (BUILT_IN_Y0):
10592 if (validate_arg (arg0, REAL_TYPE))
10593 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10594 &dconst0, NULL, false);
10595 break;
10597 CASE_FLT_FN (BUILT_IN_Y1):
10598 if (validate_arg (arg0, REAL_TYPE))
10599 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10600 &dconst0, NULL, false);
10601 break;
10603 CASE_FLT_FN (BUILT_IN_NAN):
10604 case BUILT_IN_NAND32:
10605 case BUILT_IN_NAND64:
10606 case BUILT_IN_NAND128:
10607 return fold_builtin_nan (arg0, type, true);
10609 CASE_FLT_FN (BUILT_IN_NANS):
10610 return fold_builtin_nan (arg0, type, false);
10612 CASE_FLT_FN (BUILT_IN_FLOOR):
10613 return fold_builtin_floor (loc, fndecl, arg0);
10615 CASE_FLT_FN (BUILT_IN_CEIL):
10616 return fold_builtin_ceil (loc, fndecl, arg0);
10618 CASE_FLT_FN (BUILT_IN_TRUNC):
10619 return fold_builtin_trunc (loc, fndecl, arg0);
10621 CASE_FLT_FN (BUILT_IN_ROUND):
10622 return fold_builtin_round (loc, fndecl, arg0);
10624 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10625 CASE_FLT_FN (BUILT_IN_RINT):
10626 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10628 CASE_FLT_FN (BUILT_IN_ICEIL):
10629 CASE_FLT_FN (BUILT_IN_LCEIL):
10630 CASE_FLT_FN (BUILT_IN_LLCEIL):
10631 CASE_FLT_FN (BUILT_IN_LFLOOR):
10632 CASE_FLT_FN (BUILT_IN_IFLOOR):
10633 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10634 CASE_FLT_FN (BUILT_IN_IROUND):
10635 CASE_FLT_FN (BUILT_IN_LROUND):
10636 CASE_FLT_FN (BUILT_IN_LLROUND):
10637 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10639 CASE_FLT_FN (BUILT_IN_IRINT):
10640 CASE_FLT_FN (BUILT_IN_LRINT):
10641 CASE_FLT_FN (BUILT_IN_LLRINT):
10642 return fold_fixed_mathfn (loc, fndecl, arg0);
10644 case BUILT_IN_BSWAP16:
10645 case BUILT_IN_BSWAP32:
10646 case BUILT_IN_BSWAP64:
10647 return fold_builtin_bswap (fndecl, arg0);
10649 CASE_INT_FN (BUILT_IN_FFS):
10650 CASE_INT_FN (BUILT_IN_CLZ):
10651 CASE_INT_FN (BUILT_IN_CTZ):
10652 CASE_INT_FN (BUILT_IN_CLRSB):
10653 CASE_INT_FN (BUILT_IN_POPCOUNT):
10654 CASE_INT_FN (BUILT_IN_PARITY):
10655 return fold_builtin_bitop (fndecl, arg0);
10657 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10658 return fold_builtin_signbit (loc, arg0, type);
10660 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10661 return fold_builtin_significand (loc, arg0, type);
10663 CASE_FLT_FN (BUILT_IN_ILOGB):
10664 CASE_FLT_FN (BUILT_IN_LOGB):
10665 return fold_builtin_logb (loc, arg0, type);
10667 case BUILT_IN_ISASCII:
10668 return fold_builtin_isascii (loc, arg0);
10670 case BUILT_IN_TOASCII:
10671 return fold_builtin_toascii (loc, arg0);
10673 case BUILT_IN_ISDIGIT:
10674 return fold_builtin_isdigit (loc, arg0);
10676 CASE_FLT_FN (BUILT_IN_FINITE):
10677 case BUILT_IN_FINITED32:
10678 case BUILT_IN_FINITED64:
10679 case BUILT_IN_FINITED128:
10680 case BUILT_IN_ISFINITE:
10682 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10683 if (ret)
10684 return ret;
10685 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10688 CASE_FLT_FN (BUILT_IN_ISINF):
10689 case BUILT_IN_ISINFD32:
10690 case BUILT_IN_ISINFD64:
10691 case BUILT_IN_ISINFD128:
10693 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10694 if (ret)
10695 return ret;
10696 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10699 case BUILT_IN_ISNORMAL:
10700 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10702 case BUILT_IN_ISINF_SIGN:
10703 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10705 CASE_FLT_FN (BUILT_IN_ISNAN):
10706 case BUILT_IN_ISNAND32:
10707 case BUILT_IN_ISNAND64:
10708 case BUILT_IN_ISNAND128:
10709 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10711 case BUILT_IN_PRINTF:
10712 case BUILT_IN_PRINTF_UNLOCKED:
10713 case BUILT_IN_VPRINTF:
10714 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10716 case BUILT_IN_FREE:
10717 if (integer_zerop (arg0))
10718 return build_empty_stmt (loc);
10719 break;
10721 default:
10722 break;
10725 return NULL_TREE;
10729 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10730 IGNORE is true if the result of the function call is ignored. This
10731 function returns NULL_TREE if no simplification was possible. */
10733 static tree
10734 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10736 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10737 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10739 switch (fcode)
10741 CASE_FLT_FN (BUILT_IN_JN):
10742 if (validate_arg (arg0, INTEGER_TYPE)
10743 && validate_arg (arg1, REAL_TYPE))
10744 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10745 break;
10747 CASE_FLT_FN (BUILT_IN_YN):
10748 if (validate_arg (arg0, INTEGER_TYPE)
10749 && validate_arg (arg1, REAL_TYPE))
10750 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10751 &dconst0, false);
10752 break;
10754 CASE_FLT_FN (BUILT_IN_DREM):
10755 CASE_FLT_FN (BUILT_IN_REMAINDER):
10756 if (validate_arg (arg0, REAL_TYPE)
10757 && validate_arg (arg1, REAL_TYPE))
10758 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10759 break;
10761 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10762 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10763 if (validate_arg (arg0, REAL_TYPE)
10764 && validate_arg (arg1, POINTER_TYPE))
10765 return do_mpfr_lgamma_r (arg0, arg1, type);
10766 break;
10768 CASE_FLT_FN (BUILT_IN_ATAN2):
10769 if (validate_arg (arg0, REAL_TYPE)
10770 && validate_arg (arg1, REAL_TYPE))
10771 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10772 break;
10774 CASE_FLT_FN (BUILT_IN_FDIM):
10775 if (validate_arg (arg0, REAL_TYPE)
10776 && validate_arg (arg1, REAL_TYPE))
10777 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10778 break;
10780 CASE_FLT_FN (BUILT_IN_HYPOT):
10781 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10783 CASE_FLT_FN (BUILT_IN_CPOW):
10784 if (validate_arg (arg0, COMPLEX_TYPE)
10785 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10786 && validate_arg (arg1, COMPLEX_TYPE)
10787 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10788 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10789 break;
10791 CASE_FLT_FN (BUILT_IN_LDEXP):
10792 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10793 CASE_FLT_FN (BUILT_IN_SCALBN):
10794 CASE_FLT_FN (BUILT_IN_SCALBLN):
10795 return fold_builtin_load_exponent (loc, arg0, arg1,
10796 type, /*ldexp=*/false);
10798 CASE_FLT_FN (BUILT_IN_FREXP):
10799 return fold_builtin_frexp (loc, arg0, arg1, type);
10801 CASE_FLT_FN (BUILT_IN_MODF):
10802 return fold_builtin_modf (loc, arg0, arg1, type);
10804 case BUILT_IN_BZERO:
10805 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10807 case BUILT_IN_FPUTS:
10808 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10810 case BUILT_IN_FPUTS_UNLOCKED:
10811 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10813 case BUILT_IN_STRSTR:
10814 return fold_builtin_strstr (loc, arg0, arg1, type);
10816 case BUILT_IN_STRCAT:
10817 return fold_builtin_strcat (loc, arg0, arg1);
10819 case BUILT_IN_STRSPN:
10820 return fold_builtin_strspn (loc, arg0, arg1);
10822 case BUILT_IN_STRCSPN:
10823 return fold_builtin_strcspn (loc, arg0, arg1);
10825 case BUILT_IN_STRCHR:
10826 case BUILT_IN_INDEX:
10827 return fold_builtin_strchr (loc, arg0, arg1, type);
10829 case BUILT_IN_STRRCHR:
10830 case BUILT_IN_RINDEX:
10831 return fold_builtin_strrchr (loc, arg0, arg1, type);
10833 case BUILT_IN_STRCPY:
10834 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10836 case BUILT_IN_STPCPY:
10837 if (ignore)
10839 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10840 if (!fn)
10841 break;
10843 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10845 else
10846 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10847 break;
10849 case BUILT_IN_STRCMP:
10850 return fold_builtin_strcmp (loc, arg0, arg1);
10852 case BUILT_IN_STRPBRK:
10853 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10855 case BUILT_IN_EXPECT:
10856 return fold_builtin_expect (loc, arg0, arg1);
10858 CASE_FLT_FN (BUILT_IN_POW):
10859 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10861 CASE_FLT_FN (BUILT_IN_POWI):
10862 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10864 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10865 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10867 CASE_FLT_FN (BUILT_IN_FMIN):
10868 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10870 CASE_FLT_FN (BUILT_IN_FMAX):
10871 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10873 case BUILT_IN_ISGREATER:
10874 return fold_builtin_unordered_cmp (loc, fndecl,
10875 arg0, arg1, UNLE_EXPR, LE_EXPR);
10876 case BUILT_IN_ISGREATEREQUAL:
10877 return fold_builtin_unordered_cmp (loc, fndecl,
10878 arg0, arg1, UNLT_EXPR, LT_EXPR);
10879 case BUILT_IN_ISLESS:
10880 return fold_builtin_unordered_cmp (loc, fndecl,
10881 arg0, arg1, UNGE_EXPR, GE_EXPR);
10882 case BUILT_IN_ISLESSEQUAL:
10883 return fold_builtin_unordered_cmp (loc, fndecl,
10884 arg0, arg1, UNGT_EXPR, GT_EXPR);
10885 case BUILT_IN_ISLESSGREATER:
10886 return fold_builtin_unordered_cmp (loc, fndecl,
10887 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10888 case BUILT_IN_ISUNORDERED:
10889 return fold_builtin_unordered_cmp (loc, fndecl,
10890 arg0, arg1, UNORDERED_EXPR,
10891 NOP_EXPR);
10893 /* We do the folding for va_start in the expander. */
10894 case BUILT_IN_VA_START:
10895 break;
10897 case BUILT_IN_SPRINTF:
10898 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10900 case BUILT_IN_OBJECT_SIZE:
10901 return fold_builtin_object_size (arg0, arg1);
10903 case BUILT_IN_PRINTF:
10904 case BUILT_IN_PRINTF_UNLOCKED:
10905 case BUILT_IN_VPRINTF:
10906 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10908 case BUILT_IN_PRINTF_CHK:
10909 case BUILT_IN_VPRINTF_CHK:
10910 if (!validate_arg (arg0, INTEGER_TYPE)
10911 || TREE_SIDE_EFFECTS (arg0))
10912 return NULL_TREE;
10913 else
10914 return fold_builtin_printf (loc, fndecl,
10915 arg1, NULL_TREE, ignore, fcode);
10916 break;
10918 case BUILT_IN_FPRINTF:
10919 case BUILT_IN_FPRINTF_UNLOCKED:
10920 case BUILT_IN_VFPRINTF:
10921 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10922 ignore, fcode);
10924 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10925 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10927 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10928 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10930 default:
10931 break;
10933 return NULL_TREE;
10936 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10937 and ARG2. IGNORE is true if the result of the function call is ignored.
10938 This function returns NULL_TREE if no simplification was possible. */
10940 static tree
10941 fold_builtin_3 (location_t loc, tree fndecl,
10942 tree arg0, tree arg1, tree arg2, bool ignore)
10944 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10945 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10946 switch (fcode)
10949 CASE_FLT_FN (BUILT_IN_SINCOS):
10950 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10952 CASE_FLT_FN (BUILT_IN_FMA):
10953 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10954 break;
10956 CASE_FLT_FN (BUILT_IN_REMQUO):
10957 if (validate_arg (arg0, REAL_TYPE)
10958 && validate_arg (arg1, REAL_TYPE)
10959 && validate_arg (arg2, POINTER_TYPE))
10960 return do_mpfr_remquo (arg0, arg1, arg2);
10961 break;
10963 case BUILT_IN_MEMSET:
10964 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10966 case BUILT_IN_BCOPY:
10967 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10968 void_type_node, true, /*endp=*/3);
10970 case BUILT_IN_MEMCPY:
10971 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10972 type, ignore, /*endp=*/0);
10974 case BUILT_IN_MEMPCPY:
10975 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10976 type, ignore, /*endp=*/1);
10978 case BUILT_IN_MEMMOVE:
10979 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10980 type, ignore, /*endp=*/3);
10982 case BUILT_IN_STRNCAT:
10983 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10985 case BUILT_IN_STRNCPY:
10986 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10988 case BUILT_IN_STRNCMP:
10989 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10991 case BUILT_IN_MEMCHR:
10992 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10994 case BUILT_IN_BCMP:
10995 case BUILT_IN_MEMCMP:
10996 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10998 case BUILT_IN_SPRINTF:
10999 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
11001 case BUILT_IN_SNPRINTF:
11002 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
11004 case BUILT_IN_STRCPY_CHK:
11005 case BUILT_IN_STPCPY_CHK:
11006 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
11007 ignore, fcode);
11009 case BUILT_IN_STRCAT_CHK:
11010 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
11012 case BUILT_IN_PRINTF_CHK:
11013 case BUILT_IN_VPRINTF_CHK:
11014 if (!validate_arg (arg0, INTEGER_TYPE)
11015 || TREE_SIDE_EFFECTS (arg0))
11016 return NULL_TREE;
11017 else
11018 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
11019 break;
11021 case BUILT_IN_FPRINTF:
11022 case BUILT_IN_FPRINTF_UNLOCKED:
11023 case BUILT_IN_VFPRINTF:
11024 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
11025 ignore, fcode);
11027 case BUILT_IN_FPRINTF_CHK:
11028 case BUILT_IN_VFPRINTF_CHK:
11029 if (!validate_arg (arg1, INTEGER_TYPE)
11030 || TREE_SIDE_EFFECTS (arg1))
11031 return NULL_TREE;
11032 else
11033 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
11034 ignore, fcode);
11036 default:
11037 break;
11039 return NULL_TREE;
11042 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11043 ARG2, and ARG3. IGNORE is true if the result of the function call is
11044 ignored. This function returns NULL_TREE if no simplification was
11045 possible. */
11047 static tree
11048 fold_builtin_4 (location_t loc, tree fndecl,
11049 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11051 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11053 switch (fcode)
11055 case BUILT_IN_MEMCPY_CHK:
11056 case BUILT_IN_MEMPCPY_CHK:
11057 case BUILT_IN_MEMMOVE_CHK:
11058 case BUILT_IN_MEMSET_CHK:
11059 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11060 NULL_TREE, ignore,
11061 DECL_FUNCTION_CODE (fndecl));
11063 case BUILT_IN_STRNCPY_CHK:
11064 case BUILT_IN_STPNCPY_CHK:
11065 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
11066 ignore, fcode);
11068 case BUILT_IN_STRNCAT_CHK:
11069 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11071 case BUILT_IN_SNPRINTF:
11072 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
11074 case BUILT_IN_FPRINTF_CHK:
11075 case BUILT_IN_VFPRINTF_CHK:
11076 if (!validate_arg (arg1, INTEGER_TYPE)
11077 || TREE_SIDE_EFFECTS (arg1))
11078 return NULL_TREE;
11079 else
11080 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11081 ignore, fcode);
11082 break;
11084 default:
11085 break;
11087 return NULL_TREE;
11090 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11091 arguments, where NARGS <= 4. IGNORE is true if the result of the
11092 function call is ignored. This function returns NULL_TREE if no
11093 simplification was possible. Note that this only folds builtins with
11094 fixed argument patterns. Foldings that do varargs-to-varargs
11095 transformations, or that match calls with more than 4 arguments,
11096 need to be handled with fold_builtin_varargs instead. */
11098 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11100 static tree
11101 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11103 tree ret = NULL_TREE;
11105 switch (nargs)
11107 case 0:
11108 ret = fold_builtin_0 (loc, fndecl, ignore);
11109 break;
11110 case 1:
11111 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11112 break;
11113 case 2:
11114 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11115 break;
11116 case 3:
11117 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11118 break;
11119 case 4:
11120 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11121 ignore);
11122 break;
11123 default:
11124 break;
11126 if (ret)
11128 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11129 SET_EXPR_LOCATION (ret, loc);
11130 TREE_NO_WARNING (ret) = 1;
11131 return ret;
11133 return NULL_TREE;
11136 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11137 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11138 of arguments in ARGS to be omitted. OLDNARGS is the number of
11139 elements in ARGS. */
11141 static tree
11142 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11143 int skip, tree fndecl, int n, va_list newargs)
11145 int nargs = oldnargs - skip + n;
11146 tree *buffer;
11148 if (n > 0)
11150 int i, j;
11152 buffer = XALLOCAVEC (tree, nargs);
11153 for (i = 0; i < n; i++)
11154 buffer[i] = va_arg (newargs, tree);
11155 for (j = skip; j < oldnargs; j++, i++)
11156 buffer[i] = args[j];
11158 else
11159 buffer = args + skip;
11161 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11164 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11165 list ARGS along with N new arguments specified as the "..."
11166 parameters. SKIP is the number of arguments in ARGS to be omitted.
11167 OLDNARGS is the number of elements in ARGS. */
11169 static tree
11170 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11171 int skip, tree fndecl, int n, ...)
11173 va_list ap;
11174 tree t;
11176 va_start (ap, n);
11177 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11178 va_end (ap);
11180 return t;
11183 /* Return true if FNDECL shouldn't be folded right now.
11184 If a built-in function has an inline attribute always_inline
11185 wrapper, defer folding it after always_inline functions have
11186 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11187 might not be performed. */
11189 bool
11190 avoid_folding_inline_builtin (tree fndecl)
11192 return (DECL_DECLARED_INLINE_P (fndecl)
11193 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11194 && cfun
11195 && !cfun->always_inline_functions_inlined
11196 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11199 /* A wrapper function for builtin folding that prevents warnings for
11200 "statement without effect" and the like, caused by removing the
11201 call node earlier than the warning is generated. */
11203 tree
11204 fold_call_expr (location_t loc, tree exp, bool ignore)
11206 tree ret = NULL_TREE;
11207 tree fndecl = get_callee_fndecl (exp);
11208 if (fndecl
11209 && TREE_CODE (fndecl) == FUNCTION_DECL
11210 && DECL_BUILT_IN (fndecl)
11211 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11212 yet. Defer folding until we see all the arguments
11213 (after inlining). */
11214 && !CALL_EXPR_VA_ARG_PACK (exp))
11216 int nargs = call_expr_nargs (exp);
11218 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11219 instead last argument is __builtin_va_arg_pack (). Defer folding
11220 even in that case, until arguments are finalized. */
11221 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11223 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11224 if (fndecl2
11225 && TREE_CODE (fndecl2) == FUNCTION_DECL
11226 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11227 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11228 return NULL_TREE;
11231 if (avoid_folding_inline_builtin (fndecl))
11232 return NULL_TREE;
11234 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11235 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11236 CALL_EXPR_ARGP (exp), ignore);
11237 else
11239 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11241 tree *args = CALL_EXPR_ARGP (exp);
11242 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11244 if (!ret)
11245 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11246 if (ret)
11247 return ret;
11250 return NULL_TREE;
11253 /* Conveniently construct a function call expression. FNDECL names the
11254 function to be called and N arguments are passed in the array
11255 ARGARRAY. */
11257 tree
11258 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11260 tree fntype = TREE_TYPE (fndecl);
11261 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11263 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11266 /* Conveniently construct a function call expression. FNDECL names the
11267 function to be called and the arguments are passed in the vector
11268 VEC. */
11270 tree
11271 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11273 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11274 vec_safe_address (vec));
11278 /* Conveniently construct a function call expression. FNDECL names the
11279 function to be called, N is the number of arguments, and the "..."
11280 parameters are the argument expressions. */
11282 tree
11283 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11285 va_list ap;
11286 tree *argarray = XALLOCAVEC (tree, n);
11287 int i;
11289 va_start (ap, n);
11290 for (i = 0; i < n; i++)
11291 argarray[i] = va_arg (ap, tree);
11292 va_end (ap);
11293 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11296 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11297 varargs macros aren't supported by all bootstrap compilers. */
11299 tree
11300 build_call_expr (tree fndecl, int n, ...)
11302 va_list ap;
11303 tree *argarray = XALLOCAVEC (tree, n);
11304 int i;
11306 va_start (ap, n);
11307 for (i = 0; i < n; i++)
11308 argarray[i] = va_arg (ap, tree);
11309 va_end (ap);
11310 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11313 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11314 N arguments are passed in the array ARGARRAY. */
11316 tree
11317 fold_builtin_call_array (location_t loc, tree type,
11318 tree fn,
11319 int n,
11320 tree *argarray)
11322 tree ret = NULL_TREE;
11323 tree exp;
11325 if (TREE_CODE (fn) == ADDR_EXPR)
11327 tree fndecl = TREE_OPERAND (fn, 0);
11328 if (TREE_CODE (fndecl) == FUNCTION_DECL
11329 && DECL_BUILT_IN (fndecl))
11331 /* If last argument is __builtin_va_arg_pack (), arguments to this
11332 function are not finalized yet. Defer folding until they are. */
11333 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11335 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11336 if (fndecl2
11337 && TREE_CODE (fndecl2) == FUNCTION_DECL
11338 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11339 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11340 return build_call_array_loc (loc, type, fn, n, argarray);
11342 if (avoid_folding_inline_builtin (fndecl))
11343 return build_call_array_loc (loc, type, fn, n, argarray);
11344 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11346 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11347 if (ret)
11348 return ret;
11350 return build_call_array_loc (loc, type, fn, n, argarray);
11352 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11354 /* First try the transformations that don't require consing up
11355 an exp. */
11356 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11357 if (ret)
11358 return ret;
11361 /* If we got this far, we need to build an exp. */
11362 exp = build_call_array_loc (loc, type, fn, n, argarray);
11363 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11364 return ret ? ret : exp;
11368 return build_call_array_loc (loc, type, fn, n, argarray);
11371 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11372 along with N new arguments specified as the "..." parameters. SKIP
11373 is the number of arguments in EXP to be omitted. This function is used
11374 to do varargs-to-varargs transformations. */
11376 static tree
11377 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11379 va_list ap;
11380 tree t;
11382 va_start (ap, n);
11383 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11384 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11385 va_end (ap);
11387 return t;
11390 /* Validate a single argument ARG against a tree code CODE representing
11391 a type. */
11393 static bool
11394 validate_arg (const_tree arg, enum tree_code code)
11396 if (!arg)
11397 return false;
11398 else if (code == POINTER_TYPE)
11399 return POINTER_TYPE_P (TREE_TYPE (arg));
11400 else if (code == INTEGER_TYPE)
11401 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11402 return code == TREE_CODE (TREE_TYPE (arg));
11405 /* This function validates the types of a function call argument list
11406 against a specified list of tree_codes. If the last specifier is a 0,
11407 that represents an ellipses, otherwise the last specifier must be a
11408 VOID_TYPE.
11410 This is the GIMPLE version of validate_arglist. Eventually we want to
11411 completely convert builtins.c to work from GIMPLEs and the tree based
11412 validate_arglist will then be removed. */
11414 bool
11415 validate_gimple_arglist (const_gimple call, ...)
11417 enum tree_code code;
11418 bool res = 0;
11419 va_list ap;
11420 const_tree arg;
11421 size_t i;
11423 va_start (ap, call);
11424 i = 0;
11428 code = (enum tree_code) va_arg (ap, int);
11429 switch (code)
11431 case 0:
11432 /* This signifies an ellipses, any further arguments are all ok. */
11433 res = true;
11434 goto end;
11435 case VOID_TYPE:
11436 /* This signifies an endlink, if no arguments remain, return
11437 true, otherwise return false. */
11438 res = (i == gimple_call_num_args (call));
11439 goto end;
11440 default:
11441 /* If no parameters remain or the parameter's code does not
11442 match the specified code, return false. Otherwise continue
11443 checking any remaining arguments. */
11444 arg = gimple_call_arg (call, i++);
11445 if (!validate_arg (arg, code))
11446 goto end;
11447 break;
11450 while (1);
11452 /* We need gotos here since we can only have one VA_CLOSE in a
11453 function. */
11454 end: ;
11455 va_end (ap);
11457 return res;
11460 /* Default target-specific builtin expander that does nothing. */
11463 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11464 rtx target ATTRIBUTE_UNUSED,
11465 rtx subtarget ATTRIBUTE_UNUSED,
11466 enum machine_mode mode ATTRIBUTE_UNUSED,
11467 int ignore ATTRIBUTE_UNUSED)
11469 return NULL_RTX;
11472 /* Returns true is EXP represents data that would potentially reside
11473 in a readonly section. */
11475 static bool
11476 readonly_data_expr (tree exp)
11478 STRIP_NOPS (exp);
11480 if (TREE_CODE (exp) != ADDR_EXPR)
11481 return false;
11483 exp = get_base_address (TREE_OPERAND (exp, 0));
11484 if (!exp)
11485 return false;
11487 /* Make sure we call decl_readonly_section only for trees it
11488 can handle (since it returns true for everything it doesn't
11489 understand). */
11490 if (TREE_CODE (exp) == STRING_CST
11491 || TREE_CODE (exp) == CONSTRUCTOR
11492 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11493 return decl_readonly_section (exp, 0);
11494 else
11495 return false;
11498 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11499 to the call, and TYPE is its return type.
11501 Return NULL_TREE if no simplification was possible, otherwise return the
11502 simplified form of the call as a tree.
11504 The simplified form may be a constant or other expression which
11505 computes the same value, but in a more efficient manner (including
11506 calls to other builtin functions).
11508 The call may contain arguments which need to be evaluated, but
11509 which are not useful to determine the result of the call. In
11510 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11511 COMPOUND_EXPR will be an argument which must be evaluated.
11512 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11513 COMPOUND_EXPR in the chain will contain the tree for the simplified
11514 form of the builtin function call. */
11516 static tree
11517 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11519 if (!validate_arg (s1, POINTER_TYPE)
11520 || !validate_arg (s2, POINTER_TYPE))
11521 return NULL_TREE;
11522 else
11524 tree fn;
11525 const char *p1, *p2;
11527 p2 = c_getstr (s2);
11528 if (p2 == NULL)
11529 return NULL_TREE;
11531 p1 = c_getstr (s1);
11532 if (p1 != NULL)
11534 const char *r = strstr (p1, p2);
11535 tree tem;
11537 if (r == NULL)
11538 return build_int_cst (TREE_TYPE (s1), 0);
11540 /* Return an offset into the constant string argument. */
11541 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11542 return fold_convert_loc (loc, type, tem);
11545 /* The argument is const char *, and the result is char *, so we need
11546 a type conversion here to avoid a warning. */
11547 if (p2[0] == '\0')
11548 return fold_convert_loc (loc, type, s1);
11550 if (p2[1] != '\0')
11551 return NULL_TREE;
11553 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11554 if (!fn)
11555 return NULL_TREE;
11557 /* New argument list transforming strstr(s1, s2) to
11558 strchr(s1, s2[0]). */
11559 return build_call_expr_loc (loc, fn, 2, s1,
11560 build_int_cst (integer_type_node, p2[0]));
11564 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11565 the call, and TYPE is its return type.
11567 Return NULL_TREE if no simplification was possible, otherwise return the
11568 simplified form of the call as a tree.
11570 The simplified form may be a constant or other expression which
11571 computes the same value, but in a more efficient manner (including
11572 calls to other builtin functions).
11574 The call may contain arguments which need to be evaluated, but
11575 which are not useful to determine the result of the call. In
11576 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11577 COMPOUND_EXPR will be an argument which must be evaluated.
11578 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11579 COMPOUND_EXPR in the chain will contain the tree for the simplified
11580 form of the builtin function call. */
11582 static tree
11583 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11585 if (!validate_arg (s1, POINTER_TYPE)
11586 || !validate_arg (s2, INTEGER_TYPE))
11587 return NULL_TREE;
11588 else
11590 const char *p1;
11592 if (TREE_CODE (s2) != INTEGER_CST)
11593 return NULL_TREE;
11595 p1 = c_getstr (s1);
11596 if (p1 != NULL)
11598 char c;
11599 const char *r;
11600 tree tem;
11602 if (target_char_cast (s2, &c))
11603 return NULL_TREE;
11605 r = strchr (p1, c);
11607 if (r == NULL)
11608 return build_int_cst (TREE_TYPE (s1), 0);
11610 /* Return an offset into the constant string argument. */
11611 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11612 return fold_convert_loc (loc, type, tem);
11614 return NULL_TREE;
11618 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11619 the call, and TYPE is its return type.
11621 Return NULL_TREE if no simplification was possible, otherwise return the
11622 simplified form of the call as a tree.
11624 The simplified form may be a constant or other expression which
11625 computes the same value, but in a more efficient manner (including
11626 calls to other builtin functions).
11628 The call may contain arguments which need to be evaluated, but
11629 which are not useful to determine the result of the call. In
11630 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11631 COMPOUND_EXPR will be an argument which must be evaluated.
11632 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11633 COMPOUND_EXPR in the chain will contain the tree for the simplified
11634 form of the builtin function call. */
11636 static tree
11637 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11639 if (!validate_arg (s1, POINTER_TYPE)
11640 || !validate_arg (s2, INTEGER_TYPE))
11641 return NULL_TREE;
11642 else
11644 tree fn;
11645 const char *p1;
11647 if (TREE_CODE (s2) != INTEGER_CST)
11648 return NULL_TREE;
11650 p1 = c_getstr (s1);
11651 if (p1 != NULL)
11653 char c;
11654 const char *r;
11655 tree tem;
11657 if (target_char_cast (s2, &c))
11658 return NULL_TREE;
11660 r = strrchr (p1, c);
11662 if (r == NULL)
11663 return build_int_cst (TREE_TYPE (s1), 0);
11665 /* Return an offset into the constant string argument. */
11666 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11667 return fold_convert_loc (loc, type, tem);
11670 if (! integer_zerop (s2))
11671 return NULL_TREE;
11673 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11674 if (!fn)
11675 return NULL_TREE;
11677 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11678 return build_call_expr_loc (loc, fn, 2, s1, s2);
11682 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11683 to the call, and TYPE is its return type.
11685 Return NULL_TREE if no simplification was possible, otherwise return the
11686 simplified form of the call as a tree.
11688 The simplified form may be a constant or other expression which
11689 computes the same value, but in a more efficient manner (including
11690 calls to other builtin functions).
11692 The call may contain arguments which need to be evaluated, but
11693 which are not useful to determine the result of the call. In
11694 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11695 COMPOUND_EXPR will be an argument which must be evaluated.
11696 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11697 COMPOUND_EXPR in the chain will contain the tree for the simplified
11698 form of the builtin function call. */
11700 static tree
11701 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11703 if (!validate_arg (s1, POINTER_TYPE)
11704 || !validate_arg (s2, POINTER_TYPE))
11705 return NULL_TREE;
11706 else
11708 tree fn;
11709 const char *p1, *p2;
11711 p2 = c_getstr (s2);
11712 if (p2 == NULL)
11713 return NULL_TREE;
11715 p1 = c_getstr (s1);
11716 if (p1 != NULL)
11718 const char *r = strpbrk (p1, p2);
11719 tree tem;
11721 if (r == NULL)
11722 return build_int_cst (TREE_TYPE (s1), 0);
11724 /* Return an offset into the constant string argument. */
11725 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11726 return fold_convert_loc (loc, type, tem);
11729 if (p2[0] == '\0')
11730 /* strpbrk(x, "") == NULL.
11731 Evaluate and ignore s1 in case it had side-effects. */
11732 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11734 if (p2[1] != '\0')
11735 return NULL_TREE; /* Really call strpbrk. */
11737 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11738 if (!fn)
11739 return NULL_TREE;
11741 /* New argument list transforming strpbrk(s1, s2) to
11742 strchr(s1, s2[0]). */
11743 return build_call_expr_loc (loc, fn, 2, s1,
11744 build_int_cst (integer_type_node, p2[0]));
11748 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11749 to the call.
11751 Return NULL_TREE if no simplification was possible, otherwise return the
11752 simplified form of the call as a tree.
11754 The simplified form may be a constant or other expression which
11755 computes the same value, but in a more efficient manner (including
11756 calls to other builtin functions).
11758 The call may contain arguments which need to be evaluated, but
11759 which are not useful to determine the result of the call. In
11760 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11761 COMPOUND_EXPR will be an argument which must be evaluated.
11762 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11763 COMPOUND_EXPR in the chain will contain the tree for the simplified
11764 form of the builtin function call. */
11766 static tree
11767 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11769 if (!validate_arg (dst, POINTER_TYPE)
11770 || !validate_arg (src, POINTER_TYPE))
11771 return NULL_TREE;
11772 else
11774 const char *p = c_getstr (src);
11776 /* If the string length is zero, return the dst parameter. */
11777 if (p && *p == '\0')
11778 return dst;
11780 if (optimize_insn_for_speed_p ())
11782 /* See if we can store by pieces into (dst + strlen(dst)). */
11783 tree newdst, call;
11784 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11785 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11787 if (!strlen_fn || !strcpy_fn)
11788 return NULL_TREE;
11790 /* If we don't have a movstr we don't want to emit an strcpy
11791 call. We have to do that if the length of the source string
11792 isn't computable (in that case we can use memcpy probably
11793 later expanding to a sequence of mov instructions). If we
11794 have movstr instructions we can emit strcpy calls. */
11795 if (!HAVE_movstr)
11797 tree len = c_strlen (src, 1);
11798 if (! len || TREE_SIDE_EFFECTS (len))
11799 return NULL_TREE;
11802 /* Stabilize the argument list. */
11803 dst = builtin_save_expr (dst);
11805 /* Create strlen (dst). */
11806 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11807 /* Create (dst p+ strlen (dst)). */
11809 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11810 newdst = builtin_save_expr (newdst);
11812 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11813 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11815 return NULL_TREE;
11819 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11820 arguments to the call.
11822 Return NULL_TREE if no simplification was possible, otherwise return the
11823 simplified form of the call as a tree.
11825 The simplified form may be a constant or other expression which
11826 computes the same value, but in a more efficient manner (including
11827 calls to other builtin functions).
11829 The call may contain arguments which need to be evaluated, but
11830 which are not useful to determine the result of the call. In
11831 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11832 COMPOUND_EXPR will be an argument which must be evaluated.
11833 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11834 COMPOUND_EXPR in the chain will contain the tree for the simplified
11835 form of the builtin function call. */
11837 static tree
11838 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11840 if (!validate_arg (dst, POINTER_TYPE)
11841 || !validate_arg (src, POINTER_TYPE)
11842 || !validate_arg (len, INTEGER_TYPE))
11843 return NULL_TREE;
11844 else
11846 const char *p = c_getstr (src);
11848 /* If the requested length is zero, or the src parameter string
11849 length is zero, return the dst parameter. */
11850 if (integer_zerop (len) || (p && *p == '\0'))
11851 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11853 /* If the requested len is greater than or equal to the string
11854 length, call strcat. */
11855 if (TREE_CODE (len) == INTEGER_CST && p
11856 && compare_tree_int (len, strlen (p)) >= 0)
11858 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11860 /* If the replacement _DECL isn't initialized, don't do the
11861 transformation. */
11862 if (!fn)
11863 return NULL_TREE;
11865 return build_call_expr_loc (loc, fn, 2, dst, src);
11867 return NULL_TREE;
11871 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11872 to the call.
11874 Return NULL_TREE if no simplification was possible, otherwise return the
11875 simplified form of the call as a tree.
11877 The simplified form may be a constant or other expression which
11878 computes the same value, but in a more efficient manner (including
11879 calls to other builtin functions).
11881 The call may contain arguments which need to be evaluated, but
11882 which are not useful to determine the result of the call. In
11883 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11884 COMPOUND_EXPR will be an argument which must be evaluated.
11885 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11886 COMPOUND_EXPR in the chain will contain the tree for the simplified
11887 form of the builtin function call. */
11889 static tree
11890 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11892 if (!validate_arg (s1, POINTER_TYPE)
11893 || !validate_arg (s2, POINTER_TYPE))
11894 return NULL_TREE;
11895 else
11897 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11899 /* If both arguments are constants, evaluate at compile-time. */
11900 if (p1 && p2)
11902 const size_t r = strspn (p1, p2);
11903 return build_int_cst (size_type_node, r);
11906 /* If either argument is "", return NULL_TREE. */
11907 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11908 /* Evaluate and ignore both arguments in case either one has
11909 side-effects. */
11910 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11911 s1, s2);
11912 return NULL_TREE;
11916 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11917 to the call.
11919 Return NULL_TREE if no simplification was possible, otherwise return the
11920 simplified form of the call as a tree.
11922 The simplified form may be a constant or other expression which
11923 computes the same value, but in a more efficient manner (including
11924 calls to other builtin functions).
11926 The call may contain arguments which need to be evaluated, but
11927 which are not useful to determine the result of the call. In
11928 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11929 COMPOUND_EXPR will be an argument which must be evaluated.
11930 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11931 COMPOUND_EXPR in the chain will contain the tree for the simplified
11932 form of the builtin function call. */
11934 static tree
11935 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11937 if (!validate_arg (s1, POINTER_TYPE)
11938 || !validate_arg (s2, POINTER_TYPE))
11939 return NULL_TREE;
11940 else
11942 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11944 /* If both arguments are constants, evaluate at compile-time. */
11945 if (p1 && p2)
11947 const size_t r = strcspn (p1, p2);
11948 return build_int_cst (size_type_node, r);
11951 /* If the first argument is "", return NULL_TREE. */
11952 if (p1 && *p1 == '\0')
11954 /* Evaluate and ignore argument s2 in case it has
11955 side-effects. */
11956 return omit_one_operand_loc (loc, size_type_node,
11957 size_zero_node, s2);
11960 /* If the second argument is "", return __builtin_strlen(s1). */
11961 if (p2 && *p2 == '\0')
11963 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11965 /* If the replacement _DECL isn't initialized, don't do the
11966 transformation. */
11967 if (!fn)
11968 return NULL_TREE;
11970 return build_call_expr_loc (loc, fn, 1, s1);
11972 return NULL_TREE;
11976 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11977 to the call. IGNORE is true if the value returned
11978 by the builtin will be ignored. UNLOCKED is true is true if this
11979 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11980 the known length of the string. Return NULL_TREE if no simplification
11981 was possible. */
11983 tree
11984 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11985 bool ignore, bool unlocked, tree len)
11987 /* If we're using an unlocked function, assume the other unlocked
11988 functions exist explicitly. */
11989 tree const fn_fputc = (unlocked
11990 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11991 : builtin_decl_implicit (BUILT_IN_FPUTC));
11992 tree const fn_fwrite = (unlocked
11993 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11994 : builtin_decl_implicit (BUILT_IN_FWRITE));
11996 /* If the return value is used, don't do the transformation. */
11997 if (!ignore)
11998 return NULL_TREE;
12000 /* Verify the arguments in the original call. */
12001 if (!validate_arg (arg0, POINTER_TYPE)
12002 || !validate_arg (arg1, POINTER_TYPE))
12003 return NULL_TREE;
12005 if (! len)
12006 len = c_strlen (arg0, 0);
12008 /* Get the length of the string passed to fputs. If the length
12009 can't be determined, punt. */
12010 if (!len
12011 || TREE_CODE (len) != INTEGER_CST)
12012 return NULL_TREE;
12014 switch (compare_tree_int (len, 1))
12016 case -1: /* length is 0, delete the call entirely . */
12017 return omit_one_operand_loc (loc, integer_type_node,
12018 integer_zero_node, arg1);;
12020 case 0: /* length is 1, call fputc. */
12022 const char *p = c_getstr (arg0);
12024 if (p != NULL)
12026 if (fn_fputc)
12027 return build_call_expr_loc (loc, fn_fputc, 2,
12028 build_int_cst
12029 (integer_type_node, p[0]), arg1);
12030 else
12031 return NULL_TREE;
12034 /* FALLTHROUGH */
12035 case 1: /* length is greater than 1, call fwrite. */
12037 /* If optimizing for size keep fputs. */
12038 if (optimize_function_for_size_p (cfun))
12039 return NULL_TREE;
12040 /* New argument list transforming fputs(string, stream) to
12041 fwrite(string, 1, len, stream). */
12042 if (fn_fwrite)
12043 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12044 size_one_node, len, arg1);
12045 else
12046 return NULL_TREE;
12048 default:
12049 gcc_unreachable ();
12051 return NULL_TREE;
12054 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12055 produced. False otherwise. This is done so that we don't output the error
12056 or warning twice or three times. */
12058 bool
12059 fold_builtin_next_arg (tree exp, bool va_start_p)
12061 tree fntype = TREE_TYPE (current_function_decl);
12062 int nargs = call_expr_nargs (exp);
12063 tree arg;
12064 /* There is good chance the current input_location points inside the
12065 definition of the va_start macro (perhaps on the token for
12066 builtin) in a system header, so warnings will not be emitted.
12067 Use the location in real source code. */
12068 source_location current_location =
12069 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12070 NULL);
12072 if (!stdarg_p (fntype))
12074 error ("%<va_start%> used in function with fixed args");
12075 return true;
12078 if (va_start_p)
12080 if (va_start_p && (nargs != 2))
12082 error ("wrong number of arguments to function %<va_start%>");
12083 return true;
12085 arg = CALL_EXPR_ARG (exp, 1);
12087 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12088 when we checked the arguments and if needed issued a warning. */
12089 else
12091 if (nargs == 0)
12093 /* Evidently an out of date version of <stdarg.h>; can't validate
12094 va_start's second argument, but can still work as intended. */
12095 warning_at (current_location,
12096 OPT_Wvarargs,
12097 "%<__builtin_next_arg%> called without an argument");
12098 return true;
12100 else if (nargs > 1)
12102 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12103 return true;
12105 arg = CALL_EXPR_ARG (exp, 0);
12108 if (TREE_CODE (arg) == SSA_NAME)
12109 arg = SSA_NAME_VAR (arg);
12111 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12112 or __builtin_next_arg (0) the first time we see it, after checking
12113 the arguments and if needed issuing a warning. */
12114 if (!integer_zerop (arg))
12116 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12118 /* Strip off all nops for the sake of the comparison. This
12119 is not quite the same as STRIP_NOPS. It does more.
12120 We must also strip off INDIRECT_EXPR for C++ reference
12121 parameters. */
12122 while (CONVERT_EXPR_P (arg)
12123 || TREE_CODE (arg) == INDIRECT_REF)
12124 arg = TREE_OPERAND (arg, 0);
12125 if (arg != last_parm)
12127 /* FIXME: Sometimes with the tree optimizers we can get the
12128 not the last argument even though the user used the last
12129 argument. We just warn and set the arg to be the last
12130 argument so that we will get wrong-code because of
12131 it. */
12132 warning_at (current_location,
12133 OPT_Wvarargs,
12134 "second parameter of %<va_start%> not last named argument");
12137 /* Undefined by C99 7.15.1.4p4 (va_start):
12138 "If the parameter parmN is declared with the register storage
12139 class, with a function or array type, or with a type that is
12140 not compatible with the type that results after application of
12141 the default argument promotions, the behavior is undefined."
12143 else if (DECL_REGISTER (arg))
12145 warning_at (current_location,
12146 OPT_Wvarargs,
12147 "undefined behaviour when second parameter of "
12148 "%<va_start%> is declared with %<register%> storage");
12151 /* We want to verify the second parameter just once before the tree
12152 optimizers are run and then avoid keeping it in the tree,
12153 as otherwise we could warn even for correct code like:
12154 void foo (int i, ...)
12155 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12156 if (va_start_p)
12157 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12158 else
12159 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12161 return false;
12165 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12166 ORIG may be null if this is a 2-argument call. We don't attempt to
12167 simplify calls with more than 3 arguments.
12169 Return NULL_TREE if no simplification was possible, otherwise return the
12170 simplified form of the call as a tree. If IGNORED is true, it means that
12171 the caller does not use the returned value of the function. */
12173 static tree
12174 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12175 tree orig, int ignored)
12177 tree call, retval;
12178 const char *fmt_str = NULL;
12180 /* Verify the required arguments in the original call. We deal with two
12181 types of sprintf() calls: 'sprintf (str, fmt)' and
12182 'sprintf (dest, "%s", orig)'. */
12183 if (!validate_arg (dest, POINTER_TYPE)
12184 || !validate_arg (fmt, POINTER_TYPE))
12185 return NULL_TREE;
12186 if (orig && !validate_arg (orig, POINTER_TYPE))
12187 return NULL_TREE;
12189 /* Check whether the format is a literal string constant. */
12190 fmt_str = c_getstr (fmt);
12191 if (fmt_str == NULL)
12192 return NULL_TREE;
12194 call = NULL_TREE;
12195 retval = NULL_TREE;
12197 if (!init_target_chars ())
12198 return NULL_TREE;
12200 /* If the format doesn't contain % args or %%, use strcpy. */
12201 if (strchr (fmt_str, target_percent) == NULL)
12203 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12205 if (!fn)
12206 return NULL_TREE;
12208 /* Don't optimize sprintf (buf, "abc", ptr++). */
12209 if (orig)
12210 return NULL_TREE;
12212 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12213 'format' is known to contain no % formats. */
12214 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12215 if (!ignored)
12216 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12219 /* If the format is "%s", use strcpy if the result isn't used. */
12220 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12222 tree fn;
12223 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12225 if (!fn)
12226 return NULL_TREE;
12228 /* Don't crash on sprintf (str1, "%s"). */
12229 if (!orig)
12230 return NULL_TREE;
12232 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12233 if (!ignored)
12235 retval = c_strlen (orig, 1);
12236 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12237 return NULL_TREE;
12239 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12242 if (call && retval)
12244 retval = fold_convert_loc
12245 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12246 retval);
12247 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12249 else
12250 return call;
12253 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12254 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12255 attempt to simplify calls with more than 4 arguments.
12257 Return NULL_TREE if no simplification was possible, otherwise return the
12258 simplified form of the call as a tree. If IGNORED is true, it means that
12259 the caller does not use the returned value of the function. */
12261 static tree
12262 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12263 tree orig, int ignored)
12265 tree call, retval;
12266 const char *fmt_str = NULL;
12267 unsigned HOST_WIDE_INT destlen;
12269 /* Verify the required arguments in the original call. We deal with two
12270 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12271 'snprintf (dest, cst, "%s", orig)'. */
12272 if (!validate_arg (dest, POINTER_TYPE)
12273 || !validate_arg (destsize, INTEGER_TYPE)
12274 || !validate_arg (fmt, POINTER_TYPE))
12275 return NULL_TREE;
12276 if (orig && !validate_arg (orig, POINTER_TYPE))
12277 return NULL_TREE;
12279 if (!tree_fits_uhwi_p (destsize))
12280 return NULL_TREE;
12282 /* Check whether the format is a literal string constant. */
12283 fmt_str = c_getstr (fmt);
12284 if (fmt_str == NULL)
12285 return NULL_TREE;
12287 call = NULL_TREE;
12288 retval = NULL_TREE;
12290 if (!init_target_chars ())
12291 return NULL_TREE;
12293 destlen = tree_to_uhwi (destsize);
12295 /* If the format doesn't contain % args or %%, use strcpy. */
12296 if (strchr (fmt_str, target_percent) == NULL)
12298 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12299 size_t len = strlen (fmt_str);
12301 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12302 if (orig)
12303 return NULL_TREE;
12305 /* We could expand this as
12306 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12307 or to
12308 memcpy (str, fmt_with_nul_at_cstm1, cst);
12309 but in the former case that might increase code size
12310 and in the latter case grow .rodata section too much.
12311 So punt for now. */
12312 if (len >= destlen)
12313 return NULL_TREE;
12315 if (!fn)
12316 return NULL_TREE;
12318 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12319 'format' is known to contain no % formats and
12320 strlen (fmt) < cst. */
12321 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12323 if (!ignored)
12324 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12327 /* If the format is "%s", use strcpy if the result isn't used. */
12328 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12330 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12331 unsigned HOST_WIDE_INT origlen;
12333 /* Don't crash on snprintf (str1, cst, "%s"). */
12334 if (!orig)
12335 return NULL_TREE;
12337 retval = c_strlen (orig, 1);
12338 if (!retval || !tree_fits_uhwi_p (retval))
12339 return NULL_TREE;
12341 origlen = tree_to_uhwi (retval);
12342 /* We could expand this as
12343 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12344 or to
12345 memcpy (str1, str2_with_nul_at_cstm1, cst);
12346 but in the former case that might increase code size
12347 and in the latter case grow .rodata section too much.
12348 So punt for now. */
12349 if (origlen >= destlen)
12350 return NULL_TREE;
12352 /* Convert snprintf (str1, cst, "%s", str2) into
12353 strcpy (str1, str2) if strlen (str2) < cst. */
12354 if (!fn)
12355 return NULL_TREE;
12357 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12359 if (ignored)
12360 retval = NULL_TREE;
12363 if (call && retval)
12365 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12366 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12367 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12369 else
12370 return call;
12373 /* Expand a call EXP to __builtin_object_size. */
12376 expand_builtin_object_size (tree exp)
12378 tree ost;
12379 int object_size_type;
12380 tree fndecl = get_callee_fndecl (exp);
12382 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12384 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12385 exp, fndecl);
12386 expand_builtin_trap ();
12387 return const0_rtx;
12390 ost = CALL_EXPR_ARG (exp, 1);
12391 STRIP_NOPS (ost);
12393 if (TREE_CODE (ost) != INTEGER_CST
12394 || tree_int_cst_sgn (ost) < 0
12395 || compare_tree_int (ost, 3) > 0)
12397 error ("%Klast argument of %D is not integer constant between 0 and 3",
12398 exp, fndecl);
12399 expand_builtin_trap ();
12400 return const0_rtx;
12403 object_size_type = tree_to_shwi (ost);
12405 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12408 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12409 FCODE is the BUILT_IN_* to use.
12410 Return NULL_RTX if we failed; the caller should emit a normal call,
12411 otherwise try to get the result in TARGET, if convenient (and in
12412 mode MODE if that's convenient). */
12414 static rtx
12415 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12416 enum built_in_function fcode)
12418 tree dest, src, len, size;
12420 if (!validate_arglist (exp,
12421 POINTER_TYPE,
12422 fcode == BUILT_IN_MEMSET_CHK
12423 ? INTEGER_TYPE : POINTER_TYPE,
12424 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12425 return NULL_RTX;
12427 dest = CALL_EXPR_ARG (exp, 0);
12428 src = CALL_EXPR_ARG (exp, 1);
12429 len = CALL_EXPR_ARG (exp, 2);
12430 size = CALL_EXPR_ARG (exp, 3);
12432 if (! tree_fits_uhwi_p (size))
12433 return NULL_RTX;
12435 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
12437 tree fn;
12439 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12441 warning_at (tree_nonartificial_location (exp),
12442 0, "%Kcall to %D will always overflow destination buffer",
12443 exp, get_callee_fndecl (exp));
12444 return NULL_RTX;
12447 fn = NULL_TREE;
12448 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12449 mem{cpy,pcpy,move,set} is available. */
12450 switch (fcode)
12452 case BUILT_IN_MEMCPY_CHK:
12453 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12454 break;
12455 case BUILT_IN_MEMPCPY_CHK:
12456 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12457 break;
12458 case BUILT_IN_MEMMOVE_CHK:
12459 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12460 break;
12461 case BUILT_IN_MEMSET_CHK:
12462 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12463 break;
12464 default:
12465 break;
12468 if (! fn)
12469 return NULL_RTX;
12471 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12472 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12473 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12474 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12476 else if (fcode == BUILT_IN_MEMSET_CHK)
12477 return NULL_RTX;
12478 else
12480 unsigned int dest_align = get_pointer_alignment (dest);
12482 /* If DEST is not a pointer type, call the normal function. */
12483 if (dest_align == 0)
12484 return NULL_RTX;
12486 /* If SRC and DEST are the same (and not volatile), do nothing. */
12487 if (operand_equal_p (src, dest, 0))
12489 tree expr;
12491 if (fcode != BUILT_IN_MEMPCPY_CHK)
12493 /* Evaluate and ignore LEN in case it has side-effects. */
12494 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12495 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12498 expr = fold_build_pointer_plus (dest, len);
12499 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12502 /* __memmove_chk special case. */
12503 if (fcode == BUILT_IN_MEMMOVE_CHK)
12505 unsigned int src_align = get_pointer_alignment (src);
12507 if (src_align == 0)
12508 return NULL_RTX;
12510 /* If src is categorized for a readonly section we can use
12511 normal __memcpy_chk. */
12512 if (readonly_data_expr (src))
12514 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12515 if (!fn)
12516 return NULL_RTX;
12517 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12518 dest, src, len, size);
12519 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12520 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12521 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12524 return NULL_RTX;
12528 /* Emit warning if a buffer overflow is detected at compile time. */
12530 static void
12531 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12533 int is_strlen = 0;
12534 tree len, size;
12535 location_t loc = tree_nonartificial_location (exp);
12537 switch (fcode)
12539 case BUILT_IN_STRCPY_CHK:
12540 case BUILT_IN_STPCPY_CHK:
12541 /* For __strcat_chk the warning will be emitted only if overflowing
12542 by at least strlen (dest) + 1 bytes. */
12543 case BUILT_IN_STRCAT_CHK:
12544 len = CALL_EXPR_ARG (exp, 1);
12545 size = CALL_EXPR_ARG (exp, 2);
12546 is_strlen = 1;
12547 break;
12548 case BUILT_IN_STRNCAT_CHK:
12549 case BUILT_IN_STRNCPY_CHK:
12550 case BUILT_IN_STPNCPY_CHK:
12551 len = CALL_EXPR_ARG (exp, 2);
12552 size = CALL_EXPR_ARG (exp, 3);
12553 break;
12554 case BUILT_IN_SNPRINTF_CHK:
12555 case BUILT_IN_VSNPRINTF_CHK:
12556 len = CALL_EXPR_ARG (exp, 1);
12557 size = CALL_EXPR_ARG (exp, 3);
12558 break;
12559 default:
12560 gcc_unreachable ();
12563 if (!len || !size)
12564 return;
12566 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12567 return;
12569 if (is_strlen)
12571 len = c_strlen (len, 1);
12572 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12573 return;
12575 else if (fcode == BUILT_IN_STRNCAT_CHK)
12577 tree src = CALL_EXPR_ARG (exp, 1);
12578 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12579 return;
12580 src = c_strlen (src, 1);
12581 if (! src || ! tree_fits_uhwi_p (src))
12583 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12584 exp, get_callee_fndecl (exp));
12585 return;
12587 else if (tree_int_cst_lt (src, size))
12588 return;
12590 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
12591 return;
12593 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12594 exp, get_callee_fndecl (exp));
12597 /* Emit warning if a buffer overflow is detected at compile time
12598 in __sprintf_chk/__vsprintf_chk calls. */
12600 static void
12601 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12603 tree size, len, fmt;
12604 const char *fmt_str;
12605 int nargs = call_expr_nargs (exp);
12607 /* Verify the required arguments in the original call. */
12609 if (nargs < 4)
12610 return;
12611 size = CALL_EXPR_ARG (exp, 2);
12612 fmt = CALL_EXPR_ARG (exp, 3);
12614 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12615 return;
12617 /* Check whether the format is a literal string constant. */
12618 fmt_str = c_getstr (fmt);
12619 if (fmt_str == NULL)
12620 return;
12622 if (!init_target_chars ())
12623 return;
12625 /* If the format doesn't contain % args or %%, we know its size. */
12626 if (strchr (fmt_str, target_percent) == 0)
12627 len = build_int_cstu (size_type_node, strlen (fmt_str));
12628 /* If the format is "%s" and first ... argument is a string literal,
12629 we know it too. */
12630 else if (fcode == BUILT_IN_SPRINTF_CHK
12631 && strcmp (fmt_str, target_percent_s) == 0)
12633 tree arg;
12635 if (nargs < 5)
12636 return;
12637 arg = CALL_EXPR_ARG (exp, 4);
12638 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12639 return;
12641 len = c_strlen (arg, 1);
12642 if (!len || ! tree_fits_uhwi_p (len))
12643 return;
12645 else
12646 return;
12648 if (! tree_int_cst_lt (len, size))
12649 warning_at (tree_nonartificial_location (exp),
12650 0, "%Kcall to %D will always overflow destination buffer",
12651 exp, get_callee_fndecl (exp));
12654 /* Emit warning if a free is called with address of a variable. */
12656 static void
12657 maybe_emit_free_warning (tree exp)
12659 tree arg = CALL_EXPR_ARG (exp, 0);
12661 STRIP_NOPS (arg);
12662 if (TREE_CODE (arg) != ADDR_EXPR)
12663 return;
12665 arg = get_base_address (TREE_OPERAND (arg, 0));
12666 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12667 return;
12669 if (SSA_VAR_P (arg))
12670 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12671 "%Kattempt to free a non-heap object %qD", exp, arg);
12672 else
12673 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12674 "%Kattempt to free a non-heap object", exp);
12677 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12678 if possible. */
12680 tree
12681 fold_builtin_object_size (tree ptr, tree ost)
12683 unsigned HOST_WIDE_INT bytes;
12684 int object_size_type;
12686 if (!validate_arg (ptr, POINTER_TYPE)
12687 || !validate_arg (ost, INTEGER_TYPE))
12688 return NULL_TREE;
12690 STRIP_NOPS (ost);
12692 if (TREE_CODE (ost) != INTEGER_CST
12693 || tree_int_cst_sgn (ost) < 0
12694 || compare_tree_int (ost, 3) > 0)
12695 return NULL_TREE;
12697 object_size_type = tree_to_shwi (ost);
12699 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12700 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12701 and (size_t) 0 for types 2 and 3. */
12702 if (TREE_SIDE_EFFECTS (ptr))
12703 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12705 if (TREE_CODE (ptr) == ADDR_EXPR)
12707 bytes = compute_builtin_object_size (ptr, object_size_type);
12708 if (double_int_fits_to_tree_p (size_type_node,
12709 double_int::from_uhwi (bytes)))
12710 return build_int_cstu (size_type_node, bytes);
12712 else if (TREE_CODE (ptr) == SSA_NAME)
12714 /* If object size is not known yet, delay folding until
12715 later. Maybe subsequent passes will help determining
12716 it. */
12717 bytes = compute_builtin_object_size (ptr, object_size_type);
12718 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12719 && double_int_fits_to_tree_p (size_type_node,
12720 double_int::from_uhwi (bytes)))
12721 return build_int_cstu (size_type_node, bytes);
12724 return NULL_TREE;
12727 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12728 DEST, SRC, LEN, and SIZE are the arguments to the call.
12729 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12730 code of the builtin. If MAXLEN is not NULL, it is maximum length
12731 passed as third argument. */
12733 tree
12734 fold_builtin_memory_chk (location_t loc, tree fndecl,
12735 tree dest, tree src, tree len, tree size,
12736 tree maxlen, bool ignore,
12737 enum built_in_function fcode)
12739 tree fn;
12741 if (!validate_arg (dest, POINTER_TYPE)
12742 || !validate_arg (src,
12743 (fcode == BUILT_IN_MEMSET_CHK
12744 ? INTEGER_TYPE : POINTER_TYPE))
12745 || !validate_arg (len, INTEGER_TYPE)
12746 || !validate_arg (size, INTEGER_TYPE))
12747 return NULL_TREE;
12749 /* If SRC and DEST are the same (and not volatile), return DEST
12750 (resp. DEST+LEN for __mempcpy_chk). */
12751 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12753 if (fcode != BUILT_IN_MEMPCPY_CHK)
12754 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12755 dest, len);
12756 else
12758 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12759 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12763 if (! tree_fits_uhwi_p (size))
12764 return NULL_TREE;
12766 if (! integer_all_onesp (size))
12768 if (! tree_fits_uhwi_p (len))
12770 /* If LEN is not constant, try MAXLEN too.
12771 For MAXLEN only allow optimizing into non-_ocs function
12772 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12773 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12775 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12777 /* (void) __mempcpy_chk () can be optimized into
12778 (void) __memcpy_chk (). */
12779 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12780 if (!fn)
12781 return NULL_TREE;
12783 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12785 return NULL_TREE;
12788 else
12789 maxlen = len;
12791 if (tree_int_cst_lt (size, maxlen))
12792 return NULL_TREE;
12795 fn = NULL_TREE;
12796 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12797 mem{cpy,pcpy,move,set} is available. */
12798 switch (fcode)
12800 case BUILT_IN_MEMCPY_CHK:
12801 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12802 break;
12803 case BUILT_IN_MEMPCPY_CHK:
12804 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12805 break;
12806 case BUILT_IN_MEMMOVE_CHK:
12807 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12808 break;
12809 case BUILT_IN_MEMSET_CHK:
12810 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12811 break;
12812 default:
12813 break;
12816 if (!fn)
12817 return NULL_TREE;
12819 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12822 /* Fold a call to the __st[rp]cpy_chk builtin.
12823 DEST, SRC, and SIZE are the arguments to the call.
12824 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12825 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12826 strings passed as second argument. */
12828 tree
12829 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12830 tree src, tree size,
12831 tree maxlen, bool ignore,
12832 enum built_in_function fcode)
12834 tree len, fn;
12836 if (!validate_arg (dest, POINTER_TYPE)
12837 || !validate_arg (src, POINTER_TYPE)
12838 || !validate_arg (size, INTEGER_TYPE))
12839 return NULL_TREE;
12841 /* If SRC and DEST are the same (and not volatile), return DEST. */
12842 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12843 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12845 if (! tree_fits_uhwi_p (size))
12846 return NULL_TREE;
12848 if (! integer_all_onesp (size))
12850 len = c_strlen (src, 1);
12851 if (! len || ! tree_fits_uhwi_p (len))
12853 /* If LEN is not constant, try MAXLEN too.
12854 For MAXLEN only allow optimizing into non-_ocs function
12855 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12856 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12858 if (fcode == BUILT_IN_STPCPY_CHK)
12860 if (! ignore)
12861 return NULL_TREE;
12863 /* If return value of __stpcpy_chk is ignored,
12864 optimize into __strcpy_chk. */
12865 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12866 if (!fn)
12867 return NULL_TREE;
12869 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12872 if (! len || TREE_SIDE_EFFECTS (len))
12873 return NULL_TREE;
12875 /* If c_strlen returned something, but not a constant,
12876 transform __strcpy_chk into __memcpy_chk. */
12877 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12878 if (!fn)
12879 return NULL_TREE;
12881 len = fold_convert_loc (loc, size_type_node, len);
12882 len = size_binop_loc (loc, PLUS_EXPR, len,
12883 build_int_cst (size_type_node, 1));
12884 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12885 build_call_expr_loc (loc, fn, 4,
12886 dest, src, len, size));
12889 else
12890 maxlen = len;
12892 if (! tree_int_cst_lt (maxlen, size))
12893 return NULL_TREE;
12896 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12897 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12898 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12899 if (!fn)
12900 return NULL_TREE;
12902 return build_call_expr_loc (loc, fn, 2, dest, src);
12905 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12906 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12907 length passed as third argument. IGNORE is true if return value can be
12908 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12910 tree
12911 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12912 tree len, tree size, tree maxlen, bool ignore,
12913 enum built_in_function fcode)
12915 tree fn;
12917 if (!validate_arg (dest, POINTER_TYPE)
12918 || !validate_arg (src, POINTER_TYPE)
12919 || !validate_arg (len, INTEGER_TYPE)
12920 || !validate_arg (size, INTEGER_TYPE))
12921 return NULL_TREE;
12923 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12925 /* If return value of __stpncpy_chk is ignored,
12926 optimize into __strncpy_chk. */
12927 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12928 if (fn)
12929 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12932 if (! tree_fits_uhwi_p (size))
12933 return NULL_TREE;
12935 if (! integer_all_onesp (size))
12937 if (! tree_fits_uhwi_p (len))
12939 /* If LEN is not constant, try MAXLEN too.
12940 For MAXLEN only allow optimizing into non-_ocs function
12941 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12942 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12943 return NULL_TREE;
12945 else
12946 maxlen = len;
12948 if (tree_int_cst_lt (size, maxlen))
12949 return NULL_TREE;
12952 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12953 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12954 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12955 if (!fn)
12956 return NULL_TREE;
12958 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12961 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12962 are the arguments to the call. */
12964 static tree
12965 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12966 tree src, tree size)
12968 tree fn;
12969 const char *p;
12971 if (!validate_arg (dest, POINTER_TYPE)
12972 || !validate_arg (src, POINTER_TYPE)
12973 || !validate_arg (size, INTEGER_TYPE))
12974 return NULL_TREE;
12976 p = c_getstr (src);
12977 /* If the SRC parameter is "", return DEST. */
12978 if (p && *p == '\0')
12979 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12981 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
12982 return NULL_TREE;
12984 /* If __builtin_strcat_chk is used, assume strcat is available. */
12985 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12986 if (!fn)
12987 return NULL_TREE;
12989 return build_call_expr_loc (loc, fn, 2, dest, src);
12992 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12993 LEN, and SIZE. */
12995 static tree
12996 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12997 tree dest, tree src, tree len, tree size)
12999 tree fn;
13000 const char *p;
13002 if (!validate_arg (dest, POINTER_TYPE)
13003 || !validate_arg (src, POINTER_TYPE)
13004 || !validate_arg (size, INTEGER_TYPE)
13005 || !validate_arg (size, INTEGER_TYPE))
13006 return NULL_TREE;
13008 p = c_getstr (src);
13009 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13010 if (p && *p == '\0')
13011 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
13012 else if (integer_zerop (len))
13013 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13015 if (! tree_fits_uhwi_p (size))
13016 return NULL_TREE;
13018 if (! integer_all_onesp (size))
13020 tree src_len = c_strlen (src, 1);
13021 if (src_len
13022 && tree_fits_uhwi_p (src_len)
13023 && tree_fits_uhwi_p (len)
13024 && ! tree_int_cst_lt (len, src_len))
13026 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13027 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13028 if (!fn)
13029 return NULL_TREE;
13031 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13033 return NULL_TREE;
13036 /* If __builtin_strncat_chk is used, assume strncat is available. */
13037 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13038 if (!fn)
13039 return NULL_TREE;
13041 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13044 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13045 Return NULL_TREE if a normal call should be emitted rather than
13046 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13047 or BUILT_IN_VSPRINTF_CHK. */
13049 static tree
13050 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13051 enum built_in_function fcode)
13053 tree dest, size, len, fn, fmt, flag;
13054 const char *fmt_str;
13056 /* Verify the required arguments in the original call. */
13057 if (nargs < 4)
13058 return NULL_TREE;
13059 dest = args[0];
13060 if (!validate_arg (dest, POINTER_TYPE))
13061 return NULL_TREE;
13062 flag = args[1];
13063 if (!validate_arg (flag, INTEGER_TYPE))
13064 return NULL_TREE;
13065 size = args[2];
13066 if (!validate_arg (size, INTEGER_TYPE))
13067 return NULL_TREE;
13068 fmt = args[3];
13069 if (!validate_arg (fmt, POINTER_TYPE))
13070 return NULL_TREE;
13072 if (! tree_fits_uhwi_p (size))
13073 return NULL_TREE;
13075 len = NULL_TREE;
13077 if (!init_target_chars ())
13078 return NULL_TREE;
13080 /* Check whether the format is a literal string constant. */
13081 fmt_str = c_getstr (fmt);
13082 if (fmt_str != NULL)
13084 /* If the format doesn't contain % args or %%, we know the size. */
13085 if (strchr (fmt_str, target_percent) == 0)
13087 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13088 len = build_int_cstu (size_type_node, strlen (fmt_str));
13090 /* If the format is "%s" and first ... argument is a string literal,
13091 we know the size too. */
13092 else if (fcode == BUILT_IN_SPRINTF_CHK
13093 && strcmp (fmt_str, target_percent_s) == 0)
13095 tree arg;
13097 if (nargs == 5)
13099 arg = args[4];
13100 if (validate_arg (arg, POINTER_TYPE))
13102 len = c_strlen (arg, 1);
13103 if (! len || ! tree_fits_uhwi_p (len))
13104 len = NULL_TREE;
13110 if (! integer_all_onesp (size))
13112 if (! len || ! tree_int_cst_lt (len, size))
13113 return NULL_TREE;
13116 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13117 or if format doesn't contain % chars or is "%s". */
13118 if (! integer_zerop (flag))
13120 if (fmt_str == NULL)
13121 return NULL_TREE;
13122 if (strchr (fmt_str, target_percent) != NULL
13123 && strcmp (fmt_str, target_percent_s))
13124 return NULL_TREE;
13127 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13128 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13129 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13130 if (!fn)
13131 return NULL_TREE;
13133 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13136 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13137 a normal call should be emitted rather than expanding the function
13138 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13140 static tree
13141 fold_builtin_sprintf_chk (location_t loc, tree exp,
13142 enum built_in_function fcode)
13144 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13145 CALL_EXPR_ARGP (exp), fcode);
13148 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13149 NULL_TREE if a normal call should be emitted rather than expanding
13150 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13151 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13152 passed as second argument. */
13154 static tree
13155 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13156 tree maxlen, enum built_in_function fcode)
13158 tree dest, size, len, fn, fmt, flag;
13159 const char *fmt_str;
13161 /* Verify the required arguments in the original call. */
13162 if (nargs < 5)
13163 return NULL_TREE;
13164 dest = args[0];
13165 if (!validate_arg (dest, POINTER_TYPE))
13166 return NULL_TREE;
13167 len = args[1];
13168 if (!validate_arg (len, INTEGER_TYPE))
13169 return NULL_TREE;
13170 flag = args[2];
13171 if (!validate_arg (flag, INTEGER_TYPE))
13172 return NULL_TREE;
13173 size = args[3];
13174 if (!validate_arg (size, INTEGER_TYPE))
13175 return NULL_TREE;
13176 fmt = args[4];
13177 if (!validate_arg (fmt, POINTER_TYPE))
13178 return NULL_TREE;
13180 if (! tree_fits_uhwi_p (size))
13181 return NULL_TREE;
13183 if (! integer_all_onesp (size))
13185 if (! tree_fits_uhwi_p (len))
13187 /* If LEN is not constant, try MAXLEN too.
13188 For MAXLEN only allow optimizing into non-_ocs function
13189 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13190 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
13191 return NULL_TREE;
13193 else
13194 maxlen = len;
13196 if (tree_int_cst_lt (size, maxlen))
13197 return NULL_TREE;
13200 if (!init_target_chars ())
13201 return NULL_TREE;
13203 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13204 or if format doesn't contain % chars or is "%s". */
13205 if (! integer_zerop (flag))
13207 fmt_str = c_getstr (fmt);
13208 if (fmt_str == NULL)
13209 return NULL_TREE;
13210 if (strchr (fmt_str, target_percent) != NULL
13211 && strcmp (fmt_str, target_percent_s))
13212 return NULL_TREE;
13215 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13216 available. */
13217 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13218 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13219 if (!fn)
13220 return NULL_TREE;
13222 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13225 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13226 a normal call should be emitted rather than expanding the function
13227 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13228 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13229 passed as second argument. */
13231 static tree
13232 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13233 enum built_in_function fcode)
13235 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13236 CALL_EXPR_ARGP (exp), maxlen, fcode);
13239 /* Builtins with folding operations that operate on "..." arguments
13240 need special handling; we need to store the arguments in a convenient
13241 data structure before attempting any folding. Fortunately there are
13242 only a few builtins that fall into this category. FNDECL is the
13243 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13244 result of the function call is ignored. */
13246 static tree
13247 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
13248 bool ignore ATTRIBUTE_UNUSED)
13250 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13251 tree ret = NULL_TREE;
13253 switch (fcode)
13255 case BUILT_IN_SPRINTF_CHK:
13256 case BUILT_IN_VSPRINTF_CHK:
13257 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
13258 break;
13260 case BUILT_IN_SNPRINTF_CHK:
13261 case BUILT_IN_VSNPRINTF_CHK:
13262 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
13263 break;
13265 case BUILT_IN_FPCLASSIFY:
13266 ret = fold_builtin_fpclassify (loc, exp);
13267 break;
13269 default:
13270 break;
13272 if (ret)
13274 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13275 SET_EXPR_LOCATION (ret, loc);
13276 TREE_NO_WARNING (ret) = 1;
13277 return ret;
13279 return NULL_TREE;
13282 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13283 FMT and ARG are the arguments to the call; we don't fold cases with
13284 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13286 Return NULL_TREE if no simplification was possible, otherwise return the
13287 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13288 code of the function to be simplified. */
13290 static tree
13291 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13292 tree arg, bool ignore,
13293 enum built_in_function fcode)
13295 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13296 const char *fmt_str = NULL;
13298 /* If the return value is used, don't do the transformation. */
13299 if (! ignore)
13300 return NULL_TREE;
13302 /* Verify the required arguments in the original call. */
13303 if (!validate_arg (fmt, POINTER_TYPE))
13304 return NULL_TREE;
13306 /* Check whether the format is a literal string constant. */
13307 fmt_str = c_getstr (fmt);
13308 if (fmt_str == NULL)
13309 return NULL_TREE;
13311 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13313 /* If we're using an unlocked function, assume the other
13314 unlocked functions exist explicitly. */
13315 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13316 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13318 else
13320 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13321 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13324 if (!init_target_chars ())
13325 return NULL_TREE;
13327 if (strcmp (fmt_str, target_percent_s) == 0
13328 || strchr (fmt_str, target_percent) == NULL)
13330 const char *str;
13332 if (strcmp (fmt_str, target_percent_s) == 0)
13334 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13335 return NULL_TREE;
13337 if (!arg || !validate_arg (arg, POINTER_TYPE))
13338 return NULL_TREE;
13340 str = c_getstr (arg);
13341 if (str == NULL)
13342 return NULL_TREE;
13344 else
13346 /* The format specifier doesn't contain any '%' characters. */
13347 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13348 && arg)
13349 return NULL_TREE;
13350 str = fmt_str;
13353 /* If the string was "", printf does nothing. */
13354 if (str[0] == '\0')
13355 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13357 /* If the string has length of 1, call putchar. */
13358 if (str[1] == '\0')
13360 /* Given printf("c"), (where c is any one character,)
13361 convert "c"[0] to an int and pass that to the replacement
13362 function. */
13363 newarg = build_int_cst (integer_type_node, str[0]);
13364 if (fn_putchar)
13365 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13367 else
13369 /* If the string was "string\n", call puts("string"). */
13370 size_t len = strlen (str);
13371 if ((unsigned char)str[len - 1] == target_newline
13372 && (size_t) (int) len == len
13373 && (int) len > 0)
13375 char *newstr;
13376 tree offset_node, string_cst;
13378 /* Create a NUL-terminated string that's one char shorter
13379 than the original, stripping off the trailing '\n'. */
13380 newarg = build_string_literal (len, str);
13381 string_cst = string_constant (newarg, &offset_node);
13382 gcc_checking_assert (string_cst
13383 && (TREE_STRING_LENGTH (string_cst)
13384 == (int) len)
13385 && integer_zerop (offset_node)
13386 && (unsigned char)
13387 TREE_STRING_POINTER (string_cst)[len - 1]
13388 == target_newline);
13389 /* build_string_literal creates a new STRING_CST,
13390 modify it in place to avoid double copying. */
13391 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13392 newstr[len - 1] = '\0';
13393 if (fn_puts)
13394 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13396 else
13397 /* We'd like to arrange to call fputs(string,stdout) here,
13398 but we need stdout and don't have a way to get it yet. */
13399 return NULL_TREE;
13403 /* The other optimizations can be done only on the non-va_list variants. */
13404 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13405 return NULL_TREE;
13407 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13408 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13410 if (!arg || !validate_arg (arg, POINTER_TYPE))
13411 return NULL_TREE;
13412 if (fn_puts)
13413 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13416 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13417 else if (strcmp (fmt_str, target_percent_c) == 0)
13419 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13420 return NULL_TREE;
13421 if (fn_putchar)
13422 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13425 if (!call)
13426 return NULL_TREE;
13428 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13431 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13432 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13433 more than 3 arguments, and ARG may be null in the 2-argument case.
13435 Return NULL_TREE if no simplification was possible, otherwise return the
13436 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13437 code of the function to be simplified. */
13439 static tree
13440 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13441 tree fmt, tree arg, bool ignore,
13442 enum built_in_function fcode)
13444 tree fn_fputc, fn_fputs, call = NULL_TREE;
13445 const char *fmt_str = NULL;
13447 /* If the return value is used, don't do the transformation. */
13448 if (! ignore)
13449 return NULL_TREE;
13451 /* Verify the required arguments in the original call. */
13452 if (!validate_arg (fp, POINTER_TYPE))
13453 return NULL_TREE;
13454 if (!validate_arg (fmt, POINTER_TYPE))
13455 return NULL_TREE;
13457 /* Check whether the format is a literal string constant. */
13458 fmt_str = c_getstr (fmt);
13459 if (fmt_str == NULL)
13460 return NULL_TREE;
13462 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13464 /* If we're using an unlocked function, assume the other
13465 unlocked functions exist explicitly. */
13466 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13467 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13469 else
13471 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13472 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13475 if (!init_target_chars ())
13476 return NULL_TREE;
13478 /* If the format doesn't contain % args or %%, use strcpy. */
13479 if (strchr (fmt_str, target_percent) == NULL)
13481 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13482 && arg)
13483 return NULL_TREE;
13485 /* If the format specifier was "", fprintf does nothing. */
13486 if (fmt_str[0] == '\0')
13488 /* If FP has side-effects, just wait until gimplification is
13489 done. */
13490 if (TREE_SIDE_EFFECTS (fp))
13491 return NULL_TREE;
13493 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13496 /* When "string" doesn't contain %, replace all cases of
13497 fprintf (fp, string) with fputs (string, fp). The fputs
13498 builtin will take care of special cases like length == 1. */
13499 if (fn_fputs)
13500 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13503 /* The other optimizations can be done only on the non-va_list variants. */
13504 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13505 return NULL_TREE;
13507 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13508 else if (strcmp (fmt_str, target_percent_s) == 0)
13510 if (!arg || !validate_arg (arg, POINTER_TYPE))
13511 return NULL_TREE;
13512 if (fn_fputs)
13513 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13516 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13517 else if (strcmp (fmt_str, target_percent_c) == 0)
13519 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13520 return NULL_TREE;
13521 if (fn_fputc)
13522 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13525 if (!call)
13526 return NULL_TREE;
13527 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13530 /* Initialize format string characters in the target charset. */
13532 static bool
13533 init_target_chars (void)
13535 static bool init;
13536 if (!init)
13538 target_newline = lang_hooks.to_target_charset ('\n');
13539 target_percent = lang_hooks.to_target_charset ('%');
13540 target_c = lang_hooks.to_target_charset ('c');
13541 target_s = lang_hooks.to_target_charset ('s');
13542 if (target_newline == 0 || target_percent == 0 || target_c == 0
13543 || target_s == 0)
13544 return false;
13546 target_percent_c[0] = target_percent;
13547 target_percent_c[1] = target_c;
13548 target_percent_c[2] = '\0';
13550 target_percent_s[0] = target_percent;
13551 target_percent_s[1] = target_s;
13552 target_percent_s[2] = '\0';
13554 target_percent_s_newline[0] = target_percent;
13555 target_percent_s_newline[1] = target_s;
13556 target_percent_s_newline[2] = target_newline;
13557 target_percent_s_newline[3] = '\0';
13559 init = true;
13561 return true;
13564 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13565 and no overflow/underflow occurred. INEXACT is true if M was not
13566 exactly calculated. TYPE is the tree type for the result. This
13567 function assumes that you cleared the MPFR flags and then
13568 calculated M to see if anything subsequently set a flag prior to
13569 entering this function. Return NULL_TREE if any checks fail. */
13571 static tree
13572 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13574 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13575 overflow/underflow occurred. If -frounding-math, proceed iff the
13576 result of calling FUNC was exact. */
13577 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13578 && (!flag_rounding_math || !inexact))
13580 REAL_VALUE_TYPE rr;
13582 real_from_mpfr (&rr, m, type, GMP_RNDN);
13583 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13584 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13585 but the mpft_t is not, then we underflowed in the
13586 conversion. */
13587 if (real_isfinite (&rr)
13588 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13590 REAL_VALUE_TYPE rmode;
13592 real_convert (&rmode, TYPE_MODE (type), &rr);
13593 /* Proceed iff the specified mode can hold the value. */
13594 if (real_identical (&rmode, &rr))
13595 return build_real (type, rmode);
13598 return NULL_TREE;
13601 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13602 number and no overflow/underflow occurred. INEXACT is true if M
13603 was not exactly calculated. TYPE is the tree type for the result.
13604 This function assumes that you cleared the MPFR flags and then
13605 calculated M to see if anything subsequently set a flag prior to
13606 entering this function. Return NULL_TREE if any checks fail, if
13607 FORCE_CONVERT is true, then bypass the checks. */
13609 static tree
13610 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13612 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13613 overflow/underflow occurred. If -frounding-math, proceed iff the
13614 result of calling FUNC was exact. */
13615 if (force_convert
13616 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13617 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13618 && (!flag_rounding_math || !inexact)))
13620 REAL_VALUE_TYPE re, im;
13622 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13623 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13624 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13625 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13626 but the mpft_t is not, then we underflowed in the
13627 conversion. */
13628 if (force_convert
13629 || (real_isfinite (&re) && real_isfinite (&im)
13630 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13631 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13633 REAL_VALUE_TYPE re_mode, im_mode;
13635 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13636 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13637 /* Proceed iff the specified mode can hold the value. */
13638 if (force_convert
13639 || (real_identical (&re_mode, &re)
13640 && real_identical (&im_mode, &im)))
13641 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13642 build_real (TREE_TYPE (type), im_mode));
13645 return NULL_TREE;
13648 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13649 FUNC on it and return the resulting value as a tree with type TYPE.
13650 If MIN and/or MAX are not NULL, then the supplied ARG must be
13651 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13652 acceptable values, otherwise they are not. The mpfr precision is
13653 set to the precision of TYPE. We assume that function FUNC returns
13654 zero if the result could be calculated exactly within the requested
13655 precision. */
13657 static tree
13658 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13659 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13660 bool inclusive)
13662 tree result = NULL_TREE;
13664 STRIP_NOPS (arg);
13666 /* To proceed, MPFR must exactly represent the target floating point
13667 format, which only happens when the target base equals two. */
13668 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13669 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13671 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13673 if (real_isfinite (ra)
13674 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13675 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13677 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13678 const int prec = fmt->p;
13679 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13680 int inexact;
13681 mpfr_t m;
13683 mpfr_init2 (m, prec);
13684 mpfr_from_real (m, ra, GMP_RNDN);
13685 mpfr_clear_flags ();
13686 inexact = func (m, m, rnd);
13687 result = do_mpfr_ckconv (m, type, inexact);
13688 mpfr_clear (m);
13692 return result;
13695 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13696 FUNC on it and return the resulting value as a tree with type TYPE.
13697 The mpfr precision is set to the precision of TYPE. We assume that
13698 function FUNC returns zero if the result could be calculated
13699 exactly within the requested precision. */
13701 static tree
13702 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13703 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13705 tree result = NULL_TREE;
13707 STRIP_NOPS (arg1);
13708 STRIP_NOPS (arg2);
13710 /* To proceed, MPFR must exactly represent the target floating point
13711 format, which only happens when the target base equals two. */
13712 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13713 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13714 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13716 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13717 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13719 if (real_isfinite (ra1) && real_isfinite (ra2))
13721 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13722 const int prec = fmt->p;
13723 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13724 int inexact;
13725 mpfr_t m1, m2;
13727 mpfr_inits2 (prec, m1, m2, NULL);
13728 mpfr_from_real (m1, ra1, GMP_RNDN);
13729 mpfr_from_real (m2, ra2, GMP_RNDN);
13730 mpfr_clear_flags ();
13731 inexact = func (m1, m1, m2, rnd);
13732 result = do_mpfr_ckconv (m1, type, inexact);
13733 mpfr_clears (m1, m2, NULL);
13737 return result;
13740 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13741 FUNC on it and return the resulting value as a tree with type TYPE.
13742 The mpfr precision is set to the precision of TYPE. We assume that
13743 function FUNC returns zero if the result could be calculated
13744 exactly within the requested precision. */
13746 static tree
13747 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13748 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13750 tree result = NULL_TREE;
13752 STRIP_NOPS (arg1);
13753 STRIP_NOPS (arg2);
13754 STRIP_NOPS (arg3);
13756 /* To proceed, MPFR must exactly represent the target floating point
13757 format, which only happens when the target base equals two. */
13758 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13759 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13760 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13761 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13763 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13764 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13765 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13767 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13769 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13770 const int prec = fmt->p;
13771 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13772 int inexact;
13773 mpfr_t m1, m2, m3;
13775 mpfr_inits2 (prec, m1, m2, m3, NULL);
13776 mpfr_from_real (m1, ra1, GMP_RNDN);
13777 mpfr_from_real (m2, ra2, GMP_RNDN);
13778 mpfr_from_real (m3, ra3, GMP_RNDN);
13779 mpfr_clear_flags ();
13780 inexact = func (m1, m1, m2, m3, rnd);
13781 result = do_mpfr_ckconv (m1, type, inexact);
13782 mpfr_clears (m1, m2, m3, NULL);
13786 return result;
13789 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13790 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13791 If ARG_SINP and ARG_COSP are NULL then the result is returned
13792 as a complex value.
13793 The type is taken from the type of ARG and is used for setting the
13794 precision of the calculation and results. */
13796 static tree
13797 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13799 tree const type = TREE_TYPE (arg);
13800 tree result = NULL_TREE;
13802 STRIP_NOPS (arg);
13804 /* To proceed, MPFR must exactly represent the target floating point
13805 format, which only happens when the target base equals two. */
13806 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13807 && TREE_CODE (arg) == REAL_CST
13808 && !TREE_OVERFLOW (arg))
13810 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13812 if (real_isfinite (ra))
13814 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13815 const int prec = fmt->p;
13816 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13817 tree result_s, result_c;
13818 int inexact;
13819 mpfr_t m, ms, mc;
13821 mpfr_inits2 (prec, m, ms, mc, NULL);
13822 mpfr_from_real (m, ra, GMP_RNDN);
13823 mpfr_clear_flags ();
13824 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13825 result_s = do_mpfr_ckconv (ms, type, inexact);
13826 result_c = do_mpfr_ckconv (mc, type, inexact);
13827 mpfr_clears (m, ms, mc, NULL);
13828 if (result_s && result_c)
13830 /* If we are to return in a complex value do so. */
13831 if (!arg_sinp && !arg_cosp)
13832 return build_complex (build_complex_type (type),
13833 result_c, result_s);
13835 /* Dereference the sin/cos pointer arguments. */
13836 arg_sinp = build_fold_indirect_ref (arg_sinp);
13837 arg_cosp = build_fold_indirect_ref (arg_cosp);
13838 /* Proceed if valid pointer type were passed in. */
13839 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13840 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13842 /* Set the values. */
13843 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13844 result_s);
13845 TREE_SIDE_EFFECTS (result_s) = 1;
13846 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13847 result_c);
13848 TREE_SIDE_EFFECTS (result_c) = 1;
13849 /* Combine the assignments into a compound expr. */
13850 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13851 result_s, result_c));
13856 return result;
13859 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13860 two-argument mpfr order N Bessel function FUNC on them and return
13861 the resulting value as a tree with type TYPE. The mpfr precision
13862 is set to the precision of TYPE. We assume that function FUNC
13863 returns zero if the result could be calculated exactly within the
13864 requested precision. */
13865 static tree
13866 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13867 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13868 const REAL_VALUE_TYPE *min, bool inclusive)
13870 tree result = NULL_TREE;
13872 STRIP_NOPS (arg1);
13873 STRIP_NOPS (arg2);
13875 /* To proceed, MPFR must exactly represent the target floating point
13876 format, which only happens when the target base equals two. */
13877 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13878 && tree_fits_shwi_p (arg1)
13879 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13881 const HOST_WIDE_INT n = tree_to_shwi (arg1);
13882 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13884 if (n == (long)n
13885 && real_isfinite (ra)
13886 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13888 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13889 const int prec = fmt->p;
13890 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13891 int inexact;
13892 mpfr_t m;
13894 mpfr_init2 (m, prec);
13895 mpfr_from_real (m, ra, GMP_RNDN);
13896 mpfr_clear_flags ();
13897 inexact = func (m, n, m, rnd);
13898 result = do_mpfr_ckconv (m, type, inexact);
13899 mpfr_clear (m);
13903 return result;
13906 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13907 the pointer *(ARG_QUO) and return the result. The type is taken
13908 from the type of ARG0 and is used for setting the precision of the
13909 calculation and results. */
13911 static tree
13912 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13914 tree const type = TREE_TYPE (arg0);
13915 tree result = NULL_TREE;
13917 STRIP_NOPS (arg0);
13918 STRIP_NOPS (arg1);
13920 /* To proceed, MPFR must exactly represent the target floating point
13921 format, which only happens when the target base equals two. */
13922 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13923 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13924 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13926 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13927 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13929 if (real_isfinite (ra0) && real_isfinite (ra1))
13931 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13932 const int prec = fmt->p;
13933 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13934 tree result_rem;
13935 long integer_quo;
13936 mpfr_t m0, m1;
13938 mpfr_inits2 (prec, m0, m1, NULL);
13939 mpfr_from_real (m0, ra0, GMP_RNDN);
13940 mpfr_from_real (m1, ra1, GMP_RNDN);
13941 mpfr_clear_flags ();
13942 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13943 /* Remquo is independent of the rounding mode, so pass
13944 inexact=0 to do_mpfr_ckconv(). */
13945 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13946 mpfr_clears (m0, m1, NULL);
13947 if (result_rem)
13949 /* MPFR calculates quo in the host's long so it may
13950 return more bits in quo than the target int can hold
13951 if sizeof(host long) > sizeof(target int). This can
13952 happen even for native compilers in LP64 mode. In
13953 these cases, modulo the quo value with the largest
13954 number that the target int can hold while leaving one
13955 bit for the sign. */
13956 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13957 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13959 /* Dereference the quo pointer argument. */
13960 arg_quo = build_fold_indirect_ref (arg_quo);
13961 /* Proceed iff a valid pointer type was passed in. */
13962 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13964 /* Set the value. */
13965 tree result_quo
13966 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13967 build_int_cst (TREE_TYPE (arg_quo),
13968 integer_quo));
13969 TREE_SIDE_EFFECTS (result_quo) = 1;
13970 /* Combine the quo assignment with the rem. */
13971 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13972 result_quo, result_rem));
13977 return result;
13980 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13981 resulting value as a tree with type TYPE. The mpfr precision is
13982 set to the precision of TYPE. We assume that this mpfr function
13983 returns zero if the result could be calculated exactly within the
13984 requested precision. In addition, the integer pointer represented
13985 by ARG_SG will be dereferenced and set to the appropriate signgam
13986 (-1,1) value. */
13988 static tree
13989 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13991 tree result = NULL_TREE;
13993 STRIP_NOPS (arg);
13995 /* To proceed, MPFR must exactly represent the target floating point
13996 format, which only happens when the target base equals two. Also
13997 verify ARG is a constant and that ARG_SG is an int pointer. */
13998 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13999 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
14000 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
14001 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
14003 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
14005 /* In addition to NaN and Inf, the argument cannot be zero or a
14006 negative integer. */
14007 if (real_isfinite (ra)
14008 && ra->cl != rvc_zero
14009 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
14011 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14012 const int prec = fmt->p;
14013 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
14014 int inexact, sg;
14015 mpfr_t m;
14016 tree result_lg;
14018 mpfr_init2 (m, prec);
14019 mpfr_from_real (m, ra, GMP_RNDN);
14020 mpfr_clear_flags ();
14021 inexact = mpfr_lgamma (m, &sg, m, rnd);
14022 result_lg = do_mpfr_ckconv (m, type, inexact);
14023 mpfr_clear (m);
14024 if (result_lg)
14026 tree result_sg;
14028 /* Dereference the arg_sg pointer argument. */
14029 arg_sg = build_fold_indirect_ref (arg_sg);
14030 /* Assign the signgam value into *arg_sg. */
14031 result_sg = fold_build2 (MODIFY_EXPR,
14032 TREE_TYPE (arg_sg), arg_sg,
14033 build_int_cst (TREE_TYPE (arg_sg), sg));
14034 TREE_SIDE_EFFECTS (result_sg) = 1;
14035 /* Combine the signgam assignment with the lgamma result. */
14036 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14037 result_sg, result_lg));
14042 return result;
14045 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14046 function FUNC on it and return the resulting value as a tree with
14047 type TYPE. The mpfr precision is set to the precision of TYPE. We
14048 assume that function FUNC returns zero if the result could be
14049 calculated exactly within the requested precision. */
14051 static tree
14052 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14054 tree result = NULL_TREE;
14056 STRIP_NOPS (arg);
14058 /* To proceed, MPFR must exactly represent the target floating point
14059 format, which only happens when the target base equals two. */
14060 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14061 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14062 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14064 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14065 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14067 if (real_isfinite (re) && real_isfinite (im))
14069 const struct real_format *const fmt =
14070 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14071 const int prec = fmt->p;
14072 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14073 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14074 int inexact;
14075 mpc_t m;
14077 mpc_init2 (m, prec);
14078 mpfr_from_real (mpc_realref (m), re, rnd);
14079 mpfr_from_real (mpc_imagref (m), im, rnd);
14080 mpfr_clear_flags ();
14081 inexact = func (m, m, crnd);
14082 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14083 mpc_clear (m);
14087 return result;
14090 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14091 mpc function FUNC on it and return the resulting value as a tree
14092 with type TYPE. The mpfr precision is set to the precision of
14093 TYPE. We assume that function FUNC returns zero if the result
14094 could be calculated exactly within the requested precision. If
14095 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14096 in the arguments and/or results. */
14098 tree
14099 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14100 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14102 tree result = NULL_TREE;
14104 STRIP_NOPS (arg0);
14105 STRIP_NOPS (arg1);
14107 /* To proceed, MPFR must exactly represent the target floating point
14108 format, which only happens when the target base equals two. */
14109 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14110 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14111 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14112 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14113 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14115 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14116 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14117 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14118 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14120 if (do_nonfinite
14121 || (real_isfinite (re0) && real_isfinite (im0)
14122 && real_isfinite (re1) && real_isfinite (im1)))
14124 const struct real_format *const fmt =
14125 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14126 const int prec = fmt->p;
14127 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14128 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14129 int inexact;
14130 mpc_t m0, m1;
14132 mpc_init2 (m0, prec);
14133 mpc_init2 (m1, prec);
14134 mpfr_from_real (mpc_realref (m0), re0, rnd);
14135 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14136 mpfr_from_real (mpc_realref (m1), re1, rnd);
14137 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14138 mpfr_clear_flags ();
14139 inexact = func (m0, m0, m1, crnd);
14140 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14141 mpc_clear (m0);
14142 mpc_clear (m1);
14146 return result;
14149 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14150 a normal call should be emitted rather than expanding the function
14151 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14153 static tree
14154 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14156 int nargs = gimple_call_num_args (stmt);
14158 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14159 (nargs > 0
14160 ? gimple_call_arg_ptr (stmt, 0)
14161 : &error_mark_node), fcode);
14164 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14165 a normal call should be emitted rather than expanding the function
14166 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14167 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14168 passed as second argument. */
14170 tree
14171 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14172 enum built_in_function fcode)
14174 int nargs = gimple_call_num_args (stmt);
14176 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14177 (nargs > 0
14178 ? gimple_call_arg_ptr (stmt, 0)
14179 : &error_mark_node), maxlen, fcode);
14182 /* Builtins with folding operations that operate on "..." arguments
14183 need special handling; we need to store the arguments in a convenient
14184 data structure before attempting any folding. Fortunately there are
14185 only a few builtins that fall into this category. FNDECL is the
14186 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14187 result of the function call is ignored. */
14189 static tree
14190 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14191 bool ignore ATTRIBUTE_UNUSED)
14193 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14194 tree ret = NULL_TREE;
14196 switch (fcode)
14198 case BUILT_IN_SPRINTF_CHK:
14199 case BUILT_IN_VSPRINTF_CHK:
14200 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14201 break;
14203 case BUILT_IN_SNPRINTF_CHK:
14204 case BUILT_IN_VSNPRINTF_CHK:
14205 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14207 default:
14208 break;
14210 if (ret)
14212 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14213 TREE_NO_WARNING (ret) = 1;
14214 return ret;
14216 return NULL_TREE;
14219 /* A wrapper function for builtin folding that prevents warnings for
14220 "statement without effect" and the like, caused by removing the
14221 call node earlier than the warning is generated. */
14223 tree
14224 fold_call_stmt (gimple stmt, bool ignore)
14226 tree ret = NULL_TREE;
14227 tree fndecl = gimple_call_fndecl (stmt);
14228 location_t loc = gimple_location (stmt);
14229 if (fndecl
14230 && TREE_CODE (fndecl) == FUNCTION_DECL
14231 && DECL_BUILT_IN (fndecl)
14232 && !gimple_call_va_arg_pack_p (stmt))
14234 int nargs = gimple_call_num_args (stmt);
14235 tree *args = (nargs > 0
14236 ? gimple_call_arg_ptr (stmt, 0)
14237 : &error_mark_node);
14239 if (avoid_folding_inline_builtin (fndecl))
14240 return NULL_TREE;
14241 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14243 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14245 else
14247 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14248 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14249 if (!ret)
14250 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14251 if (ret)
14253 /* Propagate location information from original call to
14254 expansion of builtin. Otherwise things like
14255 maybe_emit_chk_warning, that operate on the expansion
14256 of a builtin, will use the wrong location information. */
14257 if (gimple_has_location (stmt))
14259 tree realret = ret;
14260 if (TREE_CODE (ret) == NOP_EXPR)
14261 realret = TREE_OPERAND (ret, 0);
14262 if (CAN_HAVE_LOCATION_P (realret)
14263 && !EXPR_HAS_LOCATION (realret))
14264 SET_EXPR_LOCATION (realret, loc);
14265 return realret;
14267 return ret;
14271 return NULL_TREE;
14274 /* Look up the function in builtin_decl that corresponds to DECL
14275 and set ASMSPEC as its user assembler name. DECL must be a
14276 function decl that declares a builtin. */
14278 void
14279 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14281 tree builtin;
14282 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14283 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14284 && asmspec != 0);
14286 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14287 set_user_assembler_name (builtin, asmspec);
14288 switch (DECL_FUNCTION_CODE (decl))
14290 case BUILT_IN_MEMCPY:
14291 init_block_move_fn (asmspec);
14292 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14293 break;
14294 case BUILT_IN_MEMSET:
14295 init_block_clear_fn (asmspec);
14296 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14297 break;
14298 case BUILT_IN_MEMMOVE:
14299 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14300 break;
14301 case BUILT_IN_MEMCMP:
14302 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14303 break;
14304 case BUILT_IN_ABORT:
14305 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14306 break;
14307 case BUILT_IN_FFS:
14308 if (INT_TYPE_SIZE < BITS_PER_WORD)
14310 set_user_assembler_libfunc ("ffs", asmspec);
14311 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14312 MODE_INT, 0), "ffs");
14314 break;
14315 default:
14316 break;
14320 /* Return true if DECL is a builtin that expands to a constant or similarly
14321 simple code. */
14322 bool
14323 is_simple_builtin (tree decl)
14325 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14326 switch (DECL_FUNCTION_CODE (decl))
14328 /* Builtins that expand to constants. */
14329 case BUILT_IN_CONSTANT_P:
14330 case BUILT_IN_EXPECT:
14331 case BUILT_IN_OBJECT_SIZE:
14332 case BUILT_IN_UNREACHABLE:
14333 /* Simple register moves or loads from stack. */
14334 case BUILT_IN_ASSUME_ALIGNED:
14335 case BUILT_IN_RETURN_ADDRESS:
14336 case BUILT_IN_EXTRACT_RETURN_ADDR:
14337 case BUILT_IN_FROB_RETURN_ADDR:
14338 case BUILT_IN_RETURN:
14339 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14340 case BUILT_IN_FRAME_ADDRESS:
14341 case BUILT_IN_VA_END:
14342 case BUILT_IN_STACK_SAVE:
14343 case BUILT_IN_STACK_RESTORE:
14344 /* Exception state returns or moves registers around. */
14345 case BUILT_IN_EH_FILTER:
14346 case BUILT_IN_EH_POINTER:
14347 case BUILT_IN_EH_COPY_VALUES:
14348 return true;
14350 default:
14351 return false;
14354 return false;
14357 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14358 most probably expanded inline into reasonably simple code. This is a
14359 superset of is_simple_builtin. */
14360 bool
14361 is_inexpensive_builtin (tree decl)
14363 if (!decl)
14364 return false;
14365 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14366 return true;
14367 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14368 switch (DECL_FUNCTION_CODE (decl))
14370 case BUILT_IN_ABS:
14371 case BUILT_IN_ALLOCA:
14372 case BUILT_IN_ALLOCA_WITH_ALIGN:
14373 case BUILT_IN_BSWAP16:
14374 case BUILT_IN_BSWAP32:
14375 case BUILT_IN_BSWAP64:
14376 case BUILT_IN_CLZ:
14377 case BUILT_IN_CLZIMAX:
14378 case BUILT_IN_CLZL:
14379 case BUILT_IN_CLZLL:
14380 case BUILT_IN_CTZ:
14381 case BUILT_IN_CTZIMAX:
14382 case BUILT_IN_CTZL:
14383 case BUILT_IN_CTZLL:
14384 case BUILT_IN_FFS:
14385 case BUILT_IN_FFSIMAX:
14386 case BUILT_IN_FFSL:
14387 case BUILT_IN_FFSLL:
14388 case BUILT_IN_IMAXABS:
14389 case BUILT_IN_FINITE:
14390 case BUILT_IN_FINITEF:
14391 case BUILT_IN_FINITEL:
14392 case BUILT_IN_FINITED32:
14393 case BUILT_IN_FINITED64:
14394 case BUILT_IN_FINITED128:
14395 case BUILT_IN_FPCLASSIFY:
14396 case BUILT_IN_ISFINITE:
14397 case BUILT_IN_ISINF_SIGN:
14398 case BUILT_IN_ISINF:
14399 case BUILT_IN_ISINFF:
14400 case BUILT_IN_ISINFL:
14401 case BUILT_IN_ISINFD32:
14402 case BUILT_IN_ISINFD64:
14403 case BUILT_IN_ISINFD128:
14404 case BUILT_IN_ISNAN:
14405 case BUILT_IN_ISNANF:
14406 case BUILT_IN_ISNANL:
14407 case BUILT_IN_ISNAND32:
14408 case BUILT_IN_ISNAND64:
14409 case BUILT_IN_ISNAND128:
14410 case BUILT_IN_ISNORMAL:
14411 case BUILT_IN_ISGREATER:
14412 case BUILT_IN_ISGREATEREQUAL:
14413 case BUILT_IN_ISLESS:
14414 case BUILT_IN_ISLESSEQUAL:
14415 case BUILT_IN_ISLESSGREATER:
14416 case BUILT_IN_ISUNORDERED:
14417 case BUILT_IN_VA_ARG_PACK:
14418 case BUILT_IN_VA_ARG_PACK_LEN:
14419 case BUILT_IN_VA_COPY:
14420 case BUILT_IN_TRAP:
14421 case BUILT_IN_SAVEREGS:
14422 case BUILT_IN_POPCOUNTL:
14423 case BUILT_IN_POPCOUNTLL:
14424 case BUILT_IN_POPCOUNTIMAX:
14425 case BUILT_IN_POPCOUNT:
14426 case BUILT_IN_PARITYL:
14427 case BUILT_IN_PARITYLL:
14428 case BUILT_IN_PARITYIMAX:
14429 case BUILT_IN_PARITY:
14430 case BUILT_IN_LABS:
14431 case BUILT_IN_LLABS:
14432 case BUILT_IN_PREFETCH:
14433 return true;
14435 default:
14436 return is_simple_builtin (decl);
14439 return false;