Merge aosp-toolchain/gcc/gcc-4_9 changes.
[official-gcc.git] / gcc-4_9-mobile / gcc / builtins.c
blobd6642d0d9e307df4a383892502b778005c7dbac5
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "basic-block.h"
34 #include "tree-ssa-alias.h"
35 #include "internal-fn.h"
36 #include "gimple-expr.h"
37 #include "is-a.h"
38 #include "gimple.h"
39 #include "flags.h"
40 #include "regs.h"
41 #include "hard-reg-set.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-config.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "recog.h"
49 #include "output.h"
50 #include "typeclass.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "langhooks.h"
55 #include "tree-ssanames.h"
56 #include "tree-dfa.h"
57 #include "value-prof.h"
58 #include "diagnostic-core.h"
59 #include "builtins.h"
60 #include "ubsan.h"
61 #include "cilk.h"
62 #include "input.h"
65 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
67 struct target_builtins default_target_builtins;
68 #if SWITCHABLE_TARGET
69 struct target_builtins *this_target_builtins = &default_target_builtins;
70 #endif
72 /* Define the names of the builtin function types and codes. */
73 const char *const built_in_class_names[BUILT_IN_LAST]
74 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
76 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
77 const char * built_in_names[(int) END_BUILTINS] =
79 #include "builtins.def"
81 #undef DEF_BUILTIN
83 /* Setup an array of _DECL trees, make sure each element is
84 initialized to NULL_TREE. */
85 builtin_info_type builtin_info;
87 /* Non-zero if __builtin_constant_p should be folded right away. */
88 bool force_folding_builtin_constant_p;
90 static const char *c_getstr (tree);
91 static rtx c_readstr (const char *, enum machine_mode);
92 static int target_char_cast (tree, char *);
93 static rtx get_memory_rtx (tree, tree);
94 static int apply_args_size (void);
95 static int apply_result_size (void);
96 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
97 static rtx result_vector (int, rtx);
98 #endif
99 static void expand_builtin_update_setjmp_buf (rtx);
100 static void expand_builtin_prefetch (tree);
101 static rtx expand_builtin_apply_args (void);
102 static rtx expand_builtin_apply_args_1 (void);
103 static rtx expand_builtin_apply (rtx, rtx, rtx);
104 static void expand_builtin_return (rtx);
105 static enum type_class type_to_class (tree);
106 static rtx expand_builtin_classify_type (tree);
107 static void expand_errno_check (tree, rtx);
108 static rtx expand_builtin_mathfn (tree, rtx, rtx);
109 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
111 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
112 static rtx expand_builtin_interclass_mathfn (tree, rtx);
113 static rtx expand_builtin_sincos (tree);
114 static rtx expand_builtin_cexpi (tree, rtx);
115 static rtx expand_builtin_int_roundingfn (tree, rtx);
116 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
117 static rtx expand_builtin_next_arg (void);
118 static rtx expand_builtin_va_start (tree);
119 static rtx expand_builtin_va_end (tree);
120 static rtx expand_builtin_va_copy (tree);
121 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strcmp (tree, rtx);
123 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
124 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
125 static rtx expand_builtin_memcpy (tree, rtx);
126 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
128 enum machine_mode, int);
129 static rtx expand_builtin_strcpy (tree, rtx);
130 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
131 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_strncpy (tree, rtx);
133 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
134 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, bool);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static tree stabilize_va_list_loc (location_t, tree, int);
142 static rtx expand_builtin_expect (tree, rtx);
143 static tree fold_builtin_constant_p (tree);
144 static tree fold_builtin_classify_type (tree);
145 static tree fold_builtin_strlen (location_t, tree, tree);
146 static tree fold_builtin_inf (location_t, tree, int);
147 static tree fold_builtin_nan (tree, tree, int);
148 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
149 static bool validate_arg (const_tree, enum tree_code code);
150 static bool integer_valued_real_p (tree);
151 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
152 static bool readonly_data_expr (tree);
153 static rtx expand_builtin_fabs (tree, rtx, rtx);
154 static rtx expand_builtin_signbit (tree, rtx);
155 static tree fold_builtin_sqrt (location_t, tree, tree);
156 static tree fold_builtin_cbrt (location_t, tree, tree);
157 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
158 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
159 static tree fold_builtin_cos (location_t, tree, tree, tree);
160 static tree fold_builtin_cosh (location_t, tree, tree, tree);
161 static tree fold_builtin_tan (tree, tree);
162 static tree fold_builtin_trunc (location_t, tree, tree);
163 static tree fold_builtin_floor (location_t, tree, tree);
164 static tree fold_builtin_ceil (location_t, tree, tree);
165 static tree fold_builtin_round (location_t, tree, tree);
166 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
167 static tree fold_builtin_bitop (tree, tree);
168 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
169 static tree fold_builtin_strchr (location_t, tree, tree, tree);
170 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
172 static tree fold_builtin_strcmp (location_t, tree, tree);
173 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
174 static tree fold_builtin_signbit (location_t, tree, tree);
175 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
176 static tree fold_builtin_isascii (location_t, tree);
177 static tree fold_builtin_toascii (location_t, tree);
178 static tree fold_builtin_isdigit (location_t, tree);
179 static tree fold_builtin_fabs (location_t, tree, tree);
180 static tree fold_builtin_abs (location_t, tree, tree);
181 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
182 enum tree_code);
183 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
184 static tree fold_builtin_0 (location_t, tree, bool);
185 static tree fold_builtin_1 (location_t, tree, tree, bool);
186 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
187 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
188 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
189 static tree fold_builtin_varargs (location_t, tree, tree, bool);
191 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
192 static tree fold_builtin_strstr (location_t, tree, tree, tree);
193 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
194 static tree fold_builtin_strncat (location_t, tree, tree, tree);
195 static tree fold_builtin_strspn (location_t, tree, tree);
196 static tree fold_builtin_strcspn (location_t, tree, tree);
197 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
198 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
200 static rtx expand_builtin_object_size (tree);
201 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
202 enum built_in_function);
203 static void maybe_emit_chk_warning (tree, enum built_in_function);
204 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
205 static void maybe_emit_free_warning (tree);
206 static tree fold_builtin_object_size (tree, tree);
207 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
208 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
209 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
210 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
211 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
212 enum built_in_function);
213 static bool init_target_chars (void);
215 static unsigned HOST_WIDE_INT target_newline;
216 static unsigned HOST_WIDE_INT target_percent;
217 static unsigned HOST_WIDE_INT target_c;
218 static unsigned HOST_WIDE_INT target_s;
219 static char target_percent_c[3];
220 static char target_percent_s[3];
221 static char target_percent_s_newline[4];
222 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
224 static tree do_mpfr_arg2 (tree, tree, tree,
225 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
226 static tree do_mpfr_arg3 (tree, tree, tree, tree,
227 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
228 static tree do_mpfr_sincos (tree, tree, tree);
229 static tree do_mpfr_bessel_n (tree, tree, tree,
230 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
231 const REAL_VALUE_TYPE *, bool);
232 static tree do_mpfr_remquo (tree, tree, tree);
233 static tree do_mpfr_lgamma_r (tree, tree, tree);
234 static void expand_builtin_sync_synchronize (void);
236 /* Return true if NAME starts with __builtin_ or __sync_. */
238 static bool
239 is_builtin_name (const char *name)
241 if (strncmp (name, "__builtin_", 10) == 0)
242 return true;
243 if (strncmp (name, "__sync_", 7) == 0)
244 return true;
245 if (strncmp (name, "__atomic_", 9) == 0)
246 return true;
247 if (flag_cilkplus
248 && (!strcmp (name, "__cilkrts_detach")
249 || !strcmp (name, "__cilkrts_pop_frame")))
250 return true;
251 return false;
255 /* Return true if DECL is a function symbol representing a built-in. */
257 bool
258 is_builtin_fn (tree decl)
260 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
263 /* By default we assume that c99 functions are present at the runtime,
264 but sincos is not. */
265 bool
266 default_libc_has_function (enum function_class fn_class)
268 if (fn_class == function_c94
269 || fn_class == function_c99_misc
270 || fn_class == function_c99_math_complex)
271 return true;
273 return false;
276 bool
277 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
279 return true;
282 bool
283 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
285 return false;
288 /* Return true if NODE should be considered for inline expansion regardless
289 of the optimization level. This means whenever a function is invoked with
290 its "internal" name, which normally contains the prefix "__builtin". */
292 static bool
293 called_as_built_in (tree node)
295 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
296 we want the name used to call the function, not the name it
297 will have. */
298 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
299 return is_builtin_name (name);
302 /* Compute values M and N such that M divides (address of EXP - N) and such
303 that N < M. If these numbers can be determined, store M in alignp and N in
304 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
305 *alignp and any bit-offset to *bitposp.
307 Note that the address (and thus the alignment) computed here is based
308 on the address to which a symbol resolves, whereas DECL_ALIGN is based
309 on the address at which an object is actually located. These two
310 addresses are not always the same. For example, on ARM targets,
311 the address &foo of a Thumb function foo() has the lowest bit set,
312 whereas foo() itself starts on an even address.
314 If ADDR_P is true we are taking the address of the memory reference EXP
315 and thus cannot rely on the access taking place. */
317 static bool
318 get_object_alignment_2 (tree exp, unsigned int *alignp,
319 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
321 HOST_WIDE_INT bitsize, bitpos;
322 tree offset;
323 enum machine_mode mode;
324 int unsignedp, volatilep;
325 unsigned int align = BITS_PER_UNIT;
326 bool known_alignment = false;
328 /* Get the innermost object and the constant (bitpos) and possibly
329 variable (offset) offset of the access. */
330 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
331 &mode, &unsignedp, &volatilep, true);
333 /* Extract alignment information from the innermost object and
334 possibly adjust bitpos and offset. */
335 if (TREE_CODE (exp) == FUNCTION_DECL)
337 /* Function addresses can encode extra information besides their
338 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
339 allows the low bit to be used as a virtual bit, we know
340 that the address itself must be at least 2-byte aligned. */
341 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
342 align = 2 * BITS_PER_UNIT;
344 else if (TREE_CODE (exp) == LABEL_DECL)
346 else if (TREE_CODE (exp) == CONST_DECL)
348 /* The alignment of a CONST_DECL is determined by its initializer. */
349 exp = DECL_INITIAL (exp);
350 align = TYPE_ALIGN (TREE_TYPE (exp));
351 #ifdef CONSTANT_ALIGNMENT
352 if (CONSTANT_CLASS_P (exp))
353 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
354 #endif
355 known_alignment = true;
357 else if (DECL_P (exp))
359 align = DECL_ALIGN (exp);
360 known_alignment = true;
362 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
364 align = TYPE_ALIGN (TREE_TYPE (exp));
366 else if (TREE_CODE (exp) == INDIRECT_REF
367 || TREE_CODE (exp) == MEM_REF
368 || TREE_CODE (exp) == TARGET_MEM_REF)
370 tree addr = TREE_OPERAND (exp, 0);
371 unsigned ptr_align;
372 unsigned HOST_WIDE_INT ptr_bitpos;
374 if (TREE_CODE (addr) == BIT_AND_EXPR
375 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
377 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
378 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
379 align *= BITS_PER_UNIT;
380 addr = TREE_OPERAND (addr, 0);
383 known_alignment
384 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
385 align = MAX (ptr_align, align);
387 /* The alignment of the pointer operand in a TARGET_MEM_REF
388 has to take the variable offset parts into account. */
389 if (TREE_CODE (exp) == TARGET_MEM_REF)
391 if (TMR_INDEX (exp))
393 unsigned HOST_WIDE_INT step = 1;
394 if (TMR_STEP (exp))
395 step = TREE_INT_CST_LOW (TMR_STEP (exp));
396 align = MIN (align, (step & -step) * BITS_PER_UNIT);
398 if (TMR_INDEX2 (exp))
399 align = BITS_PER_UNIT;
400 known_alignment = false;
403 /* When EXP is an actual memory reference then we can use
404 TYPE_ALIGN of a pointer indirection to derive alignment.
405 Do so only if get_pointer_alignment_1 did not reveal absolute
406 alignment knowledge and if using that alignment would
407 improve the situation. */
408 if (!addr_p && !known_alignment
409 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
410 align = TYPE_ALIGN (TREE_TYPE (exp));
411 else
413 /* Else adjust bitpos accordingly. */
414 bitpos += ptr_bitpos;
415 if (TREE_CODE (exp) == MEM_REF
416 || TREE_CODE (exp) == TARGET_MEM_REF)
417 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
420 else if (TREE_CODE (exp) == STRING_CST)
422 /* STRING_CST are the only constant objects we allow to be not
423 wrapped inside a CONST_DECL. */
424 align = TYPE_ALIGN (TREE_TYPE (exp));
425 #ifdef CONSTANT_ALIGNMENT
426 if (CONSTANT_CLASS_P (exp))
427 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
428 #endif
429 known_alignment = true;
432 /* If there is a non-constant offset part extract the maximum
433 alignment that can prevail. */
434 if (offset)
436 unsigned int trailing_zeros = tree_ctz (offset);
437 if (trailing_zeros < HOST_BITS_PER_INT)
439 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
440 if (inner)
441 align = MIN (align, inner);
445 *alignp = align;
446 *bitposp = bitpos & (*alignp - 1);
447 return known_alignment;
450 /* For a memory reference expression EXP compute values M and N such that M
451 divides (&EXP - N) and such that N < M. If these numbers can be determined,
452 store M in alignp and N in *BITPOSP and return true. Otherwise return false
453 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
455 bool
456 get_object_alignment_1 (tree exp, unsigned int *alignp,
457 unsigned HOST_WIDE_INT *bitposp)
459 return get_object_alignment_2 (exp, alignp, bitposp, false);
462 /* Return the alignment in bits of EXP, an object. */
464 unsigned int
465 get_object_alignment (tree exp)
467 unsigned HOST_WIDE_INT bitpos = 0;
468 unsigned int align;
470 get_object_alignment_1 (exp, &align, &bitpos);
472 /* align and bitpos now specify known low bits of the pointer.
473 ptr & (align - 1) == bitpos. */
475 if (bitpos != 0)
476 align = (bitpos & -bitpos);
477 return align;
480 /* For a pointer valued expression EXP compute values M and N such that M
481 divides (EXP - N) and such that N < M. If these numbers can be determined,
482 store M in alignp and N in *BITPOSP and return true. Return false if
483 the results are just a conservative approximation.
485 If EXP is not a pointer, false is returned too. */
487 bool
488 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
489 unsigned HOST_WIDE_INT *bitposp)
491 STRIP_NOPS (exp);
493 if (TREE_CODE (exp) == ADDR_EXPR)
494 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
495 alignp, bitposp, true);
496 else if (TREE_CODE (exp) == SSA_NAME
497 && POINTER_TYPE_P (TREE_TYPE (exp)))
499 unsigned int ptr_align, ptr_misalign;
500 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
502 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
504 *bitposp = ptr_misalign * BITS_PER_UNIT;
505 *alignp = ptr_align * BITS_PER_UNIT;
506 /* We cannot really tell whether this result is an approximation. */
507 return true;
509 else
511 *bitposp = 0;
512 *alignp = BITS_PER_UNIT;
513 return false;
516 else if (TREE_CODE (exp) == INTEGER_CST)
518 *alignp = BIGGEST_ALIGNMENT;
519 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
520 & (BIGGEST_ALIGNMENT - 1));
521 return true;
524 *bitposp = 0;
525 *alignp = BITS_PER_UNIT;
526 return false;
529 /* Return the alignment in bits of EXP, a pointer valued expression.
530 The alignment returned is, by default, the alignment of the thing that
531 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
533 Otherwise, look at the expression to see if we can do better, i.e., if the
534 expression is actually pointing at an object whose alignment is tighter. */
536 unsigned int
537 get_pointer_alignment (tree exp)
539 unsigned HOST_WIDE_INT bitpos = 0;
540 unsigned int align;
542 get_pointer_alignment_1 (exp, &align, &bitpos);
544 /* align and bitpos now specify known low bits of the pointer.
545 ptr & (align - 1) == bitpos. */
547 if (bitpos != 0)
548 align = (bitpos & -bitpos);
550 return align;
553 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
554 way, because it could contain a zero byte in the middle.
555 TREE_STRING_LENGTH is the size of the character array, not the string.
557 ONLY_VALUE should be nonzero if the result is not going to be emitted
558 into the instruction stream and zero if it is going to be expanded.
559 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
560 is returned, otherwise NULL, since
561 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
562 evaluate the side-effects.
564 The value returned is of type `ssizetype'.
566 Unfortunately, string_constant can't access the values of const char
567 arrays with initializers, so neither can we do so here. */
569 tree
570 c_strlen (tree src, int only_value)
572 tree offset_node;
573 HOST_WIDE_INT offset;
574 int max;
575 const char *ptr;
576 location_t loc;
578 STRIP_NOPS (src);
579 if (TREE_CODE (src) == COND_EXPR
580 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
582 tree len1, len2;
584 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
585 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
586 if (tree_int_cst_equal (len1, len2))
587 return len1;
590 if (TREE_CODE (src) == COMPOUND_EXPR
591 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
592 return c_strlen (TREE_OPERAND (src, 1), only_value);
594 loc = EXPR_LOC_OR_LOC (src, input_location);
596 src = string_constant (src, &offset_node);
597 if (src == 0)
598 return NULL_TREE;
600 max = TREE_STRING_LENGTH (src) - 1;
601 ptr = TREE_STRING_POINTER (src);
603 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
605 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
606 compute the offset to the following null if we don't know where to
607 start searching for it. */
608 int i;
610 for (i = 0; i < max; i++)
611 if (ptr[i] == 0)
612 return NULL_TREE;
614 /* We don't know the starting offset, but we do know that the string
615 has no internal zero bytes. We can assume that the offset falls
616 within the bounds of the string; otherwise, the programmer deserves
617 what he gets. Subtract the offset from the length of the string,
618 and return that. This would perhaps not be valid if we were dealing
619 with named arrays in addition to literal string constants. */
621 return size_diffop_loc (loc, size_int (max), offset_node);
624 /* We have a known offset into the string. Start searching there for
625 a null character if we can represent it as a single HOST_WIDE_INT. */
626 if (offset_node == 0)
627 offset = 0;
628 else if (! tree_fits_shwi_p (offset_node))
629 offset = -1;
630 else
631 offset = tree_to_shwi (offset_node);
633 /* If the offset is known to be out of bounds, warn, and call strlen at
634 runtime. */
635 if (offset < 0 || offset > max)
637 /* Suppress multiple warnings for propagated constant strings. */
638 if (! TREE_NO_WARNING (src))
640 warning_at (loc, 0, "offset outside bounds of constant string");
641 TREE_NO_WARNING (src) = 1;
643 return NULL_TREE;
646 /* Use strlen to search for the first zero byte. Since any strings
647 constructed with build_string will have nulls appended, we win even
648 if we get handed something like (char[4])"abcd".
650 Since OFFSET is our starting index into the string, no further
651 calculation is needed. */
652 return ssize_int (strlen (ptr + offset));
655 /* Return a char pointer for a C string if it is a string constant
656 or sum of string constant and integer constant. */
658 static const char *
659 c_getstr (tree src)
661 tree offset_node;
663 src = string_constant (src, &offset_node);
664 if (src == 0)
665 return 0;
667 if (offset_node == 0)
668 return TREE_STRING_POINTER (src);
669 else if (!tree_fits_uhwi_p (offset_node)
670 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
671 return 0;
673 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
676 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
677 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
679 static rtx
680 c_readstr (const char *str, enum machine_mode mode)
682 HOST_WIDE_INT c[2];
683 HOST_WIDE_INT ch;
684 unsigned int i, j;
686 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
688 c[0] = 0;
689 c[1] = 0;
690 ch = 1;
691 for (i = 0; i < GET_MODE_SIZE (mode); i++)
693 j = i;
694 if (WORDS_BIG_ENDIAN)
695 j = GET_MODE_SIZE (mode) - i - 1;
696 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
697 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
698 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
699 j *= BITS_PER_UNIT;
700 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
702 if (ch)
703 ch = (unsigned char) str[i];
704 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
706 return immed_double_const (c[0], c[1], mode);
709 /* Cast a target constant CST to target CHAR and if that value fits into
710 host char type, return zero and put that value into variable pointed to by
711 P. */
713 static int
714 target_char_cast (tree cst, char *p)
716 unsigned HOST_WIDE_INT val, hostval;
718 if (TREE_CODE (cst) != INTEGER_CST
719 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
720 return 1;
722 val = TREE_INT_CST_LOW (cst);
723 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
724 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
726 hostval = val;
727 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
728 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
730 if (val != hostval)
731 return 1;
733 *p = hostval;
734 return 0;
737 /* Similar to save_expr, but assumes that arbitrary code is not executed
738 in between the multiple evaluations. In particular, we assume that a
739 non-addressable local variable will not be modified. */
741 static tree
742 builtin_save_expr (tree exp)
744 if (TREE_CODE (exp) == SSA_NAME
745 || (TREE_ADDRESSABLE (exp) == 0
746 && (TREE_CODE (exp) == PARM_DECL
747 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
748 return exp;
750 return save_expr (exp);
753 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
754 times to get the address of either a higher stack frame, or a return
755 address located within it (depending on FNDECL_CODE). */
757 static rtx
758 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
760 int i;
762 #ifdef INITIAL_FRAME_ADDRESS_RTX
763 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
764 #else
765 rtx tem;
767 /* For a zero count with __builtin_return_address, we don't care what
768 frame address we return, because target-specific definitions will
769 override us. Therefore frame pointer elimination is OK, and using
770 the soft frame pointer is OK.
772 For a nonzero count, or a zero count with __builtin_frame_address,
773 we require a stable offset from the current frame pointer to the
774 previous one, so we must use the hard frame pointer, and
775 we must disable frame pointer elimination. */
776 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
777 tem = frame_pointer_rtx;
778 else
780 tem = hard_frame_pointer_rtx;
782 /* Tell reload not to eliminate the frame pointer. */
783 crtl->accesses_prior_frames = 1;
785 #endif
787 /* Some machines need special handling before we can access
788 arbitrary frames. For example, on the SPARC, we must first flush
789 all register windows to the stack. */
790 #ifdef SETUP_FRAME_ADDRESSES
791 if (count > 0)
792 SETUP_FRAME_ADDRESSES ();
793 #endif
795 /* On the SPARC, the return address is not in the frame, it is in a
796 register. There is no way to access it off of the current frame
797 pointer, but it can be accessed off the previous frame pointer by
798 reading the value from the register window save area. */
799 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
800 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
801 count--;
802 #endif
804 /* Scan back COUNT frames to the specified frame. */
805 for (i = 0; i < count; i++)
807 /* Assume the dynamic chain pointer is in the word that the
808 frame address points to, unless otherwise specified. */
809 #ifdef DYNAMIC_CHAIN_ADDRESS
810 tem = DYNAMIC_CHAIN_ADDRESS (tem);
811 #endif
812 tem = memory_address (Pmode, tem);
813 tem = gen_frame_mem (Pmode, tem);
814 tem = copy_to_reg (tem);
817 /* For __builtin_frame_address, return what we've got. But, on
818 the SPARC for example, we may have to add a bias. */
819 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
820 #ifdef FRAME_ADDR_RTX
821 return FRAME_ADDR_RTX (tem);
822 #else
823 return tem;
824 #endif
826 /* For __builtin_return_address, get the return address from that frame. */
827 #ifdef RETURN_ADDR_RTX
828 tem = RETURN_ADDR_RTX (count, tem);
829 #else
830 tem = memory_address (Pmode,
831 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
832 tem = gen_frame_mem (Pmode, tem);
833 #endif
834 return tem;
837 /* Alias set used for setjmp buffer. */
838 static alias_set_type setjmp_alias_set = -1;
840 /* Construct the leading half of a __builtin_setjmp call. Control will
841 return to RECEIVER_LABEL. This is also called directly by the SJLJ
842 exception handling code. */
844 void
845 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
847 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
848 rtx stack_save;
849 rtx mem;
851 if (setjmp_alias_set == -1)
852 setjmp_alias_set = new_alias_set ();
854 buf_addr = convert_memory_address (Pmode, buf_addr);
856 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
858 /* We store the frame pointer and the address of receiver_label in
859 the buffer and use the rest of it for the stack save area, which
860 is machine-dependent. */
862 mem = gen_rtx_MEM (Pmode, buf_addr);
863 set_mem_alias_set (mem, setjmp_alias_set);
864 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
866 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
867 GET_MODE_SIZE (Pmode))),
868 set_mem_alias_set (mem, setjmp_alias_set);
870 emit_move_insn (validize_mem (mem),
871 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
873 stack_save = gen_rtx_MEM (sa_mode,
874 plus_constant (Pmode, buf_addr,
875 2 * GET_MODE_SIZE (Pmode)));
876 set_mem_alias_set (stack_save, setjmp_alias_set);
877 emit_stack_save (SAVE_NONLOCAL, &stack_save);
879 /* If there is further processing to do, do it. */
880 #ifdef HAVE_builtin_setjmp_setup
881 if (HAVE_builtin_setjmp_setup)
882 emit_insn (gen_builtin_setjmp_setup (buf_addr));
883 #endif
885 /* We have a nonlocal label. */
886 cfun->has_nonlocal_label = 1;
889 /* Construct the trailing part of a __builtin_setjmp call. This is
890 also called directly by the SJLJ exception handling code.
891 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
893 void
894 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
896 rtx chain;
898 /* Mark the FP as used when we get here, so we have to make sure it's
899 marked as used by this function. */
900 emit_use (hard_frame_pointer_rtx);
902 /* Mark the static chain as clobbered here so life information
903 doesn't get messed up for it. */
904 chain = targetm.calls.static_chain (current_function_decl, true);
905 if (chain && REG_P (chain))
906 emit_clobber (chain);
908 /* Now put in the code to restore the frame pointer, and argument
909 pointer, if needed. */
910 #ifdef HAVE_nonlocal_goto
911 if (! HAVE_nonlocal_goto)
912 #endif
914 /* First adjust our frame pointer to its actual value. It was
915 previously set to the start of the virtual area corresponding to
916 the stacked variables when we branched here and now needs to be
917 adjusted to the actual hardware fp value.
919 Assignments to virtual registers are converted by
920 instantiate_virtual_regs into the corresponding assignment
921 to the underlying register (fp in this case) that makes
922 the original assignment true.
923 So the following insn will actually be decrementing fp by
924 STARTING_FRAME_OFFSET. */
925 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
927 /* Restoring the frame pointer also modifies the hard frame pointer.
928 Mark it used (so that the previous assignment remains live once
929 the frame pointer is eliminated) and clobbered (to represent the
930 implicit update from the assignment). */
931 emit_use (hard_frame_pointer_rtx);
932 emit_clobber (hard_frame_pointer_rtx);
935 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
936 if (fixed_regs[ARG_POINTER_REGNUM])
938 #ifdef ELIMINABLE_REGS
939 /* If the argument pointer can be eliminated in favor of the
940 frame pointer, we don't need to restore it. We assume here
941 that if such an elimination is present, it can always be used.
942 This is the case on all known machines; if we don't make this
943 assumption, we do unnecessary saving on many machines. */
944 size_t i;
945 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
947 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
948 if (elim_regs[i].from == ARG_POINTER_REGNUM
949 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
950 break;
952 if (i == ARRAY_SIZE (elim_regs))
953 #endif
955 /* Now restore our arg pointer from the address at which it
956 was saved in our stack frame. */
957 emit_move_insn (crtl->args.internal_arg_pointer,
958 copy_to_reg (get_arg_pointer_save_area ()));
961 #endif
963 #ifdef HAVE_builtin_setjmp_receiver
964 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
965 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
966 else
967 #endif
968 #ifdef HAVE_nonlocal_goto_receiver
969 if (HAVE_nonlocal_goto_receiver)
970 emit_insn (gen_nonlocal_goto_receiver ());
971 else
972 #endif
973 { /* Nothing */ }
975 /* We must not allow the code we just generated to be reordered by
976 scheduling. Specifically, the update of the frame pointer must
977 happen immediately, not later. */
978 emit_insn (gen_blockage ());
981 /* __builtin_longjmp is passed a pointer to an array of five words (not
982 all will be used on all machines). It operates similarly to the C
983 library function of the same name, but is more efficient. Much of
984 the code below is copied from the handling of non-local gotos. */
986 static void
987 expand_builtin_longjmp (rtx buf_addr, rtx value)
989 rtx fp, lab, stack, insn, last;
990 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
992 /* DRAP is needed for stack realign if longjmp is expanded to current
993 function */
994 if (SUPPORTS_STACK_ALIGNMENT)
995 crtl->need_drap = true;
997 if (setjmp_alias_set == -1)
998 setjmp_alias_set = new_alias_set ();
1000 buf_addr = convert_memory_address (Pmode, buf_addr);
1002 buf_addr = force_reg (Pmode, buf_addr);
1004 /* We require that the user must pass a second argument of 1, because
1005 that is what builtin_setjmp will return. */
1006 gcc_assert (value == const1_rtx);
1008 last = get_last_insn ();
1009 #ifdef HAVE_builtin_longjmp
1010 if (HAVE_builtin_longjmp)
1011 emit_insn (gen_builtin_longjmp (buf_addr));
1012 else
1013 #endif
1015 fp = gen_rtx_MEM (Pmode, buf_addr);
1016 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1017 GET_MODE_SIZE (Pmode)));
1019 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1020 2 * GET_MODE_SIZE (Pmode)));
1021 set_mem_alias_set (fp, setjmp_alias_set);
1022 set_mem_alias_set (lab, setjmp_alias_set);
1023 set_mem_alias_set (stack, setjmp_alias_set);
1025 /* Pick up FP, label, and SP from the block and jump. This code is
1026 from expand_goto in stmt.c; see there for detailed comments. */
1027 #ifdef HAVE_nonlocal_goto
1028 if (HAVE_nonlocal_goto)
1029 /* We have to pass a value to the nonlocal_goto pattern that will
1030 get copied into the static_chain pointer, but it does not matter
1031 what that value is, because builtin_setjmp does not use it. */
1032 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1033 else
1034 #endif
1036 lab = copy_to_reg (lab);
1038 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1039 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1041 emit_move_insn (hard_frame_pointer_rtx, fp);
1042 emit_stack_restore (SAVE_NONLOCAL, stack);
1044 emit_use (hard_frame_pointer_rtx);
1045 emit_use (stack_pointer_rtx);
1046 emit_indirect_jump (lab);
1050 /* Search backwards and mark the jump insn as a non-local goto.
1051 Note that this precludes the use of __builtin_longjmp to a
1052 __builtin_setjmp target in the same function. However, we've
1053 already cautioned the user that these functions are for
1054 internal exception handling use only. */
1055 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1057 gcc_assert (insn != last);
1059 if (JUMP_P (insn))
1061 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1062 break;
1064 else if (CALL_P (insn))
1065 break;
1069 static inline bool
1070 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1072 return (iter->i < iter->n);
1075 /* This function validates the types of a function call argument list
1076 against a specified list of tree_codes. If the last specifier is a 0,
1077 that represents an ellipses, otherwise the last specifier must be a
1078 VOID_TYPE. */
1080 static bool
1081 validate_arglist (const_tree callexpr, ...)
1083 enum tree_code code;
1084 bool res = 0;
1085 va_list ap;
1086 const_call_expr_arg_iterator iter;
1087 const_tree arg;
1089 va_start (ap, callexpr);
1090 init_const_call_expr_arg_iterator (callexpr, &iter);
1094 code = (enum tree_code) va_arg (ap, int);
1095 switch (code)
1097 case 0:
1098 /* This signifies an ellipses, any further arguments are all ok. */
1099 res = true;
1100 goto end;
1101 case VOID_TYPE:
1102 /* This signifies an endlink, if no arguments remain, return
1103 true, otherwise return false. */
1104 res = !more_const_call_expr_args_p (&iter);
1105 goto end;
1106 default:
1107 /* If no parameters remain or the parameter's code does not
1108 match the specified code, return false. Otherwise continue
1109 checking any remaining arguments. */
1110 arg = next_const_call_expr_arg (&iter);
1111 if (!validate_arg (arg, code))
1112 goto end;
1113 break;
1116 while (1);
1118 /* We need gotos here since we can only have one VA_CLOSE in a
1119 function. */
1120 end: ;
1121 va_end (ap);
1123 return res;
1126 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1127 and the address of the save area. */
1129 static rtx
1130 expand_builtin_nonlocal_goto (tree exp)
1132 tree t_label, t_save_area;
1133 rtx r_label, r_save_area, r_fp, r_sp, insn;
1135 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1136 return NULL_RTX;
1138 t_label = CALL_EXPR_ARG (exp, 0);
1139 t_save_area = CALL_EXPR_ARG (exp, 1);
1141 r_label = expand_normal (t_label);
1142 r_label = convert_memory_address (Pmode, r_label);
1143 r_save_area = expand_normal (t_save_area);
1144 r_save_area = convert_memory_address (Pmode, r_save_area);
1145 /* Copy the address of the save location to a register just in case it was
1146 based on the frame pointer. */
1147 r_save_area = copy_to_reg (r_save_area);
1148 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1149 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1150 plus_constant (Pmode, r_save_area,
1151 GET_MODE_SIZE (Pmode)));
1153 crtl->has_nonlocal_goto = 1;
1155 #ifdef HAVE_nonlocal_goto
1156 /* ??? We no longer need to pass the static chain value, afaik. */
1157 if (HAVE_nonlocal_goto)
1158 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1159 else
1160 #endif
1162 r_label = copy_to_reg (r_label);
1164 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1165 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1167 /* Restore frame pointer for containing function. */
1168 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1169 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1171 /* USE of hard_frame_pointer_rtx added for consistency;
1172 not clear if really needed. */
1173 emit_use (hard_frame_pointer_rtx);
1174 emit_use (stack_pointer_rtx);
1176 /* If the architecture is using a GP register, we must
1177 conservatively assume that the target function makes use of it.
1178 The prologue of functions with nonlocal gotos must therefore
1179 initialize the GP register to the appropriate value, and we
1180 must then make sure that this value is live at the point
1181 of the jump. (Note that this doesn't necessarily apply
1182 to targets with a nonlocal_goto pattern; they are free
1183 to implement it in their own way. Note also that this is
1184 a no-op if the GP register is a global invariant.) */
1185 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1186 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1187 emit_use (pic_offset_table_rtx);
1189 emit_indirect_jump (r_label);
1192 /* Search backwards to the jump insn and mark it as a
1193 non-local goto. */
1194 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1196 if (JUMP_P (insn))
1198 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1199 break;
1201 else if (CALL_P (insn))
1202 break;
1205 return const0_rtx;
1208 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1209 (not all will be used on all machines) that was passed to __builtin_setjmp.
1210 It updates the stack pointer in that block to correspond to the current
1211 stack pointer. */
1213 static void
1214 expand_builtin_update_setjmp_buf (rtx buf_addr)
1216 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1217 rtx stack_save
1218 = gen_rtx_MEM (sa_mode,
1219 memory_address
1220 (sa_mode,
1221 plus_constant (Pmode, buf_addr,
1222 2 * GET_MODE_SIZE (Pmode))));
1224 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1227 /* Expand a call to __builtin_prefetch. For a target that does not support
1228 data prefetch, evaluate the memory address argument in case it has side
1229 effects. */
1231 static void
1232 expand_builtin_prefetch (tree exp)
1234 tree arg0, arg1, arg2;
1235 int nargs;
1236 rtx op0, op1, op2;
1238 if (!validate_arglist (exp, POINTER_TYPE, 0))
1239 return;
1241 arg0 = CALL_EXPR_ARG (exp, 0);
1243 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1244 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1245 locality). */
1246 nargs = call_expr_nargs (exp);
1247 if (nargs > 1)
1248 arg1 = CALL_EXPR_ARG (exp, 1);
1249 else
1250 arg1 = integer_zero_node;
1251 if (nargs > 2)
1252 arg2 = CALL_EXPR_ARG (exp, 2);
1253 else
1254 arg2 = integer_three_node;
1256 /* Argument 0 is an address. */
1257 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1259 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1260 if (TREE_CODE (arg1) != INTEGER_CST)
1262 error ("second argument to %<__builtin_prefetch%> must be a constant");
1263 arg1 = integer_zero_node;
1265 op1 = expand_normal (arg1);
1266 /* Argument 1 must be either zero or one. */
1267 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1269 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1270 " using zero");
1271 op1 = const0_rtx;
1274 /* Argument 2 (locality) must be a compile-time constant int. */
1275 if (TREE_CODE (arg2) != INTEGER_CST)
1277 error ("third argument to %<__builtin_prefetch%> must be a constant");
1278 arg2 = integer_zero_node;
1280 op2 = expand_normal (arg2);
1281 /* Argument 2 must be 0, 1, 2, or 3. */
1282 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1284 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1285 op2 = const0_rtx;
1288 #ifdef HAVE_prefetch
1289 if (HAVE_prefetch)
1291 struct expand_operand ops[3];
1293 create_address_operand (&ops[0], op0);
1294 create_integer_operand (&ops[1], INTVAL (op1));
1295 create_integer_operand (&ops[2], INTVAL (op2));
1296 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1297 return;
1299 #endif
1301 /* Don't do anything with direct references to volatile memory, but
1302 generate code to handle other side effects. */
1303 if (!MEM_P (op0) && side_effects_p (op0))
1304 emit_insn (op0);
1307 /* Get a MEM rtx for expression EXP which is the address of an operand
1308 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1309 the maximum length of the block of memory that might be accessed or
1310 NULL if unknown. */
1312 static rtx
1313 get_memory_rtx (tree exp, tree len)
1315 tree orig_exp = exp;
1316 rtx addr, mem;
1318 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1319 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1320 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1321 exp = TREE_OPERAND (exp, 0);
1323 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1324 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1326 /* Get an expression we can use to find the attributes to assign to MEM.
1327 First remove any nops. */
1328 while (CONVERT_EXPR_P (exp)
1329 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1330 exp = TREE_OPERAND (exp, 0);
1332 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1333 (as builtin stringops may alias with anything). */
1334 exp = fold_build2 (MEM_REF,
1335 build_array_type (char_type_node,
1336 build_range_type (sizetype,
1337 size_one_node, len)),
1338 exp, build_int_cst (ptr_type_node, 0));
1340 /* If the MEM_REF has no acceptable address, try to get the base object
1341 from the original address we got, and build an all-aliasing
1342 unknown-sized access to that one. */
1343 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1344 set_mem_attributes (mem, exp, 0);
1345 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1346 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1347 0))))
1349 exp = build_fold_addr_expr (exp);
1350 exp = fold_build2 (MEM_REF,
1351 build_array_type (char_type_node,
1352 build_range_type (sizetype,
1353 size_zero_node,
1354 NULL)),
1355 exp, build_int_cst (ptr_type_node, 0));
1356 set_mem_attributes (mem, exp, 0);
1358 set_mem_alias_set (mem, 0);
1359 return mem;
1362 /* Built-in functions to perform an untyped call and return. */
1364 #define apply_args_mode \
1365 (this_target_builtins->x_apply_args_mode)
1366 #define apply_result_mode \
1367 (this_target_builtins->x_apply_result_mode)
1369 /* Return the size required for the block returned by __builtin_apply_args,
1370 and initialize apply_args_mode. */
1372 static int
1373 apply_args_size (void)
1375 static int size = -1;
1376 int align;
1377 unsigned int regno;
1378 enum machine_mode mode;
1380 /* The values computed by this function never change. */
1381 if (size < 0)
1383 /* The first value is the incoming arg-pointer. */
1384 size = GET_MODE_SIZE (Pmode);
1386 /* The second value is the structure value address unless this is
1387 passed as an "invisible" first argument. */
1388 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1389 size += GET_MODE_SIZE (Pmode);
1391 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1392 if (FUNCTION_ARG_REGNO_P (regno))
1394 mode = targetm.calls.get_raw_arg_mode (regno);
1396 gcc_assert (mode != VOIDmode);
1398 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1399 if (size % align != 0)
1400 size = CEIL (size, align) * align;
1401 size += GET_MODE_SIZE (mode);
1402 apply_args_mode[regno] = mode;
1404 else
1406 apply_args_mode[regno] = VOIDmode;
1409 return size;
1412 /* Return the size required for the block returned by __builtin_apply,
1413 and initialize apply_result_mode. */
1415 static int
1416 apply_result_size (void)
1418 static int size = -1;
1419 int align, regno;
1420 enum machine_mode mode;
1422 /* The values computed by this function never change. */
1423 if (size < 0)
1425 size = 0;
1427 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1428 if (targetm.calls.function_value_regno_p (regno))
1430 mode = targetm.calls.get_raw_result_mode (regno);
1432 gcc_assert (mode != VOIDmode);
1434 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1435 if (size % align != 0)
1436 size = CEIL (size, align) * align;
1437 size += GET_MODE_SIZE (mode);
1438 apply_result_mode[regno] = mode;
1440 else
1441 apply_result_mode[regno] = VOIDmode;
1443 /* Allow targets that use untyped_call and untyped_return to override
1444 the size so that machine-specific information can be stored here. */
1445 #ifdef APPLY_RESULT_SIZE
1446 size = APPLY_RESULT_SIZE;
1447 #endif
1449 return size;
1452 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1453 /* Create a vector describing the result block RESULT. If SAVEP is true,
1454 the result block is used to save the values; otherwise it is used to
1455 restore the values. */
1457 static rtx
1458 result_vector (int savep, rtx result)
1460 int regno, size, align, nelts;
1461 enum machine_mode mode;
1462 rtx reg, mem;
1463 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1465 size = nelts = 0;
1466 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1467 if ((mode = apply_result_mode[regno]) != VOIDmode)
1469 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1470 if (size % align != 0)
1471 size = CEIL (size, align) * align;
1472 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1473 mem = adjust_address (result, mode, size);
1474 savevec[nelts++] = (savep
1475 ? gen_rtx_SET (VOIDmode, mem, reg)
1476 : gen_rtx_SET (VOIDmode, reg, mem));
1477 size += GET_MODE_SIZE (mode);
1479 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1481 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1483 /* Save the state required to perform an untyped call with the same
1484 arguments as were passed to the current function. */
1486 static rtx
1487 expand_builtin_apply_args_1 (void)
1489 rtx registers, tem;
1490 int size, align, regno;
1491 enum machine_mode mode;
1492 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1494 /* Create a block where the arg-pointer, structure value address,
1495 and argument registers can be saved. */
1496 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1498 /* Walk past the arg-pointer and structure value address. */
1499 size = GET_MODE_SIZE (Pmode);
1500 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1501 size += GET_MODE_SIZE (Pmode);
1503 /* Save each register used in calling a function to the block. */
1504 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1505 if ((mode = apply_args_mode[regno]) != VOIDmode)
1507 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1508 if (size % align != 0)
1509 size = CEIL (size, align) * align;
1511 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1513 emit_move_insn (adjust_address (registers, mode, size), tem);
1514 size += GET_MODE_SIZE (mode);
1517 /* Save the arg pointer to the block. */
1518 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1519 #ifdef STACK_GROWS_DOWNWARD
1520 /* We need the pointer as the caller actually passed them to us, not
1521 as we might have pretended they were passed. Make sure it's a valid
1522 operand, as emit_move_insn isn't expected to handle a PLUS. */
1524 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1525 NULL_RTX);
1526 #endif
1527 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1529 size = GET_MODE_SIZE (Pmode);
1531 /* Save the structure value address unless this is passed as an
1532 "invisible" first argument. */
1533 if (struct_incoming_value)
1535 emit_move_insn (adjust_address (registers, Pmode, size),
1536 copy_to_reg (struct_incoming_value));
1537 size += GET_MODE_SIZE (Pmode);
1540 /* Return the address of the block. */
1541 return copy_addr_to_reg (XEXP (registers, 0));
1544 /* __builtin_apply_args returns block of memory allocated on
1545 the stack into which is stored the arg pointer, structure
1546 value address, static chain, and all the registers that might
1547 possibly be used in performing a function call. The code is
1548 moved to the start of the function so the incoming values are
1549 saved. */
1551 static rtx
1552 expand_builtin_apply_args (void)
1554 /* Don't do __builtin_apply_args more than once in a function.
1555 Save the result of the first call and reuse it. */
1556 if (apply_args_value != 0)
1557 return apply_args_value;
1559 /* When this function is called, it means that registers must be
1560 saved on entry to this function. So we migrate the
1561 call to the first insn of this function. */
1562 rtx temp;
1563 rtx seq;
1565 start_sequence ();
1566 temp = expand_builtin_apply_args_1 ();
1567 seq = get_insns ();
1568 end_sequence ();
1570 apply_args_value = temp;
1572 /* Put the insns after the NOTE that starts the function.
1573 If this is inside a start_sequence, make the outer-level insn
1574 chain current, so the code is placed at the start of the
1575 function. If internal_arg_pointer is a non-virtual pseudo,
1576 it needs to be placed after the function that initializes
1577 that pseudo. */
1578 push_topmost_sequence ();
1579 if (REG_P (crtl->args.internal_arg_pointer)
1580 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1581 emit_insn_before (seq, parm_birth_insn);
1582 else
1583 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1584 pop_topmost_sequence ();
1585 return temp;
1589 /* Perform an untyped call and save the state required to perform an
1590 untyped return of whatever value was returned by the given function. */
1592 static rtx
1593 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1595 int size, align, regno;
1596 enum machine_mode mode;
1597 rtx incoming_args, result, reg, dest, src, call_insn;
1598 rtx old_stack_level = 0;
1599 rtx call_fusage = 0;
1600 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1602 arguments = convert_memory_address (Pmode, arguments);
1604 /* Create a block where the return registers can be saved. */
1605 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1607 /* Fetch the arg pointer from the ARGUMENTS block. */
1608 incoming_args = gen_reg_rtx (Pmode);
1609 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1610 #ifndef STACK_GROWS_DOWNWARD
1611 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1612 incoming_args, 0, OPTAB_LIB_WIDEN);
1613 #endif
1615 /* Push a new argument block and copy the arguments. Do not allow
1616 the (potential) memcpy call below to interfere with our stack
1617 manipulations. */
1618 do_pending_stack_adjust ();
1619 NO_DEFER_POP;
1621 /* Save the stack with nonlocal if available. */
1622 #ifdef HAVE_save_stack_nonlocal
1623 if (HAVE_save_stack_nonlocal)
1624 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1625 else
1626 #endif
1627 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1629 /* Allocate a block of memory onto the stack and copy the memory
1630 arguments to the outgoing arguments address. We can pass TRUE
1631 as the 4th argument because we just saved the stack pointer
1632 and will restore it right after the call. */
1633 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1635 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1636 may have already set current_function_calls_alloca to true.
1637 current_function_calls_alloca won't be set if argsize is zero,
1638 so we have to guarantee need_drap is true here. */
1639 if (SUPPORTS_STACK_ALIGNMENT)
1640 crtl->need_drap = true;
1642 dest = virtual_outgoing_args_rtx;
1643 #ifndef STACK_GROWS_DOWNWARD
1644 if (CONST_INT_P (argsize))
1645 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1646 else
1647 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1648 #endif
1649 dest = gen_rtx_MEM (BLKmode, dest);
1650 set_mem_align (dest, PARM_BOUNDARY);
1651 src = gen_rtx_MEM (BLKmode, incoming_args);
1652 set_mem_align (src, PARM_BOUNDARY);
1653 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1655 /* Refer to the argument block. */
1656 apply_args_size ();
1657 arguments = gen_rtx_MEM (BLKmode, arguments);
1658 set_mem_align (arguments, PARM_BOUNDARY);
1660 /* Walk past the arg-pointer and structure value address. */
1661 size = GET_MODE_SIZE (Pmode);
1662 if (struct_value)
1663 size += GET_MODE_SIZE (Pmode);
1665 /* Restore each of the registers previously saved. Make USE insns
1666 for each of these registers for use in making the call. */
1667 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1668 if ((mode = apply_args_mode[regno]) != VOIDmode)
1670 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1671 if (size % align != 0)
1672 size = CEIL (size, align) * align;
1673 reg = gen_rtx_REG (mode, regno);
1674 emit_move_insn (reg, adjust_address (arguments, mode, size));
1675 use_reg (&call_fusage, reg);
1676 size += GET_MODE_SIZE (mode);
1679 /* Restore the structure value address unless this is passed as an
1680 "invisible" first argument. */
1681 size = GET_MODE_SIZE (Pmode);
1682 if (struct_value)
1684 rtx value = gen_reg_rtx (Pmode);
1685 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1686 emit_move_insn (struct_value, value);
1687 if (REG_P (struct_value))
1688 use_reg (&call_fusage, struct_value);
1689 size += GET_MODE_SIZE (Pmode);
1692 /* All arguments and registers used for the call are set up by now! */
1693 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1695 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1696 and we don't want to load it into a register as an optimization,
1697 because prepare_call_address already did it if it should be done. */
1698 if (GET_CODE (function) != SYMBOL_REF)
1699 function = memory_address (FUNCTION_MODE, function);
1701 /* Generate the actual call instruction and save the return value. */
1702 #ifdef HAVE_untyped_call
1703 if (HAVE_untyped_call)
1704 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1705 result, result_vector (1, result)));
1706 else
1707 #endif
1708 #ifdef HAVE_call_value
1709 if (HAVE_call_value)
1711 rtx valreg = 0;
1713 /* Locate the unique return register. It is not possible to
1714 express a call that sets more than one return register using
1715 call_value; use untyped_call for that. In fact, untyped_call
1716 only needs to save the return registers in the given block. */
1717 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1718 if ((mode = apply_result_mode[regno]) != VOIDmode)
1720 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1722 valreg = gen_rtx_REG (mode, regno);
1725 emit_call_insn (GEN_CALL_VALUE (valreg,
1726 gen_rtx_MEM (FUNCTION_MODE, function),
1727 const0_rtx, NULL_RTX, const0_rtx));
1729 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1731 else
1732 #endif
1733 gcc_unreachable ();
1735 /* Find the CALL insn we just emitted, and attach the register usage
1736 information. */
1737 call_insn = last_call_insn ();
1738 add_function_usage_to (call_insn, call_fusage);
1740 /* Restore the stack. */
1741 #ifdef HAVE_save_stack_nonlocal
1742 if (HAVE_save_stack_nonlocal)
1743 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1744 else
1745 #endif
1746 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1747 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1749 OK_DEFER_POP;
1751 /* Return the address of the result block. */
1752 result = copy_addr_to_reg (XEXP (result, 0));
1753 return convert_memory_address (ptr_mode, result);
1756 /* Perform an untyped return. */
1758 static void
1759 expand_builtin_return (rtx result)
1761 int size, align, regno;
1762 enum machine_mode mode;
1763 rtx reg;
1764 rtx call_fusage = 0;
1766 result = convert_memory_address (Pmode, result);
1768 apply_result_size ();
1769 result = gen_rtx_MEM (BLKmode, result);
1771 #ifdef HAVE_untyped_return
1772 if (HAVE_untyped_return)
1774 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1775 emit_barrier ();
1776 return;
1778 #endif
1780 /* Restore the return value and note that each value is used. */
1781 size = 0;
1782 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1783 if ((mode = apply_result_mode[regno]) != VOIDmode)
1785 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1786 if (size % align != 0)
1787 size = CEIL (size, align) * align;
1788 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1789 emit_move_insn (reg, adjust_address (result, mode, size));
1791 push_to_sequence (call_fusage);
1792 emit_use (reg);
1793 call_fusage = get_insns ();
1794 end_sequence ();
1795 size += GET_MODE_SIZE (mode);
1798 /* Put the USE insns before the return. */
1799 emit_insn (call_fusage);
1801 /* Return whatever values was restored by jumping directly to the end
1802 of the function. */
1803 expand_naked_return ();
1806 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1808 static enum type_class
1809 type_to_class (tree type)
1811 switch (TREE_CODE (type))
1813 case VOID_TYPE: return void_type_class;
1814 case INTEGER_TYPE: return integer_type_class;
1815 case ENUMERAL_TYPE: return enumeral_type_class;
1816 case BOOLEAN_TYPE: return boolean_type_class;
1817 case POINTER_TYPE: return pointer_type_class;
1818 case REFERENCE_TYPE: return reference_type_class;
1819 case OFFSET_TYPE: return offset_type_class;
1820 case REAL_TYPE: return real_type_class;
1821 case COMPLEX_TYPE: return complex_type_class;
1822 case FUNCTION_TYPE: return function_type_class;
1823 case METHOD_TYPE: return method_type_class;
1824 case RECORD_TYPE: return record_type_class;
1825 case UNION_TYPE:
1826 case QUAL_UNION_TYPE: return union_type_class;
1827 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1828 ? string_type_class : array_type_class);
1829 case LANG_TYPE: return lang_type_class;
1830 default: return no_type_class;
1834 /* Expand a call EXP to __builtin_classify_type. */
1836 static rtx
1837 expand_builtin_classify_type (tree exp)
1839 if (call_expr_nargs (exp))
1840 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1841 return GEN_INT (no_type_class);
1844 /* This helper macro, meant to be used in mathfn_built_in below,
1845 determines which among a set of three builtin math functions is
1846 appropriate for a given type mode. The `F' and `L' cases are
1847 automatically generated from the `double' case. */
1848 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1849 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1850 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1851 fcodel = BUILT_IN_MATHFN##L ; break;
1852 /* Similar to above, but appends _R after any F/L suffix. */
1853 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1854 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1855 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1856 fcodel = BUILT_IN_MATHFN##L_R ; break;
1858 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1859 if available. If IMPLICIT is true use the implicit builtin declaration,
1860 otherwise use the explicit declaration. If we can't do the conversion,
1861 return zero. */
1863 static tree
1864 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1866 enum built_in_function fcode, fcodef, fcodel, fcode2;
1868 switch (fn)
1870 CASE_MATHFN (BUILT_IN_ACOS)
1871 CASE_MATHFN (BUILT_IN_ACOSH)
1872 CASE_MATHFN (BUILT_IN_ASIN)
1873 CASE_MATHFN (BUILT_IN_ASINH)
1874 CASE_MATHFN (BUILT_IN_ATAN)
1875 CASE_MATHFN (BUILT_IN_ATAN2)
1876 CASE_MATHFN (BUILT_IN_ATANH)
1877 CASE_MATHFN (BUILT_IN_CBRT)
1878 CASE_MATHFN (BUILT_IN_CEIL)
1879 CASE_MATHFN (BUILT_IN_CEXPI)
1880 CASE_MATHFN (BUILT_IN_COPYSIGN)
1881 CASE_MATHFN (BUILT_IN_COS)
1882 CASE_MATHFN (BUILT_IN_COSH)
1883 CASE_MATHFN (BUILT_IN_DREM)
1884 CASE_MATHFN (BUILT_IN_ERF)
1885 CASE_MATHFN (BUILT_IN_ERFC)
1886 CASE_MATHFN (BUILT_IN_EXP)
1887 CASE_MATHFN (BUILT_IN_EXP10)
1888 CASE_MATHFN (BUILT_IN_EXP2)
1889 CASE_MATHFN (BUILT_IN_EXPM1)
1890 CASE_MATHFN (BUILT_IN_FABS)
1891 CASE_MATHFN (BUILT_IN_FDIM)
1892 CASE_MATHFN (BUILT_IN_FLOOR)
1893 CASE_MATHFN (BUILT_IN_FMA)
1894 CASE_MATHFN (BUILT_IN_FMAX)
1895 CASE_MATHFN (BUILT_IN_FMIN)
1896 CASE_MATHFN (BUILT_IN_FMOD)
1897 CASE_MATHFN (BUILT_IN_FREXP)
1898 CASE_MATHFN (BUILT_IN_GAMMA)
1899 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1900 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1901 CASE_MATHFN (BUILT_IN_HYPOT)
1902 CASE_MATHFN (BUILT_IN_ILOGB)
1903 CASE_MATHFN (BUILT_IN_ICEIL)
1904 CASE_MATHFN (BUILT_IN_IFLOOR)
1905 CASE_MATHFN (BUILT_IN_INF)
1906 CASE_MATHFN (BUILT_IN_IRINT)
1907 CASE_MATHFN (BUILT_IN_IROUND)
1908 CASE_MATHFN (BUILT_IN_ISINF)
1909 CASE_MATHFN (BUILT_IN_J0)
1910 CASE_MATHFN (BUILT_IN_J1)
1911 CASE_MATHFN (BUILT_IN_JN)
1912 CASE_MATHFN (BUILT_IN_LCEIL)
1913 CASE_MATHFN (BUILT_IN_LDEXP)
1914 CASE_MATHFN (BUILT_IN_LFLOOR)
1915 CASE_MATHFN (BUILT_IN_LGAMMA)
1916 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1917 CASE_MATHFN (BUILT_IN_LLCEIL)
1918 CASE_MATHFN (BUILT_IN_LLFLOOR)
1919 CASE_MATHFN (BUILT_IN_LLRINT)
1920 CASE_MATHFN (BUILT_IN_LLROUND)
1921 CASE_MATHFN (BUILT_IN_LOG)
1922 CASE_MATHFN (BUILT_IN_LOG10)
1923 CASE_MATHFN (BUILT_IN_LOG1P)
1924 CASE_MATHFN (BUILT_IN_LOG2)
1925 CASE_MATHFN (BUILT_IN_LOGB)
1926 CASE_MATHFN (BUILT_IN_LRINT)
1927 CASE_MATHFN (BUILT_IN_LROUND)
1928 CASE_MATHFN (BUILT_IN_MODF)
1929 CASE_MATHFN (BUILT_IN_NAN)
1930 CASE_MATHFN (BUILT_IN_NANS)
1931 CASE_MATHFN (BUILT_IN_NEARBYINT)
1932 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1933 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1934 CASE_MATHFN (BUILT_IN_POW)
1935 CASE_MATHFN (BUILT_IN_POWI)
1936 CASE_MATHFN (BUILT_IN_POW10)
1937 CASE_MATHFN (BUILT_IN_REMAINDER)
1938 CASE_MATHFN (BUILT_IN_REMQUO)
1939 CASE_MATHFN (BUILT_IN_RINT)
1940 CASE_MATHFN (BUILT_IN_ROUND)
1941 CASE_MATHFN (BUILT_IN_SCALB)
1942 CASE_MATHFN (BUILT_IN_SCALBLN)
1943 CASE_MATHFN (BUILT_IN_SCALBN)
1944 CASE_MATHFN (BUILT_IN_SIGNBIT)
1945 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1946 CASE_MATHFN (BUILT_IN_SIN)
1947 CASE_MATHFN (BUILT_IN_SINCOS)
1948 CASE_MATHFN (BUILT_IN_SINH)
1949 CASE_MATHFN (BUILT_IN_SQRT)
1950 CASE_MATHFN (BUILT_IN_TAN)
1951 CASE_MATHFN (BUILT_IN_TANH)
1952 CASE_MATHFN (BUILT_IN_TGAMMA)
1953 CASE_MATHFN (BUILT_IN_TRUNC)
1954 CASE_MATHFN (BUILT_IN_Y0)
1955 CASE_MATHFN (BUILT_IN_Y1)
1956 CASE_MATHFN (BUILT_IN_YN)
1958 default:
1959 return NULL_TREE;
1962 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1963 fcode2 = fcode;
1964 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1965 fcode2 = fcodef;
1966 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1967 fcode2 = fcodel;
1968 else
1969 return NULL_TREE;
1971 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1972 return NULL_TREE;
1974 return builtin_decl_explicit (fcode2);
1977 /* Like mathfn_built_in_1(), but always use the implicit array. */
1979 tree
1980 mathfn_built_in (tree type, enum built_in_function fn)
1982 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1985 /* If errno must be maintained, expand the RTL to check if the result,
1986 TARGET, of a built-in function call, EXP, is NaN, and if so set
1987 errno to EDOM. */
1989 static void
1990 expand_errno_check (tree exp, rtx target)
1992 rtx lab = gen_label_rtx ();
1994 /* Test the result; if it is NaN, set errno=EDOM because
1995 the argument was not in the domain. */
1996 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1997 NULL_RTX, NULL_RTX, lab,
1998 /* The jump is very likely. */
1999 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
2001 #ifdef TARGET_EDOM
2002 /* If this built-in doesn't throw an exception, set errno directly. */
2003 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
2005 #ifdef GEN_ERRNO_RTX
2006 rtx errno_rtx = GEN_ERRNO_RTX;
2007 #else
2008 rtx errno_rtx
2009 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2010 #endif
2011 emit_move_insn (errno_rtx,
2012 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2013 emit_label (lab);
2014 return;
2016 #endif
2018 /* Make sure the library call isn't expanded as a tail call. */
2019 CALL_EXPR_TAILCALL (exp) = 0;
2021 /* We can't set errno=EDOM directly; let the library call do it.
2022 Pop the arguments right away in case the call gets deleted. */
2023 NO_DEFER_POP;
2024 expand_call (exp, target, 0);
2025 OK_DEFER_POP;
2026 emit_label (lab);
2029 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2030 Return NULL_RTX if a normal call should be emitted rather than expanding
2031 the function in-line. EXP is the expression that is a call to the builtin
2032 function; if convenient, the result should be placed in TARGET.
2033 SUBTARGET may be used as the target for computing one of EXP's operands. */
2035 static rtx
2036 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2038 optab builtin_optab;
2039 rtx op0, insns;
2040 tree fndecl = get_callee_fndecl (exp);
2041 enum machine_mode mode;
2042 bool errno_set = false;
2043 bool try_widening = false;
2044 tree arg;
2046 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2047 return NULL_RTX;
2049 arg = CALL_EXPR_ARG (exp, 0);
2051 switch (DECL_FUNCTION_CODE (fndecl))
2053 CASE_FLT_FN (BUILT_IN_SQRT):
2054 errno_set = ! tree_expr_nonnegative_p (arg);
2055 try_widening = true;
2056 builtin_optab = sqrt_optab;
2057 break;
2058 CASE_FLT_FN (BUILT_IN_EXP):
2059 errno_set = true; builtin_optab = exp_optab; break;
2060 CASE_FLT_FN (BUILT_IN_EXP10):
2061 CASE_FLT_FN (BUILT_IN_POW10):
2062 errno_set = true; builtin_optab = exp10_optab; break;
2063 CASE_FLT_FN (BUILT_IN_EXP2):
2064 errno_set = true; builtin_optab = exp2_optab; break;
2065 CASE_FLT_FN (BUILT_IN_EXPM1):
2066 errno_set = true; builtin_optab = expm1_optab; break;
2067 CASE_FLT_FN (BUILT_IN_LOGB):
2068 errno_set = true; builtin_optab = logb_optab; break;
2069 CASE_FLT_FN (BUILT_IN_LOG):
2070 errno_set = true; builtin_optab = log_optab; break;
2071 CASE_FLT_FN (BUILT_IN_LOG10):
2072 errno_set = true; builtin_optab = log10_optab; break;
2073 CASE_FLT_FN (BUILT_IN_LOG2):
2074 errno_set = true; builtin_optab = log2_optab; break;
2075 CASE_FLT_FN (BUILT_IN_LOG1P):
2076 errno_set = true; builtin_optab = log1p_optab; break;
2077 CASE_FLT_FN (BUILT_IN_ASIN):
2078 builtin_optab = asin_optab; break;
2079 CASE_FLT_FN (BUILT_IN_ACOS):
2080 builtin_optab = acos_optab; break;
2081 CASE_FLT_FN (BUILT_IN_TAN):
2082 builtin_optab = tan_optab; break;
2083 CASE_FLT_FN (BUILT_IN_ATAN):
2084 builtin_optab = atan_optab; break;
2085 CASE_FLT_FN (BUILT_IN_FLOOR):
2086 builtin_optab = floor_optab; break;
2087 CASE_FLT_FN (BUILT_IN_CEIL):
2088 builtin_optab = ceil_optab; break;
2089 CASE_FLT_FN (BUILT_IN_TRUNC):
2090 builtin_optab = btrunc_optab; break;
2091 CASE_FLT_FN (BUILT_IN_ROUND):
2092 builtin_optab = round_optab; break;
2093 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2094 builtin_optab = nearbyint_optab;
2095 if (flag_trapping_math)
2096 break;
2097 /* Else fallthrough and expand as rint. */
2098 CASE_FLT_FN (BUILT_IN_RINT):
2099 builtin_optab = rint_optab; break;
2100 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2101 builtin_optab = significand_optab; break;
2102 default:
2103 gcc_unreachable ();
2106 /* Make a suitable register to place result in. */
2107 mode = TYPE_MODE (TREE_TYPE (exp));
2109 if (! flag_errno_math || ! HONOR_NANS (mode))
2110 errno_set = false;
2112 /* Before working hard, check whether the instruction is available, but try
2113 to widen the mode for specific operations. */
2114 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2115 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2116 && (!errno_set || !optimize_insn_for_size_p ()))
2118 rtx result = gen_reg_rtx (mode);
2120 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2121 need to expand the argument again. This way, we will not perform
2122 side-effects more the once. */
2123 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2125 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2127 start_sequence ();
2129 /* Compute into RESULT.
2130 Set RESULT to wherever the result comes back. */
2131 result = expand_unop (mode, builtin_optab, op0, result, 0);
2133 if (result != 0)
2135 if (errno_set)
2136 expand_errno_check (exp, result);
2138 /* Output the entire sequence. */
2139 insns = get_insns ();
2140 end_sequence ();
2141 emit_insn (insns);
2142 return result;
2145 /* If we were unable to expand via the builtin, stop the sequence
2146 (without outputting the insns) and call to the library function
2147 with the stabilized argument list. */
2148 end_sequence ();
2151 return expand_call (exp, target, target == const0_rtx);
2154 /* Expand a call to the builtin binary math functions (pow and atan2).
2155 Return NULL_RTX if a normal call should be emitted rather than expanding the
2156 function in-line. EXP is the expression that is a call to the builtin
2157 function; if convenient, the result should be placed in TARGET.
2158 SUBTARGET may be used as the target for computing one of EXP's
2159 operands. */
2161 static rtx
2162 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2164 optab builtin_optab;
2165 rtx op0, op1, insns, result;
2166 int op1_type = REAL_TYPE;
2167 tree fndecl = get_callee_fndecl (exp);
2168 tree arg0, arg1;
2169 enum machine_mode mode;
2170 bool errno_set = true;
2172 switch (DECL_FUNCTION_CODE (fndecl))
2174 CASE_FLT_FN (BUILT_IN_SCALBN):
2175 CASE_FLT_FN (BUILT_IN_SCALBLN):
2176 CASE_FLT_FN (BUILT_IN_LDEXP):
2177 op1_type = INTEGER_TYPE;
2178 default:
2179 break;
2182 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2183 return NULL_RTX;
2185 arg0 = CALL_EXPR_ARG (exp, 0);
2186 arg1 = CALL_EXPR_ARG (exp, 1);
2188 switch (DECL_FUNCTION_CODE (fndecl))
2190 CASE_FLT_FN (BUILT_IN_POW):
2191 builtin_optab = pow_optab; break;
2192 CASE_FLT_FN (BUILT_IN_ATAN2):
2193 builtin_optab = atan2_optab; break;
2194 CASE_FLT_FN (BUILT_IN_SCALB):
2195 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2196 return 0;
2197 builtin_optab = scalb_optab; break;
2198 CASE_FLT_FN (BUILT_IN_SCALBN):
2199 CASE_FLT_FN (BUILT_IN_SCALBLN):
2200 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2201 return 0;
2202 /* Fall through... */
2203 CASE_FLT_FN (BUILT_IN_LDEXP):
2204 builtin_optab = ldexp_optab; break;
2205 CASE_FLT_FN (BUILT_IN_FMOD):
2206 builtin_optab = fmod_optab; break;
2207 CASE_FLT_FN (BUILT_IN_REMAINDER):
2208 CASE_FLT_FN (BUILT_IN_DREM):
2209 builtin_optab = remainder_optab; break;
2210 default:
2211 gcc_unreachable ();
2214 /* Make a suitable register to place result in. */
2215 mode = TYPE_MODE (TREE_TYPE (exp));
2217 /* Before working hard, check whether the instruction is available. */
2218 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2219 return NULL_RTX;
2221 result = gen_reg_rtx (mode);
2223 if (! flag_errno_math || ! HONOR_NANS (mode))
2224 errno_set = false;
2226 if (errno_set && optimize_insn_for_size_p ())
2227 return 0;
2229 /* Always stabilize the argument list. */
2230 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2231 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2233 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2234 op1 = expand_normal (arg1);
2236 start_sequence ();
2238 /* Compute into RESULT.
2239 Set RESULT to wherever the result comes back. */
2240 result = expand_binop (mode, builtin_optab, op0, op1,
2241 result, 0, OPTAB_DIRECT);
2243 /* If we were unable to expand via the builtin, stop the sequence
2244 (without outputting the insns) and call to the library function
2245 with the stabilized argument list. */
2246 if (result == 0)
2248 end_sequence ();
2249 return expand_call (exp, target, target == const0_rtx);
2252 if (errno_set)
2253 expand_errno_check (exp, result);
2255 /* Output the entire sequence. */
2256 insns = get_insns ();
2257 end_sequence ();
2258 emit_insn (insns);
2260 return result;
2263 /* Expand a call to the builtin trinary math functions (fma).
2264 Return NULL_RTX if a normal call should be emitted rather than expanding the
2265 function in-line. EXP is the expression that is a call to the builtin
2266 function; if convenient, the result should be placed in TARGET.
2267 SUBTARGET may be used as the target for computing one of EXP's
2268 operands. */
2270 static rtx
2271 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2273 optab builtin_optab;
2274 rtx op0, op1, op2, insns, result;
2275 tree fndecl = get_callee_fndecl (exp);
2276 tree arg0, arg1, arg2;
2277 enum machine_mode mode;
2279 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2280 return NULL_RTX;
2282 arg0 = CALL_EXPR_ARG (exp, 0);
2283 arg1 = CALL_EXPR_ARG (exp, 1);
2284 arg2 = CALL_EXPR_ARG (exp, 2);
2286 switch (DECL_FUNCTION_CODE (fndecl))
2288 CASE_FLT_FN (BUILT_IN_FMA):
2289 builtin_optab = fma_optab; break;
2290 default:
2291 gcc_unreachable ();
2294 /* Make a suitable register to place result in. */
2295 mode = TYPE_MODE (TREE_TYPE (exp));
2297 /* Before working hard, check whether the instruction is available. */
2298 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2299 return NULL_RTX;
2301 result = gen_reg_rtx (mode);
2303 /* Always stabilize the argument list. */
2304 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2305 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2306 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2308 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2309 op1 = expand_normal (arg1);
2310 op2 = expand_normal (arg2);
2312 start_sequence ();
2314 /* Compute into RESULT.
2315 Set RESULT to wherever the result comes back. */
2316 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2317 result, 0);
2319 /* If we were unable to expand via the builtin, stop the sequence
2320 (without outputting the insns) and call to the library function
2321 with the stabilized argument list. */
2322 if (result == 0)
2324 end_sequence ();
2325 return expand_call (exp, target, target == const0_rtx);
2328 /* Output the entire sequence. */
2329 insns = get_insns ();
2330 end_sequence ();
2331 emit_insn (insns);
2333 return result;
2336 /* Expand a call to the builtin sin and cos math functions.
2337 Return NULL_RTX if a normal call should be emitted rather than expanding the
2338 function in-line. EXP is the expression that is a call to the builtin
2339 function; if convenient, the result should be placed in TARGET.
2340 SUBTARGET may be used as the target for computing one of EXP's
2341 operands. */
2343 static rtx
2344 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2346 optab builtin_optab;
2347 rtx op0, insns;
2348 tree fndecl = get_callee_fndecl (exp);
2349 enum machine_mode mode;
2350 tree arg;
2352 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2353 return NULL_RTX;
2355 arg = CALL_EXPR_ARG (exp, 0);
2357 switch (DECL_FUNCTION_CODE (fndecl))
2359 CASE_FLT_FN (BUILT_IN_SIN):
2360 CASE_FLT_FN (BUILT_IN_COS):
2361 builtin_optab = sincos_optab; break;
2362 default:
2363 gcc_unreachable ();
2366 /* Make a suitable register to place result in. */
2367 mode = TYPE_MODE (TREE_TYPE (exp));
2369 /* Check if sincos insn is available, otherwise fallback
2370 to sin or cos insn. */
2371 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2372 switch (DECL_FUNCTION_CODE (fndecl))
2374 CASE_FLT_FN (BUILT_IN_SIN):
2375 builtin_optab = sin_optab; break;
2376 CASE_FLT_FN (BUILT_IN_COS):
2377 builtin_optab = cos_optab; break;
2378 default:
2379 gcc_unreachable ();
2382 /* Before working hard, check whether the instruction is available. */
2383 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2385 rtx result = gen_reg_rtx (mode);
2387 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2388 need to expand the argument again. This way, we will not perform
2389 side-effects more the once. */
2390 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2392 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2394 start_sequence ();
2396 /* Compute into RESULT.
2397 Set RESULT to wherever the result comes back. */
2398 if (builtin_optab == sincos_optab)
2400 int ok;
2402 switch (DECL_FUNCTION_CODE (fndecl))
2404 CASE_FLT_FN (BUILT_IN_SIN):
2405 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2406 break;
2407 CASE_FLT_FN (BUILT_IN_COS):
2408 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2409 break;
2410 default:
2411 gcc_unreachable ();
2413 gcc_assert (ok);
2415 else
2416 result = expand_unop (mode, builtin_optab, op0, result, 0);
2418 if (result != 0)
2420 /* Output the entire sequence. */
2421 insns = get_insns ();
2422 end_sequence ();
2423 emit_insn (insns);
2424 return result;
2427 /* If we were unable to expand via the builtin, stop the sequence
2428 (without outputting the insns) and call to the library function
2429 with the stabilized argument list. */
2430 end_sequence ();
2433 return expand_call (exp, target, target == const0_rtx);
2436 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2437 return an RTL instruction code that implements the functionality.
2438 If that isn't possible or available return CODE_FOR_nothing. */
2440 static enum insn_code
2441 interclass_mathfn_icode (tree arg, tree fndecl)
2443 bool errno_set = false;
2444 optab builtin_optab = unknown_optab;
2445 enum machine_mode mode;
2447 switch (DECL_FUNCTION_CODE (fndecl))
2449 CASE_FLT_FN (BUILT_IN_ILOGB):
2450 errno_set = true; builtin_optab = ilogb_optab; break;
2451 CASE_FLT_FN (BUILT_IN_ISINF):
2452 builtin_optab = isinf_optab; break;
2453 case BUILT_IN_ISNORMAL:
2454 case BUILT_IN_ISFINITE:
2455 CASE_FLT_FN (BUILT_IN_FINITE):
2456 case BUILT_IN_FINITED32:
2457 case BUILT_IN_FINITED64:
2458 case BUILT_IN_FINITED128:
2459 case BUILT_IN_ISINFD32:
2460 case BUILT_IN_ISINFD64:
2461 case BUILT_IN_ISINFD128:
2462 /* These builtins have no optabs (yet). */
2463 break;
2464 default:
2465 gcc_unreachable ();
2468 /* There's no easy way to detect the case we need to set EDOM. */
2469 if (flag_errno_math && errno_set)
2470 return CODE_FOR_nothing;
2472 /* Optab mode depends on the mode of the input argument. */
2473 mode = TYPE_MODE (TREE_TYPE (arg));
2475 if (builtin_optab)
2476 return optab_handler (builtin_optab, mode);
2477 return CODE_FOR_nothing;
2480 /* Expand a call to one of the builtin math functions that operate on
2481 floating point argument and output an integer result (ilogb, isinf,
2482 isnan, etc).
2483 Return 0 if a normal call should be emitted rather than expanding the
2484 function in-line. EXP is the expression that is a call to the builtin
2485 function; if convenient, the result should be placed in TARGET. */
2487 static rtx
2488 expand_builtin_interclass_mathfn (tree exp, rtx target)
2490 enum insn_code icode = CODE_FOR_nothing;
2491 rtx op0;
2492 tree fndecl = get_callee_fndecl (exp);
2493 enum machine_mode mode;
2494 tree arg;
2496 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2497 return NULL_RTX;
2499 arg = CALL_EXPR_ARG (exp, 0);
2500 icode = interclass_mathfn_icode (arg, fndecl);
2501 mode = TYPE_MODE (TREE_TYPE (arg));
2503 if (icode != CODE_FOR_nothing)
2505 struct expand_operand ops[1];
2506 rtx last = get_last_insn ();
2507 tree orig_arg = arg;
2509 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2510 need to expand the argument again. This way, we will not perform
2511 side-effects more the once. */
2512 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2514 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2516 if (mode != GET_MODE (op0))
2517 op0 = convert_to_mode (mode, op0, 0);
2519 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2520 if (maybe_legitimize_operands (icode, 0, 1, ops)
2521 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2522 return ops[0].value;
2524 delete_insns_since (last);
2525 CALL_EXPR_ARG (exp, 0) = orig_arg;
2528 return NULL_RTX;
2531 /* Expand a call to the builtin sincos math function.
2532 Return NULL_RTX if a normal call should be emitted rather than expanding the
2533 function in-line. EXP is the expression that is a call to the builtin
2534 function. */
2536 static rtx
2537 expand_builtin_sincos (tree exp)
2539 rtx op0, op1, op2, target1, target2;
2540 enum machine_mode mode;
2541 tree arg, sinp, cosp;
2542 int result;
2543 location_t loc = EXPR_LOCATION (exp);
2544 tree alias_type, alias_off;
2546 if (!validate_arglist (exp, REAL_TYPE,
2547 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2548 return NULL_RTX;
2550 arg = CALL_EXPR_ARG (exp, 0);
2551 sinp = CALL_EXPR_ARG (exp, 1);
2552 cosp = CALL_EXPR_ARG (exp, 2);
2554 /* Make a suitable register to place result in. */
2555 mode = TYPE_MODE (TREE_TYPE (arg));
2557 /* Check if sincos insn is available, otherwise emit the call. */
2558 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2559 return NULL_RTX;
2561 target1 = gen_reg_rtx (mode);
2562 target2 = gen_reg_rtx (mode);
2564 op0 = expand_normal (arg);
2565 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2566 alias_off = build_int_cst (alias_type, 0);
2567 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2568 sinp, alias_off));
2569 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2570 cosp, alias_off));
2572 /* Compute into target1 and target2.
2573 Set TARGET to wherever the result comes back. */
2574 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2575 gcc_assert (result);
2577 /* Move target1 and target2 to the memory locations indicated
2578 by op1 and op2. */
2579 emit_move_insn (op1, target1);
2580 emit_move_insn (op2, target2);
2582 return const0_rtx;
2585 /* Expand a call to the internal cexpi builtin to the sincos math function.
2586 EXP is the expression that is a call to the builtin function; if convenient,
2587 the result should be placed in TARGET. */
2589 static rtx
2590 expand_builtin_cexpi (tree exp, rtx target)
2592 tree fndecl = get_callee_fndecl (exp);
2593 tree arg, type;
2594 enum machine_mode mode;
2595 rtx op0, op1, op2;
2596 location_t loc = EXPR_LOCATION (exp);
2598 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2599 return NULL_RTX;
2601 arg = CALL_EXPR_ARG (exp, 0);
2602 type = TREE_TYPE (arg);
2603 mode = TYPE_MODE (TREE_TYPE (arg));
2605 /* Try expanding via a sincos optab, fall back to emitting a libcall
2606 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2607 is only generated from sincos, cexp or if we have either of them. */
2608 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2610 op1 = gen_reg_rtx (mode);
2611 op2 = gen_reg_rtx (mode);
2613 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2615 /* Compute into op1 and op2. */
2616 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2618 else if (targetm.libc_has_function (function_sincos))
2620 tree call, fn = NULL_TREE;
2621 tree top1, top2;
2622 rtx op1a, op2a;
2624 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2625 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2626 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2627 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2628 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2629 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2630 else
2631 gcc_unreachable ();
2633 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2634 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2635 op1a = copy_addr_to_reg (XEXP (op1, 0));
2636 op2a = copy_addr_to_reg (XEXP (op2, 0));
2637 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2638 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2640 /* Make sure not to fold the sincos call again. */
2641 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2642 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2643 call, 3, arg, top1, top2));
2645 else
2647 tree call, fn = NULL_TREE, narg;
2648 tree ctype = build_complex_type (type);
2650 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2651 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2652 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2653 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2654 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2655 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2656 else
2657 gcc_unreachable ();
2659 /* If we don't have a decl for cexp create one. This is the
2660 friendliest fallback if the user calls __builtin_cexpi
2661 without full target C99 function support. */
2662 if (fn == NULL_TREE)
2664 tree fntype;
2665 const char *name = NULL;
2667 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2668 name = "cexpf";
2669 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2670 name = "cexp";
2671 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2672 name = "cexpl";
2674 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2675 fn = build_fn_decl (name, fntype);
2678 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2679 build_real (type, dconst0), arg);
2681 /* Make sure not to fold the cexp call again. */
2682 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2683 return expand_expr (build_call_nary (ctype, call, 1, narg),
2684 target, VOIDmode, EXPAND_NORMAL);
2687 /* Now build the proper return type. */
2688 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2689 make_tree (TREE_TYPE (arg), op2),
2690 make_tree (TREE_TYPE (arg), op1)),
2691 target, VOIDmode, EXPAND_NORMAL);
2694 /* Conveniently construct a function call expression. FNDECL names the
2695 function to be called, N is the number of arguments, and the "..."
2696 parameters are the argument expressions. Unlike build_call_exr
2697 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2699 static tree
2700 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2702 va_list ap;
2703 tree fntype = TREE_TYPE (fndecl);
2704 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2706 va_start (ap, n);
2707 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2708 va_end (ap);
2709 SET_EXPR_LOCATION (fn, loc);
2710 return fn;
2713 /* Expand a call to one of the builtin rounding functions gcc defines
2714 as an extension (lfloor and lceil). As these are gcc extensions we
2715 do not need to worry about setting errno to EDOM.
2716 If expanding via optab fails, lower expression to (int)(floor(x)).
2717 EXP is the expression that is a call to the builtin function;
2718 if convenient, the result should be placed in TARGET. */
2720 static rtx
2721 expand_builtin_int_roundingfn (tree exp, rtx target)
2723 convert_optab builtin_optab;
2724 rtx op0, insns, tmp;
2725 tree fndecl = get_callee_fndecl (exp);
2726 enum built_in_function fallback_fn;
2727 tree fallback_fndecl;
2728 enum machine_mode mode;
2729 tree arg;
2731 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2732 gcc_unreachable ();
2734 arg = CALL_EXPR_ARG (exp, 0);
2736 switch (DECL_FUNCTION_CODE (fndecl))
2738 CASE_FLT_FN (BUILT_IN_ICEIL):
2739 CASE_FLT_FN (BUILT_IN_LCEIL):
2740 CASE_FLT_FN (BUILT_IN_LLCEIL):
2741 builtin_optab = lceil_optab;
2742 fallback_fn = BUILT_IN_CEIL;
2743 break;
2745 CASE_FLT_FN (BUILT_IN_IFLOOR):
2746 CASE_FLT_FN (BUILT_IN_LFLOOR):
2747 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2748 builtin_optab = lfloor_optab;
2749 fallback_fn = BUILT_IN_FLOOR;
2750 break;
2752 default:
2753 gcc_unreachable ();
2756 /* Make a suitable register to place result in. */
2757 mode = TYPE_MODE (TREE_TYPE (exp));
2759 target = gen_reg_rtx (mode);
2761 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2762 need to expand the argument again. This way, we will not perform
2763 side-effects more the once. */
2764 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2766 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2768 start_sequence ();
2770 /* Compute into TARGET. */
2771 if (expand_sfix_optab (target, op0, builtin_optab))
2773 /* Output the entire sequence. */
2774 insns = get_insns ();
2775 end_sequence ();
2776 emit_insn (insns);
2777 return target;
2780 /* If we were unable to expand via the builtin, stop the sequence
2781 (without outputting the insns). */
2782 end_sequence ();
2784 /* Fall back to floating point rounding optab. */
2785 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2787 /* For non-C99 targets we may end up without a fallback fndecl here
2788 if the user called __builtin_lfloor directly. In this case emit
2789 a call to the floor/ceil variants nevertheless. This should result
2790 in the best user experience for not full C99 targets. */
2791 if (fallback_fndecl == NULL_TREE)
2793 tree fntype;
2794 const char *name = NULL;
2796 switch (DECL_FUNCTION_CODE (fndecl))
2798 case BUILT_IN_ICEIL:
2799 case BUILT_IN_LCEIL:
2800 case BUILT_IN_LLCEIL:
2801 name = "ceil";
2802 break;
2803 case BUILT_IN_ICEILF:
2804 case BUILT_IN_LCEILF:
2805 case BUILT_IN_LLCEILF:
2806 name = "ceilf";
2807 break;
2808 case BUILT_IN_ICEILL:
2809 case BUILT_IN_LCEILL:
2810 case BUILT_IN_LLCEILL:
2811 name = "ceill";
2812 break;
2813 case BUILT_IN_IFLOOR:
2814 case BUILT_IN_LFLOOR:
2815 case BUILT_IN_LLFLOOR:
2816 name = "floor";
2817 break;
2818 case BUILT_IN_IFLOORF:
2819 case BUILT_IN_LFLOORF:
2820 case BUILT_IN_LLFLOORF:
2821 name = "floorf";
2822 break;
2823 case BUILT_IN_IFLOORL:
2824 case BUILT_IN_LFLOORL:
2825 case BUILT_IN_LLFLOORL:
2826 name = "floorl";
2827 break;
2828 default:
2829 gcc_unreachable ();
2832 fntype = build_function_type_list (TREE_TYPE (arg),
2833 TREE_TYPE (arg), NULL_TREE);
2834 fallback_fndecl = build_fn_decl (name, fntype);
2837 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2839 tmp = expand_normal (exp);
2840 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2842 /* Truncate the result of floating point optab to integer
2843 via expand_fix (). */
2844 target = gen_reg_rtx (mode);
2845 expand_fix (target, tmp, 0);
2847 return target;
2850 /* Expand a call to one of the builtin math functions doing integer
2851 conversion (lrint).
2852 Return 0 if a normal call should be emitted rather than expanding the
2853 function in-line. EXP is the expression that is a call to the builtin
2854 function; if convenient, the result should be placed in TARGET. */
2856 static rtx
2857 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2859 convert_optab builtin_optab;
2860 rtx op0, insns;
2861 tree fndecl = get_callee_fndecl (exp);
2862 tree arg;
2863 enum machine_mode mode;
2864 enum built_in_function fallback_fn = BUILT_IN_NONE;
2866 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2867 gcc_unreachable ();
2869 arg = CALL_EXPR_ARG (exp, 0);
2871 switch (DECL_FUNCTION_CODE (fndecl))
2873 CASE_FLT_FN (BUILT_IN_IRINT):
2874 fallback_fn = BUILT_IN_LRINT;
2875 /* FALLTHRU */
2876 CASE_FLT_FN (BUILT_IN_LRINT):
2877 CASE_FLT_FN (BUILT_IN_LLRINT):
2878 builtin_optab = lrint_optab;
2879 break;
2881 CASE_FLT_FN (BUILT_IN_IROUND):
2882 fallback_fn = BUILT_IN_LROUND;
2883 /* FALLTHRU */
2884 CASE_FLT_FN (BUILT_IN_LROUND):
2885 CASE_FLT_FN (BUILT_IN_LLROUND):
2886 builtin_optab = lround_optab;
2887 break;
2889 default:
2890 gcc_unreachable ();
2893 /* There's no easy way to detect the case we need to set EDOM. */
2894 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2895 return NULL_RTX;
2897 /* Make a suitable register to place result in. */
2898 mode = TYPE_MODE (TREE_TYPE (exp));
2900 /* There's no easy way to detect the case we need to set EDOM. */
2901 if (!flag_errno_math)
2903 rtx result = gen_reg_rtx (mode);
2905 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2906 need to expand the argument again. This way, we will not perform
2907 side-effects more the once. */
2908 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2910 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2912 start_sequence ();
2914 if (expand_sfix_optab (result, op0, builtin_optab))
2916 /* Output the entire sequence. */
2917 insns = get_insns ();
2918 end_sequence ();
2919 emit_insn (insns);
2920 return result;
2923 /* If we were unable to expand via the builtin, stop the sequence
2924 (without outputting the insns) and call to the library function
2925 with the stabilized argument list. */
2926 end_sequence ();
2929 if (fallback_fn != BUILT_IN_NONE)
2931 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2932 targets, (int) round (x) should never be transformed into
2933 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2934 a call to lround in the hope that the target provides at least some
2935 C99 functions. This should result in the best user experience for
2936 not full C99 targets. */
2937 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2938 fallback_fn, 0);
2940 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2941 fallback_fndecl, 1, arg);
2943 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2944 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2945 return convert_to_mode (mode, target, 0);
2948 return expand_call (exp, target, target == const0_rtx);
2951 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2952 a normal call should be emitted rather than expanding the function
2953 in-line. EXP is the expression that is a call to the builtin
2954 function; if convenient, the result should be placed in TARGET. */
2956 static rtx
2957 expand_builtin_powi (tree exp, rtx target)
2959 tree arg0, arg1;
2960 rtx op0, op1;
2961 enum machine_mode mode;
2962 enum machine_mode mode2;
2964 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2965 return NULL_RTX;
2967 arg0 = CALL_EXPR_ARG (exp, 0);
2968 arg1 = CALL_EXPR_ARG (exp, 1);
2969 mode = TYPE_MODE (TREE_TYPE (exp));
2971 /* Emit a libcall to libgcc. */
2973 /* Mode of the 2nd argument must match that of an int. */
2974 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2976 if (target == NULL_RTX)
2977 target = gen_reg_rtx (mode);
2979 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2980 if (GET_MODE (op0) != mode)
2981 op0 = convert_to_mode (mode, op0, 0);
2982 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2983 if (GET_MODE (op1) != mode2)
2984 op1 = convert_to_mode (mode2, op1, 0);
2986 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2987 target, LCT_CONST, mode, 2,
2988 op0, mode, op1, mode2);
2990 return target;
2993 /* Expand expression EXP which is a call to the strlen builtin. Return
2994 NULL_RTX if we failed the caller should emit a normal call, otherwise
2995 try to get the result in TARGET, if convenient. */
2997 static rtx
2998 expand_builtin_strlen (tree exp, rtx target,
2999 enum machine_mode target_mode)
3001 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3002 return NULL_RTX;
3003 else
3005 struct expand_operand ops[4];
3006 rtx pat;
3007 tree len;
3008 tree src = CALL_EXPR_ARG (exp, 0);
3009 rtx src_reg, before_strlen;
3010 enum machine_mode insn_mode = target_mode;
3011 enum insn_code icode = CODE_FOR_nothing;
3012 unsigned int align;
3014 /* If the length can be computed at compile-time, return it. */
3015 len = c_strlen (src, 0);
3016 if (len)
3017 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3019 /* If the length can be computed at compile-time and is constant
3020 integer, but there are side-effects in src, evaluate
3021 src for side-effects, then return len.
3022 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3023 can be optimized into: i++; x = 3; */
3024 len = c_strlen (src, 1);
3025 if (len && TREE_CODE (len) == INTEGER_CST)
3027 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3028 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3031 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3033 /* If SRC is not a pointer type, don't do this operation inline. */
3034 if (align == 0)
3035 return NULL_RTX;
3037 /* Bail out if we can't compute strlen in the right mode. */
3038 while (insn_mode != VOIDmode)
3040 icode = optab_handler (strlen_optab, insn_mode);
3041 if (icode != CODE_FOR_nothing)
3042 break;
3044 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3046 if (insn_mode == VOIDmode)
3047 return NULL_RTX;
3049 /* Make a place to hold the source address. We will not expand
3050 the actual source until we are sure that the expansion will
3051 not fail -- there are trees that cannot be expanded twice. */
3052 src_reg = gen_reg_rtx (Pmode);
3054 /* Mark the beginning of the strlen sequence so we can emit the
3055 source operand later. */
3056 before_strlen = get_last_insn ();
3058 create_output_operand (&ops[0], target, insn_mode);
3059 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3060 create_integer_operand (&ops[2], 0);
3061 create_integer_operand (&ops[3], align);
3062 if (!maybe_expand_insn (icode, 4, ops))
3063 return NULL_RTX;
3065 /* Now that we are assured of success, expand the source. */
3066 start_sequence ();
3067 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3068 if (pat != src_reg)
3070 #ifdef POINTERS_EXTEND_UNSIGNED
3071 if (GET_MODE (pat) != Pmode)
3072 pat = convert_to_mode (Pmode, pat,
3073 POINTERS_EXTEND_UNSIGNED);
3074 #endif
3075 emit_move_insn (src_reg, pat);
3077 pat = get_insns ();
3078 end_sequence ();
3080 if (before_strlen)
3081 emit_insn_after (pat, before_strlen);
3082 else
3083 emit_insn_before (pat, get_insns ());
3085 /* Return the value in the proper mode for this function. */
3086 if (GET_MODE (ops[0].value) == target_mode)
3087 target = ops[0].value;
3088 else if (target != 0)
3089 convert_move (target, ops[0].value, 0);
3090 else
3091 target = convert_to_mode (target_mode, ops[0].value, 0);
3093 return target;
3097 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3098 bytes from constant string DATA + OFFSET and return it as target
3099 constant. */
3101 static rtx
3102 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3103 enum machine_mode mode)
3105 const char *str = (const char *) data;
3107 gcc_assert (offset >= 0
3108 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3109 <= strlen (str) + 1));
3111 return c_readstr (str + offset, mode);
3114 /* LEN specify length of the block of memcpy/memset operation.
3115 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3116 In some cases we can make very likely guess on max size, then we
3117 set it into PROBABLE_MAX_SIZE. */
3119 static void
3120 determine_block_size (tree len, rtx len_rtx,
3121 unsigned HOST_WIDE_INT *min_size,
3122 unsigned HOST_WIDE_INT *max_size,
3123 unsigned HOST_WIDE_INT *probable_max_size)
3125 if (CONST_INT_P (len_rtx))
3127 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3128 return;
3130 else
3132 double_int min, max;
3133 enum value_range_type range_type = VR_UNDEFINED;
3135 /* Determine bounds from the type. */
3136 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3137 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3138 else
3139 *min_size = 0;
3140 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3141 *probable_max_size = *max_size
3142 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3143 else
3144 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3146 if (TREE_CODE (len) == SSA_NAME)
3147 range_type = get_range_info (len, &min, &max);
3148 if (range_type == VR_RANGE)
3150 if (min.fits_uhwi () && *min_size < min.to_uhwi ())
3151 *min_size = min.to_uhwi ();
3152 if (max.fits_uhwi () && *max_size > max.to_uhwi ())
3153 *probable_max_size = *max_size = max.to_uhwi ();
3155 else if (range_type == VR_ANTI_RANGE)
3157 /* Anti range 0...N lets us to determine minimal size to N+1. */
3158 if (min.is_zero ())
3160 if ((max + double_int_one).fits_uhwi ())
3161 *min_size = (max + double_int_one).to_uhwi ();
3163 /* Code like
3165 int n;
3166 if (n < 100)
3167 memcpy (a, b, n)
3169 Produce anti range allowing negative values of N. We still
3170 can use the information and make a guess that N is not negative.
3172 else if (!max.ule (double_int_one.lshift (30))
3173 && min.fits_uhwi ())
3174 *probable_max_size = min.to_uhwi () - 1;
3177 gcc_checking_assert (*max_size <=
3178 (unsigned HOST_WIDE_INT)
3179 GET_MODE_MASK (GET_MODE (len_rtx)));
3182 /* Expand a call EXP to the memcpy builtin.
3183 Return NULL_RTX if we failed, the caller should emit a normal call,
3184 otherwise try to get the result in TARGET, if convenient (and in
3185 mode MODE if that's convenient). */
3187 static rtx
3188 expand_builtin_memcpy (tree exp, rtx target)
3190 if (!validate_arglist (exp,
3191 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3192 return NULL_RTX;
3193 else
3195 tree dest = CALL_EXPR_ARG (exp, 0);
3196 tree src = CALL_EXPR_ARG (exp, 1);
3197 tree len = CALL_EXPR_ARG (exp, 2);
3198 const char *src_str;
3199 unsigned int src_align = get_pointer_alignment (src);
3200 unsigned int dest_align = get_pointer_alignment (dest);
3201 rtx dest_mem, src_mem, dest_addr, len_rtx;
3202 HOST_WIDE_INT expected_size = -1;
3203 unsigned int expected_align = 0;
3204 unsigned HOST_WIDE_INT min_size;
3205 unsigned HOST_WIDE_INT max_size;
3206 unsigned HOST_WIDE_INT probable_max_size;
3208 /* If DEST is not a pointer type, call the normal function. */
3209 if (dest_align == 0)
3210 return NULL_RTX;
3212 /* If either SRC is not a pointer type, don't do this
3213 operation in-line. */
3214 if (src_align == 0)
3215 return NULL_RTX;
3217 if (currently_expanding_gimple_stmt)
3218 stringop_block_profile (currently_expanding_gimple_stmt,
3219 &expected_align, &expected_size);
3221 if (expected_align < dest_align)
3222 expected_align = dest_align;
3223 dest_mem = get_memory_rtx (dest, len);
3224 set_mem_align (dest_mem, dest_align);
3225 len_rtx = expand_normal (len);
3226 determine_block_size (len, len_rtx, &min_size, &max_size,
3227 &probable_max_size);
3228 src_str = c_getstr (src);
3230 /* If SRC is a string constant and block move would be done
3231 by pieces, we can avoid loading the string from memory
3232 and only stored the computed constants. */
3233 if (src_str
3234 && CONST_INT_P (len_rtx)
3235 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3236 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3237 CONST_CAST (char *, src_str),
3238 dest_align, false))
3240 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3241 builtin_memcpy_read_str,
3242 CONST_CAST (char *, src_str),
3243 dest_align, false, 0);
3244 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3245 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3246 return dest_mem;
3249 src_mem = get_memory_rtx (src, len);
3250 set_mem_align (src_mem, src_align);
3252 /* Copy word part most expediently. */
3253 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3254 CALL_EXPR_TAILCALL (exp)
3255 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3256 expected_align, expected_size,
3257 min_size, max_size, probable_max_size);
3259 if (dest_addr == 0)
3261 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3262 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3264 return dest_addr;
3268 /* Expand a call EXP to the mempcpy builtin.
3269 Return NULL_RTX if we failed; the caller should emit a normal call,
3270 otherwise try to get the result in TARGET, if convenient (and in
3271 mode MODE if that's convenient). If ENDP is 0 return the
3272 destination pointer, if ENDP is 1 return the end pointer ala
3273 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3274 stpcpy. */
3276 static rtx
3277 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3279 if (!validate_arglist (exp,
3280 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3281 return NULL_RTX;
3282 else
3284 tree dest = CALL_EXPR_ARG (exp, 0);
3285 tree src = CALL_EXPR_ARG (exp, 1);
3286 tree len = CALL_EXPR_ARG (exp, 2);
3287 return expand_builtin_mempcpy_args (dest, src, len,
3288 target, mode, /*endp=*/ 1);
3292 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3293 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3294 so that this can also be called without constructing an actual CALL_EXPR.
3295 The other arguments and return value are the same as for
3296 expand_builtin_mempcpy. */
3298 static rtx
3299 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3300 rtx target, enum machine_mode mode, int endp)
3302 /* If return value is ignored, transform mempcpy into memcpy. */
3303 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3305 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3306 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3307 dest, src, len);
3308 return expand_expr (result, target, mode, EXPAND_NORMAL);
3310 else
3312 const char *src_str;
3313 unsigned int src_align = get_pointer_alignment (src);
3314 unsigned int dest_align = get_pointer_alignment (dest);
3315 rtx dest_mem, src_mem, len_rtx;
3317 /* If either SRC or DEST is not a pointer type, don't do this
3318 operation in-line. */
3319 if (dest_align == 0 || src_align == 0)
3320 return NULL_RTX;
3322 /* If LEN is not constant, call the normal function. */
3323 if (! tree_fits_uhwi_p (len))
3324 return NULL_RTX;
3326 len_rtx = expand_normal (len);
3327 src_str = c_getstr (src);
3329 /* If SRC is a string constant and block move would be done
3330 by pieces, we can avoid loading the string from memory
3331 and only stored the computed constants. */
3332 if (src_str
3333 && CONST_INT_P (len_rtx)
3334 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3335 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3336 CONST_CAST (char *, src_str),
3337 dest_align, false))
3339 dest_mem = get_memory_rtx (dest, len);
3340 set_mem_align (dest_mem, dest_align);
3341 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3342 builtin_memcpy_read_str,
3343 CONST_CAST (char *, src_str),
3344 dest_align, false, endp);
3345 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3346 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3347 return dest_mem;
3350 if (CONST_INT_P (len_rtx)
3351 && can_move_by_pieces (INTVAL (len_rtx),
3352 MIN (dest_align, src_align)))
3354 dest_mem = get_memory_rtx (dest, len);
3355 set_mem_align (dest_mem, dest_align);
3356 src_mem = get_memory_rtx (src, len);
3357 set_mem_align (src_mem, src_align);
3358 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3359 MIN (dest_align, src_align), endp);
3360 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3361 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3362 return dest_mem;
3365 return NULL_RTX;
3369 #ifndef HAVE_movstr
3370 # define HAVE_movstr 0
3371 # define CODE_FOR_movstr CODE_FOR_nothing
3372 #endif
3374 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3375 we failed, the caller should emit a normal call, otherwise try to
3376 get the result in TARGET, if convenient. If ENDP is 0 return the
3377 destination pointer, if ENDP is 1 return the end pointer ala
3378 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3379 stpcpy. */
3381 static rtx
3382 expand_movstr (tree dest, tree src, rtx target, int endp)
3384 struct expand_operand ops[3];
3385 rtx dest_mem;
3386 rtx src_mem;
3388 if (!HAVE_movstr)
3389 return NULL_RTX;
3391 dest_mem = get_memory_rtx (dest, NULL);
3392 src_mem = get_memory_rtx (src, NULL);
3393 if (!endp)
3395 target = force_reg (Pmode, XEXP (dest_mem, 0));
3396 dest_mem = replace_equiv_address (dest_mem, target);
3399 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3400 create_fixed_operand (&ops[1], dest_mem);
3401 create_fixed_operand (&ops[2], src_mem);
3402 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3403 return NULL_RTX;
3405 if (endp && target != const0_rtx)
3407 target = ops[0].value;
3408 /* movstr is supposed to set end to the address of the NUL
3409 terminator. If the caller requested a mempcpy-like return value,
3410 adjust it. */
3411 if (endp == 1)
3413 rtx tem = plus_constant (GET_MODE (target),
3414 gen_lowpart (GET_MODE (target), target), 1);
3415 emit_move_insn (target, force_operand (tem, NULL_RTX));
3418 return target;
3421 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3422 NULL_RTX if we failed the caller should emit a normal call, otherwise
3423 try to get the result in TARGET, if convenient (and in mode MODE if that's
3424 convenient). */
3426 static rtx
3427 expand_builtin_strcpy (tree exp, rtx target)
3429 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3431 tree dest = CALL_EXPR_ARG (exp, 0);
3432 tree src = CALL_EXPR_ARG (exp, 1);
3433 return expand_builtin_strcpy_args (dest, src, target);
3435 return NULL_RTX;
3438 /* Helper function to do the actual work for expand_builtin_strcpy. The
3439 arguments to the builtin_strcpy call DEST and SRC are broken out
3440 so that this can also be called without constructing an actual CALL_EXPR.
3441 The other arguments and return value are the same as for
3442 expand_builtin_strcpy. */
3444 static rtx
3445 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3447 return expand_movstr (dest, src, target, /*endp=*/0);
3450 /* Expand a call EXP to the stpcpy builtin.
3451 Return NULL_RTX if we failed the caller should emit a normal call,
3452 otherwise try to get the result in TARGET, if convenient (and in
3453 mode MODE if that's convenient). */
3455 static rtx
3456 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3458 tree dst, src;
3459 location_t loc = EXPR_LOCATION (exp);
3461 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3462 return NULL_RTX;
3464 dst = CALL_EXPR_ARG (exp, 0);
3465 src = CALL_EXPR_ARG (exp, 1);
3467 /* If return value is ignored, transform stpcpy into strcpy. */
3468 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3470 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3471 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3472 return expand_expr (result, target, mode, EXPAND_NORMAL);
3474 else
3476 tree len, lenp1;
3477 rtx ret;
3479 /* Ensure we get an actual string whose length can be evaluated at
3480 compile-time, not an expression containing a string. This is
3481 because the latter will potentially produce pessimized code
3482 when used to produce the return value. */
3483 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3484 return expand_movstr (dst, src, target, /*endp=*/2);
3486 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3487 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3488 target, mode, /*endp=*/2);
3490 if (ret)
3491 return ret;
3493 if (TREE_CODE (len) == INTEGER_CST)
3495 rtx len_rtx = expand_normal (len);
3497 if (CONST_INT_P (len_rtx))
3499 ret = expand_builtin_strcpy_args (dst, src, target);
3501 if (ret)
3503 if (! target)
3505 if (mode != VOIDmode)
3506 target = gen_reg_rtx (mode);
3507 else
3508 target = gen_reg_rtx (GET_MODE (ret));
3510 if (GET_MODE (target) != GET_MODE (ret))
3511 ret = gen_lowpart (GET_MODE (target), ret);
3513 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3514 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3515 gcc_assert (ret);
3517 return target;
3522 return expand_movstr (dst, src, target, /*endp=*/2);
3526 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3527 bytes from constant string DATA + OFFSET and return it as target
3528 constant. */
3531 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3532 enum machine_mode mode)
3534 const char *str = (const char *) data;
3536 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3537 return const0_rtx;
3539 return c_readstr (str + offset, mode);
3542 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3543 NULL_RTX if we failed the caller should emit a normal call. */
3545 static rtx
3546 expand_builtin_strncpy (tree exp, rtx target)
3548 location_t loc = EXPR_LOCATION (exp);
3550 if (validate_arglist (exp,
3551 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3553 tree dest = CALL_EXPR_ARG (exp, 0);
3554 tree src = CALL_EXPR_ARG (exp, 1);
3555 tree len = CALL_EXPR_ARG (exp, 2);
3556 tree slen = c_strlen (src, 1);
3558 /* We must be passed a constant len and src parameter. */
3559 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3560 return NULL_RTX;
3562 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3564 /* We're required to pad with trailing zeros if the requested
3565 len is greater than strlen(s2)+1. In that case try to
3566 use store_by_pieces, if it fails, punt. */
3567 if (tree_int_cst_lt (slen, len))
3569 unsigned int dest_align = get_pointer_alignment (dest);
3570 const char *p = c_getstr (src);
3571 rtx dest_mem;
3573 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3574 || !can_store_by_pieces (tree_to_uhwi (len),
3575 builtin_strncpy_read_str,
3576 CONST_CAST (char *, p),
3577 dest_align, false))
3578 return NULL_RTX;
3580 dest_mem = get_memory_rtx (dest, len);
3581 store_by_pieces (dest_mem, tree_to_uhwi (len),
3582 builtin_strncpy_read_str,
3583 CONST_CAST (char *, p), dest_align, false, 0);
3584 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3585 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3586 return dest_mem;
3589 return NULL_RTX;
3592 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3593 bytes from constant string DATA + OFFSET and return it as target
3594 constant. */
3597 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3598 enum machine_mode mode)
3600 const char *c = (const char *) data;
3601 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3603 memset (p, *c, GET_MODE_SIZE (mode));
3605 return c_readstr (p, mode);
3608 /* Callback routine for store_by_pieces. Return the RTL of a register
3609 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3610 char value given in the RTL register data. For example, if mode is
3611 4 bytes wide, return the RTL for 0x01010101*data. */
3613 static rtx
3614 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3615 enum machine_mode mode)
3617 rtx target, coeff;
3618 size_t size;
3619 char *p;
3621 size = GET_MODE_SIZE (mode);
3622 if (size == 1)
3623 return (rtx) data;
3625 p = XALLOCAVEC (char, size);
3626 memset (p, 1, size);
3627 coeff = c_readstr (p, mode);
3629 target = convert_to_mode (mode, (rtx) data, 1);
3630 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3631 return force_reg (mode, target);
3634 /* Expand expression EXP, which is a call to the memset builtin. Return
3635 NULL_RTX if we failed the caller should emit a normal call, otherwise
3636 try to get the result in TARGET, if convenient (and in mode MODE if that's
3637 convenient). */
3639 static rtx
3640 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3642 if (!validate_arglist (exp,
3643 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3644 return NULL_RTX;
3645 else
3647 tree dest = CALL_EXPR_ARG (exp, 0);
3648 tree val = CALL_EXPR_ARG (exp, 1);
3649 tree len = CALL_EXPR_ARG (exp, 2);
3650 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3654 /* Helper function to do the actual work for expand_builtin_memset. The
3655 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3656 so that this can also be called without constructing an actual CALL_EXPR.
3657 The other arguments and return value are the same as for
3658 expand_builtin_memset. */
3660 static rtx
3661 expand_builtin_memset_args (tree dest, tree val, tree len,
3662 rtx target, enum machine_mode mode, tree orig_exp)
3664 tree fndecl, fn;
3665 enum built_in_function fcode;
3666 enum machine_mode val_mode;
3667 char c;
3668 unsigned int dest_align;
3669 rtx dest_mem, dest_addr, len_rtx;
3670 HOST_WIDE_INT expected_size = -1;
3671 unsigned int expected_align = 0;
3672 unsigned HOST_WIDE_INT min_size;
3673 unsigned HOST_WIDE_INT max_size;
3674 unsigned HOST_WIDE_INT probable_max_size;
3676 dest_align = get_pointer_alignment (dest);
3678 /* If DEST is not a pointer type, don't do this operation in-line. */
3679 if (dest_align == 0)
3680 return NULL_RTX;
3682 if (currently_expanding_gimple_stmt)
3683 stringop_block_profile (currently_expanding_gimple_stmt,
3684 &expected_align, &expected_size);
3686 if (expected_align < dest_align)
3687 expected_align = dest_align;
3689 /* If the LEN parameter is zero, return DEST. */
3690 if (integer_zerop (len))
3692 /* Evaluate and ignore VAL in case it has side-effects. */
3693 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3694 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3697 /* Stabilize the arguments in case we fail. */
3698 dest = builtin_save_expr (dest);
3699 val = builtin_save_expr (val);
3700 len = builtin_save_expr (len);
3702 len_rtx = expand_normal (len);
3703 determine_block_size (len, len_rtx, &min_size, &max_size,
3704 &probable_max_size);
3705 dest_mem = get_memory_rtx (dest, len);
3706 val_mode = TYPE_MODE (unsigned_char_type_node);
3708 if (TREE_CODE (val) != INTEGER_CST)
3710 rtx val_rtx;
3712 val_rtx = expand_normal (val);
3713 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3715 /* Assume that we can memset by pieces if we can store
3716 * the coefficients by pieces (in the required modes).
3717 * We can't pass builtin_memset_gen_str as that emits RTL. */
3718 c = 1;
3719 if (tree_fits_uhwi_p (len)
3720 && can_store_by_pieces (tree_to_uhwi (len),
3721 builtin_memset_read_str, &c, dest_align,
3722 true))
3724 val_rtx = force_reg (val_mode, val_rtx);
3725 store_by_pieces (dest_mem, tree_to_uhwi (len),
3726 builtin_memset_gen_str, val_rtx, dest_align,
3727 true, 0);
3729 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3730 dest_align, expected_align,
3731 expected_size, min_size, max_size,
3732 probable_max_size))
3733 goto do_libcall;
3735 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3736 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3737 return dest_mem;
3740 if (target_char_cast (val, &c))
3741 goto do_libcall;
3743 if (c)
3745 if (tree_fits_uhwi_p (len)
3746 && can_store_by_pieces (tree_to_uhwi (len),
3747 builtin_memset_read_str, &c, dest_align,
3748 true))
3749 store_by_pieces (dest_mem, tree_to_uhwi (len),
3750 builtin_memset_read_str, &c, dest_align, true, 0);
3751 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3752 gen_int_mode (c, val_mode),
3753 dest_align, expected_align,
3754 expected_size, min_size, max_size,
3755 probable_max_size))
3756 goto do_libcall;
3758 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3759 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3760 return dest_mem;
3763 set_mem_align (dest_mem, dest_align);
3764 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3765 CALL_EXPR_TAILCALL (orig_exp)
3766 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3767 expected_align, expected_size,
3768 min_size, max_size,
3769 probable_max_size);
3771 if (dest_addr == 0)
3773 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3774 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3777 return dest_addr;
3779 do_libcall:
3780 fndecl = get_callee_fndecl (orig_exp);
3781 fcode = DECL_FUNCTION_CODE (fndecl);
3782 if (fcode == BUILT_IN_MEMSET)
3783 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3784 dest, val, len);
3785 else if (fcode == BUILT_IN_BZERO)
3786 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3787 dest, len);
3788 else
3789 gcc_unreachable ();
3790 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3791 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3792 return expand_call (fn, target, target == const0_rtx);
3795 /* Expand expression EXP, which is a call to the bzero builtin. Return
3796 NULL_RTX if we failed the caller should emit a normal call. */
3798 static rtx
3799 expand_builtin_bzero (tree exp)
3801 tree dest, size;
3802 location_t loc = EXPR_LOCATION (exp);
3804 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3805 return NULL_RTX;
3807 dest = CALL_EXPR_ARG (exp, 0);
3808 size = CALL_EXPR_ARG (exp, 1);
3810 /* New argument list transforming bzero(ptr x, int y) to
3811 memset(ptr x, int 0, size_t y). This is done this way
3812 so that if it isn't expanded inline, we fallback to
3813 calling bzero instead of memset. */
3815 return expand_builtin_memset_args (dest, integer_zero_node,
3816 fold_convert_loc (loc,
3817 size_type_node, size),
3818 const0_rtx, VOIDmode, exp);
3821 /* Expand expression EXP, which is a call to the memcmp built-in function.
3822 Return NULL_RTX if we failed and the caller should emit a normal call,
3823 otherwise try to get the result in TARGET, if convenient (and in mode
3824 MODE, if that's convenient). */
3826 static rtx
3827 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3828 ATTRIBUTE_UNUSED enum machine_mode mode)
3830 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3832 if (!validate_arglist (exp,
3833 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3834 return NULL_RTX;
3836 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3837 implementing memcmp because it will stop if it encounters two
3838 zero bytes. */
3839 #if defined HAVE_cmpmemsi
3841 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3842 rtx result;
3843 rtx insn;
3844 tree arg1 = CALL_EXPR_ARG (exp, 0);
3845 tree arg2 = CALL_EXPR_ARG (exp, 1);
3846 tree len = CALL_EXPR_ARG (exp, 2);
3848 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3849 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3850 enum machine_mode insn_mode;
3852 if (HAVE_cmpmemsi)
3853 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3854 else
3855 return NULL_RTX;
3857 /* If we don't have POINTER_TYPE, call the function. */
3858 if (arg1_align == 0 || arg2_align == 0)
3859 return NULL_RTX;
3861 /* Make a place to write the result of the instruction. */
3862 result = target;
3863 if (! (result != 0
3864 && REG_P (result) && GET_MODE (result) == insn_mode
3865 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3866 result = gen_reg_rtx (insn_mode);
3868 arg1_rtx = get_memory_rtx (arg1, len);
3869 arg2_rtx = get_memory_rtx (arg2, len);
3870 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3872 /* Set MEM_SIZE as appropriate. */
3873 if (CONST_INT_P (arg3_rtx))
3875 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3876 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3879 if (HAVE_cmpmemsi)
3880 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3881 GEN_INT (MIN (arg1_align, arg2_align)));
3882 else
3883 gcc_unreachable ();
3885 if (insn)
3886 emit_insn (insn);
3887 else
3888 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3889 TYPE_MODE (integer_type_node), 3,
3890 XEXP (arg1_rtx, 0), Pmode,
3891 XEXP (arg2_rtx, 0), Pmode,
3892 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3893 TYPE_UNSIGNED (sizetype)),
3894 TYPE_MODE (sizetype));
3896 /* Return the value in the proper mode for this function. */
3897 mode = TYPE_MODE (TREE_TYPE (exp));
3898 if (GET_MODE (result) == mode)
3899 return result;
3900 else if (target != 0)
3902 convert_move (target, result, 0);
3903 return target;
3905 else
3906 return convert_to_mode (mode, result, 0);
3908 #endif /* HAVE_cmpmemsi. */
3910 return NULL_RTX;
3913 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3914 if we failed the caller should emit a normal call, otherwise try to get
3915 the result in TARGET, if convenient. */
3917 static rtx
3918 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3920 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3921 return NULL_RTX;
3923 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3924 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3925 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3927 rtx arg1_rtx, arg2_rtx;
3928 rtx result, insn = NULL_RTX;
3929 tree fndecl, fn;
3930 tree arg1 = CALL_EXPR_ARG (exp, 0);
3931 tree arg2 = CALL_EXPR_ARG (exp, 1);
3933 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3934 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3936 /* If we don't have POINTER_TYPE, call the function. */
3937 if (arg1_align == 0 || arg2_align == 0)
3938 return NULL_RTX;
3940 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3941 arg1 = builtin_save_expr (arg1);
3942 arg2 = builtin_save_expr (arg2);
3944 arg1_rtx = get_memory_rtx (arg1, NULL);
3945 arg2_rtx = get_memory_rtx (arg2, NULL);
3947 #ifdef HAVE_cmpstrsi
3948 /* Try to call cmpstrsi. */
3949 if (HAVE_cmpstrsi)
3951 enum machine_mode insn_mode
3952 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3954 /* Make a place to write the result of the instruction. */
3955 result = target;
3956 if (! (result != 0
3957 && REG_P (result) && GET_MODE (result) == insn_mode
3958 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3959 result = gen_reg_rtx (insn_mode);
3961 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3962 GEN_INT (MIN (arg1_align, arg2_align)));
3964 #endif
3965 #ifdef HAVE_cmpstrnsi
3966 /* Try to determine at least one length and call cmpstrnsi. */
3967 if (!insn && HAVE_cmpstrnsi)
3969 tree len;
3970 rtx arg3_rtx;
3972 enum machine_mode insn_mode
3973 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3974 tree len1 = c_strlen (arg1, 1);
3975 tree len2 = c_strlen (arg2, 1);
3977 if (len1)
3978 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3979 if (len2)
3980 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3982 /* If we don't have a constant length for the first, use the length
3983 of the second, if we know it. We don't require a constant for
3984 this case; some cost analysis could be done if both are available
3985 but neither is constant. For now, assume they're equally cheap,
3986 unless one has side effects. If both strings have constant lengths,
3987 use the smaller. */
3989 if (!len1)
3990 len = len2;
3991 else if (!len2)
3992 len = len1;
3993 else if (TREE_SIDE_EFFECTS (len1))
3994 len = len2;
3995 else if (TREE_SIDE_EFFECTS (len2))
3996 len = len1;
3997 else if (TREE_CODE (len1) != INTEGER_CST)
3998 len = len2;
3999 else if (TREE_CODE (len2) != INTEGER_CST)
4000 len = len1;
4001 else if (tree_int_cst_lt (len1, len2))
4002 len = len1;
4003 else
4004 len = len2;
4006 /* If both arguments have side effects, we cannot optimize. */
4007 if (!len || TREE_SIDE_EFFECTS (len))
4008 goto do_libcall;
4010 arg3_rtx = expand_normal (len);
4012 /* Make a place to write the result of the instruction. */
4013 result = target;
4014 if (! (result != 0
4015 && REG_P (result) && GET_MODE (result) == insn_mode
4016 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4017 result = gen_reg_rtx (insn_mode);
4019 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4020 GEN_INT (MIN (arg1_align, arg2_align)));
4022 #endif
4024 if (insn)
4026 enum machine_mode mode;
4027 emit_insn (insn);
4029 /* Return the value in the proper mode for this function. */
4030 mode = TYPE_MODE (TREE_TYPE (exp));
4031 if (GET_MODE (result) == mode)
4032 return result;
4033 if (target == 0)
4034 return convert_to_mode (mode, result, 0);
4035 convert_move (target, result, 0);
4036 return target;
4039 /* Expand the library call ourselves using a stabilized argument
4040 list to avoid re-evaluating the function's arguments twice. */
4041 #ifdef HAVE_cmpstrnsi
4042 do_libcall:
4043 #endif
4044 fndecl = get_callee_fndecl (exp);
4045 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4046 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4047 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4048 return expand_call (fn, target, target == const0_rtx);
4050 #endif
4051 return NULL_RTX;
4054 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4055 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4056 the result in TARGET, if convenient. */
4058 static rtx
4059 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4060 ATTRIBUTE_UNUSED enum machine_mode mode)
4062 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4064 if (!validate_arglist (exp,
4065 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4066 return NULL_RTX;
4068 /* If c_strlen can determine an expression for one of the string
4069 lengths, and it doesn't have side effects, then emit cmpstrnsi
4070 using length MIN(strlen(string)+1, arg3). */
4071 #ifdef HAVE_cmpstrnsi
4072 if (HAVE_cmpstrnsi)
4074 tree len, len1, len2;
4075 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4076 rtx result, insn;
4077 tree fndecl, fn;
4078 tree arg1 = CALL_EXPR_ARG (exp, 0);
4079 tree arg2 = CALL_EXPR_ARG (exp, 1);
4080 tree arg3 = CALL_EXPR_ARG (exp, 2);
4082 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4083 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4084 enum machine_mode insn_mode
4085 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4087 len1 = c_strlen (arg1, 1);
4088 len2 = c_strlen (arg2, 1);
4090 if (len1)
4091 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4092 if (len2)
4093 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4095 /* If we don't have a constant length for the first, use the length
4096 of the second, if we know it. We don't require a constant for
4097 this case; some cost analysis could be done if both are available
4098 but neither is constant. For now, assume they're equally cheap,
4099 unless one has side effects. If both strings have constant lengths,
4100 use the smaller. */
4102 if (!len1)
4103 len = len2;
4104 else if (!len2)
4105 len = len1;
4106 else if (TREE_SIDE_EFFECTS (len1))
4107 len = len2;
4108 else if (TREE_SIDE_EFFECTS (len2))
4109 len = len1;
4110 else if (TREE_CODE (len1) != INTEGER_CST)
4111 len = len2;
4112 else if (TREE_CODE (len2) != INTEGER_CST)
4113 len = len1;
4114 else if (tree_int_cst_lt (len1, len2))
4115 len = len1;
4116 else
4117 len = len2;
4119 /* If both arguments have side effects, we cannot optimize. */
4120 if (!len || TREE_SIDE_EFFECTS (len))
4121 return NULL_RTX;
4123 /* The actual new length parameter is MIN(len,arg3). */
4124 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4125 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4127 /* If we don't have POINTER_TYPE, call the function. */
4128 if (arg1_align == 0 || arg2_align == 0)
4129 return NULL_RTX;
4131 /* Make a place to write the result of the instruction. */
4132 result = target;
4133 if (! (result != 0
4134 && REG_P (result) && GET_MODE (result) == insn_mode
4135 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4136 result = gen_reg_rtx (insn_mode);
4138 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4139 arg1 = builtin_save_expr (arg1);
4140 arg2 = builtin_save_expr (arg2);
4141 len = builtin_save_expr (len);
4143 arg1_rtx = get_memory_rtx (arg1, len);
4144 arg2_rtx = get_memory_rtx (arg2, len);
4145 arg3_rtx = expand_normal (len);
4146 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4147 GEN_INT (MIN (arg1_align, arg2_align)));
4148 if (insn)
4150 emit_insn (insn);
4152 /* Return the value in the proper mode for this function. */
4153 mode = TYPE_MODE (TREE_TYPE (exp));
4154 if (GET_MODE (result) == mode)
4155 return result;
4156 if (target == 0)
4157 return convert_to_mode (mode, result, 0);
4158 convert_move (target, result, 0);
4159 return target;
4162 /* Expand the library call ourselves using a stabilized argument
4163 list to avoid re-evaluating the function's arguments twice. */
4164 fndecl = get_callee_fndecl (exp);
4165 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4166 arg1, arg2, len);
4167 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4168 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4169 return expand_call (fn, target, target == const0_rtx);
4171 #endif
4172 return NULL_RTX;
4175 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4176 if that's convenient. */
4179 expand_builtin_saveregs (void)
4181 rtx val, seq;
4183 /* Don't do __builtin_saveregs more than once in a function.
4184 Save the result of the first call and reuse it. */
4185 if (saveregs_value != 0)
4186 return saveregs_value;
4188 /* When this function is called, it means that registers must be
4189 saved on entry to this function. So we migrate the call to the
4190 first insn of this function. */
4192 start_sequence ();
4194 /* Do whatever the machine needs done in this case. */
4195 val = targetm.calls.expand_builtin_saveregs ();
4197 seq = get_insns ();
4198 end_sequence ();
4200 saveregs_value = val;
4202 /* Put the insns after the NOTE that starts the function. If this
4203 is inside a start_sequence, make the outer-level insn chain current, so
4204 the code is placed at the start of the function. */
4205 push_topmost_sequence ();
4206 emit_insn_after (seq, entry_of_function ());
4207 pop_topmost_sequence ();
4209 return val;
4212 /* Expand a call to __builtin_next_arg. */
4214 static rtx
4215 expand_builtin_next_arg (void)
4217 /* Checking arguments is already done in fold_builtin_next_arg
4218 that must be called before this function. */
4219 return expand_binop (ptr_mode, add_optab,
4220 crtl->args.internal_arg_pointer,
4221 crtl->args.arg_offset_rtx,
4222 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4225 /* Make it easier for the backends by protecting the valist argument
4226 from multiple evaluations. */
4228 static tree
4229 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4231 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4233 /* The current way of determining the type of valist is completely
4234 bogus. We should have the information on the va builtin instead. */
4235 if (!vatype)
4236 vatype = targetm.fn_abi_va_list (cfun->decl);
4238 if (TREE_CODE (vatype) == ARRAY_TYPE)
4240 if (TREE_SIDE_EFFECTS (valist))
4241 valist = save_expr (valist);
4243 /* For this case, the backends will be expecting a pointer to
4244 vatype, but it's possible we've actually been given an array
4245 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4246 So fix it. */
4247 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4249 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4250 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4253 else
4255 tree pt = build_pointer_type (vatype);
4257 if (! needs_lvalue)
4259 if (! TREE_SIDE_EFFECTS (valist))
4260 return valist;
4262 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4263 TREE_SIDE_EFFECTS (valist) = 1;
4266 if (TREE_SIDE_EFFECTS (valist))
4267 valist = save_expr (valist);
4268 valist = fold_build2_loc (loc, MEM_REF,
4269 vatype, valist, build_int_cst (pt, 0));
4272 return valist;
4275 /* The "standard" definition of va_list is void*. */
4277 tree
4278 std_build_builtin_va_list (void)
4280 return ptr_type_node;
4283 /* The "standard" abi va_list is va_list_type_node. */
4285 tree
4286 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4288 return va_list_type_node;
4291 /* The "standard" type of va_list is va_list_type_node. */
4293 tree
4294 std_canonical_va_list_type (tree type)
4296 tree wtype, htype;
4298 if (INDIRECT_REF_P (type))
4299 type = TREE_TYPE (type);
4300 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4301 type = TREE_TYPE (type);
4302 wtype = va_list_type_node;
4303 htype = type;
4304 /* Treat structure va_list types. */
4305 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4306 htype = TREE_TYPE (htype);
4307 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4309 /* If va_list is an array type, the argument may have decayed
4310 to a pointer type, e.g. by being passed to another function.
4311 In that case, unwrap both types so that we can compare the
4312 underlying records. */
4313 if (TREE_CODE (htype) == ARRAY_TYPE
4314 || POINTER_TYPE_P (htype))
4316 wtype = TREE_TYPE (wtype);
4317 htype = TREE_TYPE (htype);
4320 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4321 return va_list_type_node;
4323 return NULL_TREE;
4326 /* The "standard" implementation of va_start: just assign `nextarg' to
4327 the variable. */
4329 void
4330 std_expand_builtin_va_start (tree valist, rtx nextarg)
4332 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4333 convert_move (va_r, nextarg, 0);
4336 /* Expand EXP, a call to __builtin_va_start. */
4338 static rtx
4339 expand_builtin_va_start (tree exp)
4341 rtx nextarg;
4342 tree valist;
4343 location_t loc = EXPR_LOCATION (exp);
4345 if (call_expr_nargs (exp) < 2)
4347 error_at (loc, "too few arguments to function %<va_start%>");
4348 return const0_rtx;
4351 if (fold_builtin_next_arg (exp, true))
4352 return const0_rtx;
4354 nextarg = expand_builtin_next_arg ();
4355 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4357 if (targetm.expand_builtin_va_start)
4358 targetm.expand_builtin_va_start (valist, nextarg);
4359 else
4360 std_expand_builtin_va_start (valist, nextarg);
4362 return const0_rtx;
4365 /* Expand EXP, a call to __builtin_va_end. */
4367 static rtx
4368 expand_builtin_va_end (tree exp)
4370 tree valist = CALL_EXPR_ARG (exp, 0);
4372 /* Evaluate for side effects, if needed. I hate macros that don't
4373 do that. */
4374 if (TREE_SIDE_EFFECTS (valist))
4375 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4377 return const0_rtx;
4380 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4381 builtin rather than just as an assignment in stdarg.h because of the
4382 nastiness of array-type va_list types. */
4384 static rtx
4385 expand_builtin_va_copy (tree exp)
4387 tree dst, src, t;
4388 location_t loc = EXPR_LOCATION (exp);
4390 dst = CALL_EXPR_ARG (exp, 0);
4391 src = CALL_EXPR_ARG (exp, 1);
4393 dst = stabilize_va_list_loc (loc, dst, 1);
4394 src = stabilize_va_list_loc (loc, src, 0);
4396 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4398 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4400 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4401 TREE_SIDE_EFFECTS (t) = 1;
4402 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4404 else
4406 rtx dstb, srcb, size;
4408 /* Evaluate to pointers. */
4409 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4410 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4411 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4412 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4414 dstb = convert_memory_address (Pmode, dstb);
4415 srcb = convert_memory_address (Pmode, srcb);
4417 /* "Dereference" to BLKmode memories. */
4418 dstb = gen_rtx_MEM (BLKmode, dstb);
4419 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4420 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4421 srcb = gen_rtx_MEM (BLKmode, srcb);
4422 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4423 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4425 /* Copy. */
4426 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4429 return const0_rtx;
4432 /* Expand a call to one of the builtin functions __builtin_frame_address or
4433 __builtin_return_address. */
4435 static rtx
4436 expand_builtin_frame_address (tree fndecl, tree exp)
4438 /* The argument must be a nonnegative integer constant.
4439 It counts the number of frames to scan up the stack.
4440 The value is the return address saved in that frame. */
4441 if (call_expr_nargs (exp) == 0)
4442 /* Warning about missing arg was already issued. */
4443 return const0_rtx;
4444 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4446 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4447 error ("invalid argument to %<__builtin_frame_address%>");
4448 else
4449 error ("invalid argument to %<__builtin_return_address%>");
4450 return const0_rtx;
4452 else
4454 rtx tem
4455 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4456 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4458 /* Some ports cannot access arbitrary stack frames. */
4459 if (tem == NULL)
4461 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4462 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4463 else
4464 warning (0, "unsupported argument to %<__builtin_return_address%>");
4465 return const0_rtx;
4468 /* For __builtin_frame_address, return what we've got. */
4469 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4470 return tem;
4472 if (!REG_P (tem)
4473 && ! CONSTANT_P (tem))
4474 tem = copy_addr_to_reg (tem);
4475 return tem;
4479 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4480 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4481 is the same as for allocate_dynamic_stack_space. */
4483 static rtx
4484 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4486 rtx op0;
4487 rtx result;
4488 bool valid_arglist;
4489 unsigned int align;
4490 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4491 == BUILT_IN_ALLOCA_WITH_ALIGN);
4493 valid_arglist
4494 = (alloca_with_align
4495 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4496 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4498 if (!valid_arglist)
4499 return NULL_RTX;
4501 /* Compute the argument. */
4502 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4504 /* Compute the alignment. */
4505 align = (alloca_with_align
4506 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4507 : BIGGEST_ALIGNMENT);
4509 /* Allocate the desired space. */
4510 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4511 result = convert_memory_address (ptr_mode, result);
4513 return result;
4516 /* Expand a call to bswap builtin in EXP.
4517 Return NULL_RTX if a normal call should be emitted rather than expanding the
4518 function in-line. If convenient, the result should be placed in TARGET.
4519 SUBTARGET may be used as the target for computing one of EXP's operands. */
4521 static rtx
4522 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4523 rtx subtarget)
4525 tree arg;
4526 rtx op0;
4528 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4529 return NULL_RTX;
4531 arg = CALL_EXPR_ARG (exp, 0);
4532 op0 = expand_expr (arg,
4533 subtarget && GET_MODE (subtarget) == target_mode
4534 ? subtarget : NULL_RTX,
4535 target_mode, EXPAND_NORMAL);
4536 if (GET_MODE (op0) != target_mode)
4537 op0 = convert_to_mode (target_mode, op0, 1);
4539 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4541 gcc_assert (target);
4543 return convert_to_mode (target_mode, target, 1);
4546 /* Expand a call to a unary builtin in EXP.
4547 Return NULL_RTX if a normal call should be emitted rather than expanding the
4548 function in-line. If convenient, the result should be placed in TARGET.
4549 SUBTARGET may be used as the target for computing one of EXP's operands. */
4551 static rtx
4552 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4553 rtx subtarget, optab op_optab)
4555 rtx op0;
4557 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4558 return NULL_RTX;
4560 /* Compute the argument. */
4561 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4562 (subtarget
4563 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4564 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4565 VOIDmode, EXPAND_NORMAL);
4566 /* Compute op, into TARGET if possible.
4567 Set TARGET to wherever the result comes back. */
4568 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4569 op_optab, op0, target, op_optab != clrsb_optab);
4570 gcc_assert (target);
4572 return convert_to_mode (target_mode, target, 0);
4575 /* Expand a call to __builtin_expect. We just return our argument
4576 as the builtin_expect semantic should've been already executed by
4577 tree branch prediction pass. */
4579 static rtx
4580 expand_builtin_expect (tree exp, rtx target)
4582 tree arg;
4584 if (call_expr_nargs (exp) < 2)
4585 return const0_rtx;
4586 arg = CALL_EXPR_ARG (exp, 0);
4588 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4589 /* When guessing was done, the hints should be already stripped away. */
4590 gcc_assert (!flag_guess_branch_prob
4591 || optimize == 0 || seen_error ());
4592 return target;
4595 /* Expand a call to __builtin_assume_aligned. We just return our first
4596 argument as the builtin_assume_aligned semantic should've been already
4597 executed by CCP. */
4599 static rtx
4600 expand_builtin_assume_aligned (tree exp, rtx target)
4602 if (call_expr_nargs (exp) < 2)
4603 return const0_rtx;
4604 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4605 EXPAND_NORMAL);
4606 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4607 && (call_expr_nargs (exp) < 3
4608 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4609 return target;
4612 void
4613 expand_builtin_trap (void)
4615 #ifdef HAVE_trap
4616 if (HAVE_trap)
4618 rtx insn = emit_insn (gen_trap ());
4619 /* For trap insns when not accumulating outgoing args force
4620 REG_ARGS_SIZE note to prevent crossjumping of calls with
4621 different args sizes. */
4622 if (!ACCUMULATE_OUTGOING_ARGS)
4623 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4625 else
4626 #endif
4627 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4628 emit_barrier ();
4631 /* Expand a call to __builtin_unreachable. We do nothing except emit
4632 a barrier saying that control flow will not pass here.
4634 It is the responsibility of the program being compiled to ensure
4635 that control flow does never reach __builtin_unreachable. */
4636 static void
4637 expand_builtin_unreachable (void)
4639 emit_barrier ();
4642 /* Expand EXP, a call to fabs, fabsf or fabsl.
4643 Return NULL_RTX if a normal call should be emitted rather than expanding
4644 the function inline. If convenient, the result should be placed
4645 in TARGET. SUBTARGET may be used as the target for computing
4646 the operand. */
4648 static rtx
4649 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4651 enum machine_mode mode;
4652 tree arg;
4653 rtx op0;
4655 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4656 return NULL_RTX;
4658 arg = CALL_EXPR_ARG (exp, 0);
4659 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4660 mode = TYPE_MODE (TREE_TYPE (arg));
4661 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4662 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4665 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4666 Return NULL is a normal call should be emitted rather than expanding the
4667 function inline. If convenient, the result should be placed in TARGET.
4668 SUBTARGET may be used as the target for computing the operand. */
4670 static rtx
4671 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4673 rtx op0, op1;
4674 tree arg;
4676 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4677 return NULL_RTX;
4679 arg = CALL_EXPR_ARG (exp, 0);
4680 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4682 arg = CALL_EXPR_ARG (exp, 1);
4683 op1 = expand_normal (arg);
4685 return expand_copysign (op0, op1, target);
4688 /* Create a new constant string literal and return a char* pointer to it.
4689 The STRING_CST value is the LEN characters at STR. */
4690 tree
4691 build_string_literal (int len, const char *str)
4693 tree t, elem, index, type;
4695 t = build_string (len, str);
4696 elem = build_type_variant (char_type_node, 1, 0);
4697 index = build_index_type (size_int (len - 1));
4698 type = build_array_type (elem, index);
4699 TREE_TYPE (t) = type;
4700 TREE_CONSTANT (t) = 1;
4701 TREE_READONLY (t) = 1;
4702 TREE_STATIC (t) = 1;
4704 type = build_pointer_type (elem);
4705 t = build1 (ADDR_EXPR, type,
4706 build4 (ARRAY_REF, elem,
4707 t, integer_zero_node, NULL_TREE, NULL_TREE));
4708 return t;
4711 /* Expand a call to __builtin___clear_cache. */
4713 static rtx
4714 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4716 #ifndef HAVE_clear_cache
4717 #ifdef CLEAR_INSN_CACHE
4718 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4719 does something. Just do the default expansion to a call to
4720 __clear_cache(). */
4721 return NULL_RTX;
4722 #else
4723 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4724 does nothing. There is no need to call it. Do nothing. */
4725 return const0_rtx;
4726 #endif /* CLEAR_INSN_CACHE */
4727 #else
4728 /* We have a "clear_cache" insn, and it will handle everything. */
4729 tree begin, end;
4730 rtx begin_rtx, end_rtx;
4732 /* We must not expand to a library call. If we did, any
4733 fallback library function in libgcc that might contain a call to
4734 __builtin___clear_cache() would recurse infinitely. */
4735 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4737 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4738 return const0_rtx;
4741 if (HAVE_clear_cache)
4743 struct expand_operand ops[2];
4745 begin = CALL_EXPR_ARG (exp, 0);
4746 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4748 end = CALL_EXPR_ARG (exp, 1);
4749 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4751 create_address_operand (&ops[0], begin_rtx);
4752 create_address_operand (&ops[1], end_rtx);
4753 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4754 return const0_rtx;
4756 return const0_rtx;
4757 #endif /* HAVE_clear_cache */
4760 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4762 static rtx
4763 round_trampoline_addr (rtx tramp)
4765 rtx temp, addend, mask;
4767 /* If we don't need too much alignment, we'll have been guaranteed
4768 proper alignment by get_trampoline_type. */
4769 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4770 return tramp;
4772 /* Round address up to desired boundary. */
4773 temp = gen_reg_rtx (Pmode);
4774 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4775 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4777 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4778 temp, 0, OPTAB_LIB_WIDEN);
4779 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4780 temp, 0, OPTAB_LIB_WIDEN);
4782 return tramp;
4785 static rtx
4786 expand_builtin_init_trampoline (tree exp, bool onstack)
4788 tree t_tramp, t_func, t_chain;
4789 rtx m_tramp, r_tramp, r_chain, tmp;
4791 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4792 POINTER_TYPE, VOID_TYPE))
4793 return NULL_RTX;
4795 t_tramp = CALL_EXPR_ARG (exp, 0);
4796 t_func = CALL_EXPR_ARG (exp, 1);
4797 t_chain = CALL_EXPR_ARG (exp, 2);
4799 r_tramp = expand_normal (t_tramp);
4800 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4801 MEM_NOTRAP_P (m_tramp) = 1;
4803 /* If ONSTACK, the TRAMP argument should be the address of a field
4804 within the local function's FRAME decl. Either way, let's see if
4805 we can fill in the MEM_ATTRs for this memory. */
4806 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4807 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4809 /* Creator of a heap trampoline is responsible for making sure the
4810 address is aligned to at least STACK_BOUNDARY. Normally malloc
4811 will ensure this anyhow. */
4812 tmp = round_trampoline_addr (r_tramp);
4813 if (tmp != r_tramp)
4815 m_tramp = change_address (m_tramp, BLKmode, tmp);
4816 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4817 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4820 /* The FUNC argument should be the address of the nested function.
4821 Extract the actual function decl to pass to the hook. */
4822 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4823 t_func = TREE_OPERAND (t_func, 0);
4824 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4826 r_chain = expand_normal (t_chain);
4828 /* Generate insns to initialize the trampoline. */
4829 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4831 if (onstack)
4833 trampolines_created = 1;
4835 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4836 "trampoline generated for nested function %qD", t_func);
4839 return const0_rtx;
4842 static rtx
4843 expand_builtin_adjust_trampoline (tree exp)
4845 rtx tramp;
4847 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4848 return NULL_RTX;
4850 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4851 tramp = round_trampoline_addr (tramp);
4852 if (targetm.calls.trampoline_adjust_address)
4853 tramp = targetm.calls.trampoline_adjust_address (tramp);
4855 return tramp;
4858 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4859 function. The function first checks whether the back end provides
4860 an insn to implement signbit for the respective mode. If not, it
4861 checks whether the floating point format of the value is such that
4862 the sign bit can be extracted. If that is not the case, the
4863 function returns NULL_RTX to indicate that a normal call should be
4864 emitted rather than expanding the function in-line. EXP is the
4865 expression that is a call to the builtin function; if convenient,
4866 the result should be placed in TARGET. */
4867 static rtx
4868 expand_builtin_signbit (tree exp, rtx target)
4870 const struct real_format *fmt;
4871 enum machine_mode fmode, imode, rmode;
4872 tree arg;
4873 int word, bitpos;
4874 enum insn_code icode;
4875 rtx temp;
4876 location_t loc = EXPR_LOCATION (exp);
4878 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4879 return NULL_RTX;
4881 arg = CALL_EXPR_ARG (exp, 0);
4882 fmode = TYPE_MODE (TREE_TYPE (arg));
4883 rmode = TYPE_MODE (TREE_TYPE (exp));
4884 fmt = REAL_MODE_FORMAT (fmode);
4886 arg = builtin_save_expr (arg);
4888 /* Expand the argument yielding a RTX expression. */
4889 temp = expand_normal (arg);
4891 /* Check if the back end provides an insn that handles signbit for the
4892 argument's mode. */
4893 icode = optab_handler (signbit_optab, fmode);
4894 if (icode != CODE_FOR_nothing)
4896 rtx last = get_last_insn ();
4897 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4898 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4899 return target;
4900 delete_insns_since (last);
4903 /* For floating point formats without a sign bit, implement signbit
4904 as "ARG < 0.0". */
4905 bitpos = fmt->signbit_ro;
4906 if (bitpos < 0)
4908 /* But we can't do this if the format supports signed zero. */
4909 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4910 return NULL_RTX;
4912 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4913 build_real (TREE_TYPE (arg), dconst0));
4914 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4917 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4919 imode = int_mode_for_mode (fmode);
4920 if (imode == BLKmode)
4921 return NULL_RTX;
4922 temp = gen_lowpart (imode, temp);
4924 else
4926 imode = word_mode;
4927 /* Handle targets with different FP word orders. */
4928 if (FLOAT_WORDS_BIG_ENDIAN)
4929 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4930 else
4931 word = bitpos / BITS_PER_WORD;
4932 temp = operand_subword_force (temp, word, fmode);
4933 bitpos = bitpos % BITS_PER_WORD;
4936 /* Force the intermediate word_mode (or narrower) result into a
4937 register. This avoids attempting to create paradoxical SUBREGs
4938 of floating point modes below. */
4939 temp = force_reg (imode, temp);
4941 /* If the bitpos is within the "result mode" lowpart, the operation
4942 can be implement with a single bitwise AND. Otherwise, we need
4943 a right shift and an AND. */
4945 if (bitpos < GET_MODE_BITSIZE (rmode))
4947 double_int mask = double_int_zero.set_bit (bitpos);
4949 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4950 temp = gen_lowpart (rmode, temp);
4951 temp = expand_binop (rmode, and_optab, temp,
4952 immed_double_int_const (mask, rmode),
4953 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4955 else
4957 /* Perform a logical right shift to place the signbit in the least
4958 significant bit, then truncate the result to the desired mode
4959 and mask just this bit. */
4960 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4961 temp = gen_lowpart (rmode, temp);
4962 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4963 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4966 return temp;
4969 /* Expand fork or exec calls. TARGET is the desired target of the
4970 call. EXP is the call. FN is the
4971 identificator of the actual function. IGNORE is nonzero if the
4972 value is to be ignored. */
4974 static rtx
4975 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4977 tree id, decl;
4978 tree call;
4980 /* If we are not profiling, just call the function. */
4981 if (!profile_arc_flag)
4982 return NULL_RTX;
4984 /* Otherwise call the wrapper. This should be equivalent for the rest of
4985 compiler, so the code does not diverge, and the wrapper may run the
4986 code necessary for keeping the profiling sane. */
4988 switch (DECL_FUNCTION_CODE (fn))
4990 case BUILT_IN_FORK:
4991 id = get_identifier ("__gcov_fork");
4992 break;
4994 case BUILT_IN_EXECL:
4995 id = get_identifier ("__gcov_execl");
4996 break;
4998 case BUILT_IN_EXECV:
4999 id = get_identifier ("__gcov_execv");
5000 break;
5002 case BUILT_IN_EXECLP:
5003 id = get_identifier ("__gcov_execlp");
5004 break;
5006 case BUILT_IN_EXECLE:
5007 id = get_identifier ("__gcov_execle");
5008 break;
5010 case BUILT_IN_EXECVP:
5011 id = get_identifier ("__gcov_execvp");
5012 break;
5014 case BUILT_IN_EXECVE:
5015 id = get_identifier ("__gcov_execve");
5016 break;
5018 default:
5019 gcc_unreachable ();
5022 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5023 FUNCTION_DECL, id, TREE_TYPE (fn));
5024 DECL_EXTERNAL (decl) = 1;
5025 TREE_PUBLIC (decl) = 1;
5026 DECL_ARTIFICIAL (decl) = 1;
5027 TREE_NOTHROW (decl) = 1;
5028 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5029 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5030 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5031 return expand_call (call, target, ignore);
5036 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5037 the pointer in these functions is void*, the tree optimizers may remove
5038 casts. The mode computed in expand_builtin isn't reliable either, due
5039 to __sync_bool_compare_and_swap.
5041 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5042 group of builtins. This gives us log2 of the mode size. */
5044 static inline enum machine_mode
5045 get_builtin_sync_mode (int fcode_diff)
5047 /* The size is not negotiable, so ask not to get BLKmode in return
5048 if the target indicates that a smaller size would be better. */
5049 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5052 /* Expand the memory expression LOC and return the appropriate memory operand
5053 for the builtin_sync operations. */
5055 static rtx
5056 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5058 rtx addr, mem;
5060 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5061 addr = convert_memory_address (Pmode, addr);
5063 /* Note that we explicitly do not want any alias information for this
5064 memory, so that we kill all other live memories. Otherwise we don't
5065 satisfy the full barrier semantics of the intrinsic. */
5066 mem = validize_mem (gen_rtx_MEM (mode, addr));
5068 /* The alignment needs to be at least according to that of the mode. */
5069 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5070 get_pointer_alignment (loc)));
5071 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5072 MEM_VOLATILE_P (mem) = 1;
5074 return mem;
5077 /* Make sure an argument is in the right mode.
5078 EXP is the tree argument.
5079 MODE is the mode it should be in. */
5081 static rtx
5082 expand_expr_force_mode (tree exp, enum machine_mode mode)
5084 rtx val;
5085 enum machine_mode old_mode;
5087 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5088 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5089 of CONST_INTs, where we know the old_mode only from the call argument. */
5091 old_mode = GET_MODE (val);
5092 if (old_mode == VOIDmode)
5093 old_mode = TYPE_MODE (TREE_TYPE (exp));
5094 val = convert_modes (mode, old_mode, val, 1);
5095 return val;
5099 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5100 EXP is the CALL_EXPR. CODE is the rtx code
5101 that corresponds to the arithmetic or logical operation from the name;
5102 an exception here is that NOT actually means NAND. TARGET is an optional
5103 place for us to store the results; AFTER is true if this is the
5104 fetch_and_xxx form. */
5106 static rtx
5107 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5108 enum rtx_code code, bool after,
5109 rtx target)
5111 rtx val, mem;
5112 location_t loc = EXPR_LOCATION (exp);
5114 if (code == NOT && warn_sync_nand)
5116 tree fndecl = get_callee_fndecl (exp);
5117 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5119 static bool warned_f_a_n, warned_n_a_f;
5121 switch (fcode)
5123 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5124 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5125 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5126 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5127 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5128 if (warned_f_a_n)
5129 break;
5131 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5132 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5133 warned_f_a_n = true;
5134 break;
5136 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5137 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5138 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5139 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5140 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5141 if (warned_n_a_f)
5142 break;
5144 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5145 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5146 warned_n_a_f = true;
5147 break;
5149 default:
5150 gcc_unreachable ();
5154 /* Expand the operands. */
5155 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5156 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5158 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5159 after);
5162 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5163 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5164 true if this is the boolean form. TARGET is a place for us to store the
5165 results; this is NOT optional if IS_BOOL is true. */
5167 static rtx
5168 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5169 bool is_bool, rtx target)
5171 rtx old_val, new_val, mem;
5172 rtx *pbool, *poval;
5174 /* Expand the operands. */
5175 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5176 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5177 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5179 pbool = poval = NULL;
5180 if (target != const0_rtx)
5182 if (is_bool)
5183 pbool = &target;
5184 else
5185 poval = &target;
5187 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5188 false, MEMMODEL_SEQ_CST,
5189 MEMMODEL_SEQ_CST))
5190 return NULL_RTX;
5192 return target;
5195 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5196 general form is actually an atomic exchange, and some targets only
5197 support a reduced form with the second argument being a constant 1.
5198 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5199 the results. */
5201 static rtx
5202 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5203 rtx target)
5205 rtx val, mem;
5207 /* Expand the operands. */
5208 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5209 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5211 return expand_sync_lock_test_and_set (target, mem, val);
5214 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5216 static void
5217 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5219 rtx mem;
5221 /* Expand the operands. */
5222 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5224 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5227 /* Given an integer representing an ``enum memmodel'', verify its
5228 correctness and return the memory model enum. */
5230 static enum memmodel
5231 get_memmodel (tree exp)
5233 rtx op;
5234 unsigned HOST_WIDE_INT val;
5236 /* If the parameter is not a constant, it's a run time value so we'll just
5237 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5238 if (TREE_CODE (exp) != INTEGER_CST)
5239 return MEMMODEL_SEQ_CST;
5241 op = expand_normal (exp);
5243 val = INTVAL (op);
5244 if (targetm.memmodel_check)
5245 val = targetm.memmodel_check (val);
5246 else if (val & ~MEMMODEL_MASK)
5248 warning (OPT_Winvalid_memory_model,
5249 "Unknown architecture specifier in memory model to builtin.");
5250 return MEMMODEL_SEQ_CST;
5253 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5255 warning (OPT_Winvalid_memory_model,
5256 "invalid memory model argument to builtin");
5257 return MEMMODEL_SEQ_CST;
5260 return (enum memmodel) val;
5263 /* Expand the __atomic_exchange intrinsic:
5264 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5265 EXP is the CALL_EXPR.
5266 TARGET is an optional place for us to store the results. */
5268 static rtx
5269 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5271 rtx val, mem;
5272 enum memmodel model;
5274 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5275 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5277 error ("invalid memory model for %<__atomic_exchange%>");
5278 return NULL_RTX;
5281 if (!flag_inline_atomics)
5282 return NULL_RTX;
5284 /* Expand the operands. */
5285 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5286 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5288 return expand_atomic_exchange (target, mem, val, model);
5291 /* Expand the __atomic_compare_exchange intrinsic:
5292 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5293 TYPE desired, BOOL weak,
5294 enum memmodel success,
5295 enum memmodel failure)
5296 EXP is the CALL_EXPR.
5297 TARGET is an optional place for us to store the results. */
5299 static rtx
5300 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5301 rtx target)
5303 rtx expect, desired, mem, oldval, label;
5304 enum memmodel success, failure;
5305 tree weak;
5306 bool is_weak;
5308 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5309 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5311 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5312 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5314 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5315 return NULL_RTX;
5318 if (failure > success)
5320 error ("failure memory model cannot be stronger than success "
5321 "memory model for %<__atomic_compare_exchange%>");
5322 return NULL_RTX;
5325 if (!flag_inline_atomics)
5326 return NULL_RTX;
5328 /* Expand the operands. */
5329 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5331 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5332 expect = convert_memory_address (Pmode, expect);
5333 expect = gen_rtx_MEM (mode, expect);
5334 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5336 weak = CALL_EXPR_ARG (exp, 3);
5337 is_weak = false;
5338 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5339 is_weak = true;
5341 if (target == const0_rtx)
5342 target = NULL;
5344 /* Lest the rtl backend create a race condition with an imporoper store
5345 to memory, always create a new pseudo for OLDVAL. */
5346 oldval = NULL;
5348 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5349 is_weak, success, failure))
5350 return NULL_RTX;
5352 /* Conditionally store back to EXPECT, lest we create a race condition
5353 with an improper store to memory. */
5354 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5355 the normal case where EXPECT is totally private, i.e. a register. At
5356 which point the store can be unconditional. */
5357 label = gen_label_rtx ();
5358 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5359 emit_move_insn (expect, oldval);
5360 emit_label (label);
5362 return target;
5365 /* Expand the __atomic_load intrinsic:
5366 TYPE __atomic_load (TYPE *object, enum memmodel)
5367 EXP is the CALL_EXPR.
5368 TARGET is an optional place for us to store the results. */
5370 static rtx
5371 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5373 rtx mem;
5374 enum memmodel model;
5376 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5377 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5378 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5380 error ("invalid memory model for %<__atomic_load%>");
5381 return NULL_RTX;
5384 if (!flag_inline_atomics)
5385 return NULL_RTX;
5387 /* Expand the operand. */
5388 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5390 return expand_atomic_load (target, mem, model);
5394 /* Expand the __atomic_store intrinsic:
5395 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5396 EXP is the CALL_EXPR.
5397 TARGET is an optional place for us to store the results. */
5399 static rtx
5400 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5402 rtx mem, val;
5403 enum memmodel model;
5405 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5406 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5407 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5408 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5410 error ("invalid memory model for %<__atomic_store%>");
5411 return NULL_RTX;
5414 if (!flag_inline_atomics)
5415 return NULL_RTX;
5417 /* Expand the operands. */
5418 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5419 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5421 return expand_atomic_store (mem, val, model, false);
5424 /* Expand the __atomic_fetch_XXX intrinsic:
5425 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5426 EXP is the CALL_EXPR.
5427 TARGET is an optional place for us to store the results.
5428 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5429 FETCH_AFTER is true if returning the result of the operation.
5430 FETCH_AFTER is false if returning the value before the operation.
5431 IGNORE is true if the result is not used.
5432 EXT_CALL is the correct builtin for an external call if this cannot be
5433 resolved to an instruction sequence. */
5435 static rtx
5436 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5437 enum rtx_code code, bool fetch_after,
5438 bool ignore, enum built_in_function ext_call)
5440 rtx val, mem, ret;
5441 enum memmodel model;
5442 tree fndecl;
5443 tree addr;
5445 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5447 /* Expand the operands. */
5448 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5449 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5451 /* Only try generating instructions if inlining is turned on. */
5452 if (flag_inline_atomics)
5454 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5455 if (ret)
5456 return ret;
5459 /* Return if a different routine isn't needed for the library call. */
5460 if (ext_call == BUILT_IN_NONE)
5461 return NULL_RTX;
5463 /* Change the call to the specified function. */
5464 fndecl = get_callee_fndecl (exp);
5465 addr = CALL_EXPR_FN (exp);
5466 STRIP_NOPS (addr);
5468 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5469 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5471 /* Expand the call here so we can emit trailing code. */
5472 ret = expand_call (exp, target, ignore);
5474 /* Replace the original function just in case it matters. */
5475 TREE_OPERAND (addr, 0) = fndecl;
5477 /* Then issue the arithmetic correction to return the right result. */
5478 if (!ignore)
5480 if (code == NOT)
5482 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5483 OPTAB_LIB_WIDEN);
5484 ret = expand_simple_unop (mode, NOT, ret, target, true);
5486 else
5487 ret = expand_simple_binop (mode, code, ret, val, target, true,
5488 OPTAB_LIB_WIDEN);
5490 return ret;
5494 #ifndef HAVE_atomic_clear
5495 # define HAVE_atomic_clear 0
5496 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5497 #endif
5499 /* Expand an atomic clear operation.
5500 void _atomic_clear (BOOL *obj, enum memmodel)
5501 EXP is the call expression. */
5503 static rtx
5504 expand_builtin_atomic_clear (tree exp)
5506 enum machine_mode mode;
5507 rtx mem, ret;
5508 enum memmodel model;
5510 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5511 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5512 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5514 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5515 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5517 error ("invalid memory model for %<__atomic_store%>");
5518 return const0_rtx;
5521 if (HAVE_atomic_clear)
5523 emit_insn (gen_atomic_clear (mem, model));
5524 return const0_rtx;
5527 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5528 Failing that, a store is issued by __atomic_store. The only way this can
5529 fail is if the bool type is larger than a word size. Unlikely, but
5530 handle it anyway for completeness. Assume a single threaded model since
5531 there is no atomic support in this case, and no barriers are required. */
5532 ret = expand_atomic_store (mem, const0_rtx, model, true);
5533 if (!ret)
5534 emit_move_insn (mem, const0_rtx);
5535 return const0_rtx;
5538 /* Expand an atomic test_and_set operation.
5539 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5540 EXP is the call expression. */
5542 static rtx
5543 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5545 rtx mem;
5546 enum memmodel model;
5547 enum machine_mode mode;
5549 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5550 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5551 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5553 return expand_atomic_test_and_set (target, mem, model);
5557 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5558 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5560 static tree
5561 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5563 int size;
5564 enum machine_mode mode;
5565 unsigned int mode_align, type_align;
5567 if (TREE_CODE (arg0) != INTEGER_CST)
5568 return NULL_TREE;
5570 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5571 mode = mode_for_size (size, MODE_INT, 0);
5572 mode_align = GET_MODE_ALIGNMENT (mode);
5574 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5575 type_align = mode_align;
5576 else
5578 tree ttype = TREE_TYPE (arg1);
5580 /* This function is usually invoked and folded immediately by the front
5581 end before anything else has a chance to look at it. The pointer
5582 parameter at this point is usually cast to a void *, so check for that
5583 and look past the cast. */
5584 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5585 && VOID_TYPE_P (TREE_TYPE (ttype)))
5586 arg1 = TREE_OPERAND (arg1, 0);
5588 ttype = TREE_TYPE (arg1);
5589 gcc_assert (POINTER_TYPE_P (ttype));
5591 /* Get the underlying type of the object. */
5592 ttype = TREE_TYPE (ttype);
5593 type_align = TYPE_ALIGN (ttype);
5596 /* If the object has smaller alignment, the the lock free routines cannot
5597 be used. */
5598 if (type_align < mode_align)
5599 return boolean_false_node;
5601 /* Check if a compare_and_swap pattern exists for the mode which represents
5602 the required size. The pattern is not allowed to fail, so the existence
5603 of the pattern indicates support is present. */
5604 if (can_compare_and_swap_p (mode, true))
5605 return boolean_true_node;
5606 else
5607 return boolean_false_node;
5610 /* Return true if the parameters to call EXP represent an object which will
5611 always generate lock free instructions. The first argument represents the
5612 size of the object, and the second parameter is a pointer to the object
5613 itself. If NULL is passed for the object, then the result is based on
5614 typical alignment for an object of the specified size. Otherwise return
5615 false. */
5617 static rtx
5618 expand_builtin_atomic_always_lock_free (tree exp)
5620 tree size;
5621 tree arg0 = CALL_EXPR_ARG (exp, 0);
5622 tree arg1 = CALL_EXPR_ARG (exp, 1);
5624 if (TREE_CODE (arg0) != INTEGER_CST)
5626 error ("non-constant argument 1 to __atomic_always_lock_free");
5627 return const0_rtx;
5630 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5631 if (size == boolean_true_node)
5632 return const1_rtx;
5633 return const0_rtx;
5636 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5637 is lock free on this architecture. */
5639 static tree
5640 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5642 if (!flag_inline_atomics)
5643 return NULL_TREE;
5645 /* If it isn't always lock free, don't generate a result. */
5646 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5647 return boolean_true_node;
5649 return NULL_TREE;
5652 /* Return true if the parameters to call EXP represent an object which will
5653 always generate lock free instructions. The first argument represents the
5654 size of the object, and the second parameter is a pointer to the object
5655 itself. If NULL is passed for the object, then the result is based on
5656 typical alignment for an object of the specified size. Otherwise return
5657 NULL*/
5659 static rtx
5660 expand_builtin_atomic_is_lock_free (tree exp)
5662 tree size;
5663 tree arg0 = CALL_EXPR_ARG (exp, 0);
5664 tree arg1 = CALL_EXPR_ARG (exp, 1);
5666 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5668 error ("non-integer argument 1 to __atomic_is_lock_free");
5669 return NULL_RTX;
5672 if (!flag_inline_atomics)
5673 return NULL_RTX;
5675 /* If the value is known at compile time, return the RTX for it. */
5676 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5677 if (size == boolean_true_node)
5678 return const1_rtx;
5680 return NULL_RTX;
5683 /* Expand the __atomic_thread_fence intrinsic:
5684 void __atomic_thread_fence (enum memmodel)
5685 EXP is the CALL_EXPR. */
5687 static void
5688 expand_builtin_atomic_thread_fence (tree exp)
5690 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5691 expand_mem_thread_fence (model);
5694 /* Expand the __atomic_signal_fence intrinsic:
5695 void __atomic_signal_fence (enum memmodel)
5696 EXP is the CALL_EXPR. */
5698 static void
5699 expand_builtin_atomic_signal_fence (tree exp)
5701 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5702 expand_mem_signal_fence (model);
5705 /* Expand the __sync_synchronize intrinsic. */
5707 static void
5708 expand_builtin_sync_synchronize (void)
5710 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5713 static rtx
5714 expand_builtin_thread_pointer (tree exp, rtx target)
5716 enum insn_code icode;
5717 if (!validate_arglist (exp, VOID_TYPE))
5718 return const0_rtx;
5719 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5720 if (icode != CODE_FOR_nothing)
5722 struct expand_operand op;
5723 /* If the target is not sutitable then create a new target. */
5724 if (target == NULL_RTX
5725 || !REG_P (target)
5726 || GET_MODE (target) != Pmode)
5727 target = gen_reg_rtx (Pmode);
5728 create_output_operand (&op, target, Pmode);
5729 expand_insn (icode, 1, &op);
5730 return target;
5732 error ("__builtin_thread_pointer is not supported on this target");
5733 return const0_rtx;
5736 static void
5737 expand_builtin_set_thread_pointer (tree exp)
5739 enum insn_code icode;
5740 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5741 return;
5742 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5743 if (icode != CODE_FOR_nothing)
5745 struct expand_operand op;
5746 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5747 Pmode, EXPAND_NORMAL);
5748 create_input_operand (&op, val, Pmode);
5749 expand_insn (icode, 1, &op);
5750 return;
5752 error ("__builtin_set_thread_pointer is not supported on this target");
5756 /* Emit code to restore the current value of stack. */
5758 static void
5759 expand_stack_restore (tree var)
5761 rtx prev, sa = expand_normal (var);
5763 sa = convert_memory_address (Pmode, sa);
5765 prev = get_last_insn ();
5766 emit_stack_restore (SAVE_BLOCK, sa);
5767 fixup_args_size_notes (prev, get_last_insn (), 0);
5771 /* Emit code to save the current value of stack. */
5773 static rtx
5774 expand_stack_save (void)
5776 rtx ret = NULL_RTX;
5778 do_pending_stack_adjust ();
5779 emit_stack_save (SAVE_BLOCK, &ret);
5780 return ret;
5783 /* Expand an expression EXP that calls a built-in function,
5784 with result going to TARGET if that's convenient
5785 (and in mode MODE if that's convenient).
5786 SUBTARGET may be used as the target for computing one of EXP's operands.
5787 IGNORE is nonzero if the value is to be ignored. */
5790 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5791 int ignore)
5793 tree fndecl = get_callee_fndecl (exp);
5794 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5795 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5796 int flags;
5798 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5799 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5801 /* When not optimizing, generate calls to library functions for a certain
5802 set of builtins. */
5803 if (!optimize
5804 && !called_as_built_in (fndecl)
5805 && fcode != BUILT_IN_FORK
5806 && fcode != BUILT_IN_EXECL
5807 && fcode != BUILT_IN_EXECV
5808 && fcode != BUILT_IN_EXECLP
5809 && fcode != BUILT_IN_EXECLE
5810 && fcode != BUILT_IN_EXECVP
5811 && fcode != BUILT_IN_EXECVE
5812 && fcode != BUILT_IN_ALLOCA
5813 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5814 && fcode != BUILT_IN_FREE)
5815 return expand_call (exp, target, ignore);
5817 /* The built-in function expanders test for target == const0_rtx
5818 to determine whether the function's result will be ignored. */
5819 if (ignore)
5820 target = const0_rtx;
5822 /* If the result of a pure or const built-in function is ignored, and
5823 none of its arguments are volatile, we can avoid expanding the
5824 built-in call and just evaluate the arguments for side-effects. */
5825 if (target == const0_rtx
5826 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5827 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5829 bool volatilep = false;
5830 tree arg;
5831 call_expr_arg_iterator iter;
5833 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5834 if (TREE_THIS_VOLATILE (arg))
5836 volatilep = true;
5837 break;
5840 if (! volatilep)
5842 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5843 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5844 return const0_rtx;
5848 switch (fcode)
5850 CASE_FLT_FN (BUILT_IN_FABS):
5851 case BUILT_IN_FABSD32:
5852 case BUILT_IN_FABSD64:
5853 case BUILT_IN_FABSD128:
5854 target = expand_builtin_fabs (exp, target, subtarget);
5855 if (target)
5856 return target;
5857 break;
5859 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5860 target = expand_builtin_copysign (exp, target, subtarget);
5861 if (target)
5862 return target;
5863 break;
5865 /* Just do a normal library call if we were unable to fold
5866 the values. */
5867 CASE_FLT_FN (BUILT_IN_CABS):
5868 break;
5870 CASE_FLT_FN (BUILT_IN_EXP):
5871 CASE_FLT_FN (BUILT_IN_EXP10):
5872 CASE_FLT_FN (BUILT_IN_POW10):
5873 CASE_FLT_FN (BUILT_IN_EXP2):
5874 CASE_FLT_FN (BUILT_IN_EXPM1):
5875 CASE_FLT_FN (BUILT_IN_LOGB):
5876 CASE_FLT_FN (BUILT_IN_LOG):
5877 CASE_FLT_FN (BUILT_IN_LOG10):
5878 CASE_FLT_FN (BUILT_IN_LOG2):
5879 CASE_FLT_FN (BUILT_IN_LOG1P):
5880 CASE_FLT_FN (BUILT_IN_TAN):
5881 CASE_FLT_FN (BUILT_IN_ASIN):
5882 CASE_FLT_FN (BUILT_IN_ACOS):
5883 CASE_FLT_FN (BUILT_IN_ATAN):
5884 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5885 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5886 because of possible accuracy problems. */
5887 if (! flag_unsafe_math_optimizations)
5888 break;
5889 CASE_FLT_FN (BUILT_IN_SQRT):
5890 CASE_FLT_FN (BUILT_IN_FLOOR):
5891 CASE_FLT_FN (BUILT_IN_CEIL):
5892 CASE_FLT_FN (BUILT_IN_TRUNC):
5893 CASE_FLT_FN (BUILT_IN_ROUND):
5894 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5895 CASE_FLT_FN (BUILT_IN_RINT):
5896 target = expand_builtin_mathfn (exp, target, subtarget);
5897 if (target)
5898 return target;
5899 break;
5901 CASE_FLT_FN (BUILT_IN_FMA):
5902 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5903 if (target)
5904 return target;
5905 break;
5907 CASE_FLT_FN (BUILT_IN_ILOGB):
5908 if (! flag_unsafe_math_optimizations)
5909 break;
5910 CASE_FLT_FN (BUILT_IN_ISINF):
5911 CASE_FLT_FN (BUILT_IN_FINITE):
5912 case BUILT_IN_ISFINITE:
5913 case BUILT_IN_ISNORMAL:
5914 target = expand_builtin_interclass_mathfn (exp, target);
5915 if (target)
5916 return target;
5917 break;
5919 CASE_FLT_FN (BUILT_IN_ICEIL):
5920 CASE_FLT_FN (BUILT_IN_LCEIL):
5921 CASE_FLT_FN (BUILT_IN_LLCEIL):
5922 CASE_FLT_FN (BUILT_IN_LFLOOR):
5923 CASE_FLT_FN (BUILT_IN_IFLOOR):
5924 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5925 target = expand_builtin_int_roundingfn (exp, target);
5926 if (target)
5927 return target;
5928 break;
5930 CASE_FLT_FN (BUILT_IN_IRINT):
5931 CASE_FLT_FN (BUILT_IN_LRINT):
5932 CASE_FLT_FN (BUILT_IN_LLRINT):
5933 CASE_FLT_FN (BUILT_IN_IROUND):
5934 CASE_FLT_FN (BUILT_IN_LROUND):
5935 CASE_FLT_FN (BUILT_IN_LLROUND):
5936 target = expand_builtin_int_roundingfn_2 (exp, target);
5937 if (target)
5938 return target;
5939 break;
5941 CASE_FLT_FN (BUILT_IN_POWI):
5942 target = expand_builtin_powi (exp, target);
5943 if (target)
5944 return target;
5945 break;
5947 CASE_FLT_FN (BUILT_IN_ATAN2):
5948 CASE_FLT_FN (BUILT_IN_LDEXP):
5949 CASE_FLT_FN (BUILT_IN_SCALB):
5950 CASE_FLT_FN (BUILT_IN_SCALBN):
5951 CASE_FLT_FN (BUILT_IN_SCALBLN):
5952 if (! flag_unsafe_math_optimizations)
5953 break;
5955 CASE_FLT_FN (BUILT_IN_FMOD):
5956 CASE_FLT_FN (BUILT_IN_REMAINDER):
5957 CASE_FLT_FN (BUILT_IN_DREM):
5958 CASE_FLT_FN (BUILT_IN_POW):
5959 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5960 if (target)
5961 return target;
5962 break;
5964 CASE_FLT_FN (BUILT_IN_CEXPI):
5965 target = expand_builtin_cexpi (exp, target);
5966 gcc_assert (target);
5967 return target;
5969 CASE_FLT_FN (BUILT_IN_SIN):
5970 CASE_FLT_FN (BUILT_IN_COS):
5971 if (! flag_unsafe_math_optimizations)
5972 break;
5973 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5974 if (target)
5975 return target;
5976 break;
5978 CASE_FLT_FN (BUILT_IN_SINCOS):
5979 if (! flag_unsafe_math_optimizations)
5980 break;
5981 target = expand_builtin_sincos (exp);
5982 if (target)
5983 return target;
5984 break;
5986 case BUILT_IN_APPLY_ARGS:
5987 return expand_builtin_apply_args ();
5989 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5990 FUNCTION with a copy of the parameters described by
5991 ARGUMENTS, and ARGSIZE. It returns a block of memory
5992 allocated on the stack into which is stored all the registers
5993 that might possibly be used for returning the result of a
5994 function. ARGUMENTS is the value returned by
5995 __builtin_apply_args. ARGSIZE is the number of bytes of
5996 arguments that must be copied. ??? How should this value be
5997 computed? We'll also need a safe worst case value for varargs
5998 functions. */
5999 case BUILT_IN_APPLY:
6000 if (!validate_arglist (exp, POINTER_TYPE,
6001 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6002 && !validate_arglist (exp, REFERENCE_TYPE,
6003 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6004 return const0_rtx;
6005 else
6007 rtx ops[3];
6009 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6010 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6011 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6013 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6016 /* __builtin_return (RESULT) causes the function to return the
6017 value described by RESULT. RESULT is address of the block of
6018 memory returned by __builtin_apply. */
6019 case BUILT_IN_RETURN:
6020 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6021 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6022 return const0_rtx;
6024 case BUILT_IN_SAVEREGS:
6025 return expand_builtin_saveregs ();
6027 case BUILT_IN_VA_ARG_PACK:
6028 /* All valid uses of __builtin_va_arg_pack () are removed during
6029 inlining. */
6030 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6031 return const0_rtx;
6033 case BUILT_IN_VA_ARG_PACK_LEN:
6034 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6035 inlining. */
6036 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6037 return const0_rtx;
6039 /* Return the address of the first anonymous stack arg. */
6040 case BUILT_IN_NEXT_ARG:
6041 if (fold_builtin_next_arg (exp, false))
6042 return const0_rtx;
6043 return expand_builtin_next_arg ();
6045 case BUILT_IN_CLEAR_CACHE:
6046 target = expand_builtin___clear_cache (exp);
6047 if (target)
6048 return target;
6049 break;
6051 case BUILT_IN_CLASSIFY_TYPE:
6052 return expand_builtin_classify_type (exp);
6054 case BUILT_IN_CONSTANT_P:
6055 return const0_rtx;
6057 case BUILT_IN_FRAME_ADDRESS:
6058 case BUILT_IN_RETURN_ADDRESS:
6059 return expand_builtin_frame_address (fndecl, exp);
6061 /* Returns the address of the area where the structure is returned.
6062 0 otherwise. */
6063 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6064 if (call_expr_nargs (exp) != 0
6065 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6066 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6067 return const0_rtx;
6068 else
6069 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6071 case BUILT_IN_ALLOCA:
6072 case BUILT_IN_ALLOCA_WITH_ALIGN:
6073 /* If the allocation stems from the declaration of a variable-sized
6074 object, it cannot accumulate. */
6075 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6076 if (target)
6077 return target;
6078 break;
6080 case BUILT_IN_STACK_SAVE:
6081 return expand_stack_save ();
6083 case BUILT_IN_STACK_RESTORE:
6084 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6085 return const0_rtx;
6087 case BUILT_IN_BSWAP16:
6088 case BUILT_IN_BSWAP32:
6089 case BUILT_IN_BSWAP64:
6090 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6091 if (target)
6092 return target;
6093 break;
6095 CASE_INT_FN (BUILT_IN_FFS):
6096 target = expand_builtin_unop (target_mode, exp, target,
6097 subtarget, ffs_optab);
6098 if (target)
6099 return target;
6100 break;
6102 CASE_INT_FN (BUILT_IN_CLZ):
6103 target = expand_builtin_unop (target_mode, exp, target,
6104 subtarget, clz_optab);
6105 if (target)
6106 return target;
6107 break;
6109 CASE_INT_FN (BUILT_IN_CTZ):
6110 target = expand_builtin_unop (target_mode, exp, target,
6111 subtarget, ctz_optab);
6112 if (target)
6113 return target;
6114 break;
6116 CASE_INT_FN (BUILT_IN_CLRSB):
6117 target = expand_builtin_unop (target_mode, exp, target,
6118 subtarget, clrsb_optab);
6119 if (target)
6120 return target;
6121 break;
6123 CASE_INT_FN (BUILT_IN_POPCOUNT):
6124 target = expand_builtin_unop (target_mode, exp, target,
6125 subtarget, popcount_optab);
6126 if (target)
6127 return target;
6128 break;
6130 CASE_INT_FN (BUILT_IN_PARITY):
6131 target = expand_builtin_unop (target_mode, exp, target,
6132 subtarget, parity_optab);
6133 if (target)
6134 return target;
6135 break;
6137 case BUILT_IN_STRLEN:
6138 target = expand_builtin_strlen (exp, target, target_mode);
6139 if (target)
6140 return target;
6141 break;
6143 case BUILT_IN_STRCPY:
6144 target = expand_builtin_strcpy (exp, target);
6145 if (target)
6146 return target;
6147 break;
6149 case BUILT_IN_STRNCPY:
6150 target = expand_builtin_strncpy (exp, target);
6151 if (target)
6152 return target;
6153 break;
6155 case BUILT_IN_STPCPY:
6156 target = expand_builtin_stpcpy (exp, target, mode);
6157 if (target)
6158 return target;
6159 break;
6161 case BUILT_IN_MEMCPY:
6162 target = expand_builtin_memcpy (exp, target);
6163 if (target)
6164 return target;
6165 break;
6167 case BUILT_IN_MEMPCPY:
6168 target = expand_builtin_mempcpy (exp, target, mode);
6169 if (target)
6170 return target;
6171 break;
6173 case BUILT_IN_MEMSET:
6174 target = expand_builtin_memset (exp, target, mode);
6175 if (target)
6176 return target;
6177 break;
6179 case BUILT_IN_BZERO:
6180 target = expand_builtin_bzero (exp);
6181 if (target)
6182 return target;
6183 break;
6185 case BUILT_IN_STRCMP:
6186 target = expand_builtin_strcmp (exp, target);
6187 if (target)
6188 return target;
6189 break;
6191 case BUILT_IN_STRNCMP:
6192 target = expand_builtin_strncmp (exp, target, mode);
6193 if (target)
6194 return target;
6195 break;
6197 case BUILT_IN_BCMP:
6198 case BUILT_IN_MEMCMP:
6199 target = expand_builtin_memcmp (exp, target, mode);
6200 if (target)
6201 return target;
6202 break;
6204 case BUILT_IN_SETJMP:
6205 /* This should have been lowered to the builtins below. */
6206 gcc_unreachable ();
6208 case BUILT_IN_SETJMP_SETUP:
6209 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6210 and the receiver label. */
6211 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6213 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6214 VOIDmode, EXPAND_NORMAL);
6215 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6216 rtx label_r = label_rtx (label);
6218 /* This is copied from the handling of non-local gotos. */
6219 expand_builtin_setjmp_setup (buf_addr, label_r);
6220 nonlocal_goto_handler_labels
6221 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6222 nonlocal_goto_handler_labels);
6223 /* ??? Do not let expand_label treat us as such since we would
6224 not want to be both on the list of non-local labels and on
6225 the list of forced labels. */
6226 FORCED_LABEL (label) = 0;
6227 return const0_rtx;
6229 break;
6231 case BUILT_IN_SETJMP_RECEIVER:
6232 /* __builtin_setjmp_receiver is passed the receiver label. */
6233 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6235 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6236 rtx label_r = label_rtx (label);
6238 expand_builtin_setjmp_receiver (label_r);
6239 return const0_rtx;
6241 break;
6243 /* __builtin_longjmp is passed a pointer to an array of five words.
6244 It's similar to the C library longjmp function but works with
6245 __builtin_setjmp above. */
6246 case BUILT_IN_LONGJMP:
6247 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6249 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6250 VOIDmode, EXPAND_NORMAL);
6251 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6253 if (value != const1_rtx)
6255 error ("%<__builtin_longjmp%> second argument must be 1");
6256 return const0_rtx;
6259 expand_builtin_longjmp (buf_addr, value);
6260 return const0_rtx;
6262 break;
6264 case BUILT_IN_NONLOCAL_GOTO:
6265 target = expand_builtin_nonlocal_goto (exp);
6266 if (target)
6267 return target;
6268 break;
6270 /* This updates the setjmp buffer that is its argument with the value
6271 of the current stack pointer. */
6272 case BUILT_IN_UPDATE_SETJMP_BUF:
6273 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6275 rtx buf_addr
6276 = expand_normal (CALL_EXPR_ARG (exp, 0));
6278 expand_builtin_update_setjmp_buf (buf_addr);
6279 return const0_rtx;
6281 break;
6283 case BUILT_IN_TRAP:
6284 expand_builtin_trap ();
6285 return const0_rtx;
6287 case BUILT_IN_UNREACHABLE:
6288 expand_builtin_unreachable ();
6289 return const0_rtx;
6291 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6292 case BUILT_IN_SIGNBITD32:
6293 case BUILT_IN_SIGNBITD64:
6294 case BUILT_IN_SIGNBITD128:
6295 target = expand_builtin_signbit (exp, target);
6296 if (target)
6297 return target;
6298 break;
6300 /* Various hooks for the DWARF 2 __throw routine. */
6301 case BUILT_IN_UNWIND_INIT:
6302 expand_builtin_unwind_init ();
6303 return const0_rtx;
6304 case BUILT_IN_DWARF_CFA:
6305 return virtual_cfa_rtx;
6306 #ifdef DWARF2_UNWIND_INFO
6307 case BUILT_IN_DWARF_SP_COLUMN:
6308 return expand_builtin_dwarf_sp_column ();
6309 case BUILT_IN_INIT_DWARF_REG_SIZES:
6310 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6311 return const0_rtx;
6312 #endif
6313 case BUILT_IN_FROB_RETURN_ADDR:
6314 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6315 case BUILT_IN_EXTRACT_RETURN_ADDR:
6316 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6317 case BUILT_IN_EH_RETURN:
6318 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6319 CALL_EXPR_ARG (exp, 1));
6320 return const0_rtx;
6321 #ifdef EH_RETURN_DATA_REGNO
6322 case BUILT_IN_EH_RETURN_DATA_REGNO:
6323 return expand_builtin_eh_return_data_regno (exp);
6324 #endif
6325 case BUILT_IN_EXTEND_POINTER:
6326 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6327 case BUILT_IN_EH_POINTER:
6328 return expand_builtin_eh_pointer (exp);
6329 case BUILT_IN_EH_FILTER:
6330 return expand_builtin_eh_filter (exp);
6331 case BUILT_IN_EH_COPY_VALUES:
6332 return expand_builtin_eh_copy_values (exp);
6334 case BUILT_IN_VA_START:
6335 return expand_builtin_va_start (exp);
6336 case BUILT_IN_VA_END:
6337 return expand_builtin_va_end (exp);
6338 case BUILT_IN_VA_COPY:
6339 return expand_builtin_va_copy (exp);
6340 case BUILT_IN_EXPECT:
6341 return expand_builtin_expect (exp, target);
6342 case BUILT_IN_ASSUME_ALIGNED:
6343 return expand_builtin_assume_aligned (exp, target);
6344 case BUILT_IN_PREFETCH:
6345 expand_builtin_prefetch (exp);
6346 return const0_rtx;
6348 case BUILT_IN_INIT_TRAMPOLINE:
6349 return expand_builtin_init_trampoline (exp, true);
6350 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6351 return expand_builtin_init_trampoline (exp, false);
6352 case BUILT_IN_ADJUST_TRAMPOLINE:
6353 return expand_builtin_adjust_trampoline (exp);
6355 case BUILT_IN_FORK:
6356 case BUILT_IN_EXECL:
6357 case BUILT_IN_EXECV:
6358 case BUILT_IN_EXECLP:
6359 case BUILT_IN_EXECLE:
6360 case BUILT_IN_EXECVP:
6361 case BUILT_IN_EXECVE:
6362 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6363 if (target)
6364 return target;
6365 break;
6367 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6368 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6369 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6370 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6371 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6372 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6373 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6374 if (target)
6375 return target;
6376 break;
6378 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6379 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6380 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6381 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6382 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6383 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6384 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6385 if (target)
6386 return target;
6387 break;
6389 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6390 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6391 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6392 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6393 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6394 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6395 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6396 if (target)
6397 return target;
6398 break;
6400 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6401 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6402 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6403 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6404 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6405 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6406 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6407 if (target)
6408 return target;
6409 break;
6411 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6412 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6413 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6414 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6415 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6416 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6417 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6418 if (target)
6419 return target;
6420 break;
6422 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6423 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6424 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6425 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6426 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6427 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6428 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6429 if (target)
6430 return target;
6431 break;
6433 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6434 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6435 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6436 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6437 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6438 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6439 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6440 if (target)
6441 return target;
6442 break;
6444 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6445 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6446 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6447 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6448 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6449 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6450 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6451 if (target)
6452 return target;
6453 break;
6455 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6456 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6457 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6458 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6459 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6460 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6461 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6462 if (target)
6463 return target;
6464 break;
6466 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6467 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6468 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6469 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6470 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6471 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6472 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6473 if (target)
6474 return target;
6475 break;
6477 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6478 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6479 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6480 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6481 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6482 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6483 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6484 if (target)
6485 return target;
6486 break;
6488 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6489 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6490 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6491 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6492 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6493 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6494 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6495 if (target)
6496 return target;
6497 break;
6499 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6500 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6501 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6502 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6503 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6504 if (mode == VOIDmode)
6505 mode = TYPE_MODE (boolean_type_node);
6506 if (!target || !register_operand (target, mode))
6507 target = gen_reg_rtx (mode);
6509 mode = get_builtin_sync_mode
6510 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6511 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6512 if (target)
6513 return target;
6514 break;
6516 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6517 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6518 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6519 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6520 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6521 mode = get_builtin_sync_mode
6522 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6523 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6524 if (target)
6525 return target;
6526 break;
6528 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6529 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6530 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6531 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6532 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6533 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6534 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6535 if (target)
6536 return target;
6537 break;
6539 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6540 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6541 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6542 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6543 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6544 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6545 expand_builtin_sync_lock_release (mode, exp);
6546 return const0_rtx;
6548 case BUILT_IN_SYNC_SYNCHRONIZE:
6549 expand_builtin_sync_synchronize ();
6550 return const0_rtx;
6552 case BUILT_IN_ATOMIC_EXCHANGE_1:
6553 case BUILT_IN_ATOMIC_EXCHANGE_2:
6554 case BUILT_IN_ATOMIC_EXCHANGE_4:
6555 case BUILT_IN_ATOMIC_EXCHANGE_8:
6556 case BUILT_IN_ATOMIC_EXCHANGE_16:
6557 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6558 target = expand_builtin_atomic_exchange (mode, exp, target);
6559 if (target)
6560 return target;
6561 break;
6563 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6564 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6565 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6566 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6567 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6569 unsigned int nargs, z;
6570 vec<tree, va_gc> *vec;
6572 mode =
6573 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6574 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6575 if (target)
6576 return target;
6578 /* If this is turned into an external library call, the weak parameter
6579 must be dropped to match the expected parameter list. */
6580 nargs = call_expr_nargs (exp);
6581 vec_alloc (vec, nargs - 1);
6582 for (z = 0; z < 3; z++)
6583 vec->quick_push (CALL_EXPR_ARG (exp, z));
6584 /* Skip the boolean weak parameter. */
6585 for (z = 4; z < 6; z++)
6586 vec->quick_push (CALL_EXPR_ARG (exp, z));
6587 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6588 break;
6591 case BUILT_IN_ATOMIC_LOAD_1:
6592 case BUILT_IN_ATOMIC_LOAD_2:
6593 case BUILT_IN_ATOMIC_LOAD_4:
6594 case BUILT_IN_ATOMIC_LOAD_8:
6595 case BUILT_IN_ATOMIC_LOAD_16:
6596 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6597 target = expand_builtin_atomic_load (mode, exp, target);
6598 if (target)
6599 return target;
6600 break;
6602 case BUILT_IN_ATOMIC_STORE_1:
6603 case BUILT_IN_ATOMIC_STORE_2:
6604 case BUILT_IN_ATOMIC_STORE_4:
6605 case BUILT_IN_ATOMIC_STORE_8:
6606 case BUILT_IN_ATOMIC_STORE_16:
6607 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6608 target = expand_builtin_atomic_store (mode, exp);
6609 if (target)
6610 return const0_rtx;
6611 break;
6613 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6614 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6615 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6616 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6617 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6619 enum built_in_function lib;
6620 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6621 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6622 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6623 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6624 ignore, lib);
6625 if (target)
6626 return target;
6627 break;
6629 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6630 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6631 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6632 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6633 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6635 enum built_in_function lib;
6636 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6637 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6638 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6639 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6640 ignore, lib);
6641 if (target)
6642 return target;
6643 break;
6645 case BUILT_IN_ATOMIC_AND_FETCH_1:
6646 case BUILT_IN_ATOMIC_AND_FETCH_2:
6647 case BUILT_IN_ATOMIC_AND_FETCH_4:
6648 case BUILT_IN_ATOMIC_AND_FETCH_8:
6649 case BUILT_IN_ATOMIC_AND_FETCH_16:
6651 enum built_in_function lib;
6652 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6653 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6654 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6655 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6656 ignore, lib);
6657 if (target)
6658 return target;
6659 break;
6661 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6662 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6663 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6664 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6665 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6667 enum built_in_function lib;
6668 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6669 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6670 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6671 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6672 ignore, lib);
6673 if (target)
6674 return target;
6675 break;
6677 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6678 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6679 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6680 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6681 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6683 enum built_in_function lib;
6684 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6685 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6686 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6687 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6688 ignore, lib);
6689 if (target)
6690 return target;
6691 break;
6693 case BUILT_IN_ATOMIC_OR_FETCH_1:
6694 case BUILT_IN_ATOMIC_OR_FETCH_2:
6695 case BUILT_IN_ATOMIC_OR_FETCH_4:
6696 case BUILT_IN_ATOMIC_OR_FETCH_8:
6697 case BUILT_IN_ATOMIC_OR_FETCH_16:
6699 enum built_in_function lib;
6700 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6701 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6702 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6703 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6704 ignore, lib);
6705 if (target)
6706 return target;
6707 break;
6709 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6710 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6711 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6712 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6713 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6714 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6715 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6716 ignore, BUILT_IN_NONE);
6717 if (target)
6718 return target;
6719 break;
6721 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6722 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6723 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6724 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6725 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6726 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6727 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6728 ignore, BUILT_IN_NONE);
6729 if (target)
6730 return target;
6731 break;
6733 case BUILT_IN_ATOMIC_FETCH_AND_1:
6734 case BUILT_IN_ATOMIC_FETCH_AND_2:
6735 case BUILT_IN_ATOMIC_FETCH_AND_4:
6736 case BUILT_IN_ATOMIC_FETCH_AND_8:
6737 case BUILT_IN_ATOMIC_FETCH_AND_16:
6738 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6739 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6740 ignore, BUILT_IN_NONE);
6741 if (target)
6742 return target;
6743 break;
6745 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6746 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6747 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6748 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6749 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6750 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6751 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6752 ignore, BUILT_IN_NONE);
6753 if (target)
6754 return target;
6755 break;
6757 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6758 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6759 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6760 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6761 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6762 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6763 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6764 ignore, BUILT_IN_NONE);
6765 if (target)
6766 return target;
6767 break;
6769 case BUILT_IN_ATOMIC_FETCH_OR_1:
6770 case BUILT_IN_ATOMIC_FETCH_OR_2:
6771 case BUILT_IN_ATOMIC_FETCH_OR_4:
6772 case BUILT_IN_ATOMIC_FETCH_OR_8:
6773 case BUILT_IN_ATOMIC_FETCH_OR_16:
6774 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6775 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6776 ignore, BUILT_IN_NONE);
6777 if (target)
6778 return target;
6779 break;
6781 case BUILT_IN_ATOMIC_TEST_AND_SET:
6782 return expand_builtin_atomic_test_and_set (exp, target);
6784 case BUILT_IN_ATOMIC_CLEAR:
6785 return expand_builtin_atomic_clear (exp);
6787 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6788 return expand_builtin_atomic_always_lock_free (exp);
6790 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6791 target = expand_builtin_atomic_is_lock_free (exp);
6792 if (target)
6793 return target;
6794 break;
6796 case BUILT_IN_ATOMIC_THREAD_FENCE:
6797 expand_builtin_atomic_thread_fence (exp);
6798 return const0_rtx;
6800 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6801 expand_builtin_atomic_signal_fence (exp);
6802 return const0_rtx;
6804 case BUILT_IN_OBJECT_SIZE:
6805 return expand_builtin_object_size (exp);
6807 case BUILT_IN_MEMCPY_CHK:
6808 case BUILT_IN_MEMPCPY_CHK:
6809 case BUILT_IN_MEMMOVE_CHK:
6810 case BUILT_IN_MEMSET_CHK:
6811 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6812 if (target)
6813 return target;
6814 break;
6816 case BUILT_IN_STRCPY_CHK:
6817 case BUILT_IN_STPCPY_CHK:
6818 case BUILT_IN_STRNCPY_CHK:
6819 case BUILT_IN_STPNCPY_CHK:
6820 case BUILT_IN_STRCAT_CHK:
6821 case BUILT_IN_STRNCAT_CHK:
6822 case BUILT_IN_SNPRINTF_CHK:
6823 case BUILT_IN_VSNPRINTF_CHK:
6824 maybe_emit_chk_warning (exp, fcode);
6825 break;
6827 case BUILT_IN_SPRINTF_CHK:
6828 case BUILT_IN_VSPRINTF_CHK:
6829 maybe_emit_sprintf_chk_warning (exp, fcode);
6830 break;
6832 case BUILT_IN_FREE:
6833 if (warn_free_nonheap_object)
6834 maybe_emit_free_warning (exp);
6835 break;
6837 case BUILT_IN_THREAD_POINTER:
6838 return expand_builtin_thread_pointer (exp, target);
6840 case BUILT_IN_SET_THREAD_POINTER:
6841 expand_builtin_set_thread_pointer (exp);
6842 return const0_rtx;
6844 case BUILT_IN_CILK_DETACH:
6845 expand_builtin_cilk_detach (exp);
6846 return const0_rtx;
6848 case BUILT_IN_CILK_POP_FRAME:
6849 expand_builtin_cilk_pop_frame (exp);
6850 return const0_rtx;
6852 default: /* just do library call, if unknown builtin */
6853 break;
6856 /* The switch statement above can drop through to cause the function
6857 to be called normally. */
6858 return expand_call (exp, target, ignore);
6861 /* Determine whether a tree node represents a call to a built-in
6862 function. If the tree T is a call to a built-in function with
6863 the right number of arguments of the appropriate types, return
6864 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6865 Otherwise the return value is END_BUILTINS. */
6867 enum built_in_function
6868 builtin_mathfn_code (const_tree t)
6870 const_tree fndecl, arg, parmlist;
6871 const_tree argtype, parmtype;
6872 const_call_expr_arg_iterator iter;
6874 if (TREE_CODE (t) != CALL_EXPR
6875 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6876 return END_BUILTINS;
6878 fndecl = get_callee_fndecl (t);
6879 if (fndecl == NULL_TREE
6880 || TREE_CODE (fndecl) != FUNCTION_DECL
6881 || ! DECL_BUILT_IN (fndecl)
6882 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6883 return END_BUILTINS;
6885 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6886 init_const_call_expr_arg_iterator (t, &iter);
6887 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6889 /* If a function doesn't take a variable number of arguments,
6890 the last element in the list will have type `void'. */
6891 parmtype = TREE_VALUE (parmlist);
6892 if (VOID_TYPE_P (parmtype))
6894 if (more_const_call_expr_args_p (&iter))
6895 return END_BUILTINS;
6896 return DECL_FUNCTION_CODE (fndecl);
6899 if (! more_const_call_expr_args_p (&iter))
6900 return END_BUILTINS;
6902 arg = next_const_call_expr_arg (&iter);
6903 argtype = TREE_TYPE (arg);
6905 if (SCALAR_FLOAT_TYPE_P (parmtype))
6907 if (! SCALAR_FLOAT_TYPE_P (argtype))
6908 return END_BUILTINS;
6910 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6912 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6913 return END_BUILTINS;
6915 else if (POINTER_TYPE_P (parmtype))
6917 if (! POINTER_TYPE_P (argtype))
6918 return END_BUILTINS;
6920 else if (INTEGRAL_TYPE_P (parmtype))
6922 if (! INTEGRAL_TYPE_P (argtype))
6923 return END_BUILTINS;
6925 else
6926 return END_BUILTINS;
6929 /* Variable-length argument list. */
6930 return DECL_FUNCTION_CODE (fndecl);
6933 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6934 evaluate to a constant. */
6936 static tree
6937 fold_builtin_constant_p (tree arg)
6939 /* We return 1 for a numeric type that's known to be a constant
6940 value at compile-time or for an aggregate type that's a
6941 literal constant. */
6942 STRIP_NOPS (arg);
6944 /* If we know this is a constant, emit the constant of one. */
6945 if (CONSTANT_CLASS_P (arg)
6946 || (TREE_CODE (arg) == CONSTRUCTOR
6947 && TREE_CONSTANT (arg)))
6948 return integer_one_node;
6949 if (TREE_CODE (arg) == ADDR_EXPR)
6951 tree op = TREE_OPERAND (arg, 0);
6952 if (TREE_CODE (op) == STRING_CST
6953 || (TREE_CODE (op) == ARRAY_REF
6954 && integer_zerop (TREE_OPERAND (op, 1))
6955 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6956 return integer_one_node;
6959 /* If this expression has side effects, show we don't know it to be a
6960 constant. Likewise if it's a pointer or aggregate type since in
6961 those case we only want literals, since those are only optimized
6962 when generating RTL, not later.
6963 And finally, if we are compiling an initializer, not code, we
6964 need to return a definite result now; there's not going to be any
6965 more optimization done. */
6966 if (TREE_SIDE_EFFECTS (arg)
6967 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6968 || POINTER_TYPE_P (TREE_TYPE (arg))
6969 || cfun == 0
6970 || folding_initializer
6971 || force_folding_builtin_constant_p)
6972 return integer_zero_node;
6974 return NULL_TREE;
6977 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6978 return it as a truthvalue. */
6980 static tree
6981 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
6982 tree predictor)
6984 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6986 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6987 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6988 ret_type = TREE_TYPE (TREE_TYPE (fn));
6989 pred_type = TREE_VALUE (arg_types);
6990 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6992 pred = fold_convert_loc (loc, pred_type, pred);
6993 expected = fold_convert_loc (loc, expected_type, expected);
6994 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
6995 predictor);
6997 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6998 build_int_cst (ret_type, 0));
7001 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7002 NULL_TREE if no simplification is possible. */
7004 tree
7005 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7007 tree inner, fndecl, inner_arg0;
7008 enum tree_code code;
7010 /* Distribute the expected value over short-circuiting operators.
7011 See through the cast from truthvalue_type_node to long. */
7012 inner_arg0 = arg0;
7013 while (TREE_CODE (inner_arg0) == NOP_EXPR
7014 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7015 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7016 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7018 /* If this is a builtin_expect within a builtin_expect keep the
7019 inner one. See through a comparison against a constant. It
7020 might have been added to create a thruthvalue. */
7021 inner = inner_arg0;
7023 if (COMPARISON_CLASS_P (inner)
7024 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7025 inner = TREE_OPERAND (inner, 0);
7027 if (TREE_CODE (inner) == CALL_EXPR
7028 && (fndecl = get_callee_fndecl (inner))
7029 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7030 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7031 return arg0;
7033 inner = inner_arg0;
7034 code = TREE_CODE (inner);
7035 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7037 tree op0 = TREE_OPERAND (inner, 0);
7038 tree op1 = TREE_OPERAND (inner, 1);
7040 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7041 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7042 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7044 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7047 /* If the argument isn't invariant then there's nothing else we can do. */
7048 if (!TREE_CONSTANT (inner_arg0))
7049 return NULL_TREE;
7051 /* If we expect that a comparison against the argument will fold to
7052 a constant return the constant. In practice, this means a true
7053 constant or the address of a non-weak symbol. */
7054 inner = inner_arg0;
7055 STRIP_NOPS (inner);
7056 if (TREE_CODE (inner) == ADDR_EXPR)
7060 inner = TREE_OPERAND (inner, 0);
7062 while (TREE_CODE (inner) == COMPONENT_REF
7063 || TREE_CODE (inner) == ARRAY_REF);
7064 if ((TREE_CODE (inner) == VAR_DECL
7065 || TREE_CODE (inner) == FUNCTION_DECL)
7066 && DECL_WEAK (inner))
7067 return NULL_TREE;
7070 /* Otherwise, ARG0 already has the proper type for the return value. */
7071 return arg0;
7074 /* Fold a call to __builtin_classify_type with argument ARG. */
7076 static tree
7077 fold_builtin_classify_type (tree arg)
7079 if (arg == 0)
7080 return build_int_cst (integer_type_node, no_type_class);
7082 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7085 /* Fold a call to __builtin_strlen with argument ARG. */
7087 static tree
7088 fold_builtin_strlen (location_t loc, tree type, tree arg)
7090 if (!validate_arg (arg, POINTER_TYPE))
7091 return NULL_TREE;
7092 else
7094 tree len = c_strlen (arg, 0);
7096 if (len)
7097 return fold_convert_loc (loc, type, len);
7099 return NULL_TREE;
7103 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7105 static tree
7106 fold_builtin_inf (location_t loc, tree type, int warn)
7108 REAL_VALUE_TYPE real;
7110 /* __builtin_inff is intended to be usable to define INFINITY on all
7111 targets. If an infinity is not available, INFINITY expands "to a
7112 positive constant of type float that overflows at translation
7113 time", footnote "In this case, using INFINITY will violate the
7114 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7115 Thus we pedwarn to ensure this constraint violation is
7116 diagnosed. */
7117 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7118 pedwarn (loc, 0, "target format does not support infinity");
7120 real_inf (&real);
7121 return build_real (type, real);
7124 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7126 static tree
7127 fold_builtin_nan (tree arg, tree type, int quiet)
7129 REAL_VALUE_TYPE real;
7130 const char *str;
7132 if (!validate_arg (arg, POINTER_TYPE))
7133 return NULL_TREE;
7134 str = c_getstr (arg);
7135 if (!str)
7136 return NULL_TREE;
7138 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7139 return NULL_TREE;
7141 return build_real (type, real);
7144 /* Return true if the floating point expression T has an integer value.
7145 We also allow +Inf, -Inf and NaN to be considered integer values. */
7147 static bool
7148 integer_valued_real_p (tree t)
7150 switch (TREE_CODE (t))
7152 case FLOAT_EXPR:
7153 return true;
7155 case ABS_EXPR:
7156 case SAVE_EXPR:
7157 return integer_valued_real_p (TREE_OPERAND (t, 0));
7159 case COMPOUND_EXPR:
7160 case MODIFY_EXPR:
7161 case BIND_EXPR:
7162 return integer_valued_real_p (TREE_OPERAND (t, 1));
7164 case PLUS_EXPR:
7165 case MINUS_EXPR:
7166 case MULT_EXPR:
7167 case MIN_EXPR:
7168 case MAX_EXPR:
7169 return integer_valued_real_p (TREE_OPERAND (t, 0))
7170 && integer_valued_real_p (TREE_OPERAND (t, 1));
7172 case COND_EXPR:
7173 return integer_valued_real_p (TREE_OPERAND (t, 1))
7174 && integer_valued_real_p (TREE_OPERAND (t, 2));
7176 case REAL_CST:
7177 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7179 case NOP_EXPR:
7181 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7182 if (TREE_CODE (type) == INTEGER_TYPE)
7183 return true;
7184 if (TREE_CODE (type) == REAL_TYPE)
7185 return integer_valued_real_p (TREE_OPERAND (t, 0));
7186 break;
7189 case CALL_EXPR:
7190 switch (builtin_mathfn_code (t))
7192 CASE_FLT_FN (BUILT_IN_CEIL):
7193 CASE_FLT_FN (BUILT_IN_FLOOR):
7194 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7195 CASE_FLT_FN (BUILT_IN_RINT):
7196 CASE_FLT_FN (BUILT_IN_ROUND):
7197 CASE_FLT_FN (BUILT_IN_TRUNC):
7198 return true;
7200 CASE_FLT_FN (BUILT_IN_FMIN):
7201 CASE_FLT_FN (BUILT_IN_FMAX):
7202 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7203 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7205 default:
7206 break;
7208 break;
7210 default:
7211 break;
7213 return false;
7216 /* FNDECL is assumed to be a builtin where truncation can be propagated
7217 across (for instance floor((double)f) == (double)floorf (f).
7218 Do the transformation for a call with argument ARG. */
7220 static tree
7221 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7223 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7225 if (!validate_arg (arg, REAL_TYPE))
7226 return NULL_TREE;
7228 /* Integer rounding functions are idempotent. */
7229 if (fcode == builtin_mathfn_code (arg))
7230 return arg;
7232 /* If argument is already integer valued, and we don't need to worry
7233 about setting errno, there's no need to perform rounding. */
7234 if (! flag_errno_math && integer_valued_real_p (arg))
7235 return arg;
7237 if (optimize)
7239 tree arg0 = strip_float_extensions (arg);
7240 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7241 tree newtype = TREE_TYPE (arg0);
7242 tree decl;
7244 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7245 && (decl = mathfn_built_in (newtype, fcode)))
7246 return fold_convert_loc (loc, ftype,
7247 build_call_expr_loc (loc, decl, 1,
7248 fold_convert_loc (loc,
7249 newtype,
7250 arg0)));
7252 return NULL_TREE;
7255 /* FNDECL is assumed to be builtin which can narrow the FP type of
7256 the argument, for instance lround((double)f) -> lroundf (f).
7257 Do the transformation for a call with argument ARG. */
7259 static tree
7260 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7262 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7264 if (!validate_arg (arg, REAL_TYPE))
7265 return NULL_TREE;
7267 /* If argument is already integer valued, and we don't need to worry
7268 about setting errno, there's no need to perform rounding. */
7269 if (! flag_errno_math && integer_valued_real_p (arg))
7270 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7271 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7273 if (optimize)
7275 tree ftype = TREE_TYPE (arg);
7276 tree arg0 = strip_float_extensions (arg);
7277 tree newtype = TREE_TYPE (arg0);
7278 tree decl;
7280 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7281 && (decl = mathfn_built_in (newtype, fcode)))
7282 return build_call_expr_loc (loc, decl, 1,
7283 fold_convert_loc (loc, newtype, arg0));
7286 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7287 sizeof (int) == sizeof (long). */
7288 if (TYPE_PRECISION (integer_type_node)
7289 == TYPE_PRECISION (long_integer_type_node))
7291 tree newfn = NULL_TREE;
7292 switch (fcode)
7294 CASE_FLT_FN (BUILT_IN_ICEIL):
7295 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7296 break;
7298 CASE_FLT_FN (BUILT_IN_IFLOOR):
7299 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7300 break;
7302 CASE_FLT_FN (BUILT_IN_IROUND):
7303 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7304 break;
7306 CASE_FLT_FN (BUILT_IN_IRINT):
7307 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7308 break;
7310 default:
7311 break;
7314 if (newfn)
7316 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7317 return fold_convert_loc (loc,
7318 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7322 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7323 sizeof (long long) == sizeof (long). */
7324 if (TYPE_PRECISION (long_long_integer_type_node)
7325 == TYPE_PRECISION (long_integer_type_node))
7327 tree newfn = NULL_TREE;
7328 switch (fcode)
7330 CASE_FLT_FN (BUILT_IN_LLCEIL):
7331 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7332 break;
7334 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7335 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7336 break;
7338 CASE_FLT_FN (BUILT_IN_LLROUND):
7339 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7340 break;
7342 CASE_FLT_FN (BUILT_IN_LLRINT):
7343 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7344 break;
7346 default:
7347 break;
7350 if (newfn)
7352 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7353 return fold_convert_loc (loc,
7354 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7358 return NULL_TREE;
7361 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7362 return type. Return NULL_TREE if no simplification can be made. */
7364 static tree
7365 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7367 tree res;
7369 if (!validate_arg (arg, COMPLEX_TYPE)
7370 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7371 return NULL_TREE;
7373 /* Calculate the result when the argument is a constant. */
7374 if (TREE_CODE (arg) == COMPLEX_CST
7375 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7376 type, mpfr_hypot)))
7377 return res;
7379 if (TREE_CODE (arg) == COMPLEX_EXPR)
7381 tree real = TREE_OPERAND (arg, 0);
7382 tree imag = TREE_OPERAND (arg, 1);
7384 /* If either part is zero, cabs is fabs of the other. */
7385 if (real_zerop (real))
7386 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7387 if (real_zerop (imag))
7388 return fold_build1_loc (loc, ABS_EXPR, type, real);
7390 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7391 if (flag_unsafe_math_optimizations
7392 && operand_equal_p (real, imag, OEP_PURE_SAME))
7394 const REAL_VALUE_TYPE sqrt2_trunc
7395 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7396 STRIP_NOPS (real);
7397 return fold_build2_loc (loc, MULT_EXPR, type,
7398 fold_build1_loc (loc, ABS_EXPR, type, real),
7399 build_real (type, sqrt2_trunc));
7403 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7404 if (TREE_CODE (arg) == NEGATE_EXPR
7405 || TREE_CODE (arg) == CONJ_EXPR)
7406 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7408 /* Don't do this when optimizing for size. */
7409 if (flag_unsafe_math_optimizations
7410 && optimize && optimize_function_for_speed_p (cfun))
7412 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7414 if (sqrtfn != NULL_TREE)
7416 tree rpart, ipart, result;
7418 arg = builtin_save_expr (arg);
7420 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7421 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7423 rpart = builtin_save_expr (rpart);
7424 ipart = builtin_save_expr (ipart);
7426 result = fold_build2_loc (loc, PLUS_EXPR, type,
7427 fold_build2_loc (loc, MULT_EXPR, type,
7428 rpart, rpart),
7429 fold_build2_loc (loc, MULT_EXPR, type,
7430 ipart, ipart));
7432 return build_call_expr_loc (loc, sqrtfn, 1, result);
7436 return NULL_TREE;
7439 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7440 complex tree type of the result. If NEG is true, the imaginary
7441 zero is negative. */
7443 static tree
7444 build_complex_cproj (tree type, bool neg)
7446 REAL_VALUE_TYPE rinf, rzero = dconst0;
7448 real_inf (&rinf);
7449 rzero.sign = neg;
7450 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7451 build_real (TREE_TYPE (type), rzero));
7454 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7455 return type. Return NULL_TREE if no simplification can be made. */
7457 static tree
7458 fold_builtin_cproj (location_t loc, tree arg, tree type)
7460 if (!validate_arg (arg, COMPLEX_TYPE)
7461 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7462 return NULL_TREE;
7464 /* If there are no infinities, return arg. */
7465 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7466 return non_lvalue_loc (loc, arg);
7468 /* Calculate the result when the argument is a constant. */
7469 if (TREE_CODE (arg) == COMPLEX_CST)
7471 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7472 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7474 if (real_isinf (real) || real_isinf (imag))
7475 return build_complex_cproj (type, imag->sign);
7476 else
7477 return arg;
7479 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7481 tree real = TREE_OPERAND (arg, 0);
7482 tree imag = TREE_OPERAND (arg, 1);
7484 STRIP_NOPS (real);
7485 STRIP_NOPS (imag);
7487 /* If the real part is inf and the imag part is known to be
7488 nonnegative, return (inf + 0i). Remember side-effects are
7489 possible in the imag part. */
7490 if (TREE_CODE (real) == REAL_CST
7491 && real_isinf (TREE_REAL_CST_PTR (real))
7492 && tree_expr_nonnegative_p (imag))
7493 return omit_one_operand_loc (loc, type,
7494 build_complex_cproj (type, false),
7495 arg);
7497 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7498 Remember side-effects are possible in the real part. */
7499 if (TREE_CODE (imag) == REAL_CST
7500 && real_isinf (TREE_REAL_CST_PTR (imag)))
7501 return
7502 omit_one_operand_loc (loc, type,
7503 build_complex_cproj (type, TREE_REAL_CST_PTR
7504 (imag)->sign), arg);
7507 return NULL_TREE;
7510 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7511 Return NULL_TREE if no simplification can be made. */
7513 static tree
7514 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7517 enum built_in_function fcode;
7518 tree res;
7520 if (!validate_arg (arg, REAL_TYPE))
7521 return NULL_TREE;
7523 /* Calculate the result when the argument is a constant. */
7524 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7525 return res;
7527 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7528 fcode = builtin_mathfn_code (arg);
7529 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7531 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7532 arg = fold_build2_loc (loc, MULT_EXPR, type,
7533 CALL_EXPR_ARG (arg, 0),
7534 build_real (type, dconsthalf));
7535 return build_call_expr_loc (loc, expfn, 1, arg);
7538 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7539 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7541 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7543 if (powfn)
7545 tree arg0 = CALL_EXPR_ARG (arg, 0);
7546 tree tree_root;
7547 /* The inner root was either sqrt or cbrt. */
7548 /* This was a conditional expression but it triggered a bug
7549 in Sun C 5.5. */
7550 REAL_VALUE_TYPE dconstroot;
7551 if (BUILTIN_SQRT_P (fcode))
7552 dconstroot = dconsthalf;
7553 else
7554 dconstroot = dconst_third ();
7556 /* Adjust for the outer root. */
7557 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7558 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7559 tree_root = build_real (type, dconstroot);
7560 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7564 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7565 if (flag_unsafe_math_optimizations
7566 && (fcode == BUILT_IN_POW
7567 || fcode == BUILT_IN_POWF
7568 || fcode == BUILT_IN_POWL))
7570 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7571 tree arg0 = CALL_EXPR_ARG (arg, 0);
7572 tree arg1 = CALL_EXPR_ARG (arg, 1);
7573 tree narg1;
7574 if (!tree_expr_nonnegative_p (arg0))
7575 arg0 = build1 (ABS_EXPR, type, arg0);
7576 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7577 build_real (type, dconsthalf));
7578 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7581 return NULL_TREE;
7584 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7585 Return NULL_TREE if no simplification can be made. */
7587 static tree
7588 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7590 const enum built_in_function fcode = builtin_mathfn_code (arg);
7591 tree res;
7593 if (!validate_arg (arg, REAL_TYPE))
7594 return NULL_TREE;
7596 /* Calculate the result when the argument is a constant. */
7597 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7598 return res;
7600 if (flag_unsafe_math_optimizations)
7602 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7603 if (BUILTIN_EXPONENT_P (fcode))
7605 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7606 const REAL_VALUE_TYPE third_trunc =
7607 real_value_truncate (TYPE_MODE (type), dconst_third ());
7608 arg = fold_build2_loc (loc, MULT_EXPR, type,
7609 CALL_EXPR_ARG (arg, 0),
7610 build_real (type, third_trunc));
7611 return build_call_expr_loc (loc, expfn, 1, arg);
7614 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7615 if (BUILTIN_SQRT_P (fcode))
7617 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7619 if (powfn)
7621 tree arg0 = CALL_EXPR_ARG (arg, 0);
7622 tree tree_root;
7623 REAL_VALUE_TYPE dconstroot = dconst_third ();
7625 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7626 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7627 tree_root = build_real (type, dconstroot);
7628 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7632 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7633 if (BUILTIN_CBRT_P (fcode))
7635 tree arg0 = CALL_EXPR_ARG (arg, 0);
7636 if (tree_expr_nonnegative_p (arg0))
7638 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7640 if (powfn)
7642 tree tree_root;
7643 REAL_VALUE_TYPE dconstroot;
7645 real_arithmetic (&dconstroot, MULT_EXPR,
7646 dconst_third_ptr (), dconst_third_ptr ());
7647 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7648 tree_root = build_real (type, dconstroot);
7649 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7654 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7655 if (fcode == BUILT_IN_POW
7656 || fcode == BUILT_IN_POWF
7657 || fcode == BUILT_IN_POWL)
7659 tree arg00 = CALL_EXPR_ARG (arg, 0);
7660 tree arg01 = CALL_EXPR_ARG (arg, 1);
7661 if (tree_expr_nonnegative_p (arg00))
7663 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7664 const REAL_VALUE_TYPE dconstroot
7665 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7666 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7667 build_real (type, dconstroot));
7668 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7672 return NULL_TREE;
7675 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7676 TYPE is the type of the return value. Return NULL_TREE if no
7677 simplification can be made. */
7679 static tree
7680 fold_builtin_cos (location_t loc,
7681 tree arg, tree type, tree fndecl)
7683 tree res, narg;
7685 if (!validate_arg (arg, REAL_TYPE))
7686 return NULL_TREE;
7688 /* Calculate the result when the argument is a constant. */
7689 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7690 return res;
7692 /* Optimize cos(-x) into cos (x). */
7693 if ((narg = fold_strip_sign_ops (arg)))
7694 return build_call_expr_loc (loc, fndecl, 1, narg);
7696 return NULL_TREE;
7699 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7700 Return NULL_TREE if no simplification can be made. */
7702 static tree
7703 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7705 if (validate_arg (arg, REAL_TYPE))
7707 tree res, narg;
7709 /* Calculate the result when the argument is a constant. */
7710 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7711 return res;
7713 /* Optimize cosh(-x) into cosh (x). */
7714 if ((narg = fold_strip_sign_ops (arg)))
7715 return build_call_expr_loc (loc, fndecl, 1, narg);
7718 return NULL_TREE;
7721 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7722 argument ARG. TYPE is the type of the return value. Return
7723 NULL_TREE if no simplification can be made. */
7725 static tree
7726 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7727 bool hyper)
7729 if (validate_arg (arg, COMPLEX_TYPE)
7730 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7732 tree tmp;
7734 /* Calculate the result when the argument is a constant. */
7735 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7736 return tmp;
7738 /* Optimize fn(-x) into fn(x). */
7739 if ((tmp = fold_strip_sign_ops (arg)))
7740 return build_call_expr_loc (loc, fndecl, 1, tmp);
7743 return NULL_TREE;
7746 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7747 Return NULL_TREE if no simplification can be made. */
7749 static tree
7750 fold_builtin_tan (tree arg, tree type)
7752 enum built_in_function fcode;
7753 tree res;
7755 if (!validate_arg (arg, REAL_TYPE))
7756 return NULL_TREE;
7758 /* Calculate the result when the argument is a constant. */
7759 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7760 return res;
7762 /* Optimize tan(atan(x)) = x. */
7763 fcode = builtin_mathfn_code (arg);
7764 if (flag_unsafe_math_optimizations
7765 && (fcode == BUILT_IN_ATAN
7766 || fcode == BUILT_IN_ATANF
7767 || fcode == BUILT_IN_ATANL))
7768 return CALL_EXPR_ARG (arg, 0);
7770 return NULL_TREE;
7773 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7774 NULL_TREE if no simplification can be made. */
7776 static tree
7777 fold_builtin_sincos (location_t loc,
7778 tree arg0, tree arg1, tree arg2)
7780 tree type;
7781 tree res, fn, call;
7783 if (!validate_arg (arg0, REAL_TYPE)
7784 || !validate_arg (arg1, POINTER_TYPE)
7785 || !validate_arg (arg2, POINTER_TYPE))
7786 return NULL_TREE;
7788 type = TREE_TYPE (arg0);
7790 /* Calculate the result when the argument is a constant. */
7791 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7792 return res;
7794 /* Canonicalize sincos to cexpi. */
7795 if (!targetm.libc_has_function (function_c99_math_complex))
7796 return NULL_TREE;
7797 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7798 if (!fn)
7799 return NULL_TREE;
7801 call = build_call_expr_loc (loc, fn, 1, arg0);
7802 call = builtin_save_expr (call);
7804 return build2 (COMPOUND_EXPR, void_type_node,
7805 build2 (MODIFY_EXPR, void_type_node,
7806 build_fold_indirect_ref_loc (loc, arg1),
7807 build1 (IMAGPART_EXPR, type, call)),
7808 build2 (MODIFY_EXPR, void_type_node,
7809 build_fold_indirect_ref_loc (loc, arg2),
7810 build1 (REALPART_EXPR, type, call)));
7813 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7814 NULL_TREE if no simplification can be made. */
7816 static tree
7817 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7819 tree rtype;
7820 tree realp, imagp, ifn;
7821 tree res;
7823 if (!validate_arg (arg0, COMPLEX_TYPE)
7824 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7825 return NULL_TREE;
7827 /* Calculate the result when the argument is a constant. */
7828 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7829 return res;
7831 rtype = TREE_TYPE (TREE_TYPE (arg0));
7833 /* In case we can figure out the real part of arg0 and it is constant zero
7834 fold to cexpi. */
7835 if (!targetm.libc_has_function (function_c99_math_complex))
7836 return NULL_TREE;
7837 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7838 if (!ifn)
7839 return NULL_TREE;
7841 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7842 && real_zerop (realp))
7844 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7845 return build_call_expr_loc (loc, ifn, 1, narg);
7848 /* In case we can easily decompose real and imaginary parts split cexp
7849 to exp (r) * cexpi (i). */
7850 if (flag_unsafe_math_optimizations
7851 && realp)
7853 tree rfn, rcall, icall;
7855 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7856 if (!rfn)
7857 return NULL_TREE;
7859 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7860 if (!imagp)
7861 return NULL_TREE;
7863 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7864 icall = builtin_save_expr (icall);
7865 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7866 rcall = builtin_save_expr (rcall);
7867 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7868 fold_build2_loc (loc, MULT_EXPR, rtype,
7869 rcall,
7870 fold_build1_loc (loc, REALPART_EXPR,
7871 rtype, icall)),
7872 fold_build2_loc (loc, MULT_EXPR, rtype,
7873 rcall,
7874 fold_build1_loc (loc, IMAGPART_EXPR,
7875 rtype, icall)));
7878 return NULL_TREE;
7881 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7882 Return NULL_TREE if no simplification can be made. */
7884 static tree
7885 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7887 if (!validate_arg (arg, REAL_TYPE))
7888 return NULL_TREE;
7890 /* Optimize trunc of constant value. */
7891 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7893 REAL_VALUE_TYPE r, x;
7894 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7896 x = TREE_REAL_CST (arg);
7897 real_trunc (&r, TYPE_MODE (type), &x);
7898 return build_real (type, r);
7901 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7904 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7905 Return NULL_TREE if no simplification can be made. */
7907 static tree
7908 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7910 if (!validate_arg (arg, REAL_TYPE))
7911 return NULL_TREE;
7913 /* Optimize floor of constant value. */
7914 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7916 REAL_VALUE_TYPE x;
7918 x = TREE_REAL_CST (arg);
7919 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7921 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7922 REAL_VALUE_TYPE r;
7924 real_floor (&r, TYPE_MODE (type), &x);
7925 return build_real (type, r);
7929 /* Fold floor (x) where x is nonnegative to trunc (x). */
7930 if (tree_expr_nonnegative_p (arg))
7932 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7933 if (truncfn)
7934 return build_call_expr_loc (loc, truncfn, 1, arg);
7937 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7940 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7941 Return NULL_TREE if no simplification can be made. */
7943 static tree
7944 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7946 if (!validate_arg (arg, REAL_TYPE))
7947 return NULL_TREE;
7949 /* Optimize ceil of constant value. */
7950 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7952 REAL_VALUE_TYPE x;
7954 x = TREE_REAL_CST (arg);
7955 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7957 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7958 REAL_VALUE_TYPE r;
7960 real_ceil (&r, TYPE_MODE (type), &x);
7961 return build_real (type, r);
7965 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7968 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7969 Return NULL_TREE if no simplification can be made. */
7971 static tree
7972 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7974 if (!validate_arg (arg, REAL_TYPE))
7975 return NULL_TREE;
7977 /* Optimize round of constant value. */
7978 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7980 REAL_VALUE_TYPE x;
7982 x = TREE_REAL_CST (arg);
7983 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7985 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7986 REAL_VALUE_TYPE r;
7988 real_round (&r, TYPE_MODE (type), &x);
7989 return build_real (type, r);
7993 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7996 /* Fold function call to builtin lround, lroundf or lroundl (or the
7997 corresponding long long versions) and other rounding functions. ARG
7998 is the argument to the call. Return NULL_TREE if no simplification
7999 can be made. */
8001 static tree
8002 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8004 if (!validate_arg (arg, REAL_TYPE))
8005 return NULL_TREE;
8007 /* Optimize lround of constant value. */
8008 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8010 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8012 if (real_isfinite (&x))
8014 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8015 tree ftype = TREE_TYPE (arg);
8016 double_int val;
8017 REAL_VALUE_TYPE r;
8019 switch (DECL_FUNCTION_CODE (fndecl))
8021 CASE_FLT_FN (BUILT_IN_IFLOOR):
8022 CASE_FLT_FN (BUILT_IN_LFLOOR):
8023 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8024 real_floor (&r, TYPE_MODE (ftype), &x);
8025 break;
8027 CASE_FLT_FN (BUILT_IN_ICEIL):
8028 CASE_FLT_FN (BUILT_IN_LCEIL):
8029 CASE_FLT_FN (BUILT_IN_LLCEIL):
8030 real_ceil (&r, TYPE_MODE (ftype), &x);
8031 break;
8033 CASE_FLT_FN (BUILT_IN_IROUND):
8034 CASE_FLT_FN (BUILT_IN_LROUND):
8035 CASE_FLT_FN (BUILT_IN_LLROUND):
8036 real_round (&r, TYPE_MODE (ftype), &x);
8037 break;
8039 default:
8040 gcc_unreachable ();
8043 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8044 if (double_int_fits_to_tree_p (itype, val))
8045 return double_int_to_tree (itype, val);
8049 switch (DECL_FUNCTION_CODE (fndecl))
8051 CASE_FLT_FN (BUILT_IN_LFLOOR):
8052 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8053 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8054 if (tree_expr_nonnegative_p (arg))
8055 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8056 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8057 break;
8058 default:;
8061 return fold_fixed_mathfn (loc, fndecl, arg);
8064 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8065 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8066 the argument to the call. Return NULL_TREE if no simplification can
8067 be made. */
8069 static tree
8070 fold_builtin_bitop (tree fndecl, tree arg)
8072 if (!validate_arg (arg, INTEGER_TYPE))
8073 return NULL_TREE;
8075 /* Optimize for constant argument. */
8076 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8078 HOST_WIDE_INT hi, width, result;
8079 unsigned HOST_WIDE_INT lo;
8080 tree type;
8082 type = TREE_TYPE (arg);
8083 width = TYPE_PRECISION (type);
8084 lo = TREE_INT_CST_LOW (arg);
8086 /* Clear all the bits that are beyond the type's precision. */
8087 if (width > HOST_BITS_PER_WIDE_INT)
8089 hi = TREE_INT_CST_HIGH (arg);
8090 if (width < HOST_BITS_PER_DOUBLE_INT)
8091 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
8093 else
8095 hi = 0;
8096 if (width < HOST_BITS_PER_WIDE_INT)
8097 lo &= ~(HOST_WIDE_INT_M1U << width);
8100 switch (DECL_FUNCTION_CODE (fndecl))
8102 CASE_INT_FN (BUILT_IN_FFS):
8103 if (lo != 0)
8104 result = ffs_hwi (lo);
8105 else if (hi != 0)
8106 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8107 else
8108 result = 0;
8109 break;
8111 CASE_INT_FN (BUILT_IN_CLZ):
8112 if (hi != 0)
8113 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8114 else if (lo != 0)
8115 result = width - floor_log2 (lo) - 1;
8116 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8117 result = width;
8118 break;
8120 CASE_INT_FN (BUILT_IN_CTZ):
8121 if (lo != 0)
8122 result = ctz_hwi (lo);
8123 else if (hi != 0)
8124 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8125 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8126 result = width;
8127 break;
8129 CASE_INT_FN (BUILT_IN_CLRSB):
8130 if (width > 2 * HOST_BITS_PER_WIDE_INT)
8131 return NULL_TREE;
8132 if (width > HOST_BITS_PER_WIDE_INT
8133 && (hi & ((unsigned HOST_WIDE_INT) 1
8134 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8136 hi = ~hi & ~(HOST_WIDE_INT_M1U
8137 << (width - HOST_BITS_PER_WIDE_INT - 1));
8138 lo = ~lo;
8140 else if (width <= HOST_BITS_PER_WIDE_INT
8141 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8142 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
8143 if (hi != 0)
8144 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8145 else if (lo != 0)
8146 result = width - floor_log2 (lo) - 2;
8147 else
8148 result = width - 1;
8149 break;
8151 CASE_INT_FN (BUILT_IN_POPCOUNT):
8152 result = 0;
8153 while (lo)
8154 result++, lo &= lo - 1;
8155 while (hi)
8156 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8157 break;
8159 CASE_INT_FN (BUILT_IN_PARITY):
8160 result = 0;
8161 while (lo)
8162 result++, lo &= lo - 1;
8163 while (hi)
8164 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8165 result &= 1;
8166 break;
8168 default:
8169 gcc_unreachable ();
8172 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8175 return NULL_TREE;
8178 /* Fold function call to builtin_bswap and the short, long and long long
8179 variants. Return NULL_TREE if no simplification can be made. */
8180 static tree
8181 fold_builtin_bswap (tree fndecl, tree arg)
8183 if (! validate_arg (arg, INTEGER_TYPE))
8184 return NULL_TREE;
8186 /* Optimize constant value. */
8187 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8189 HOST_WIDE_INT hi, width, r_hi = 0;
8190 unsigned HOST_WIDE_INT lo, r_lo = 0;
8191 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8193 width = TYPE_PRECISION (type);
8194 lo = TREE_INT_CST_LOW (arg);
8195 hi = TREE_INT_CST_HIGH (arg);
8197 switch (DECL_FUNCTION_CODE (fndecl))
8199 case BUILT_IN_BSWAP16:
8200 case BUILT_IN_BSWAP32:
8201 case BUILT_IN_BSWAP64:
8203 int s;
8205 for (s = 0; s < width; s += 8)
8207 int d = width - s - 8;
8208 unsigned HOST_WIDE_INT byte;
8210 if (s < HOST_BITS_PER_WIDE_INT)
8211 byte = (lo >> s) & 0xff;
8212 else
8213 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8215 if (d < HOST_BITS_PER_WIDE_INT)
8216 r_lo |= byte << d;
8217 else
8218 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8222 break;
8224 default:
8225 gcc_unreachable ();
8228 if (width < HOST_BITS_PER_WIDE_INT)
8229 return build_int_cst (type, r_lo);
8230 else
8231 return build_int_cst_wide (type, r_lo, r_hi);
8234 return NULL_TREE;
8237 /* A subroutine of fold_builtin to fold the various logarithmic
8238 functions. Return NULL_TREE if no simplification can me made.
8239 FUNC is the corresponding MPFR logarithm function. */
8241 static tree
8242 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8243 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8245 if (validate_arg (arg, REAL_TYPE))
8247 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8248 tree res;
8249 const enum built_in_function fcode = builtin_mathfn_code (arg);
8251 /* Calculate the result when the argument is a constant. */
8252 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8253 return res;
8255 /* Special case, optimize logN(expN(x)) = x. */
8256 if (flag_unsafe_math_optimizations
8257 && ((func == mpfr_log
8258 && (fcode == BUILT_IN_EXP
8259 || fcode == BUILT_IN_EXPF
8260 || fcode == BUILT_IN_EXPL))
8261 || (func == mpfr_log2
8262 && (fcode == BUILT_IN_EXP2
8263 || fcode == BUILT_IN_EXP2F
8264 || fcode == BUILT_IN_EXP2L))
8265 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8266 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8268 /* Optimize logN(func()) for various exponential functions. We
8269 want to determine the value "x" and the power "exponent" in
8270 order to transform logN(x**exponent) into exponent*logN(x). */
8271 if (flag_unsafe_math_optimizations)
8273 tree exponent = 0, x = 0;
8275 switch (fcode)
8277 CASE_FLT_FN (BUILT_IN_EXP):
8278 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8279 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8280 dconst_e ()));
8281 exponent = CALL_EXPR_ARG (arg, 0);
8282 break;
8283 CASE_FLT_FN (BUILT_IN_EXP2):
8284 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8285 x = build_real (type, dconst2);
8286 exponent = CALL_EXPR_ARG (arg, 0);
8287 break;
8288 CASE_FLT_FN (BUILT_IN_EXP10):
8289 CASE_FLT_FN (BUILT_IN_POW10):
8290 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8292 REAL_VALUE_TYPE dconst10;
8293 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8294 x = build_real (type, dconst10);
8296 exponent = CALL_EXPR_ARG (arg, 0);
8297 break;
8298 CASE_FLT_FN (BUILT_IN_SQRT):
8299 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8300 x = CALL_EXPR_ARG (arg, 0);
8301 exponent = build_real (type, dconsthalf);
8302 break;
8303 CASE_FLT_FN (BUILT_IN_CBRT):
8304 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8305 x = CALL_EXPR_ARG (arg, 0);
8306 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8307 dconst_third ()));
8308 break;
8309 CASE_FLT_FN (BUILT_IN_POW):
8310 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8311 x = CALL_EXPR_ARG (arg, 0);
8312 exponent = CALL_EXPR_ARG (arg, 1);
8313 break;
8314 default:
8315 break;
8318 /* Now perform the optimization. */
8319 if (x && exponent)
8321 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8322 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8327 return NULL_TREE;
8330 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8331 NULL_TREE if no simplification can be made. */
8333 static tree
8334 fold_builtin_hypot (location_t loc, tree fndecl,
8335 tree arg0, tree arg1, tree type)
8337 tree res, narg0, narg1;
8339 if (!validate_arg (arg0, REAL_TYPE)
8340 || !validate_arg (arg1, REAL_TYPE))
8341 return NULL_TREE;
8343 /* Calculate the result when the argument is a constant. */
8344 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8345 return res;
8347 /* If either argument to hypot has a negate or abs, strip that off.
8348 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8349 narg0 = fold_strip_sign_ops (arg0);
8350 narg1 = fold_strip_sign_ops (arg1);
8351 if (narg0 || narg1)
8353 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8354 narg1 ? narg1 : arg1);
8357 /* If either argument is zero, hypot is fabs of the other. */
8358 if (real_zerop (arg0))
8359 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8360 else if (real_zerop (arg1))
8361 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8363 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8364 if (flag_unsafe_math_optimizations
8365 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8367 const REAL_VALUE_TYPE sqrt2_trunc
8368 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8369 return fold_build2_loc (loc, MULT_EXPR, type,
8370 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8371 build_real (type, sqrt2_trunc));
8374 return NULL_TREE;
8378 /* Fold a builtin function call to pow, powf, or powl. Return
8379 NULL_TREE if no simplification can be made. */
8380 static tree
8381 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8383 tree res;
8385 if (!validate_arg (arg0, REAL_TYPE)
8386 || !validate_arg (arg1, REAL_TYPE))
8387 return NULL_TREE;
8389 /* Calculate the result when the argument is a constant. */
8390 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8391 return res;
8393 /* Optimize pow(1.0,y) = 1.0. */
8394 if (real_onep (arg0))
8395 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8397 if (TREE_CODE (arg1) == REAL_CST
8398 && !TREE_OVERFLOW (arg1))
8400 REAL_VALUE_TYPE cint;
8401 REAL_VALUE_TYPE c;
8402 HOST_WIDE_INT n;
8404 c = TREE_REAL_CST (arg1);
8406 /* Optimize pow(x,0.0) = 1.0. */
8407 if (REAL_VALUES_EQUAL (c, dconst0))
8408 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8409 arg0);
8411 /* Optimize pow(x,1.0) = x. */
8412 if (REAL_VALUES_EQUAL (c, dconst1))
8413 return arg0;
8415 /* Optimize pow(x,-1.0) = 1.0/x. */
8416 if (REAL_VALUES_EQUAL (c, dconstm1))
8417 return fold_build2_loc (loc, RDIV_EXPR, type,
8418 build_real (type, dconst1), arg0);
8420 /* Optimize pow(x,0.5) = sqrt(x). */
8421 if (flag_unsafe_math_optimizations
8422 && REAL_VALUES_EQUAL (c, dconsthalf))
8424 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8426 if (sqrtfn != NULL_TREE)
8427 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8430 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8431 if (flag_unsafe_math_optimizations)
8433 const REAL_VALUE_TYPE dconstroot
8434 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8436 if (REAL_VALUES_EQUAL (c, dconstroot))
8438 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8439 if (cbrtfn != NULL_TREE)
8440 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8444 /* Check for an integer exponent. */
8445 n = real_to_integer (&c);
8446 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8447 if (real_identical (&c, &cint))
8449 /* Attempt to evaluate pow at compile-time, unless this should
8450 raise an exception. */
8451 if (TREE_CODE (arg0) == REAL_CST
8452 && !TREE_OVERFLOW (arg0)
8453 && (n > 0
8454 || (!flag_trapping_math && !flag_errno_math)
8455 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8457 REAL_VALUE_TYPE x;
8458 bool inexact;
8460 x = TREE_REAL_CST (arg0);
8461 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8462 if (flag_unsafe_math_optimizations || !inexact)
8463 return build_real (type, x);
8466 /* Strip sign ops from even integer powers. */
8467 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8469 tree narg0 = fold_strip_sign_ops (arg0);
8470 if (narg0)
8471 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8476 if (flag_unsafe_math_optimizations)
8478 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8480 /* Optimize pow(expN(x),y) = expN(x*y). */
8481 if (BUILTIN_EXPONENT_P (fcode))
8483 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8484 tree arg = CALL_EXPR_ARG (arg0, 0);
8485 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8486 return build_call_expr_loc (loc, expfn, 1, arg);
8489 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8490 if (BUILTIN_SQRT_P (fcode))
8492 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8493 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8494 build_real (type, dconsthalf));
8495 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8498 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8499 if (BUILTIN_CBRT_P (fcode))
8501 tree arg = CALL_EXPR_ARG (arg0, 0);
8502 if (tree_expr_nonnegative_p (arg))
8504 const REAL_VALUE_TYPE dconstroot
8505 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8506 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8507 build_real (type, dconstroot));
8508 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8512 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8513 if (fcode == BUILT_IN_POW
8514 || fcode == BUILT_IN_POWF
8515 || fcode == BUILT_IN_POWL)
8517 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8518 if (tree_expr_nonnegative_p (arg00))
8520 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8521 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8522 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8527 return NULL_TREE;
8530 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8531 Return NULL_TREE if no simplification can be made. */
8532 static tree
8533 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8534 tree arg0, tree arg1, tree type)
8536 if (!validate_arg (arg0, REAL_TYPE)
8537 || !validate_arg (arg1, INTEGER_TYPE))
8538 return NULL_TREE;
8540 /* Optimize pow(1.0,y) = 1.0. */
8541 if (real_onep (arg0))
8542 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8544 if (tree_fits_shwi_p (arg1))
8546 HOST_WIDE_INT c = tree_to_shwi (arg1);
8548 /* Evaluate powi at compile-time. */
8549 if (TREE_CODE (arg0) == REAL_CST
8550 && !TREE_OVERFLOW (arg0))
8552 REAL_VALUE_TYPE x;
8553 x = TREE_REAL_CST (arg0);
8554 real_powi (&x, TYPE_MODE (type), &x, c);
8555 return build_real (type, x);
8558 /* Optimize pow(x,0) = 1.0. */
8559 if (c == 0)
8560 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8561 arg0);
8563 /* Optimize pow(x,1) = x. */
8564 if (c == 1)
8565 return arg0;
8567 /* Optimize pow(x,-1) = 1.0/x. */
8568 if (c == -1)
8569 return fold_build2_loc (loc, RDIV_EXPR, type,
8570 build_real (type, dconst1), arg0);
8573 return NULL_TREE;
8576 /* A subroutine of fold_builtin to fold the various exponent
8577 functions. Return NULL_TREE if no simplification can be made.
8578 FUNC is the corresponding MPFR exponent function. */
8580 static tree
8581 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8582 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8584 if (validate_arg (arg, REAL_TYPE))
8586 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8587 tree res;
8589 /* Calculate the result when the argument is a constant. */
8590 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8591 return res;
8593 /* Optimize expN(logN(x)) = x. */
8594 if (flag_unsafe_math_optimizations)
8596 const enum built_in_function fcode = builtin_mathfn_code (arg);
8598 if ((func == mpfr_exp
8599 && (fcode == BUILT_IN_LOG
8600 || fcode == BUILT_IN_LOGF
8601 || fcode == BUILT_IN_LOGL))
8602 || (func == mpfr_exp2
8603 && (fcode == BUILT_IN_LOG2
8604 || fcode == BUILT_IN_LOG2F
8605 || fcode == BUILT_IN_LOG2L))
8606 || (func == mpfr_exp10
8607 && (fcode == BUILT_IN_LOG10
8608 || fcode == BUILT_IN_LOG10F
8609 || fcode == BUILT_IN_LOG10L)))
8610 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8614 return NULL_TREE;
8617 /* Return true if VAR is a VAR_DECL or a component thereof. */
8619 static bool
8620 var_decl_component_p (tree var)
8622 tree inner = var;
8623 while (handled_component_p (inner))
8624 inner = TREE_OPERAND (inner, 0);
8625 return SSA_VAR_P (inner);
8628 /* Fold function call to builtin memset. Return
8629 NULL_TREE if no simplification can be made. */
8631 static tree
8632 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8633 tree type, bool ignore)
8635 tree var, ret, etype;
8636 unsigned HOST_WIDE_INT length, cval;
8638 if (! validate_arg (dest, POINTER_TYPE)
8639 || ! validate_arg (c, INTEGER_TYPE)
8640 || ! validate_arg (len, INTEGER_TYPE))
8641 return NULL_TREE;
8643 if (! tree_fits_uhwi_p (len))
8644 return NULL_TREE;
8646 /* If the LEN parameter is zero, return DEST. */
8647 if (integer_zerop (len))
8648 return omit_one_operand_loc (loc, type, dest, c);
8650 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8651 return NULL_TREE;
8653 var = dest;
8654 STRIP_NOPS (var);
8655 if (TREE_CODE (var) != ADDR_EXPR)
8656 return NULL_TREE;
8658 var = TREE_OPERAND (var, 0);
8659 if (TREE_THIS_VOLATILE (var))
8660 return NULL_TREE;
8662 etype = TREE_TYPE (var);
8663 if (TREE_CODE (etype) == ARRAY_TYPE)
8664 etype = TREE_TYPE (etype);
8666 if (!INTEGRAL_TYPE_P (etype)
8667 && !POINTER_TYPE_P (etype))
8668 return NULL_TREE;
8670 if (! var_decl_component_p (var))
8671 return NULL_TREE;
8673 length = tree_to_uhwi (len);
8674 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8675 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8676 return NULL_TREE;
8678 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8679 return NULL_TREE;
8681 if (integer_zerop (c))
8682 cval = 0;
8683 else
8685 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8686 return NULL_TREE;
8688 cval = TREE_INT_CST_LOW (c);
8689 cval &= 0xff;
8690 cval |= cval << 8;
8691 cval |= cval << 16;
8692 cval |= (cval << 31) << 1;
8695 ret = build_int_cst_type (etype, cval);
8696 var = build_fold_indirect_ref_loc (loc,
8697 fold_convert_loc (loc,
8698 build_pointer_type (etype),
8699 dest));
8700 ret = build2 (MODIFY_EXPR, etype, var, ret);
8701 if (ignore)
8702 return ret;
8704 return omit_one_operand_loc (loc, type, dest, ret);
8707 /* Fold function call to builtin memset. Return
8708 NULL_TREE if no simplification can be made. */
8710 static tree
8711 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8713 if (! validate_arg (dest, POINTER_TYPE)
8714 || ! validate_arg (size, INTEGER_TYPE))
8715 return NULL_TREE;
8717 if (!ignore)
8718 return NULL_TREE;
8720 /* New argument list transforming bzero(ptr x, int y) to
8721 memset(ptr x, int 0, size_t y). This is done this way
8722 so that if it isn't expanded inline, we fallback to
8723 calling bzero instead of memset. */
8725 return fold_builtin_memset (loc, dest, integer_zero_node,
8726 fold_convert_loc (loc, size_type_node, size),
8727 void_type_node, ignore);
8730 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8731 NULL_TREE if no simplification can be made.
8732 If ENDP is 0, return DEST (like memcpy).
8733 If ENDP is 1, return DEST+LEN (like mempcpy).
8734 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8735 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8736 (memmove). */
8738 static tree
8739 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8740 tree len, tree type, bool ignore, int endp)
8742 tree destvar, srcvar, expr;
8744 if (! validate_arg (dest, POINTER_TYPE)
8745 || ! validate_arg (src, POINTER_TYPE)
8746 || ! validate_arg (len, INTEGER_TYPE))
8747 return NULL_TREE;
8749 /* If the LEN parameter is zero, return DEST. */
8750 if (integer_zerop (len))
8751 return omit_one_operand_loc (loc, type, dest, src);
8753 /* If SRC and DEST are the same (and not volatile), return
8754 DEST{,+LEN,+LEN-1}. */
8755 if (operand_equal_p (src, dest, 0))
8756 expr = len;
8757 else
8759 tree srctype, desttype;
8760 unsigned int src_align, dest_align;
8761 tree off0;
8763 if (endp == 3)
8765 src_align = get_pointer_alignment (src);
8766 dest_align = get_pointer_alignment (dest);
8768 /* Both DEST and SRC must be pointer types.
8769 ??? This is what old code did. Is the testing for pointer types
8770 really mandatory?
8772 If either SRC is readonly or length is 1, we can use memcpy. */
8773 if (!dest_align || !src_align)
8774 return NULL_TREE;
8775 if (readonly_data_expr (src)
8776 || (tree_fits_uhwi_p (len)
8777 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8778 >= tree_to_uhwi (len))))
8780 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8781 if (!fn)
8782 return NULL_TREE;
8783 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8786 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8787 if (TREE_CODE (src) == ADDR_EXPR
8788 && TREE_CODE (dest) == ADDR_EXPR)
8790 tree src_base, dest_base, fn;
8791 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8792 HOST_WIDE_INT size = -1;
8793 HOST_WIDE_INT maxsize = -1;
8795 srcvar = TREE_OPERAND (src, 0);
8796 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8797 &size, &maxsize);
8798 destvar = TREE_OPERAND (dest, 0);
8799 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8800 &size, &maxsize);
8801 if (tree_fits_uhwi_p (len))
8802 maxsize = tree_to_uhwi (len);
8803 else
8804 maxsize = -1;
8805 src_offset /= BITS_PER_UNIT;
8806 dest_offset /= BITS_PER_UNIT;
8807 if (SSA_VAR_P (src_base)
8808 && SSA_VAR_P (dest_base))
8810 if (operand_equal_p (src_base, dest_base, 0)
8811 && ranges_overlap_p (src_offset, maxsize,
8812 dest_offset, maxsize))
8813 return NULL_TREE;
8815 else if (TREE_CODE (src_base) == MEM_REF
8816 && TREE_CODE (dest_base) == MEM_REF)
8818 double_int off;
8819 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8820 TREE_OPERAND (dest_base, 0), 0))
8821 return NULL_TREE;
8822 off = mem_ref_offset (src_base) +
8823 double_int::from_shwi (src_offset);
8824 if (!off.fits_shwi ())
8825 return NULL_TREE;
8826 src_offset = off.low;
8827 off = mem_ref_offset (dest_base) +
8828 double_int::from_shwi (dest_offset);
8829 if (!off.fits_shwi ())
8830 return NULL_TREE;
8831 dest_offset = off.low;
8832 if (ranges_overlap_p (src_offset, maxsize,
8833 dest_offset, maxsize))
8834 return NULL_TREE;
8836 else
8837 return NULL_TREE;
8839 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8840 if (!fn)
8841 return NULL_TREE;
8842 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8845 /* If the destination and source do not alias optimize into
8846 memcpy as well. */
8847 if ((is_gimple_min_invariant (dest)
8848 || TREE_CODE (dest) == SSA_NAME)
8849 && (is_gimple_min_invariant (src)
8850 || TREE_CODE (src) == SSA_NAME))
8852 ao_ref destr, srcr;
8853 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8854 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8855 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8857 tree fn;
8858 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8859 if (!fn)
8860 return NULL_TREE;
8861 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8865 return NULL_TREE;
8868 if (!tree_fits_shwi_p (len))
8869 return NULL_TREE;
8870 /* FIXME:
8871 This logic lose for arguments like (type *)malloc (sizeof (type)),
8872 since we strip the casts of up to VOID return value from malloc.
8873 Perhaps we ought to inherit type from non-VOID argument here? */
8874 STRIP_NOPS (src);
8875 STRIP_NOPS (dest);
8876 if (!POINTER_TYPE_P (TREE_TYPE (src))
8877 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8878 return NULL_TREE;
8879 /* In the following try to find a type that is most natural to be
8880 used for the memcpy source and destination and that allows
8881 the most optimization when memcpy is turned into a plain assignment
8882 using that type. In theory we could always use a char[len] type
8883 but that only gains us that the destination and source possibly
8884 no longer will have their address taken. */
8885 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8886 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8888 tree tem = TREE_OPERAND (src, 0);
8889 STRIP_NOPS (tem);
8890 if (tem != TREE_OPERAND (src, 0))
8891 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8893 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8895 tree tem = TREE_OPERAND (dest, 0);
8896 STRIP_NOPS (tem);
8897 if (tem != TREE_OPERAND (dest, 0))
8898 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8900 srctype = TREE_TYPE (TREE_TYPE (src));
8901 if (TREE_CODE (srctype) == ARRAY_TYPE
8902 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8904 srctype = TREE_TYPE (srctype);
8905 STRIP_NOPS (src);
8906 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8908 desttype = TREE_TYPE (TREE_TYPE (dest));
8909 if (TREE_CODE (desttype) == ARRAY_TYPE
8910 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8912 desttype = TREE_TYPE (desttype);
8913 STRIP_NOPS (dest);
8914 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8916 if (TREE_ADDRESSABLE (srctype)
8917 || TREE_ADDRESSABLE (desttype))
8918 return NULL_TREE;
8920 /* Make sure we are not copying using a floating-point mode or
8921 a type whose size possibly does not match its precision. */
8922 if (FLOAT_MODE_P (TYPE_MODE (desttype))
8923 || TREE_CODE (desttype) == BOOLEAN_TYPE
8924 || TREE_CODE (desttype) == ENUMERAL_TYPE)
8926 /* A more suitable int_mode_for_mode would return a vector
8927 integer mode for a vector float mode or a integer complex
8928 mode for a float complex mode if there isn't a regular
8929 integer mode covering the mode of desttype. */
8930 enum machine_mode mode = int_mode_for_mode (TYPE_MODE (desttype));
8931 if (mode == BLKmode)
8932 desttype = NULL_TREE;
8933 else
8934 desttype = build_nonstandard_integer_type (GET_MODE_BITSIZE (mode),
8937 if (FLOAT_MODE_P (TYPE_MODE (srctype))
8938 || TREE_CODE (srctype) == BOOLEAN_TYPE
8939 || TREE_CODE (srctype) == ENUMERAL_TYPE)
8941 enum machine_mode mode = int_mode_for_mode (TYPE_MODE (srctype));
8942 if (mode == BLKmode)
8943 srctype = NULL_TREE;
8944 else
8945 srctype = build_nonstandard_integer_type (GET_MODE_BITSIZE (mode),
8948 if (!srctype)
8949 srctype = desttype;
8950 if (!desttype)
8951 desttype = srctype;
8952 if (!srctype)
8953 return NULL_TREE;
8955 src_align = get_pointer_alignment (src);
8956 dest_align = get_pointer_alignment (dest);
8957 if (dest_align < TYPE_ALIGN (desttype)
8958 || src_align < TYPE_ALIGN (srctype))
8959 return NULL_TREE;
8961 if (!ignore)
8962 dest = builtin_save_expr (dest);
8964 /* Build accesses at offset zero with a ref-all character type. */
8965 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8966 ptr_mode, true), 0);
8968 destvar = dest;
8969 STRIP_NOPS (destvar);
8970 if (TREE_CODE (destvar) == ADDR_EXPR
8971 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8972 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8973 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8974 else
8975 destvar = NULL_TREE;
8977 srcvar = src;
8978 STRIP_NOPS (srcvar);
8979 if (TREE_CODE (srcvar) == ADDR_EXPR
8980 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8981 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8983 if (!destvar
8984 || src_align >= TYPE_ALIGN (desttype))
8985 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8986 srcvar, off0);
8987 else if (!STRICT_ALIGNMENT)
8989 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8990 src_align);
8991 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8993 else
8994 srcvar = NULL_TREE;
8996 else
8997 srcvar = NULL_TREE;
8999 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9000 return NULL_TREE;
9002 if (srcvar == NULL_TREE)
9004 STRIP_NOPS (src);
9005 if (src_align >= TYPE_ALIGN (desttype))
9006 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
9007 else
9009 if (STRICT_ALIGNMENT)
9010 return NULL_TREE;
9011 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
9012 src_align);
9013 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
9016 else if (destvar == NULL_TREE)
9018 STRIP_NOPS (dest);
9019 if (dest_align >= TYPE_ALIGN (srctype))
9020 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
9021 else
9023 if (STRICT_ALIGNMENT)
9024 return NULL_TREE;
9025 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
9026 dest_align);
9027 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
9031 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
9034 if (ignore)
9035 return expr;
9037 if (endp == 0 || endp == 3)
9038 return omit_one_operand_loc (loc, type, dest, expr);
9040 if (expr == len)
9041 expr = NULL_TREE;
9043 if (endp == 2)
9044 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9045 ssize_int (1));
9047 dest = fold_build_pointer_plus_loc (loc, dest, len);
9048 dest = fold_convert_loc (loc, type, dest);
9049 if (expr)
9050 dest = omit_one_operand_loc (loc, type, dest, expr);
9051 return dest;
9054 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9055 If LEN is not NULL, it represents the length of the string to be
9056 copied. Return NULL_TREE if no simplification can be made. */
9058 tree
9059 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9061 tree fn;
9063 if (!validate_arg (dest, POINTER_TYPE)
9064 || !validate_arg (src, POINTER_TYPE))
9065 return NULL_TREE;
9067 /* If SRC and DEST are the same (and not volatile), return DEST. */
9068 if (operand_equal_p (src, dest, 0))
9069 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9071 if (optimize_function_for_size_p (cfun))
9072 return NULL_TREE;
9074 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9075 if (!fn)
9076 return NULL_TREE;
9078 if (!len)
9080 len = c_strlen (src, 1);
9081 if (! len || TREE_SIDE_EFFECTS (len))
9082 return NULL_TREE;
9085 len = fold_convert_loc (loc, size_type_node, len);
9086 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9087 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9088 build_call_expr_loc (loc, fn, 3, dest, src, len));
9091 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9092 Return NULL_TREE if no simplification can be made. */
9094 static tree
9095 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9097 tree fn, len, lenp1, call, type;
9099 if (!validate_arg (dest, POINTER_TYPE)
9100 || !validate_arg (src, POINTER_TYPE))
9101 return NULL_TREE;
9103 len = c_strlen (src, 1);
9104 if (!len
9105 || TREE_CODE (len) != INTEGER_CST)
9106 return NULL_TREE;
9108 if (optimize_function_for_size_p (cfun)
9109 /* If length is zero it's small enough. */
9110 && !integer_zerop (len))
9111 return NULL_TREE;
9113 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9114 if (!fn)
9115 return NULL_TREE;
9117 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9118 fold_convert_loc (loc, size_type_node, len),
9119 build_int_cst (size_type_node, 1));
9120 /* We use dest twice in building our expression. Save it from
9121 multiple expansions. */
9122 dest = builtin_save_expr (dest);
9123 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9125 type = TREE_TYPE (TREE_TYPE (fndecl));
9126 dest = fold_build_pointer_plus_loc (loc, dest, len);
9127 dest = fold_convert_loc (loc, type, dest);
9128 dest = omit_one_operand_loc (loc, type, dest, call);
9129 return dest;
9132 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9133 If SLEN is not NULL, it represents the length of the source string.
9134 Return NULL_TREE if no simplification can be made. */
9136 tree
9137 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9138 tree src, tree len, tree slen)
9140 tree fn;
9142 if (!validate_arg (dest, POINTER_TYPE)
9143 || !validate_arg (src, POINTER_TYPE)
9144 || !validate_arg (len, INTEGER_TYPE))
9145 return NULL_TREE;
9147 /* If the LEN parameter is zero, return DEST. */
9148 if (integer_zerop (len))
9149 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9151 /* We can't compare slen with len as constants below if len is not a
9152 constant. */
9153 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9154 return NULL_TREE;
9156 if (!slen)
9157 slen = c_strlen (src, 1);
9159 /* Now, we must be passed a constant src ptr parameter. */
9160 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9161 return NULL_TREE;
9163 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9165 /* We do not support simplification of this case, though we do
9166 support it when expanding trees into RTL. */
9167 /* FIXME: generate a call to __builtin_memset. */
9168 if (tree_int_cst_lt (slen, len))
9169 return NULL_TREE;
9171 /* OK transform into builtin memcpy. */
9172 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9173 if (!fn)
9174 return NULL_TREE;
9176 len = fold_convert_loc (loc, size_type_node, len);
9177 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9178 build_call_expr_loc (loc, fn, 3, dest, src, len));
9181 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9182 arguments to the call, and TYPE is its return type.
9183 Return NULL_TREE if no simplification can be made. */
9185 static tree
9186 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9188 if (!validate_arg (arg1, POINTER_TYPE)
9189 || !validate_arg (arg2, INTEGER_TYPE)
9190 || !validate_arg (len, INTEGER_TYPE))
9191 return NULL_TREE;
9192 else
9194 const char *p1;
9196 if (TREE_CODE (arg2) != INTEGER_CST
9197 || !tree_fits_uhwi_p (len))
9198 return NULL_TREE;
9200 p1 = c_getstr (arg1);
9201 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9203 char c;
9204 const char *r;
9205 tree tem;
9207 if (target_char_cast (arg2, &c))
9208 return NULL_TREE;
9210 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
9212 if (r == NULL)
9213 return build_int_cst (TREE_TYPE (arg1), 0);
9215 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9216 return fold_convert_loc (loc, type, tem);
9218 return NULL_TREE;
9222 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9223 Return NULL_TREE if no simplification can be made. */
9225 static tree
9226 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9228 const char *p1, *p2;
9230 if (!validate_arg (arg1, POINTER_TYPE)
9231 || !validate_arg (arg2, POINTER_TYPE)
9232 || !validate_arg (len, INTEGER_TYPE))
9233 return NULL_TREE;
9235 /* If the LEN parameter is zero, return zero. */
9236 if (integer_zerop (len))
9237 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9238 arg1, arg2);
9240 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9241 if (operand_equal_p (arg1, arg2, 0))
9242 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9244 p1 = c_getstr (arg1);
9245 p2 = c_getstr (arg2);
9247 /* If all arguments are constant, and the value of len is not greater
9248 than the lengths of arg1 and arg2, evaluate at compile-time. */
9249 if (tree_fits_uhwi_p (len) && p1 && p2
9250 && compare_tree_int (len, strlen (p1) + 1) <= 0
9251 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9253 const int r = memcmp (p1, p2, tree_to_uhwi (len));
9255 if (r > 0)
9256 return integer_one_node;
9257 else if (r < 0)
9258 return integer_minus_one_node;
9259 else
9260 return integer_zero_node;
9263 /* If len parameter is one, return an expression corresponding to
9264 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9265 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9267 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9268 tree cst_uchar_ptr_node
9269 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9271 tree ind1
9272 = fold_convert_loc (loc, integer_type_node,
9273 build1 (INDIRECT_REF, cst_uchar_node,
9274 fold_convert_loc (loc,
9275 cst_uchar_ptr_node,
9276 arg1)));
9277 tree ind2
9278 = fold_convert_loc (loc, integer_type_node,
9279 build1 (INDIRECT_REF, cst_uchar_node,
9280 fold_convert_loc (loc,
9281 cst_uchar_ptr_node,
9282 arg2)));
9283 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9286 return NULL_TREE;
9289 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9290 Return NULL_TREE if no simplification can be made. */
9292 static tree
9293 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9295 const char *p1, *p2;
9297 if (!validate_arg (arg1, POINTER_TYPE)
9298 || !validate_arg (arg2, POINTER_TYPE))
9299 return NULL_TREE;
9301 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9302 if (operand_equal_p (arg1, arg2, 0))
9303 return integer_zero_node;
9305 p1 = c_getstr (arg1);
9306 p2 = c_getstr (arg2);
9308 if (p1 && p2)
9310 const int i = strcmp (p1, p2);
9311 if (i < 0)
9312 return integer_minus_one_node;
9313 else if (i > 0)
9314 return integer_one_node;
9315 else
9316 return integer_zero_node;
9319 /* If the second arg is "", return *(const unsigned char*)arg1. */
9320 if (p2 && *p2 == '\0')
9322 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9323 tree cst_uchar_ptr_node
9324 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9326 return fold_convert_loc (loc, integer_type_node,
9327 build1 (INDIRECT_REF, cst_uchar_node,
9328 fold_convert_loc (loc,
9329 cst_uchar_ptr_node,
9330 arg1)));
9333 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9334 if (p1 && *p1 == '\0')
9336 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9337 tree cst_uchar_ptr_node
9338 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9340 tree temp
9341 = fold_convert_loc (loc, integer_type_node,
9342 build1 (INDIRECT_REF, cst_uchar_node,
9343 fold_convert_loc (loc,
9344 cst_uchar_ptr_node,
9345 arg2)));
9346 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9349 return NULL_TREE;
9352 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9353 Return NULL_TREE if no simplification can be made. */
9355 static tree
9356 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9358 const char *p1, *p2;
9360 if (!validate_arg (arg1, POINTER_TYPE)
9361 || !validate_arg (arg2, POINTER_TYPE)
9362 || !validate_arg (len, INTEGER_TYPE))
9363 return NULL_TREE;
9365 /* If the LEN parameter is zero, return zero. */
9366 if (integer_zerop (len))
9367 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9368 arg1, arg2);
9370 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9371 if (operand_equal_p (arg1, arg2, 0))
9372 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9374 p1 = c_getstr (arg1);
9375 p2 = c_getstr (arg2);
9377 if (tree_fits_uhwi_p (len) && p1 && p2)
9379 const int i = strncmp (p1, p2, tree_to_uhwi (len));
9380 if (i > 0)
9381 return integer_one_node;
9382 else if (i < 0)
9383 return integer_minus_one_node;
9384 else
9385 return integer_zero_node;
9388 /* If the second arg is "", and the length is greater than zero,
9389 return *(const unsigned char*)arg1. */
9390 if (p2 && *p2 == '\0'
9391 && TREE_CODE (len) == INTEGER_CST
9392 && tree_int_cst_sgn (len) == 1)
9394 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9395 tree cst_uchar_ptr_node
9396 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9398 return fold_convert_loc (loc, integer_type_node,
9399 build1 (INDIRECT_REF, cst_uchar_node,
9400 fold_convert_loc (loc,
9401 cst_uchar_ptr_node,
9402 arg1)));
9405 /* If the first arg is "", and the length is greater than zero,
9406 return -*(const unsigned char*)arg2. */
9407 if (p1 && *p1 == '\0'
9408 && TREE_CODE (len) == INTEGER_CST
9409 && tree_int_cst_sgn (len) == 1)
9411 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9412 tree cst_uchar_ptr_node
9413 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9415 tree temp = fold_convert_loc (loc, integer_type_node,
9416 build1 (INDIRECT_REF, cst_uchar_node,
9417 fold_convert_loc (loc,
9418 cst_uchar_ptr_node,
9419 arg2)));
9420 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9423 /* If len parameter is one, return an expression corresponding to
9424 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9425 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9427 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9428 tree cst_uchar_ptr_node
9429 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9431 tree ind1 = fold_convert_loc (loc, integer_type_node,
9432 build1 (INDIRECT_REF, cst_uchar_node,
9433 fold_convert_loc (loc,
9434 cst_uchar_ptr_node,
9435 arg1)));
9436 tree ind2 = fold_convert_loc (loc, integer_type_node,
9437 build1 (INDIRECT_REF, cst_uchar_node,
9438 fold_convert_loc (loc,
9439 cst_uchar_ptr_node,
9440 arg2)));
9441 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9444 return NULL_TREE;
9447 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9448 ARG. Return NULL_TREE if no simplification can be made. */
9450 static tree
9451 fold_builtin_signbit (location_t loc, tree arg, tree type)
9453 if (!validate_arg (arg, REAL_TYPE))
9454 return NULL_TREE;
9456 /* If ARG is a compile-time constant, determine the result. */
9457 if (TREE_CODE (arg) == REAL_CST
9458 && !TREE_OVERFLOW (arg))
9460 REAL_VALUE_TYPE c;
9462 c = TREE_REAL_CST (arg);
9463 return (REAL_VALUE_NEGATIVE (c)
9464 ? build_one_cst (type)
9465 : build_zero_cst (type));
9468 /* If ARG is non-negative, the result is always zero. */
9469 if (tree_expr_nonnegative_p (arg))
9470 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9472 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9473 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9474 return fold_convert (type,
9475 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9476 build_real (TREE_TYPE (arg), dconst0)));
9478 return NULL_TREE;
9481 /* Fold function call to builtin copysign, copysignf or copysignl with
9482 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9483 be made. */
9485 static tree
9486 fold_builtin_copysign (location_t loc, tree fndecl,
9487 tree arg1, tree arg2, tree type)
9489 tree tem;
9491 if (!validate_arg (arg1, REAL_TYPE)
9492 || !validate_arg (arg2, REAL_TYPE))
9493 return NULL_TREE;
9495 /* copysign(X,X) is X. */
9496 if (operand_equal_p (arg1, arg2, 0))
9497 return fold_convert_loc (loc, type, arg1);
9499 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9500 if (TREE_CODE (arg1) == REAL_CST
9501 && TREE_CODE (arg2) == REAL_CST
9502 && !TREE_OVERFLOW (arg1)
9503 && !TREE_OVERFLOW (arg2))
9505 REAL_VALUE_TYPE c1, c2;
9507 c1 = TREE_REAL_CST (arg1);
9508 c2 = TREE_REAL_CST (arg2);
9509 /* c1.sign := c2.sign. */
9510 real_copysign (&c1, &c2);
9511 return build_real (type, c1);
9514 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9515 Remember to evaluate Y for side-effects. */
9516 if (tree_expr_nonnegative_p (arg2))
9517 return omit_one_operand_loc (loc, type,
9518 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9519 arg2);
9521 /* Strip sign changing operations for the first argument. */
9522 tem = fold_strip_sign_ops (arg1);
9523 if (tem)
9524 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9526 return NULL_TREE;
9529 /* Fold a call to builtin isascii with argument ARG. */
9531 static tree
9532 fold_builtin_isascii (location_t loc, tree arg)
9534 if (!validate_arg (arg, INTEGER_TYPE))
9535 return NULL_TREE;
9536 else
9538 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9539 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9540 build_int_cst (integer_type_node,
9541 ~ (unsigned HOST_WIDE_INT) 0x7f));
9542 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9543 arg, integer_zero_node);
9547 /* Fold a call to builtin toascii with argument ARG. */
9549 static tree
9550 fold_builtin_toascii (location_t loc, tree arg)
9552 if (!validate_arg (arg, INTEGER_TYPE))
9553 return NULL_TREE;
9555 /* Transform toascii(c) -> (c & 0x7f). */
9556 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9557 build_int_cst (integer_type_node, 0x7f));
9560 /* Fold a call to builtin isdigit with argument ARG. */
9562 static tree
9563 fold_builtin_isdigit (location_t loc, tree arg)
9565 if (!validate_arg (arg, INTEGER_TYPE))
9566 return NULL_TREE;
9567 else
9569 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9570 /* According to the C standard, isdigit is unaffected by locale.
9571 However, it definitely is affected by the target character set. */
9572 unsigned HOST_WIDE_INT target_digit0
9573 = lang_hooks.to_target_charset ('0');
9575 if (target_digit0 == 0)
9576 return NULL_TREE;
9578 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9579 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9580 build_int_cst (unsigned_type_node, target_digit0));
9581 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9582 build_int_cst (unsigned_type_node, 9));
9586 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9588 static tree
9589 fold_builtin_fabs (location_t loc, tree arg, tree type)
9591 if (!validate_arg (arg, REAL_TYPE))
9592 return NULL_TREE;
9594 arg = fold_convert_loc (loc, type, arg);
9595 if (TREE_CODE (arg) == REAL_CST)
9596 return fold_abs_const (arg, type);
9597 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9600 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9602 static tree
9603 fold_builtin_abs (location_t loc, tree arg, tree type)
9605 if (!validate_arg (arg, INTEGER_TYPE))
9606 return NULL_TREE;
9608 arg = fold_convert_loc (loc, type, arg);
9609 if (TREE_CODE (arg) == INTEGER_CST)
9610 return fold_abs_const (arg, type);
9611 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9614 /* Fold a fma operation with arguments ARG[012]. */
9616 tree
9617 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9618 tree type, tree arg0, tree arg1, tree arg2)
9620 if (TREE_CODE (arg0) == REAL_CST
9621 && TREE_CODE (arg1) == REAL_CST
9622 && TREE_CODE (arg2) == REAL_CST)
9623 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9625 return NULL_TREE;
9628 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9630 static tree
9631 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9633 if (validate_arg (arg0, REAL_TYPE)
9634 && validate_arg (arg1, REAL_TYPE)
9635 && validate_arg (arg2, REAL_TYPE))
9637 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9638 if (tem)
9639 return tem;
9641 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9642 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9643 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9645 return NULL_TREE;
9648 /* Fold a call to builtin fmin or fmax. */
9650 static tree
9651 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9652 tree type, bool max)
9654 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9656 /* Calculate the result when the argument is a constant. */
9657 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9659 if (res)
9660 return res;
9662 /* If either argument is NaN, return the other one. Avoid the
9663 transformation if we get (and honor) a signalling NaN. Using
9664 omit_one_operand() ensures we create a non-lvalue. */
9665 if (TREE_CODE (arg0) == REAL_CST
9666 && real_isnan (&TREE_REAL_CST (arg0))
9667 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9668 || ! TREE_REAL_CST (arg0).signalling))
9669 return omit_one_operand_loc (loc, type, arg1, arg0);
9670 if (TREE_CODE (arg1) == REAL_CST
9671 && real_isnan (&TREE_REAL_CST (arg1))
9672 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9673 || ! TREE_REAL_CST (arg1).signalling))
9674 return omit_one_operand_loc (loc, type, arg0, arg1);
9676 /* Transform fmin/fmax(x,x) -> x. */
9677 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9678 return omit_one_operand_loc (loc, type, arg0, arg1);
9680 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9681 functions to return the numeric arg if the other one is NaN.
9682 These tree codes don't honor that, so only transform if
9683 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9684 handled, so we don't have to worry about it either. */
9685 if (flag_finite_math_only)
9686 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9687 fold_convert_loc (loc, type, arg0),
9688 fold_convert_loc (loc, type, arg1));
9690 return NULL_TREE;
9693 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9695 static tree
9696 fold_builtin_carg (location_t loc, tree arg, tree type)
9698 if (validate_arg (arg, COMPLEX_TYPE)
9699 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9701 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9703 if (atan2_fn)
9705 tree new_arg = builtin_save_expr (arg);
9706 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9707 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9708 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9712 return NULL_TREE;
9715 /* Fold a call to builtin logb/ilogb. */
9717 static tree
9718 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9720 if (! validate_arg (arg, REAL_TYPE))
9721 return NULL_TREE;
9723 STRIP_NOPS (arg);
9725 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9727 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9729 switch (value->cl)
9731 case rvc_nan:
9732 case rvc_inf:
9733 /* If arg is Inf or NaN and we're logb, return it. */
9734 if (TREE_CODE (rettype) == REAL_TYPE)
9736 /* For logb(-Inf) we have to return +Inf. */
9737 if (real_isinf (value) && real_isneg (value))
9739 REAL_VALUE_TYPE tem;
9740 real_inf (&tem);
9741 return build_real (rettype, tem);
9743 return fold_convert_loc (loc, rettype, arg);
9745 /* Fall through... */
9746 case rvc_zero:
9747 /* Zero may set errno and/or raise an exception for logb, also
9748 for ilogb we don't know FP_ILOGB0. */
9749 return NULL_TREE;
9750 case rvc_normal:
9751 /* For normal numbers, proceed iff radix == 2. In GCC,
9752 normalized significands are in the range [0.5, 1.0). We
9753 want the exponent as if they were [1.0, 2.0) so get the
9754 exponent and subtract 1. */
9755 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9756 return fold_convert_loc (loc, rettype,
9757 build_int_cst (integer_type_node,
9758 REAL_EXP (value)-1));
9759 break;
9763 return NULL_TREE;
9766 /* Fold a call to builtin significand, if radix == 2. */
9768 static tree
9769 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9771 if (! validate_arg (arg, REAL_TYPE))
9772 return NULL_TREE;
9774 STRIP_NOPS (arg);
9776 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9778 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9780 switch (value->cl)
9782 case rvc_zero:
9783 case rvc_nan:
9784 case rvc_inf:
9785 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9786 return fold_convert_loc (loc, rettype, arg);
9787 case rvc_normal:
9788 /* For normal numbers, proceed iff radix == 2. */
9789 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9791 REAL_VALUE_TYPE result = *value;
9792 /* In GCC, normalized significands are in the range [0.5,
9793 1.0). We want them to be [1.0, 2.0) so set the
9794 exponent to 1. */
9795 SET_REAL_EXP (&result, 1);
9796 return build_real (rettype, result);
9798 break;
9802 return NULL_TREE;
9805 /* Fold a call to builtin frexp, we can assume the base is 2. */
9807 static tree
9808 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9810 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9811 return NULL_TREE;
9813 STRIP_NOPS (arg0);
9815 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9816 return NULL_TREE;
9818 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9820 /* Proceed if a valid pointer type was passed in. */
9821 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9823 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9824 tree frac, exp;
9826 switch (value->cl)
9828 case rvc_zero:
9829 /* For +-0, return (*exp = 0, +-0). */
9830 exp = integer_zero_node;
9831 frac = arg0;
9832 break;
9833 case rvc_nan:
9834 case rvc_inf:
9835 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9836 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9837 case rvc_normal:
9839 /* Since the frexp function always expects base 2, and in
9840 GCC normalized significands are already in the range
9841 [0.5, 1.0), we have exactly what frexp wants. */
9842 REAL_VALUE_TYPE frac_rvt = *value;
9843 SET_REAL_EXP (&frac_rvt, 0);
9844 frac = build_real (rettype, frac_rvt);
9845 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9847 break;
9848 default:
9849 gcc_unreachable ();
9852 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9853 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9854 TREE_SIDE_EFFECTS (arg1) = 1;
9855 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9858 return NULL_TREE;
9861 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9862 then we can assume the base is two. If it's false, then we have to
9863 check the mode of the TYPE parameter in certain cases. */
9865 static tree
9866 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9867 tree type, bool ldexp)
9869 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9871 STRIP_NOPS (arg0);
9872 STRIP_NOPS (arg1);
9874 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9875 if (real_zerop (arg0) || integer_zerop (arg1)
9876 || (TREE_CODE (arg0) == REAL_CST
9877 && !real_isfinite (&TREE_REAL_CST (arg0))))
9878 return omit_one_operand_loc (loc, type, arg0, arg1);
9880 /* If both arguments are constant, then try to evaluate it. */
9881 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9882 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9883 && tree_fits_shwi_p (arg1))
9885 /* Bound the maximum adjustment to twice the range of the
9886 mode's valid exponents. Use abs to ensure the range is
9887 positive as a sanity check. */
9888 const long max_exp_adj = 2 *
9889 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9890 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9892 /* Get the user-requested adjustment. */
9893 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9895 /* The requested adjustment must be inside this range. This
9896 is a preliminary cap to avoid things like overflow, we
9897 may still fail to compute the result for other reasons. */
9898 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9900 REAL_VALUE_TYPE initial_result;
9902 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9904 /* Ensure we didn't overflow. */
9905 if (! real_isinf (&initial_result))
9907 const REAL_VALUE_TYPE trunc_result
9908 = real_value_truncate (TYPE_MODE (type), initial_result);
9910 /* Only proceed if the target mode can hold the
9911 resulting value. */
9912 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9913 return build_real (type, trunc_result);
9919 return NULL_TREE;
9922 /* Fold a call to builtin modf. */
9924 static tree
9925 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9927 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9928 return NULL_TREE;
9930 STRIP_NOPS (arg0);
9932 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9933 return NULL_TREE;
9935 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9937 /* Proceed if a valid pointer type was passed in. */
9938 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9940 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9941 REAL_VALUE_TYPE trunc, frac;
9943 switch (value->cl)
9945 case rvc_nan:
9946 case rvc_zero:
9947 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9948 trunc = frac = *value;
9949 break;
9950 case rvc_inf:
9951 /* For +-Inf, return (*arg1 = arg0, +-0). */
9952 frac = dconst0;
9953 frac.sign = value->sign;
9954 trunc = *value;
9955 break;
9956 case rvc_normal:
9957 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9958 real_trunc (&trunc, VOIDmode, value);
9959 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9960 /* If the original number was negative and already
9961 integral, then the fractional part is -0.0. */
9962 if (value->sign && frac.cl == rvc_zero)
9963 frac.sign = value->sign;
9964 break;
9967 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9968 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9969 build_real (rettype, trunc));
9970 TREE_SIDE_EFFECTS (arg1) = 1;
9971 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9972 build_real (rettype, frac));
9975 return NULL_TREE;
9978 /* Given a location LOC, an interclass builtin function decl FNDECL
9979 and its single argument ARG, return an folded expression computing
9980 the same, or NULL_TREE if we either couldn't or didn't want to fold
9981 (the latter happen if there's an RTL instruction available). */
9983 static tree
9984 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9986 enum machine_mode mode;
9988 if (!validate_arg (arg, REAL_TYPE))
9989 return NULL_TREE;
9991 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9992 return NULL_TREE;
9994 mode = TYPE_MODE (TREE_TYPE (arg));
9996 /* If there is no optab, try generic code. */
9997 switch (DECL_FUNCTION_CODE (fndecl))
9999 tree result;
10001 CASE_FLT_FN (BUILT_IN_ISINF):
10003 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
10004 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
10005 tree const type = TREE_TYPE (arg);
10006 REAL_VALUE_TYPE r;
10007 char buf[128];
10009 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10010 real_from_string (&r, buf);
10011 result = build_call_expr (isgr_fn, 2,
10012 fold_build1_loc (loc, ABS_EXPR, type, arg),
10013 build_real (type, r));
10014 return result;
10016 CASE_FLT_FN (BUILT_IN_FINITE):
10017 case BUILT_IN_ISFINITE:
10019 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
10020 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10021 tree const type = TREE_TYPE (arg);
10022 REAL_VALUE_TYPE r;
10023 char buf[128];
10025 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10026 real_from_string (&r, buf);
10027 result = build_call_expr (isle_fn, 2,
10028 fold_build1_loc (loc, ABS_EXPR, type, arg),
10029 build_real (type, r));
10030 /*result = fold_build2_loc (loc, UNGT_EXPR,
10031 TREE_TYPE (TREE_TYPE (fndecl)),
10032 fold_build1_loc (loc, ABS_EXPR, type, arg),
10033 build_real (type, r));
10034 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10035 TREE_TYPE (TREE_TYPE (fndecl)),
10036 result);*/
10037 return result;
10039 case BUILT_IN_ISNORMAL:
10041 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10042 islessequal(fabs(x),DBL_MAX). */
10043 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10044 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10045 tree const type = TREE_TYPE (arg);
10046 REAL_VALUE_TYPE rmax, rmin;
10047 char buf[128];
10049 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10050 real_from_string (&rmax, buf);
10051 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10052 real_from_string (&rmin, buf);
10053 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10054 result = build_call_expr (isle_fn, 2, arg,
10055 build_real (type, rmax));
10056 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
10057 build_call_expr (isge_fn, 2, arg,
10058 build_real (type, rmin)));
10059 return result;
10061 default:
10062 break;
10065 return NULL_TREE;
10068 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10069 ARG is the argument for the call. */
10071 static tree
10072 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10074 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10075 REAL_VALUE_TYPE r;
10077 if (!validate_arg (arg, REAL_TYPE))
10078 return NULL_TREE;
10080 switch (builtin_index)
10082 case BUILT_IN_ISINF:
10083 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10084 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10086 if (TREE_CODE (arg) == REAL_CST)
10088 r = TREE_REAL_CST (arg);
10089 if (real_isinf (&r))
10090 return real_compare (GT_EXPR, &r, &dconst0)
10091 ? integer_one_node : integer_minus_one_node;
10092 else
10093 return integer_zero_node;
10096 return NULL_TREE;
10098 case BUILT_IN_ISINF_SIGN:
10100 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10101 /* In a boolean context, GCC will fold the inner COND_EXPR to
10102 1. So e.g. "if (isinf_sign(x))" would be folded to just
10103 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10104 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10105 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10106 tree tmp = NULL_TREE;
10108 arg = builtin_save_expr (arg);
10110 if (signbit_fn && isinf_fn)
10112 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10113 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10115 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10116 signbit_call, integer_zero_node);
10117 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10118 isinf_call, integer_zero_node);
10120 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10121 integer_minus_one_node, integer_one_node);
10122 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10123 isinf_call, tmp,
10124 integer_zero_node);
10127 return tmp;
10130 case BUILT_IN_ISFINITE:
10131 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10132 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10133 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10135 if (TREE_CODE (arg) == REAL_CST)
10137 r = TREE_REAL_CST (arg);
10138 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10141 return NULL_TREE;
10143 case BUILT_IN_ISNAN:
10144 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10145 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10147 if (TREE_CODE (arg) == REAL_CST)
10149 r = TREE_REAL_CST (arg);
10150 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10153 arg = builtin_save_expr (arg);
10154 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10156 default:
10157 gcc_unreachable ();
10161 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10162 This builtin will generate code to return the appropriate floating
10163 point classification depending on the value of the floating point
10164 number passed in. The possible return values must be supplied as
10165 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10166 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10167 one floating point argument which is "type generic". */
10169 static tree
10170 fold_builtin_fpclassify (location_t loc, tree exp)
10172 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10173 arg, type, res, tmp;
10174 enum machine_mode mode;
10175 REAL_VALUE_TYPE r;
10176 char buf[128];
10178 /* Verify the required arguments in the original call. */
10179 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10180 INTEGER_TYPE, INTEGER_TYPE,
10181 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10182 return NULL_TREE;
10184 fp_nan = CALL_EXPR_ARG (exp, 0);
10185 fp_infinite = CALL_EXPR_ARG (exp, 1);
10186 fp_normal = CALL_EXPR_ARG (exp, 2);
10187 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10188 fp_zero = CALL_EXPR_ARG (exp, 4);
10189 arg = CALL_EXPR_ARG (exp, 5);
10190 type = TREE_TYPE (arg);
10191 mode = TYPE_MODE (type);
10192 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10194 /* fpclassify(x) ->
10195 isnan(x) ? FP_NAN :
10196 (fabs(x) == Inf ? FP_INFINITE :
10197 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10198 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10200 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10201 build_real (type, dconst0));
10202 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10203 tmp, fp_zero, fp_subnormal);
10205 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10206 real_from_string (&r, buf);
10207 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10208 arg, build_real (type, r));
10209 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10211 if (HONOR_INFINITIES (mode))
10213 real_inf (&r);
10214 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10215 build_real (type, r));
10216 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10217 fp_infinite, res);
10220 if (HONOR_NANS (mode))
10222 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10223 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10226 return res;
10229 /* Fold a call to an unordered comparison function such as
10230 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10231 being called and ARG0 and ARG1 are the arguments for the call.
10232 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10233 the opposite of the desired result. UNORDERED_CODE is used
10234 for modes that can hold NaNs and ORDERED_CODE is used for
10235 the rest. */
10237 static tree
10238 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10239 enum tree_code unordered_code,
10240 enum tree_code ordered_code)
10242 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10243 enum tree_code code;
10244 tree type0, type1;
10245 enum tree_code code0, code1;
10246 tree cmp_type = NULL_TREE;
10248 type0 = TREE_TYPE (arg0);
10249 type1 = TREE_TYPE (arg1);
10251 code0 = TREE_CODE (type0);
10252 code1 = TREE_CODE (type1);
10254 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10255 /* Choose the wider of two real types. */
10256 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10257 ? type0 : type1;
10258 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10259 cmp_type = type0;
10260 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10261 cmp_type = type1;
10263 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10264 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10266 if (unordered_code == UNORDERED_EXPR)
10268 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10269 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10270 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10273 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10274 : ordered_code;
10275 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10276 fold_build2_loc (loc, code, type, arg0, arg1));
10279 /* Fold a call to built-in function FNDECL with 0 arguments.
10280 IGNORE is true if the result of the function call is ignored. This
10281 function returns NULL_TREE if no simplification was possible. */
10283 static tree
10284 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10286 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10287 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10288 switch (fcode)
10290 CASE_FLT_FN (BUILT_IN_INF):
10291 case BUILT_IN_INFD32:
10292 case BUILT_IN_INFD64:
10293 case BUILT_IN_INFD128:
10294 return fold_builtin_inf (loc, type, true);
10296 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10297 return fold_builtin_inf (loc, type, false);
10299 case BUILT_IN_CLASSIFY_TYPE:
10300 return fold_builtin_classify_type (NULL_TREE);
10302 case BUILT_IN_UNREACHABLE:
10303 if (flag_sanitize & SANITIZE_UNREACHABLE
10304 && (current_function_decl == NULL
10305 || !lookup_attribute ("no_sanitize_undefined",
10306 DECL_ATTRIBUTES (current_function_decl))))
10307 return ubsan_instrument_unreachable (loc);
10308 break;
10310 default:
10311 break;
10313 return NULL_TREE;
10316 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10317 IGNORE is true if the result of the function call is ignored. This
10318 function returns NULL_TREE if no simplification was possible. */
10320 static tree
10321 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10323 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10324 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10325 switch (fcode)
10327 case BUILT_IN_CONSTANT_P:
10329 tree val = fold_builtin_constant_p (arg0);
10331 /* Gimplification will pull the CALL_EXPR for the builtin out of
10332 an if condition. When not optimizing, we'll not CSE it back.
10333 To avoid link error types of regressions, return false now. */
10334 if (!val && !optimize)
10335 val = integer_zero_node;
10337 return val;
10340 case BUILT_IN_CLASSIFY_TYPE:
10341 return fold_builtin_classify_type (arg0);
10343 case BUILT_IN_STRLEN:
10344 return fold_builtin_strlen (loc, type, arg0);
10346 CASE_FLT_FN (BUILT_IN_FABS):
10347 case BUILT_IN_FABSD32:
10348 case BUILT_IN_FABSD64:
10349 case BUILT_IN_FABSD128:
10350 return fold_builtin_fabs (loc, arg0, type);
10352 case BUILT_IN_ABS:
10353 case BUILT_IN_LABS:
10354 case BUILT_IN_LLABS:
10355 case BUILT_IN_IMAXABS:
10356 return fold_builtin_abs (loc, arg0, type);
10358 CASE_FLT_FN (BUILT_IN_CONJ):
10359 if (validate_arg (arg0, COMPLEX_TYPE)
10360 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10361 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10362 break;
10364 CASE_FLT_FN (BUILT_IN_CREAL):
10365 if (validate_arg (arg0, COMPLEX_TYPE)
10366 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10367 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10368 break;
10370 CASE_FLT_FN (BUILT_IN_CIMAG):
10371 if (validate_arg (arg0, COMPLEX_TYPE)
10372 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10373 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10374 break;
10376 CASE_FLT_FN (BUILT_IN_CCOS):
10377 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10379 CASE_FLT_FN (BUILT_IN_CCOSH):
10380 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10382 CASE_FLT_FN (BUILT_IN_CPROJ):
10383 return fold_builtin_cproj (loc, arg0, type);
10385 CASE_FLT_FN (BUILT_IN_CSIN):
10386 if (validate_arg (arg0, COMPLEX_TYPE)
10387 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10388 return do_mpc_arg1 (arg0, type, mpc_sin);
10389 break;
10391 CASE_FLT_FN (BUILT_IN_CSINH):
10392 if (validate_arg (arg0, COMPLEX_TYPE)
10393 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10394 return do_mpc_arg1 (arg0, type, mpc_sinh);
10395 break;
10397 CASE_FLT_FN (BUILT_IN_CTAN):
10398 if (validate_arg (arg0, COMPLEX_TYPE)
10399 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10400 return do_mpc_arg1 (arg0, type, mpc_tan);
10401 break;
10403 CASE_FLT_FN (BUILT_IN_CTANH):
10404 if (validate_arg (arg0, COMPLEX_TYPE)
10405 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10406 return do_mpc_arg1 (arg0, type, mpc_tanh);
10407 break;
10409 CASE_FLT_FN (BUILT_IN_CLOG):
10410 if (validate_arg (arg0, COMPLEX_TYPE)
10411 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10412 return do_mpc_arg1 (arg0, type, mpc_log);
10413 break;
10415 CASE_FLT_FN (BUILT_IN_CSQRT):
10416 if (validate_arg (arg0, COMPLEX_TYPE)
10417 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10418 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10419 break;
10421 CASE_FLT_FN (BUILT_IN_CASIN):
10422 if (validate_arg (arg0, COMPLEX_TYPE)
10423 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10424 return do_mpc_arg1 (arg0, type, mpc_asin);
10425 break;
10427 CASE_FLT_FN (BUILT_IN_CACOS):
10428 if (validate_arg (arg0, COMPLEX_TYPE)
10429 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10430 return do_mpc_arg1 (arg0, type, mpc_acos);
10431 break;
10433 CASE_FLT_FN (BUILT_IN_CATAN):
10434 if (validate_arg (arg0, COMPLEX_TYPE)
10435 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10436 return do_mpc_arg1 (arg0, type, mpc_atan);
10437 break;
10439 CASE_FLT_FN (BUILT_IN_CASINH):
10440 if (validate_arg (arg0, COMPLEX_TYPE)
10441 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10442 return do_mpc_arg1 (arg0, type, mpc_asinh);
10443 break;
10445 CASE_FLT_FN (BUILT_IN_CACOSH):
10446 if (validate_arg (arg0, COMPLEX_TYPE)
10447 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10448 return do_mpc_arg1 (arg0, type, mpc_acosh);
10449 break;
10451 CASE_FLT_FN (BUILT_IN_CATANH):
10452 if (validate_arg (arg0, COMPLEX_TYPE)
10453 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10454 return do_mpc_arg1 (arg0, type, mpc_atanh);
10455 break;
10457 CASE_FLT_FN (BUILT_IN_CABS):
10458 return fold_builtin_cabs (loc, arg0, type, fndecl);
10460 CASE_FLT_FN (BUILT_IN_CARG):
10461 return fold_builtin_carg (loc, arg0, type);
10463 CASE_FLT_FN (BUILT_IN_SQRT):
10464 return fold_builtin_sqrt (loc, arg0, type);
10466 CASE_FLT_FN (BUILT_IN_CBRT):
10467 return fold_builtin_cbrt (loc, arg0, type);
10469 CASE_FLT_FN (BUILT_IN_ASIN):
10470 if (validate_arg (arg0, REAL_TYPE))
10471 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10472 &dconstm1, &dconst1, true);
10473 break;
10475 CASE_FLT_FN (BUILT_IN_ACOS):
10476 if (validate_arg (arg0, REAL_TYPE))
10477 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10478 &dconstm1, &dconst1, true);
10479 break;
10481 CASE_FLT_FN (BUILT_IN_ATAN):
10482 if (validate_arg (arg0, REAL_TYPE))
10483 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10484 break;
10486 CASE_FLT_FN (BUILT_IN_ASINH):
10487 if (validate_arg (arg0, REAL_TYPE))
10488 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10489 break;
10491 CASE_FLT_FN (BUILT_IN_ACOSH):
10492 if (validate_arg (arg0, REAL_TYPE))
10493 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10494 &dconst1, NULL, true);
10495 break;
10497 CASE_FLT_FN (BUILT_IN_ATANH):
10498 if (validate_arg (arg0, REAL_TYPE))
10499 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10500 &dconstm1, &dconst1, false);
10501 break;
10503 CASE_FLT_FN (BUILT_IN_SIN):
10504 if (validate_arg (arg0, REAL_TYPE))
10505 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10506 break;
10508 CASE_FLT_FN (BUILT_IN_COS):
10509 return fold_builtin_cos (loc, arg0, type, fndecl);
10511 CASE_FLT_FN (BUILT_IN_TAN):
10512 return fold_builtin_tan (arg0, type);
10514 CASE_FLT_FN (BUILT_IN_CEXP):
10515 return fold_builtin_cexp (loc, arg0, type);
10517 CASE_FLT_FN (BUILT_IN_CEXPI):
10518 if (validate_arg (arg0, REAL_TYPE))
10519 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10520 break;
10522 CASE_FLT_FN (BUILT_IN_SINH):
10523 if (validate_arg (arg0, REAL_TYPE))
10524 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10525 break;
10527 CASE_FLT_FN (BUILT_IN_COSH):
10528 return fold_builtin_cosh (loc, arg0, type, fndecl);
10530 CASE_FLT_FN (BUILT_IN_TANH):
10531 if (validate_arg (arg0, REAL_TYPE))
10532 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10533 break;
10535 CASE_FLT_FN (BUILT_IN_ERF):
10536 if (validate_arg (arg0, REAL_TYPE))
10537 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10538 break;
10540 CASE_FLT_FN (BUILT_IN_ERFC):
10541 if (validate_arg (arg0, REAL_TYPE))
10542 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10543 break;
10545 CASE_FLT_FN (BUILT_IN_TGAMMA):
10546 if (validate_arg (arg0, REAL_TYPE))
10547 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10548 break;
10550 CASE_FLT_FN (BUILT_IN_EXP):
10551 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10553 CASE_FLT_FN (BUILT_IN_EXP2):
10554 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10556 CASE_FLT_FN (BUILT_IN_EXP10):
10557 CASE_FLT_FN (BUILT_IN_POW10):
10558 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10560 CASE_FLT_FN (BUILT_IN_EXPM1):
10561 if (validate_arg (arg0, REAL_TYPE))
10562 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10563 break;
10565 CASE_FLT_FN (BUILT_IN_LOG):
10566 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10568 CASE_FLT_FN (BUILT_IN_LOG2):
10569 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10571 CASE_FLT_FN (BUILT_IN_LOG10):
10572 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10574 CASE_FLT_FN (BUILT_IN_LOG1P):
10575 if (validate_arg (arg0, REAL_TYPE))
10576 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10577 &dconstm1, NULL, false);
10578 break;
10580 CASE_FLT_FN (BUILT_IN_J0):
10581 if (validate_arg (arg0, REAL_TYPE))
10582 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10583 NULL, NULL, 0);
10584 break;
10586 CASE_FLT_FN (BUILT_IN_J1):
10587 if (validate_arg (arg0, REAL_TYPE))
10588 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10589 NULL, NULL, 0);
10590 break;
10592 CASE_FLT_FN (BUILT_IN_Y0):
10593 if (validate_arg (arg0, REAL_TYPE))
10594 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10595 &dconst0, NULL, false);
10596 break;
10598 CASE_FLT_FN (BUILT_IN_Y1):
10599 if (validate_arg (arg0, REAL_TYPE))
10600 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10601 &dconst0, NULL, false);
10602 break;
10604 CASE_FLT_FN (BUILT_IN_NAN):
10605 case BUILT_IN_NAND32:
10606 case BUILT_IN_NAND64:
10607 case BUILT_IN_NAND128:
10608 return fold_builtin_nan (arg0, type, true);
10610 CASE_FLT_FN (BUILT_IN_NANS):
10611 return fold_builtin_nan (arg0, type, false);
10613 CASE_FLT_FN (BUILT_IN_FLOOR):
10614 return fold_builtin_floor (loc, fndecl, arg0);
10616 CASE_FLT_FN (BUILT_IN_CEIL):
10617 return fold_builtin_ceil (loc, fndecl, arg0);
10619 CASE_FLT_FN (BUILT_IN_TRUNC):
10620 return fold_builtin_trunc (loc, fndecl, arg0);
10622 CASE_FLT_FN (BUILT_IN_ROUND):
10623 return fold_builtin_round (loc, fndecl, arg0);
10625 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10626 CASE_FLT_FN (BUILT_IN_RINT):
10627 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10629 CASE_FLT_FN (BUILT_IN_ICEIL):
10630 CASE_FLT_FN (BUILT_IN_LCEIL):
10631 CASE_FLT_FN (BUILT_IN_LLCEIL):
10632 CASE_FLT_FN (BUILT_IN_LFLOOR):
10633 CASE_FLT_FN (BUILT_IN_IFLOOR):
10634 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10635 CASE_FLT_FN (BUILT_IN_IROUND):
10636 CASE_FLT_FN (BUILT_IN_LROUND):
10637 CASE_FLT_FN (BUILT_IN_LLROUND):
10638 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10640 CASE_FLT_FN (BUILT_IN_IRINT):
10641 CASE_FLT_FN (BUILT_IN_LRINT):
10642 CASE_FLT_FN (BUILT_IN_LLRINT):
10643 return fold_fixed_mathfn (loc, fndecl, arg0);
10645 case BUILT_IN_BSWAP16:
10646 case BUILT_IN_BSWAP32:
10647 case BUILT_IN_BSWAP64:
10648 return fold_builtin_bswap (fndecl, arg0);
10650 CASE_INT_FN (BUILT_IN_FFS):
10651 CASE_INT_FN (BUILT_IN_CLZ):
10652 CASE_INT_FN (BUILT_IN_CTZ):
10653 CASE_INT_FN (BUILT_IN_CLRSB):
10654 CASE_INT_FN (BUILT_IN_POPCOUNT):
10655 CASE_INT_FN (BUILT_IN_PARITY):
10656 return fold_builtin_bitop (fndecl, arg0);
10658 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10659 return fold_builtin_signbit (loc, arg0, type);
10661 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10662 return fold_builtin_significand (loc, arg0, type);
10664 CASE_FLT_FN (BUILT_IN_ILOGB):
10665 CASE_FLT_FN (BUILT_IN_LOGB):
10666 return fold_builtin_logb (loc, arg0, type);
10668 case BUILT_IN_ISASCII:
10669 return fold_builtin_isascii (loc, arg0);
10671 case BUILT_IN_TOASCII:
10672 return fold_builtin_toascii (loc, arg0);
10674 case BUILT_IN_ISDIGIT:
10675 return fold_builtin_isdigit (loc, arg0);
10677 CASE_FLT_FN (BUILT_IN_FINITE):
10678 case BUILT_IN_FINITED32:
10679 case BUILT_IN_FINITED64:
10680 case BUILT_IN_FINITED128:
10681 case BUILT_IN_ISFINITE:
10683 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10684 if (ret)
10685 return ret;
10686 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10689 CASE_FLT_FN (BUILT_IN_ISINF):
10690 case BUILT_IN_ISINFD32:
10691 case BUILT_IN_ISINFD64:
10692 case BUILT_IN_ISINFD128:
10694 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10695 if (ret)
10696 return ret;
10697 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10700 case BUILT_IN_ISNORMAL:
10701 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10703 case BUILT_IN_ISINF_SIGN:
10704 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10706 CASE_FLT_FN (BUILT_IN_ISNAN):
10707 case BUILT_IN_ISNAND32:
10708 case BUILT_IN_ISNAND64:
10709 case BUILT_IN_ISNAND128:
10710 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10712 case BUILT_IN_PRINTF:
10713 case BUILT_IN_PRINTF_UNLOCKED:
10714 case BUILT_IN_VPRINTF:
10715 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10717 case BUILT_IN_FREE:
10718 if (integer_zerop (arg0))
10719 return build_empty_stmt (loc);
10720 break;
10722 default:
10723 break;
10726 return NULL_TREE;
10730 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10731 IGNORE is true if the result of the function call is ignored. This
10732 function returns NULL_TREE if no simplification was possible. */
10734 static tree
10735 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10737 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10738 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10740 switch (fcode)
10742 CASE_FLT_FN (BUILT_IN_JN):
10743 if (validate_arg (arg0, INTEGER_TYPE)
10744 && validate_arg (arg1, REAL_TYPE))
10745 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10746 break;
10748 CASE_FLT_FN (BUILT_IN_YN):
10749 if (validate_arg (arg0, INTEGER_TYPE)
10750 && validate_arg (arg1, REAL_TYPE))
10751 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10752 &dconst0, false);
10753 break;
10755 CASE_FLT_FN (BUILT_IN_DREM):
10756 CASE_FLT_FN (BUILT_IN_REMAINDER):
10757 if (validate_arg (arg0, REAL_TYPE)
10758 && validate_arg (arg1, REAL_TYPE))
10759 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10760 break;
10762 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10763 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10764 if (validate_arg (arg0, REAL_TYPE)
10765 && validate_arg (arg1, POINTER_TYPE))
10766 return do_mpfr_lgamma_r (arg0, arg1, type);
10767 break;
10769 CASE_FLT_FN (BUILT_IN_ATAN2):
10770 if (validate_arg (arg0, REAL_TYPE)
10771 && validate_arg (arg1, REAL_TYPE))
10772 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10773 break;
10775 CASE_FLT_FN (BUILT_IN_FDIM):
10776 if (validate_arg (arg0, REAL_TYPE)
10777 && validate_arg (arg1, REAL_TYPE))
10778 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10779 break;
10781 CASE_FLT_FN (BUILT_IN_HYPOT):
10782 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10784 CASE_FLT_FN (BUILT_IN_CPOW):
10785 if (validate_arg (arg0, COMPLEX_TYPE)
10786 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10787 && validate_arg (arg1, COMPLEX_TYPE)
10788 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10789 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10790 break;
10792 CASE_FLT_FN (BUILT_IN_LDEXP):
10793 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10794 CASE_FLT_FN (BUILT_IN_SCALBN):
10795 CASE_FLT_FN (BUILT_IN_SCALBLN):
10796 return fold_builtin_load_exponent (loc, arg0, arg1,
10797 type, /*ldexp=*/false);
10799 CASE_FLT_FN (BUILT_IN_FREXP):
10800 return fold_builtin_frexp (loc, arg0, arg1, type);
10802 CASE_FLT_FN (BUILT_IN_MODF):
10803 return fold_builtin_modf (loc, arg0, arg1, type);
10805 case BUILT_IN_BZERO:
10806 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10808 case BUILT_IN_FPUTS:
10809 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10811 case BUILT_IN_FPUTS_UNLOCKED:
10812 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10814 case BUILT_IN_STRSTR:
10815 return fold_builtin_strstr (loc, arg0, arg1, type);
10817 case BUILT_IN_STRCAT:
10818 return fold_builtin_strcat (loc, arg0, arg1, NULL_TREE);
10820 case BUILT_IN_STRSPN:
10821 return fold_builtin_strspn (loc, arg0, arg1);
10823 case BUILT_IN_STRCSPN:
10824 return fold_builtin_strcspn (loc, arg0, arg1);
10826 case BUILT_IN_STRCHR:
10827 case BUILT_IN_INDEX:
10828 return fold_builtin_strchr (loc, arg0, arg1, type);
10830 case BUILT_IN_STRRCHR:
10831 case BUILT_IN_RINDEX:
10832 return fold_builtin_strrchr (loc, arg0, arg1, type);
10834 case BUILT_IN_STRCPY:
10835 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10837 case BUILT_IN_STPCPY:
10838 if (ignore)
10840 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10841 if (!fn)
10842 break;
10844 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10846 else
10847 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10848 break;
10850 case BUILT_IN_STRCMP:
10851 return fold_builtin_strcmp (loc, arg0, arg1);
10853 case BUILT_IN_STRPBRK:
10854 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10856 case BUILT_IN_EXPECT:
10857 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10859 CASE_FLT_FN (BUILT_IN_POW):
10860 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10862 CASE_FLT_FN (BUILT_IN_POWI):
10863 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10865 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10866 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10868 CASE_FLT_FN (BUILT_IN_FMIN):
10869 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10871 CASE_FLT_FN (BUILT_IN_FMAX):
10872 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10874 case BUILT_IN_ISGREATER:
10875 return fold_builtin_unordered_cmp (loc, fndecl,
10876 arg0, arg1, UNLE_EXPR, LE_EXPR);
10877 case BUILT_IN_ISGREATEREQUAL:
10878 return fold_builtin_unordered_cmp (loc, fndecl,
10879 arg0, arg1, UNLT_EXPR, LT_EXPR);
10880 case BUILT_IN_ISLESS:
10881 return fold_builtin_unordered_cmp (loc, fndecl,
10882 arg0, arg1, UNGE_EXPR, GE_EXPR);
10883 case BUILT_IN_ISLESSEQUAL:
10884 return fold_builtin_unordered_cmp (loc, fndecl,
10885 arg0, arg1, UNGT_EXPR, GT_EXPR);
10886 case BUILT_IN_ISLESSGREATER:
10887 return fold_builtin_unordered_cmp (loc, fndecl,
10888 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10889 case BUILT_IN_ISUNORDERED:
10890 return fold_builtin_unordered_cmp (loc, fndecl,
10891 arg0, arg1, UNORDERED_EXPR,
10892 NOP_EXPR);
10894 /* We do the folding for va_start in the expander. */
10895 case BUILT_IN_VA_START:
10896 break;
10898 case BUILT_IN_SPRINTF:
10899 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10901 case BUILT_IN_OBJECT_SIZE:
10902 return fold_builtin_object_size (arg0, arg1);
10904 case BUILT_IN_PRINTF:
10905 case BUILT_IN_PRINTF_UNLOCKED:
10906 case BUILT_IN_VPRINTF:
10907 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10909 case BUILT_IN_PRINTF_CHK:
10910 case BUILT_IN_VPRINTF_CHK:
10911 if (!validate_arg (arg0, INTEGER_TYPE)
10912 || TREE_SIDE_EFFECTS (arg0))
10913 return NULL_TREE;
10914 else
10915 return fold_builtin_printf (loc, fndecl,
10916 arg1, NULL_TREE, ignore, fcode);
10917 break;
10919 case BUILT_IN_FPRINTF:
10920 case BUILT_IN_FPRINTF_UNLOCKED:
10921 case BUILT_IN_VFPRINTF:
10922 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10923 ignore, fcode);
10925 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10926 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10928 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10929 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10931 default:
10932 break;
10934 return NULL_TREE;
10937 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10938 and ARG2. IGNORE is true if the result of the function call is ignored.
10939 This function returns NULL_TREE if no simplification was possible. */
10941 static tree
10942 fold_builtin_3 (location_t loc, tree fndecl,
10943 tree arg0, tree arg1, tree arg2, bool ignore)
10945 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10946 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10947 switch (fcode)
10950 CASE_FLT_FN (BUILT_IN_SINCOS):
10951 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10953 CASE_FLT_FN (BUILT_IN_FMA):
10954 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10955 break;
10957 CASE_FLT_FN (BUILT_IN_REMQUO):
10958 if (validate_arg (arg0, REAL_TYPE)
10959 && validate_arg (arg1, REAL_TYPE)
10960 && validate_arg (arg2, POINTER_TYPE))
10961 return do_mpfr_remquo (arg0, arg1, arg2);
10962 break;
10964 case BUILT_IN_MEMSET:
10965 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10967 case BUILT_IN_BCOPY:
10968 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10969 void_type_node, true, /*endp=*/3);
10971 case BUILT_IN_MEMCPY:
10972 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10973 type, ignore, /*endp=*/0);
10975 case BUILT_IN_MEMPCPY:
10976 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10977 type, ignore, /*endp=*/1);
10979 case BUILT_IN_MEMMOVE:
10980 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10981 type, ignore, /*endp=*/3);
10983 case BUILT_IN_STRNCAT:
10984 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10986 case BUILT_IN_STRNCPY:
10987 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10989 case BUILT_IN_STRNCMP:
10990 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10992 case BUILT_IN_MEMCHR:
10993 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10995 case BUILT_IN_BCMP:
10996 case BUILT_IN_MEMCMP:
10997 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10999 case BUILT_IN_SPRINTF:
11000 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
11002 case BUILT_IN_SNPRINTF:
11003 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
11005 case BUILT_IN_STRCPY_CHK:
11006 case BUILT_IN_STPCPY_CHK:
11007 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
11008 ignore, fcode);
11010 case BUILT_IN_STRCAT_CHK:
11011 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
11013 case BUILT_IN_PRINTF_CHK:
11014 case BUILT_IN_VPRINTF_CHK:
11015 if (!validate_arg (arg0, INTEGER_TYPE)
11016 || TREE_SIDE_EFFECTS (arg0))
11017 return NULL_TREE;
11018 else
11019 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
11020 break;
11022 case BUILT_IN_FPRINTF:
11023 case BUILT_IN_FPRINTF_UNLOCKED:
11024 case BUILT_IN_VFPRINTF:
11025 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
11026 ignore, fcode);
11028 case BUILT_IN_FPRINTF_CHK:
11029 case BUILT_IN_VFPRINTF_CHK:
11030 if (!validate_arg (arg1, INTEGER_TYPE)
11031 || TREE_SIDE_EFFECTS (arg1))
11032 return NULL_TREE;
11033 else
11034 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
11035 ignore, fcode);
11037 case BUILT_IN_EXPECT:
11038 return fold_builtin_expect (loc, arg0, arg1, arg2);
11040 default:
11041 break;
11043 return NULL_TREE;
11046 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11047 ARG2, and ARG3. IGNORE is true if the result of the function call is
11048 ignored. This function returns NULL_TREE if no simplification was
11049 possible. */
11051 static tree
11052 fold_builtin_4 (location_t loc, tree fndecl,
11053 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11055 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11057 switch (fcode)
11059 case BUILT_IN_MEMCPY_CHK:
11060 case BUILT_IN_MEMPCPY_CHK:
11061 case BUILT_IN_MEMMOVE_CHK:
11062 case BUILT_IN_MEMSET_CHK:
11063 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11064 NULL_TREE, ignore,
11065 DECL_FUNCTION_CODE (fndecl));
11067 case BUILT_IN_STRNCPY_CHK:
11068 case BUILT_IN_STPNCPY_CHK:
11069 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
11070 ignore, fcode);
11072 case BUILT_IN_STRNCAT_CHK:
11073 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11075 case BUILT_IN_SNPRINTF:
11076 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
11078 case BUILT_IN_FPRINTF_CHK:
11079 case BUILT_IN_VFPRINTF_CHK:
11080 if (!validate_arg (arg1, INTEGER_TYPE)
11081 || TREE_SIDE_EFFECTS (arg1))
11082 return NULL_TREE;
11083 else
11084 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11085 ignore, fcode);
11086 break;
11088 default:
11089 break;
11091 return NULL_TREE;
11094 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11095 arguments, where NARGS <= 4. IGNORE is true if the result of the
11096 function call is ignored. This function returns NULL_TREE if no
11097 simplification was possible. Note that this only folds builtins with
11098 fixed argument patterns. Foldings that do varargs-to-varargs
11099 transformations, or that match calls with more than 4 arguments,
11100 need to be handled with fold_builtin_varargs instead. */
11102 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11104 static tree
11105 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11107 tree ret = NULL_TREE;
11109 switch (nargs)
11111 case 0:
11112 ret = fold_builtin_0 (loc, fndecl, ignore);
11113 break;
11114 case 1:
11115 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11116 break;
11117 case 2:
11118 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11119 break;
11120 case 3:
11121 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11122 break;
11123 case 4:
11124 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11125 ignore);
11126 break;
11127 default:
11128 break;
11130 if (ret)
11132 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11133 SET_EXPR_LOCATION (ret, loc);
11134 TREE_NO_WARNING (ret) = 1;
11135 return ret;
11137 return NULL_TREE;
11140 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11141 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11142 of arguments in ARGS to be omitted. OLDNARGS is the number of
11143 elements in ARGS. */
11145 static tree
11146 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11147 int skip, tree fndecl, int n, va_list newargs)
11149 int nargs = oldnargs - skip + n;
11150 tree *buffer;
11152 if (n > 0)
11154 int i, j;
11156 buffer = XALLOCAVEC (tree, nargs);
11157 for (i = 0; i < n; i++)
11158 buffer[i] = va_arg (newargs, tree);
11159 for (j = skip; j < oldnargs; j++, i++)
11160 buffer[i] = args[j];
11162 else
11163 buffer = args + skip;
11165 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11168 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11169 list ARGS along with N new arguments specified as the "..."
11170 parameters. SKIP is the number of arguments in ARGS to be omitted.
11171 OLDNARGS is the number of elements in ARGS. */
11173 static tree
11174 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11175 int skip, tree fndecl, int n, ...)
11177 va_list ap;
11178 tree t;
11180 va_start (ap, n);
11181 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11182 va_end (ap);
11184 return t;
11187 /* Return true if FNDECL shouldn't be folded right now.
11188 If a built-in function has an inline attribute always_inline
11189 wrapper, defer folding it after always_inline functions have
11190 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11191 might not be performed. */
11193 bool
11194 avoid_folding_inline_builtin (tree fndecl)
11196 return (DECL_DECLARED_INLINE_P (fndecl)
11197 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11198 && cfun
11199 && !cfun->always_inline_functions_inlined
11200 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11203 /* A wrapper function for builtin folding that prevents warnings for
11204 "statement without effect" and the like, caused by removing the
11205 call node earlier than the warning is generated. */
11207 tree
11208 fold_call_expr (location_t loc, tree exp, bool ignore)
11210 tree ret = NULL_TREE;
11211 tree fndecl = get_callee_fndecl (exp);
11212 if (fndecl
11213 && TREE_CODE (fndecl) == FUNCTION_DECL
11214 && DECL_BUILT_IN (fndecl)
11215 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11216 yet. Defer folding until we see all the arguments
11217 (after inlining). */
11218 && !CALL_EXPR_VA_ARG_PACK (exp))
11220 int nargs = call_expr_nargs (exp);
11222 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11223 instead last argument is __builtin_va_arg_pack (). Defer folding
11224 even in that case, until arguments are finalized. */
11225 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11227 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11228 if (fndecl2
11229 && TREE_CODE (fndecl2) == FUNCTION_DECL
11230 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11231 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11232 return NULL_TREE;
11235 if (avoid_folding_inline_builtin (fndecl))
11236 return NULL_TREE;
11238 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11239 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11240 CALL_EXPR_ARGP (exp), ignore);
11241 else
11243 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11245 tree *args = CALL_EXPR_ARGP (exp);
11246 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11248 if (!ret)
11249 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11250 if (ret)
11251 return ret;
11254 return NULL_TREE;
11257 /* Conveniently construct a function call expression. FNDECL names the
11258 function to be called and N arguments are passed in the array
11259 ARGARRAY. */
11261 tree
11262 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11264 tree fntype = TREE_TYPE (fndecl);
11265 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11267 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11270 /* Conveniently construct a function call expression. FNDECL names the
11271 function to be called and the arguments are passed in the vector
11272 VEC. */
11274 tree
11275 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11277 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11278 vec_safe_address (vec));
11282 /* Conveniently construct a function call expression. FNDECL names the
11283 function to be called, N is the number of arguments, and the "..."
11284 parameters are the argument expressions. */
11286 tree
11287 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11289 va_list ap;
11290 tree *argarray = XALLOCAVEC (tree, n);
11291 int i;
11293 va_start (ap, n);
11294 for (i = 0; i < n; i++)
11295 argarray[i] = va_arg (ap, tree);
11296 va_end (ap);
11297 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11300 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11301 varargs macros aren't supported by all bootstrap compilers. */
11303 tree
11304 build_call_expr (tree fndecl, int n, ...)
11306 va_list ap;
11307 tree *argarray = XALLOCAVEC (tree, n);
11308 int i;
11310 va_start (ap, n);
11311 for (i = 0; i < n; i++)
11312 argarray[i] = va_arg (ap, tree);
11313 va_end (ap);
11314 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11317 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11318 N arguments are passed in the array ARGARRAY. */
11320 tree
11321 fold_builtin_call_array (location_t loc, tree type,
11322 tree fn,
11323 int n,
11324 tree *argarray)
11326 tree ret = NULL_TREE;
11327 tree exp;
11329 if (TREE_CODE (fn) == ADDR_EXPR)
11331 tree fndecl = TREE_OPERAND (fn, 0);
11332 if (TREE_CODE (fndecl) == FUNCTION_DECL
11333 && DECL_BUILT_IN (fndecl))
11335 /* If last argument is __builtin_va_arg_pack (), arguments to this
11336 function are not finalized yet. Defer folding until they are. */
11337 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11339 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11340 if (fndecl2
11341 && TREE_CODE (fndecl2) == FUNCTION_DECL
11342 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11343 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11344 return build_call_array_loc (loc, type, fn, n, argarray);
11346 if (avoid_folding_inline_builtin (fndecl))
11347 return build_call_array_loc (loc, type, fn, n, argarray);
11348 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11350 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11351 if (ret)
11352 return ret;
11354 return build_call_array_loc (loc, type, fn, n, argarray);
11356 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11358 /* First try the transformations that don't require consing up
11359 an exp. */
11360 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11361 if (ret)
11362 return ret;
11365 /* If we got this far, we need to build an exp. */
11366 exp = build_call_array_loc (loc, type, fn, n, argarray);
11367 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11368 return ret ? ret : exp;
11372 return build_call_array_loc (loc, type, fn, n, argarray);
11375 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11376 along with N new arguments specified as the "..." parameters. SKIP
11377 is the number of arguments in EXP to be omitted. This function is used
11378 to do varargs-to-varargs transformations. */
11380 static tree
11381 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11383 va_list ap;
11384 tree t;
11386 va_start (ap, n);
11387 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11388 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11389 va_end (ap);
11391 return t;
11394 /* Validate a single argument ARG against a tree code CODE representing
11395 a type. */
11397 static bool
11398 validate_arg (const_tree arg, enum tree_code code)
11400 if (!arg)
11401 return false;
11402 else if (code == POINTER_TYPE)
11403 return POINTER_TYPE_P (TREE_TYPE (arg));
11404 else if (code == INTEGER_TYPE)
11405 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11406 return code == TREE_CODE (TREE_TYPE (arg));
11409 /* This function validates the types of a function call argument list
11410 against a specified list of tree_codes. If the last specifier is a 0,
11411 that represents an ellipses, otherwise the last specifier must be a
11412 VOID_TYPE.
11414 This is the GIMPLE version of validate_arglist. Eventually we want to
11415 completely convert builtins.c to work from GIMPLEs and the tree based
11416 validate_arglist will then be removed. */
11418 bool
11419 validate_gimple_arglist (const_gimple call, ...)
11421 enum tree_code code;
11422 bool res = 0;
11423 va_list ap;
11424 const_tree arg;
11425 size_t i;
11427 va_start (ap, call);
11428 i = 0;
11432 code = (enum tree_code) va_arg (ap, int);
11433 switch (code)
11435 case 0:
11436 /* This signifies an ellipses, any further arguments are all ok. */
11437 res = true;
11438 goto end;
11439 case VOID_TYPE:
11440 /* This signifies an endlink, if no arguments remain, return
11441 true, otherwise return false. */
11442 res = (i == gimple_call_num_args (call));
11443 goto end;
11444 default:
11445 /* If no parameters remain or the parameter's code does not
11446 match the specified code, return false. Otherwise continue
11447 checking any remaining arguments. */
11448 arg = gimple_call_arg (call, i++);
11449 if (!validate_arg (arg, code))
11450 goto end;
11451 break;
11454 while (1);
11456 /* We need gotos here since we can only have one VA_CLOSE in a
11457 function. */
11458 end: ;
11459 va_end (ap);
11461 return res;
11464 /* Default target-specific builtin expander that does nothing. */
11467 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11468 rtx target ATTRIBUTE_UNUSED,
11469 rtx subtarget ATTRIBUTE_UNUSED,
11470 enum machine_mode mode ATTRIBUTE_UNUSED,
11471 int ignore ATTRIBUTE_UNUSED)
11473 return NULL_RTX;
11476 /* Returns true is EXP represents data that would potentially reside
11477 in a readonly section. */
11479 static bool
11480 readonly_data_expr (tree exp)
11482 STRIP_NOPS (exp);
11484 if (TREE_CODE (exp) != ADDR_EXPR)
11485 return false;
11487 exp = get_base_address (TREE_OPERAND (exp, 0));
11488 if (!exp)
11489 return false;
11491 /* Make sure we call decl_readonly_section only for trees it
11492 can handle (since it returns true for everything it doesn't
11493 understand). */
11494 if (TREE_CODE (exp) == STRING_CST
11495 || TREE_CODE (exp) == CONSTRUCTOR
11496 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11497 return decl_readonly_section (exp, 0);
11498 else
11499 return false;
11502 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11503 to the call, and TYPE is its return type.
11505 Return NULL_TREE if no simplification was possible, otherwise return the
11506 simplified form of the call as a tree.
11508 The simplified form may be a constant or other expression which
11509 computes the same value, but in a more efficient manner (including
11510 calls to other builtin functions).
11512 The call may contain arguments which need to be evaluated, but
11513 which are not useful to determine the result of the call. In
11514 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11515 COMPOUND_EXPR will be an argument which must be evaluated.
11516 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11517 COMPOUND_EXPR in the chain will contain the tree for the simplified
11518 form of the builtin function call. */
11520 static tree
11521 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11523 if (!validate_arg (s1, POINTER_TYPE)
11524 || !validate_arg (s2, POINTER_TYPE))
11525 return NULL_TREE;
11526 else
11528 tree fn;
11529 const char *p1, *p2;
11531 p2 = c_getstr (s2);
11532 if (p2 == NULL)
11533 return NULL_TREE;
11535 p1 = c_getstr (s1);
11536 if (p1 != NULL)
11538 const char *r = strstr (p1, p2);
11539 tree tem;
11541 if (r == NULL)
11542 return build_int_cst (TREE_TYPE (s1), 0);
11544 /* Return an offset into the constant string argument. */
11545 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11546 return fold_convert_loc (loc, type, tem);
11549 /* The argument is const char *, and the result is char *, so we need
11550 a type conversion here to avoid a warning. */
11551 if (p2[0] == '\0')
11552 return fold_convert_loc (loc, type, s1);
11554 if (p2[1] != '\0')
11555 return NULL_TREE;
11557 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11558 if (!fn)
11559 return NULL_TREE;
11561 /* New argument list transforming strstr(s1, s2) to
11562 strchr(s1, s2[0]). */
11563 return build_call_expr_loc (loc, fn, 2, s1,
11564 build_int_cst (integer_type_node, p2[0]));
11568 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11569 the call, and TYPE is its return type.
11571 Return NULL_TREE if no simplification was possible, otherwise return the
11572 simplified form of the call as a tree.
11574 The simplified form may be a constant or other expression which
11575 computes the same value, but in a more efficient manner (including
11576 calls to other builtin functions).
11578 The call may contain arguments which need to be evaluated, but
11579 which are not useful to determine the result of the call. In
11580 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11581 COMPOUND_EXPR will be an argument which must be evaluated.
11582 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11583 COMPOUND_EXPR in the chain will contain the tree for the simplified
11584 form of the builtin function call. */
11586 static tree
11587 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11589 if (!validate_arg (s1, POINTER_TYPE)
11590 || !validate_arg (s2, INTEGER_TYPE))
11591 return NULL_TREE;
11592 else
11594 const char *p1;
11596 if (TREE_CODE (s2) != INTEGER_CST)
11597 return NULL_TREE;
11599 p1 = c_getstr (s1);
11600 if (p1 != NULL)
11602 char c;
11603 const char *r;
11604 tree tem;
11606 if (target_char_cast (s2, &c))
11607 return NULL_TREE;
11609 r = strchr (p1, c);
11611 if (r == NULL)
11612 return build_int_cst (TREE_TYPE (s1), 0);
11614 /* Return an offset into the constant string argument. */
11615 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11616 return fold_convert_loc (loc, type, tem);
11618 return NULL_TREE;
11622 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11623 the call, and TYPE is its return type.
11625 Return NULL_TREE if no simplification was possible, otherwise return the
11626 simplified form of the call as a tree.
11628 The simplified form may be a constant or other expression which
11629 computes the same value, but in a more efficient manner (including
11630 calls to other builtin functions).
11632 The call may contain arguments which need to be evaluated, but
11633 which are not useful to determine the result of the call. In
11634 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11635 COMPOUND_EXPR will be an argument which must be evaluated.
11636 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11637 COMPOUND_EXPR in the chain will contain the tree for the simplified
11638 form of the builtin function call. */
11640 static tree
11641 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11643 if (!validate_arg (s1, POINTER_TYPE)
11644 || !validate_arg (s2, INTEGER_TYPE))
11645 return NULL_TREE;
11646 else
11648 tree fn;
11649 const char *p1;
11651 if (TREE_CODE (s2) != INTEGER_CST)
11652 return NULL_TREE;
11654 p1 = c_getstr (s1);
11655 if (p1 != NULL)
11657 char c;
11658 const char *r;
11659 tree tem;
11661 if (target_char_cast (s2, &c))
11662 return NULL_TREE;
11664 r = strrchr (p1, c);
11666 if (r == NULL)
11667 return build_int_cst (TREE_TYPE (s1), 0);
11669 /* Return an offset into the constant string argument. */
11670 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11671 return fold_convert_loc (loc, type, tem);
11674 if (! integer_zerop (s2))
11675 return NULL_TREE;
11677 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11678 if (!fn)
11679 return NULL_TREE;
11681 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11682 return build_call_expr_loc (loc, fn, 2, s1, s2);
11686 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11687 to the call, and TYPE is its return type.
11689 Return NULL_TREE if no simplification was possible, otherwise return the
11690 simplified form of the call as a tree.
11692 The simplified form may be a constant or other expression which
11693 computes the same value, but in a more efficient manner (including
11694 calls to other builtin functions).
11696 The call may contain arguments which need to be evaluated, but
11697 which are not useful to determine the result of the call. In
11698 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11699 COMPOUND_EXPR will be an argument which must be evaluated.
11700 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11701 COMPOUND_EXPR in the chain will contain the tree for the simplified
11702 form of the builtin function call. */
11704 static tree
11705 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11707 if (!validate_arg (s1, POINTER_TYPE)
11708 || !validate_arg (s2, POINTER_TYPE))
11709 return NULL_TREE;
11710 else
11712 tree fn;
11713 const char *p1, *p2;
11715 p2 = c_getstr (s2);
11716 if (p2 == NULL)
11717 return NULL_TREE;
11719 p1 = c_getstr (s1);
11720 if (p1 != NULL)
11722 const char *r = strpbrk (p1, p2);
11723 tree tem;
11725 if (r == NULL)
11726 return build_int_cst (TREE_TYPE (s1), 0);
11728 /* Return an offset into the constant string argument. */
11729 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11730 return fold_convert_loc (loc, type, tem);
11733 if (p2[0] == '\0')
11734 /* strpbrk(x, "") == NULL.
11735 Evaluate and ignore s1 in case it had side-effects. */
11736 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11738 if (p2[1] != '\0')
11739 return NULL_TREE; /* Really call strpbrk. */
11741 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11742 if (!fn)
11743 return NULL_TREE;
11745 /* New argument list transforming strpbrk(s1, s2) to
11746 strchr(s1, s2[0]). */
11747 return build_call_expr_loc (loc, fn, 2, s1,
11748 build_int_cst (integer_type_node, p2[0]));
11752 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11753 to the call.
11755 Return NULL_TREE if no simplification was possible, otherwise return the
11756 simplified form of the call as a tree.
11758 The simplified form may be a constant or other expression which
11759 computes the same value, but in a more efficient manner (including
11760 calls to other builtin functions).
11762 The call may contain arguments which need to be evaluated, but
11763 which are not useful to determine the result of the call. In
11764 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11765 COMPOUND_EXPR will be an argument which must be evaluated.
11766 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11767 COMPOUND_EXPR in the chain will contain the tree for the simplified
11768 form of the builtin function call. */
11770 tree
11771 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src,
11772 tree len)
11774 if (!validate_arg (dst, POINTER_TYPE)
11775 || !validate_arg (src, POINTER_TYPE))
11776 return NULL_TREE;
11777 else
11779 const char *p = c_getstr (src);
11781 /* If the string length is zero, return the dst parameter. */
11782 if (p && *p == '\0')
11783 return dst;
11785 if (optimize_insn_for_speed_p ())
11787 /* See if we can store by pieces into (dst + strlen(dst)). */
11788 tree newdst, call;
11789 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11790 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
11792 if (!strlen_fn || !memcpy_fn)
11793 return NULL_TREE;
11795 /* If the length of the source string isn't computable don't
11796 split strcat into strlen and memcpy. */
11797 if (! len)
11798 len = c_strlen (src, 1);
11799 if (! len || TREE_SIDE_EFFECTS (len))
11800 return NULL_TREE;
11802 /* Stabilize the argument list. */
11803 dst = builtin_save_expr (dst);
11805 /* Create strlen (dst). */
11806 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11807 /* Create (dst p+ strlen (dst)). */
11809 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11810 newdst = builtin_save_expr (newdst);
11812 len = fold_convert_loc (loc, size_type_node, len);
11813 len = size_binop_loc (loc, PLUS_EXPR, len,
11814 build_int_cst (size_type_node, 1));
11816 call = build_call_expr_loc (loc, memcpy_fn, 3, newdst, src, len);
11817 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11819 return NULL_TREE;
11823 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11824 arguments to the call.
11826 Return NULL_TREE if no simplification was possible, otherwise return the
11827 simplified form of the call as a tree.
11829 The simplified form may be a constant or other expression which
11830 computes the same value, but in a more efficient manner (including
11831 calls to other builtin functions).
11833 The call may contain arguments which need to be evaluated, but
11834 which are not useful to determine the result of the call. In
11835 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11836 COMPOUND_EXPR will be an argument which must be evaluated.
11837 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11838 COMPOUND_EXPR in the chain will contain the tree for the simplified
11839 form of the builtin function call. */
11841 static tree
11842 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11844 if (!validate_arg (dst, POINTER_TYPE)
11845 || !validate_arg (src, POINTER_TYPE)
11846 || !validate_arg (len, INTEGER_TYPE))
11847 return NULL_TREE;
11848 else
11850 const char *p = c_getstr (src);
11852 /* If the requested length is zero, or the src parameter string
11853 length is zero, return the dst parameter. */
11854 if (integer_zerop (len) || (p && *p == '\0'))
11855 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11857 /* If the requested len is greater than or equal to the string
11858 length, call strcat. */
11859 if (TREE_CODE (len) == INTEGER_CST && p
11860 && compare_tree_int (len, strlen (p)) >= 0)
11862 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11864 /* If the replacement _DECL isn't initialized, don't do the
11865 transformation. */
11866 if (!fn)
11867 return NULL_TREE;
11869 return build_call_expr_loc (loc, fn, 2, dst, src);
11871 return NULL_TREE;
11875 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11876 to the call.
11878 Return NULL_TREE if no simplification was possible, otherwise return the
11879 simplified form of the call as a tree.
11881 The simplified form may be a constant or other expression which
11882 computes the same value, but in a more efficient manner (including
11883 calls to other builtin functions).
11885 The call may contain arguments which need to be evaluated, but
11886 which are not useful to determine the result of the call. In
11887 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11888 COMPOUND_EXPR will be an argument which must be evaluated.
11889 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11890 COMPOUND_EXPR in the chain will contain the tree for the simplified
11891 form of the builtin function call. */
11893 static tree
11894 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11896 if (!validate_arg (s1, POINTER_TYPE)
11897 || !validate_arg (s2, POINTER_TYPE))
11898 return NULL_TREE;
11899 else
11901 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11903 /* If both arguments are constants, evaluate at compile-time. */
11904 if (p1 && p2)
11906 const size_t r = strspn (p1, p2);
11907 return build_int_cst (size_type_node, r);
11910 /* If either argument is "", return NULL_TREE. */
11911 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11912 /* Evaluate and ignore both arguments in case either one has
11913 side-effects. */
11914 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11915 s1, s2);
11916 return NULL_TREE;
11920 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11921 to the call.
11923 Return NULL_TREE if no simplification was possible, otherwise return the
11924 simplified form of the call as a tree.
11926 The simplified form may be a constant or other expression which
11927 computes the same value, but in a more efficient manner (including
11928 calls to other builtin functions).
11930 The call may contain arguments which need to be evaluated, but
11931 which are not useful to determine the result of the call. In
11932 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11933 COMPOUND_EXPR will be an argument which must be evaluated.
11934 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11935 COMPOUND_EXPR in the chain will contain the tree for the simplified
11936 form of the builtin function call. */
11938 static tree
11939 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11941 if (!validate_arg (s1, POINTER_TYPE)
11942 || !validate_arg (s2, POINTER_TYPE))
11943 return NULL_TREE;
11944 else
11946 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11948 /* If both arguments are constants, evaluate at compile-time. */
11949 if (p1 && p2)
11951 const size_t r = strcspn (p1, p2);
11952 return build_int_cst (size_type_node, r);
11955 /* If the first argument is "", return NULL_TREE. */
11956 if (p1 && *p1 == '\0')
11958 /* Evaluate and ignore argument s2 in case it has
11959 side-effects. */
11960 return omit_one_operand_loc (loc, size_type_node,
11961 size_zero_node, s2);
11964 /* If the second argument is "", return __builtin_strlen(s1). */
11965 if (p2 && *p2 == '\0')
11967 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11969 /* If the replacement _DECL isn't initialized, don't do the
11970 transformation. */
11971 if (!fn)
11972 return NULL_TREE;
11974 return build_call_expr_loc (loc, fn, 1, s1);
11976 return NULL_TREE;
11980 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11981 to the call. IGNORE is true if the value returned
11982 by the builtin will be ignored. UNLOCKED is true is true if this
11983 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11984 the known length of the string. Return NULL_TREE if no simplification
11985 was possible. */
11987 tree
11988 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11989 bool ignore, bool unlocked, tree len)
11991 /* If we're using an unlocked function, assume the other unlocked
11992 functions exist explicitly. */
11993 tree const fn_fputc = (unlocked
11994 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11995 : builtin_decl_implicit (BUILT_IN_FPUTC));
11996 tree const fn_fwrite = (unlocked
11997 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11998 : builtin_decl_implicit (BUILT_IN_FWRITE));
12000 /* If the return value is used, don't do the transformation. */
12001 if (!ignore)
12002 return NULL_TREE;
12004 /* Verify the arguments in the original call. */
12005 if (!validate_arg (arg0, POINTER_TYPE)
12006 || !validate_arg (arg1, POINTER_TYPE))
12007 return NULL_TREE;
12009 if (! len)
12010 len = c_strlen (arg0, 0);
12012 /* Get the length of the string passed to fputs. If the length
12013 can't be determined, punt. */
12014 if (!len
12015 || TREE_CODE (len) != INTEGER_CST)
12016 return NULL_TREE;
12018 switch (compare_tree_int (len, 1))
12020 case -1: /* length is 0, delete the call entirely . */
12021 return omit_one_operand_loc (loc, integer_type_node,
12022 integer_zero_node, arg1);;
12024 case 0: /* length is 1, call fputc. */
12026 const char *p = c_getstr (arg0);
12028 if (p != NULL)
12030 if (fn_fputc)
12031 return build_call_expr_loc (loc, fn_fputc, 2,
12032 build_int_cst
12033 (integer_type_node, p[0]), arg1);
12034 else
12035 return NULL_TREE;
12038 /* FALLTHROUGH */
12039 case 1: /* length is greater than 1, call fwrite. */
12041 /* If optimizing for size keep fputs. */
12042 if (optimize_function_for_size_p (cfun))
12043 return NULL_TREE;
12044 /* New argument list transforming fputs(string, stream) to
12045 fwrite(string, 1, len, stream). */
12046 if (fn_fwrite)
12047 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12048 size_one_node, len, arg1);
12049 else
12050 return NULL_TREE;
12052 default:
12053 gcc_unreachable ();
12055 return NULL_TREE;
12058 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12059 produced. False otherwise. This is done so that we don't output the error
12060 or warning twice or three times. */
12062 bool
12063 fold_builtin_next_arg (tree exp, bool va_start_p)
12065 tree fntype = TREE_TYPE (current_function_decl);
12066 int nargs = call_expr_nargs (exp);
12067 tree arg;
12068 location_t loc = LOCATION_LOCUS (input_location);
12069 if (has_discriminator (loc))
12070 loc = map_discriminator_location (loc);
12072 /* There is good chance the current input_location points inside the
12073 definition of the va_start macro (perhaps on the token for
12074 builtin) in a system header, so warnings will not be emitted.
12075 Use the location in real source code. */
12076 source_location current_location =
12077 linemap_unwind_to_first_non_reserved_loc (line_table, loc, NULL);
12079 if (!stdarg_p (fntype))
12081 error ("%<va_start%> used in function with fixed args");
12082 return true;
12085 if (va_start_p)
12087 if (va_start_p && (nargs != 2))
12089 error ("wrong number of arguments to function %<va_start%>");
12090 return true;
12092 arg = CALL_EXPR_ARG (exp, 1);
12094 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12095 when we checked the arguments and if needed issued a warning. */
12096 else
12098 if (nargs == 0)
12100 /* Evidently an out of date version of <stdarg.h>; can't validate
12101 va_start's second argument, but can still work as intended. */
12102 warning_at (current_location,
12103 OPT_Wvarargs,
12104 "%<__builtin_next_arg%> called without an argument");
12105 return true;
12107 else if (nargs > 1)
12109 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12110 return true;
12112 arg = CALL_EXPR_ARG (exp, 0);
12115 if (TREE_CODE (arg) == SSA_NAME)
12116 arg = SSA_NAME_VAR (arg);
12118 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12119 or __builtin_next_arg (0) the first time we see it, after checking
12120 the arguments and if needed issuing a warning. */
12121 if (!integer_zerop (arg))
12123 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12125 /* Strip off all nops for the sake of the comparison. This
12126 is not quite the same as STRIP_NOPS. It does more.
12127 We must also strip off INDIRECT_EXPR for C++ reference
12128 parameters. */
12129 while (CONVERT_EXPR_P (arg)
12130 || TREE_CODE (arg) == INDIRECT_REF)
12131 arg = TREE_OPERAND (arg, 0);
12132 if (arg != last_parm)
12134 /* FIXME: Sometimes with the tree optimizers we can get the
12135 not the last argument even though the user used the last
12136 argument. We just warn and set the arg to be the last
12137 argument so that we will get wrong-code because of
12138 it. */
12139 warning_at (current_location,
12140 OPT_Wvarargs,
12141 "second parameter of %<va_start%> not last named argument");
12144 /* Undefined by C99 7.15.1.4p4 (va_start):
12145 "If the parameter parmN is declared with the register storage
12146 class, with a function or array type, or with a type that is
12147 not compatible with the type that results after application of
12148 the default argument promotions, the behavior is undefined."
12150 else if (DECL_REGISTER (arg))
12152 warning_at (current_location,
12153 OPT_Wvarargs,
12154 "undefined behaviour when second parameter of "
12155 "%<va_start%> is declared with %<register%> storage");
12158 /* We want to verify the second parameter just once before the tree
12159 optimizers are run and then avoid keeping it in the tree,
12160 as otherwise we could warn even for correct code like:
12161 void foo (int i, ...)
12162 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12163 if (va_start_p)
12164 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12165 else
12166 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12168 return false;
12172 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12173 ORIG may be null if this is a 2-argument call. We don't attempt to
12174 simplify calls with more than 3 arguments.
12176 Return NULL_TREE if no simplification was possible, otherwise return the
12177 simplified form of the call as a tree. If IGNORED is true, it means that
12178 the caller does not use the returned value of the function. */
12180 static tree
12181 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12182 tree orig, int ignored)
12184 tree call, retval;
12185 const char *fmt_str = NULL;
12187 /* Verify the required arguments in the original call. We deal with two
12188 types of sprintf() calls: 'sprintf (str, fmt)' and
12189 'sprintf (dest, "%s", orig)'. */
12190 if (!validate_arg (dest, POINTER_TYPE)
12191 || !validate_arg (fmt, POINTER_TYPE))
12192 return NULL_TREE;
12193 if (orig && !validate_arg (orig, POINTER_TYPE))
12194 return NULL_TREE;
12196 /* Check whether the format is a literal string constant. */
12197 fmt_str = c_getstr (fmt);
12198 if (fmt_str == NULL)
12199 return NULL_TREE;
12201 call = NULL_TREE;
12202 retval = NULL_TREE;
12204 if (!init_target_chars ())
12205 return NULL_TREE;
12207 /* If the format doesn't contain % args or %%, use strcpy. */
12208 if (strchr (fmt_str, target_percent) == NULL)
12210 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12212 if (!fn)
12213 return NULL_TREE;
12215 /* Don't optimize sprintf (buf, "abc", ptr++). */
12216 if (orig)
12217 return NULL_TREE;
12219 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12220 'format' is known to contain no % formats. */
12221 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12222 if (!ignored)
12223 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12226 /* If the format is "%s", use strcpy if the result isn't used. */
12227 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12229 tree fn;
12230 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12232 if (!fn)
12233 return NULL_TREE;
12235 /* Don't crash on sprintf (str1, "%s"). */
12236 if (!orig)
12237 return NULL_TREE;
12239 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12240 if (!ignored)
12242 retval = c_strlen (orig, 1);
12243 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12244 return NULL_TREE;
12246 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12249 if (call && retval)
12251 retval = fold_convert_loc
12252 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12253 retval);
12254 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12256 else
12257 return call;
12260 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12261 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12262 attempt to simplify calls with more than 4 arguments.
12264 Return NULL_TREE if no simplification was possible, otherwise return the
12265 simplified form of the call as a tree. If IGNORED is true, it means that
12266 the caller does not use the returned value of the function. */
12268 static tree
12269 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12270 tree orig, int ignored)
12272 tree call, retval;
12273 const char *fmt_str = NULL;
12274 unsigned HOST_WIDE_INT destlen;
12276 /* Verify the required arguments in the original call. We deal with two
12277 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12278 'snprintf (dest, cst, "%s", orig)'. */
12279 if (!validate_arg (dest, POINTER_TYPE)
12280 || !validate_arg (destsize, INTEGER_TYPE)
12281 || !validate_arg (fmt, POINTER_TYPE))
12282 return NULL_TREE;
12283 if (orig && !validate_arg (orig, POINTER_TYPE))
12284 return NULL_TREE;
12286 if (!tree_fits_uhwi_p (destsize))
12287 return NULL_TREE;
12289 /* Check whether the format is a literal string constant. */
12290 fmt_str = c_getstr (fmt);
12291 if (fmt_str == NULL)
12292 return NULL_TREE;
12294 call = NULL_TREE;
12295 retval = NULL_TREE;
12297 if (!init_target_chars ())
12298 return NULL_TREE;
12300 destlen = tree_to_uhwi (destsize);
12302 /* If the format doesn't contain % args or %%, use strcpy. */
12303 if (strchr (fmt_str, target_percent) == NULL)
12305 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12306 size_t len = strlen (fmt_str);
12308 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12309 if (orig)
12310 return NULL_TREE;
12312 /* We could expand this as
12313 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12314 or to
12315 memcpy (str, fmt_with_nul_at_cstm1, cst);
12316 but in the former case that might increase code size
12317 and in the latter case grow .rodata section too much.
12318 So punt for now. */
12319 if (len >= destlen)
12320 return NULL_TREE;
12322 if (!fn)
12323 return NULL_TREE;
12325 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12326 'format' is known to contain no % formats and
12327 strlen (fmt) < cst. */
12328 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12330 if (!ignored)
12331 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12334 /* If the format is "%s", use strcpy if the result isn't used. */
12335 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12337 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12338 unsigned HOST_WIDE_INT origlen;
12340 /* Don't crash on snprintf (str1, cst, "%s"). */
12341 if (!orig)
12342 return NULL_TREE;
12344 retval = c_strlen (orig, 1);
12345 if (!retval || !tree_fits_uhwi_p (retval))
12346 return NULL_TREE;
12348 origlen = tree_to_uhwi (retval);
12349 /* We could expand this as
12350 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12351 or to
12352 memcpy (str1, str2_with_nul_at_cstm1, cst);
12353 but in the former case that might increase code size
12354 and in the latter case grow .rodata section too much.
12355 So punt for now. */
12356 if (origlen >= destlen)
12357 return NULL_TREE;
12359 /* Convert snprintf (str1, cst, "%s", str2) into
12360 strcpy (str1, str2) if strlen (str2) < cst. */
12361 if (!fn)
12362 return NULL_TREE;
12364 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12366 if (ignored)
12367 retval = NULL_TREE;
12370 if (call && retval)
12372 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12373 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12374 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12376 else
12377 return call;
12380 /* Expand a call EXP to __builtin_object_size. */
12383 expand_builtin_object_size (tree exp)
12385 tree ost;
12386 int object_size_type;
12387 tree fndecl = get_callee_fndecl (exp);
12389 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12391 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12392 exp, fndecl);
12393 expand_builtin_trap ();
12394 return const0_rtx;
12397 ost = CALL_EXPR_ARG (exp, 1);
12398 STRIP_NOPS (ost);
12400 if (TREE_CODE (ost) != INTEGER_CST
12401 || tree_int_cst_sgn (ost) < 0
12402 || compare_tree_int (ost, 3) > 0)
12404 error ("%Klast argument of %D is not integer constant between 0 and 3",
12405 exp, fndecl);
12406 expand_builtin_trap ();
12407 return const0_rtx;
12410 object_size_type = tree_to_shwi (ost);
12412 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12415 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12416 FCODE is the BUILT_IN_* to use.
12417 Return NULL_RTX if we failed; the caller should emit a normal call,
12418 otherwise try to get the result in TARGET, if convenient (and in
12419 mode MODE if that's convenient). */
12421 static rtx
12422 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12423 enum built_in_function fcode)
12425 tree dest, src, len, size;
12427 if (!validate_arglist (exp,
12428 POINTER_TYPE,
12429 fcode == BUILT_IN_MEMSET_CHK
12430 ? INTEGER_TYPE : POINTER_TYPE,
12431 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12432 return NULL_RTX;
12434 dest = CALL_EXPR_ARG (exp, 0);
12435 src = CALL_EXPR_ARG (exp, 1);
12436 len = CALL_EXPR_ARG (exp, 2);
12437 size = CALL_EXPR_ARG (exp, 3);
12439 if (! tree_fits_uhwi_p (size))
12440 return NULL_RTX;
12442 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
12444 tree fn;
12446 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12448 warning_at (tree_nonartificial_location (exp),
12449 0, "%Kcall to %D will always overflow destination buffer",
12450 exp, get_callee_fndecl (exp));
12451 return NULL_RTX;
12454 fn = NULL_TREE;
12455 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12456 mem{cpy,pcpy,move,set} is available. */
12457 switch (fcode)
12459 case BUILT_IN_MEMCPY_CHK:
12460 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12461 break;
12462 case BUILT_IN_MEMPCPY_CHK:
12463 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12464 break;
12465 case BUILT_IN_MEMMOVE_CHK:
12466 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12467 break;
12468 case BUILT_IN_MEMSET_CHK:
12469 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12470 break;
12471 default:
12472 break;
12475 if (! fn)
12476 return NULL_RTX;
12478 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12479 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12480 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12481 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12483 else if (fcode == BUILT_IN_MEMSET_CHK)
12484 return NULL_RTX;
12485 else
12487 unsigned int dest_align = get_pointer_alignment (dest);
12489 /* If DEST is not a pointer type, call the normal function. */
12490 if (dest_align == 0)
12491 return NULL_RTX;
12493 /* If SRC and DEST are the same (and not volatile), do nothing. */
12494 if (operand_equal_p (src, dest, 0))
12496 tree expr;
12498 if (fcode != BUILT_IN_MEMPCPY_CHK)
12500 /* Evaluate and ignore LEN in case it has side-effects. */
12501 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12502 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12505 expr = fold_build_pointer_plus (dest, len);
12506 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12509 /* __memmove_chk special case. */
12510 if (fcode == BUILT_IN_MEMMOVE_CHK)
12512 unsigned int src_align = get_pointer_alignment (src);
12514 if (src_align == 0)
12515 return NULL_RTX;
12517 /* If src is categorized for a readonly section we can use
12518 normal __memcpy_chk. */
12519 if (readonly_data_expr (src))
12521 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12522 if (!fn)
12523 return NULL_RTX;
12524 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12525 dest, src, len, size);
12526 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12527 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12528 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12531 return NULL_RTX;
12535 /* Emit warning if a buffer overflow is detected at compile time. */
12537 static void
12538 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12540 int is_strlen = 0;
12541 tree len, size;
12542 location_t loc = tree_nonartificial_location (exp);
12544 switch (fcode)
12546 case BUILT_IN_STRCPY_CHK:
12547 case BUILT_IN_STPCPY_CHK:
12548 /* For __strcat_chk the warning will be emitted only if overflowing
12549 by at least strlen (dest) + 1 bytes. */
12550 case BUILT_IN_STRCAT_CHK:
12551 len = CALL_EXPR_ARG (exp, 1);
12552 size = CALL_EXPR_ARG (exp, 2);
12553 is_strlen = 1;
12554 break;
12555 case BUILT_IN_STRNCAT_CHK:
12556 case BUILT_IN_STRNCPY_CHK:
12557 case BUILT_IN_STPNCPY_CHK:
12558 len = CALL_EXPR_ARG (exp, 2);
12559 size = CALL_EXPR_ARG (exp, 3);
12560 break;
12561 case BUILT_IN_SNPRINTF_CHK:
12562 case BUILT_IN_VSNPRINTF_CHK:
12563 len = CALL_EXPR_ARG (exp, 1);
12564 size = CALL_EXPR_ARG (exp, 3);
12565 break;
12566 default:
12567 gcc_unreachable ();
12570 if (!len || !size)
12571 return;
12573 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12574 return;
12576 if (is_strlen)
12578 len = c_strlen (len, 1);
12579 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12580 return;
12582 else if (fcode == BUILT_IN_STRNCAT_CHK)
12584 tree src = CALL_EXPR_ARG (exp, 1);
12585 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12586 return;
12587 src = c_strlen (src, 1);
12588 if (! src || ! tree_fits_uhwi_p (src))
12590 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12591 exp, get_callee_fndecl (exp));
12592 return;
12594 else if (tree_int_cst_lt (src, size))
12595 return;
12597 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
12598 return;
12600 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12601 exp, get_callee_fndecl (exp));
12604 /* Emit warning if a buffer overflow is detected at compile time
12605 in __sprintf_chk/__vsprintf_chk calls. */
12607 static void
12608 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12610 tree size, len, fmt;
12611 const char *fmt_str;
12612 int nargs = call_expr_nargs (exp);
12614 /* Verify the required arguments in the original call. */
12616 if (nargs < 4)
12617 return;
12618 size = CALL_EXPR_ARG (exp, 2);
12619 fmt = CALL_EXPR_ARG (exp, 3);
12621 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12622 return;
12624 /* Check whether the format is a literal string constant. */
12625 fmt_str = c_getstr (fmt);
12626 if (fmt_str == NULL)
12627 return;
12629 if (!init_target_chars ())
12630 return;
12632 /* If the format doesn't contain % args or %%, we know its size. */
12633 if (strchr (fmt_str, target_percent) == 0)
12634 len = build_int_cstu (size_type_node, strlen (fmt_str));
12635 /* If the format is "%s" and first ... argument is a string literal,
12636 we know it too. */
12637 else if (fcode == BUILT_IN_SPRINTF_CHK
12638 && strcmp (fmt_str, target_percent_s) == 0)
12640 tree arg;
12642 if (nargs < 5)
12643 return;
12644 arg = CALL_EXPR_ARG (exp, 4);
12645 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12646 return;
12648 len = c_strlen (arg, 1);
12649 if (!len || ! tree_fits_uhwi_p (len))
12650 return;
12652 else
12653 return;
12655 if (! tree_int_cst_lt (len, size))
12656 warning_at (tree_nonartificial_location (exp),
12657 0, "%Kcall to %D will always overflow destination buffer",
12658 exp, get_callee_fndecl (exp));
12661 /* Emit warning if a free is called with address of a variable. */
12663 static void
12664 maybe_emit_free_warning (tree exp)
12666 tree arg = CALL_EXPR_ARG (exp, 0);
12668 STRIP_NOPS (arg);
12669 if (TREE_CODE (arg) != ADDR_EXPR)
12670 return;
12672 arg = get_base_address (TREE_OPERAND (arg, 0));
12673 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12674 return;
12676 if (SSA_VAR_P (arg))
12677 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12678 "%Kattempt to free a non-heap object %qD", exp, arg);
12679 else
12680 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12681 "%Kattempt to free a non-heap object", exp);
12684 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12685 if possible. */
12687 tree
12688 fold_builtin_object_size (tree ptr, tree ost)
12690 unsigned HOST_WIDE_INT bytes;
12691 int object_size_type;
12693 if (!validate_arg (ptr, POINTER_TYPE)
12694 || !validate_arg (ost, INTEGER_TYPE))
12695 return NULL_TREE;
12697 STRIP_NOPS (ost);
12699 if (TREE_CODE (ost) != INTEGER_CST
12700 || tree_int_cst_sgn (ost) < 0
12701 || compare_tree_int (ost, 3) > 0)
12702 return NULL_TREE;
12704 object_size_type = tree_to_shwi (ost);
12706 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12707 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12708 and (size_t) 0 for types 2 and 3. */
12709 if (TREE_SIDE_EFFECTS (ptr))
12710 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12712 if (TREE_CODE (ptr) == ADDR_EXPR)
12714 bytes = compute_builtin_object_size (ptr, object_size_type);
12715 if (double_int_fits_to_tree_p (size_type_node,
12716 double_int::from_uhwi (bytes)))
12717 return build_int_cstu (size_type_node, bytes);
12719 else if (TREE_CODE (ptr) == SSA_NAME)
12721 /* If object size is not known yet, delay folding until
12722 later. Maybe subsequent passes will help determining
12723 it. */
12724 bytes = compute_builtin_object_size (ptr, object_size_type);
12725 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12726 && double_int_fits_to_tree_p (size_type_node,
12727 double_int::from_uhwi (bytes)))
12728 return build_int_cstu (size_type_node, bytes);
12731 return NULL_TREE;
12734 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12735 DEST, SRC, LEN, and SIZE are the arguments to the call.
12736 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12737 code of the builtin. If MAXLEN is not NULL, it is maximum length
12738 passed as third argument. */
12740 tree
12741 fold_builtin_memory_chk (location_t loc, tree fndecl,
12742 tree dest, tree src, tree len, tree size,
12743 tree maxlen, bool ignore,
12744 enum built_in_function fcode)
12746 tree fn;
12748 if (!validate_arg (dest, POINTER_TYPE)
12749 || !validate_arg (src,
12750 (fcode == BUILT_IN_MEMSET_CHK
12751 ? INTEGER_TYPE : POINTER_TYPE))
12752 || !validate_arg (len, INTEGER_TYPE)
12753 || !validate_arg (size, INTEGER_TYPE))
12754 return NULL_TREE;
12756 /* If SRC and DEST are the same (and not volatile), return DEST
12757 (resp. DEST+LEN for __mempcpy_chk). */
12758 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12760 if (fcode != BUILT_IN_MEMPCPY_CHK)
12761 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12762 dest, len);
12763 else
12765 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12766 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12770 if (! tree_fits_uhwi_p (size))
12771 return NULL_TREE;
12773 if (! integer_all_onesp (size))
12775 if (! tree_fits_uhwi_p (len))
12777 /* If LEN is not constant, try MAXLEN too.
12778 For MAXLEN only allow optimizing into non-_ocs function
12779 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12780 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12782 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12784 /* (void) __mempcpy_chk () can be optimized into
12785 (void) __memcpy_chk (). */
12786 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12787 if (!fn)
12788 return NULL_TREE;
12790 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12792 return NULL_TREE;
12795 else
12796 maxlen = len;
12798 if (tree_int_cst_lt (size, maxlen))
12799 return NULL_TREE;
12802 fn = NULL_TREE;
12803 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12804 mem{cpy,pcpy,move,set} is available. */
12805 switch (fcode)
12807 case BUILT_IN_MEMCPY_CHK:
12808 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12809 break;
12810 case BUILT_IN_MEMPCPY_CHK:
12811 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12812 break;
12813 case BUILT_IN_MEMMOVE_CHK:
12814 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12815 break;
12816 case BUILT_IN_MEMSET_CHK:
12817 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12818 break;
12819 default:
12820 break;
12823 if (!fn)
12824 return NULL_TREE;
12826 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12829 /* Fold a call to the __st[rp]cpy_chk builtin.
12830 DEST, SRC, and SIZE are the arguments to the call.
12831 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12832 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12833 strings passed as second argument. */
12835 tree
12836 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12837 tree src, tree size,
12838 tree maxlen, bool ignore,
12839 enum built_in_function fcode)
12841 tree len, fn;
12843 if (!validate_arg (dest, POINTER_TYPE)
12844 || !validate_arg (src, POINTER_TYPE)
12845 || !validate_arg (size, INTEGER_TYPE))
12846 return NULL_TREE;
12848 /* If SRC and DEST are the same (and not volatile), return DEST. */
12849 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12850 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12852 if (! tree_fits_uhwi_p (size))
12853 return NULL_TREE;
12855 if (! integer_all_onesp (size))
12857 len = c_strlen (src, 1);
12858 if (! len || ! tree_fits_uhwi_p (len))
12860 /* If LEN is not constant, try MAXLEN too.
12861 For MAXLEN only allow optimizing into non-_ocs function
12862 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12863 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12865 if (fcode == BUILT_IN_STPCPY_CHK)
12867 if (! ignore)
12868 return NULL_TREE;
12870 /* If return value of __stpcpy_chk is ignored,
12871 optimize into __strcpy_chk. */
12872 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12873 if (!fn)
12874 return NULL_TREE;
12876 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12879 if (! len || TREE_SIDE_EFFECTS (len))
12880 return NULL_TREE;
12882 /* If c_strlen returned something, but not a constant,
12883 transform __strcpy_chk into __memcpy_chk. */
12884 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12885 if (!fn)
12886 return NULL_TREE;
12888 len = fold_convert_loc (loc, size_type_node, len);
12889 len = size_binop_loc (loc, PLUS_EXPR, len,
12890 build_int_cst (size_type_node, 1));
12891 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12892 build_call_expr_loc (loc, fn, 4,
12893 dest, src, len, size));
12896 else
12897 maxlen = len;
12899 if (! tree_int_cst_lt (maxlen, size))
12900 return NULL_TREE;
12903 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12904 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12905 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12906 if (!fn)
12907 return NULL_TREE;
12909 return build_call_expr_loc (loc, fn, 2, dest, src);
12912 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12913 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12914 length passed as third argument. IGNORE is true if return value can be
12915 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12917 tree
12918 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12919 tree len, tree size, tree maxlen, bool ignore,
12920 enum built_in_function fcode)
12922 tree fn;
12924 if (!validate_arg (dest, POINTER_TYPE)
12925 || !validate_arg (src, POINTER_TYPE)
12926 || !validate_arg (len, INTEGER_TYPE)
12927 || !validate_arg (size, INTEGER_TYPE))
12928 return NULL_TREE;
12930 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12932 /* If return value of __stpncpy_chk is ignored,
12933 optimize into __strncpy_chk. */
12934 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12935 if (fn)
12936 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12939 if (! tree_fits_uhwi_p (size))
12940 return NULL_TREE;
12942 if (! integer_all_onesp (size))
12944 if (! tree_fits_uhwi_p (len))
12946 /* If LEN is not constant, try MAXLEN too.
12947 For MAXLEN only allow optimizing into non-_ocs function
12948 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12949 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12950 return NULL_TREE;
12952 else
12953 maxlen = len;
12955 if (tree_int_cst_lt (size, maxlen))
12956 return NULL_TREE;
12959 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12960 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12961 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12962 if (!fn)
12963 return NULL_TREE;
12965 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12968 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12969 are the arguments to the call. */
12971 static tree
12972 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12973 tree src, tree size)
12975 tree fn;
12976 const char *p;
12978 if (!validate_arg (dest, POINTER_TYPE)
12979 || !validate_arg (src, POINTER_TYPE)
12980 || !validate_arg (size, INTEGER_TYPE))
12981 return NULL_TREE;
12983 p = c_getstr (src);
12984 /* If the SRC parameter is "", return DEST. */
12985 if (p && *p == '\0')
12986 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12988 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
12989 return NULL_TREE;
12991 /* If __builtin_strcat_chk is used, assume strcat is available. */
12992 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12993 if (!fn)
12994 return NULL_TREE;
12996 return build_call_expr_loc (loc, fn, 2, dest, src);
12999 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
13000 LEN, and SIZE. */
13002 static tree
13003 fold_builtin_strncat_chk (location_t loc, tree fndecl,
13004 tree dest, tree src, tree len, tree size)
13006 tree fn;
13007 const char *p;
13009 if (!validate_arg (dest, POINTER_TYPE)
13010 || !validate_arg (src, POINTER_TYPE)
13011 || !validate_arg (size, INTEGER_TYPE)
13012 || !validate_arg (size, INTEGER_TYPE))
13013 return NULL_TREE;
13015 p = c_getstr (src);
13016 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13017 if (p && *p == '\0')
13018 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
13019 else if (integer_zerop (len))
13020 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13022 if (! tree_fits_uhwi_p (size))
13023 return NULL_TREE;
13025 if (! integer_all_onesp (size))
13027 tree src_len = c_strlen (src, 1);
13028 if (src_len
13029 && tree_fits_uhwi_p (src_len)
13030 && tree_fits_uhwi_p (len)
13031 && ! tree_int_cst_lt (len, src_len))
13033 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13034 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13035 if (!fn)
13036 return NULL_TREE;
13038 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13040 return NULL_TREE;
13043 /* If __builtin_strncat_chk is used, assume strncat is available. */
13044 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13045 if (!fn)
13046 return NULL_TREE;
13048 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13051 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13052 Return NULL_TREE if a normal call should be emitted rather than
13053 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13054 or BUILT_IN_VSPRINTF_CHK. */
13056 static tree
13057 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13058 enum built_in_function fcode)
13060 tree dest, size, len, fn, fmt, flag;
13061 const char *fmt_str;
13063 /* Verify the required arguments in the original call. */
13064 if (nargs < 4)
13065 return NULL_TREE;
13066 dest = args[0];
13067 if (!validate_arg (dest, POINTER_TYPE))
13068 return NULL_TREE;
13069 flag = args[1];
13070 if (!validate_arg (flag, INTEGER_TYPE))
13071 return NULL_TREE;
13072 size = args[2];
13073 if (!validate_arg (size, INTEGER_TYPE))
13074 return NULL_TREE;
13075 fmt = args[3];
13076 if (!validate_arg (fmt, POINTER_TYPE))
13077 return NULL_TREE;
13079 if (! tree_fits_uhwi_p (size))
13080 return NULL_TREE;
13082 len = NULL_TREE;
13084 if (!init_target_chars ())
13085 return NULL_TREE;
13087 /* Check whether the format is a literal string constant. */
13088 fmt_str = c_getstr (fmt);
13089 if (fmt_str != NULL)
13091 /* If the format doesn't contain % args or %%, we know the size. */
13092 if (strchr (fmt_str, target_percent) == 0)
13094 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13095 len = build_int_cstu (size_type_node, strlen (fmt_str));
13097 /* If the format is "%s" and first ... argument is a string literal,
13098 we know the size too. */
13099 else if (fcode == BUILT_IN_SPRINTF_CHK
13100 && strcmp (fmt_str, target_percent_s) == 0)
13102 tree arg;
13104 if (nargs == 5)
13106 arg = args[4];
13107 if (validate_arg (arg, POINTER_TYPE))
13109 len = c_strlen (arg, 1);
13110 if (! len || ! tree_fits_uhwi_p (len))
13111 len = NULL_TREE;
13117 if (! integer_all_onesp (size))
13119 if (! len || ! tree_int_cst_lt (len, size))
13120 return NULL_TREE;
13123 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13124 or if format doesn't contain % chars or is "%s". */
13125 if (! integer_zerop (flag))
13127 if (fmt_str == NULL)
13128 return NULL_TREE;
13129 if (strchr (fmt_str, target_percent) != NULL
13130 && strcmp (fmt_str, target_percent_s))
13131 return NULL_TREE;
13134 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13135 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13136 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13137 if (!fn)
13138 return NULL_TREE;
13140 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13143 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13144 a normal call should be emitted rather than expanding the function
13145 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13147 static tree
13148 fold_builtin_sprintf_chk (location_t loc, tree exp,
13149 enum built_in_function fcode)
13151 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13152 CALL_EXPR_ARGP (exp), fcode);
13155 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13156 NULL_TREE if a normal call should be emitted rather than expanding
13157 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13158 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13159 passed as second argument. */
13161 static tree
13162 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13163 tree maxlen, enum built_in_function fcode)
13165 tree dest, size, len, fn, fmt, flag;
13166 const char *fmt_str;
13168 /* Verify the required arguments in the original call. */
13169 if (nargs < 5)
13170 return NULL_TREE;
13171 dest = args[0];
13172 if (!validate_arg (dest, POINTER_TYPE))
13173 return NULL_TREE;
13174 len = args[1];
13175 if (!validate_arg (len, INTEGER_TYPE))
13176 return NULL_TREE;
13177 flag = args[2];
13178 if (!validate_arg (flag, INTEGER_TYPE))
13179 return NULL_TREE;
13180 size = args[3];
13181 if (!validate_arg (size, INTEGER_TYPE))
13182 return NULL_TREE;
13183 fmt = args[4];
13184 if (!validate_arg (fmt, POINTER_TYPE))
13185 return NULL_TREE;
13187 if (! tree_fits_uhwi_p (size))
13188 return NULL_TREE;
13190 if (! integer_all_onesp (size))
13192 if (! tree_fits_uhwi_p (len))
13194 /* If LEN is not constant, try MAXLEN too.
13195 For MAXLEN only allow optimizing into non-_ocs function
13196 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13197 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
13198 return NULL_TREE;
13200 else
13201 maxlen = len;
13203 if (tree_int_cst_lt (size, maxlen))
13204 return NULL_TREE;
13207 if (!init_target_chars ())
13208 return NULL_TREE;
13210 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13211 or if format doesn't contain % chars or is "%s". */
13212 if (! integer_zerop (flag))
13214 fmt_str = c_getstr (fmt);
13215 if (fmt_str == NULL)
13216 return NULL_TREE;
13217 if (strchr (fmt_str, target_percent) != NULL
13218 && strcmp (fmt_str, target_percent_s))
13219 return NULL_TREE;
13222 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13223 available. */
13224 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13225 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13226 if (!fn)
13227 return NULL_TREE;
13229 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13232 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13233 a normal call should be emitted rather than expanding the function
13234 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13235 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13236 passed as second argument. */
13238 static tree
13239 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13240 enum built_in_function fcode)
13242 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13243 CALL_EXPR_ARGP (exp), maxlen, fcode);
13246 /* Builtins with folding operations that operate on "..." arguments
13247 need special handling; we need to store the arguments in a convenient
13248 data structure before attempting any folding. Fortunately there are
13249 only a few builtins that fall into this category. FNDECL is the
13250 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13251 result of the function call is ignored. */
13253 static tree
13254 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
13255 bool ignore ATTRIBUTE_UNUSED)
13257 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13258 tree ret = NULL_TREE;
13260 switch (fcode)
13262 case BUILT_IN_SPRINTF_CHK:
13263 case BUILT_IN_VSPRINTF_CHK:
13264 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
13265 break;
13267 case BUILT_IN_SNPRINTF_CHK:
13268 case BUILT_IN_VSNPRINTF_CHK:
13269 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
13270 break;
13272 case BUILT_IN_FPCLASSIFY:
13273 ret = fold_builtin_fpclassify (loc, exp);
13274 break;
13276 default:
13277 break;
13279 if (ret)
13281 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13282 SET_EXPR_LOCATION (ret, loc);
13283 TREE_NO_WARNING (ret) = 1;
13284 return ret;
13286 return NULL_TREE;
13289 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13290 FMT and ARG are the arguments to the call; we don't fold cases with
13291 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13293 Return NULL_TREE if no simplification was possible, otherwise return the
13294 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13295 code of the function to be simplified. */
13297 static tree
13298 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13299 tree arg, bool ignore,
13300 enum built_in_function fcode)
13302 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13303 const char *fmt_str = NULL;
13305 /* If the return value is used, don't do the transformation. */
13306 if (! ignore)
13307 return NULL_TREE;
13309 /* Verify the required arguments in the original call. */
13310 if (!validate_arg (fmt, POINTER_TYPE))
13311 return NULL_TREE;
13313 /* Check whether the format is a literal string constant. */
13314 fmt_str = c_getstr (fmt);
13315 if (fmt_str == NULL)
13316 return NULL_TREE;
13318 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13320 /* If we're using an unlocked function, assume the other
13321 unlocked functions exist explicitly. */
13322 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13323 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13325 else
13327 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13328 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13331 if (!init_target_chars ())
13332 return NULL_TREE;
13334 if (strcmp (fmt_str, target_percent_s) == 0
13335 || strchr (fmt_str, target_percent) == NULL)
13337 const char *str;
13339 if (strcmp (fmt_str, target_percent_s) == 0)
13341 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13342 return NULL_TREE;
13344 if (!arg || !validate_arg (arg, POINTER_TYPE))
13345 return NULL_TREE;
13347 str = c_getstr (arg);
13348 if (str == NULL)
13349 return NULL_TREE;
13351 else
13353 /* The format specifier doesn't contain any '%' characters. */
13354 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13355 && arg)
13356 return NULL_TREE;
13357 str = fmt_str;
13360 /* If the string was "", printf does nothing. */
13361 if (str[0] == '\0')
13362 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13364 /* If the string has length of 1, call putchar. */
13365 if (str[1] == '\0')
13367 /* Given printf("c"), (where c is any one character,)
13368 convert "c"[0] to an int and pass that to the replacement
13369 function. */
13370 newarg = build_int_cst (integer_type_node, str[0]);
13371 if (fn_putchar)
13372 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13374 else
13376 /* If the string was "string\n", call puts("string"). */
13377 size_t len = strlen (str);
13378 if ((unsigned char)str[len - 1] == target_newline
13379 && (size_t) (int) len == len
13380 && (int) len > 0)
13382 char *newstr;
13383 tree offset_node, string_cst;
13385 /* Create a NUL-terminated string that's one char shorter
13386 than the original, stripping off the trailing '\n'. */
13387 newarg = build_string_literal (len, str);
13388 string_cst = string_constant (newarg, &offset_node);
13389 gcc_checking_assert (string_cst
13390 && (TREE_STRING_LENGTH (string_cst)
13391 == (int) len)
13392 && integer_zerop (offset_node)
13393 && (unsigned char)
13394 TREE_STRING_POINTER (string_cst)[len - 1]
13395 == target_newline);
13396 /* build_string_literal creates a new STRING_CST,
13397 modify it in place to avoid double copying. */
13398 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13399 newstr[len - 1] = '\0';
13400 if (fn_puts)
13401 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13403 else
13404 /* We'd like to arrange to call fputs(string,stdout) here,
13405 but we need stdout and don't have a way to get it yet. */
13406 return NULL_TREE;
13410 /* The other optimizations can be done only on the non-va_list variants. */
13411 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13412 return NULL_TREE;
13414 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13415 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13417 if (!arg || !validate_arg (arg, POINTER_TYPE))
13418 return NULL_TREE;
13419 if (fn_puts)
13420 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13423 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13424 else if (strcmp (fmt_str, target_percent_c) == 0)
13426 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13427 return NULL_TREE;
13428 if (fn_putchar)
13429 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13432 if (!call)
13433 return NULL_TREE;
13435 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13438 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13439 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13440 more than 3 arguments, and ARG may be null in the 2-argument case.
13442 Return NULL_TREE if no simplification was possible, otherwise return the
13443 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13444 code of the function to be simplified. */
13446 static tree
13447 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13448 tree fmt, tree arg, bool ignore,
13449 enum built_in_function fcode)
13451 tree fn_fputc, fn_fputs, call = NULL_TREE;
13452 const char *fmt_str = NULL;
13454 /* If the return value is used, don't do the transformation. */
13455 if (! ignore)
13456 return NULL_TREE;
13458 /* Verify the required arguments in the original call. */
13459 if (!validate_arg (fp, POINTER_TYPE))
13460 return NULL_TREE;
13461 if (!validate_arg (fmt, POINTER_TYPE))
13462 return NULL_TREE;
13464 /* Check whether the format is a literal string constant. */
13465 fmt_str = c_getstr (fmt);
13466 if (fmt_str == NULL)
13467 return NULL_TREE;
13469 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13471 /* If we're using an unlocked function, assume the other
13472 unlocked functions exist explicitly. */
13473 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13474 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13476 else
13478 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13479 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13482 if (!init_target_chars ())
13483 return NULL_TREE;
13485 /* If the format doesn't contain % args or %%, use strcpy. */
13486 if (strchr (fmt_str, target_percent) == NULL)
13488 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13489 && arg)
13490 return NULL_TREE;
13492 /* If the format specifier was "", fprintf does nothing. */
13493 if (fmt_str[0] == '\0')
13495 /* If FP has side-effects, just wait until gimplification is
13496 done. */
13497 if (TREE_SIDE_EFFECTS (fp))
13498 return NULL_TREE;
13500 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13503 /* When "string" doesn't contain %, replace all cases of
13504 fprintf (fp, string) with fputs (string, fp). The fputs
13505 builtin will take care of special cases like length == 1. */
13506 if (fn_fputs)
13507 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13510 /* The other optimizations can be done only on the non-va_list variants. */
13511 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13512 return NULL_TREE;
13514 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13515 else if (strcmp (fmt_str, target_percent_s) == 0)
13517 if (!arg || !validate_arg (arg, POINTER_TYPE))
13518 return NULL_TREE;
13519 if (fn_fputs)
13520 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13523 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13524 else if (strcmp (fmt_str, target_percent_c) == 0)
13526 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13527 return NULL_TREE;
13528 if (fn_fputc)
13529 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13532 if (!call)
13533 return NULL_TREE;
13534 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13537 /* Initialize format string characters in the target charset. */
13539 static bool
13540 init_target_chars (void)
13542 static bool init;
13543 if (!init)
13545 target_newline = lang_hooks.to_target_charset ('\n');
13546 target_percent = lang_hooks.to_target_charset ('%');
13547 target_c = lang_hooks.to_target_charset ('c');
13548 target_s = lang_hooks.to_target_charset ('s');
13549 if (target_newline == 0 || target_percent == 0 || target_c == 0
13550 || target_s == 0)
13551 return false;
13553 target_percent_c[0] = target_percent;
13554 target_percent_c[1] = target_c;
13555 target_percent_c[2] = '\0';
13557 target_percent_s[0] = target_percent;
13558 target_percent_s[1] = target_s;
13559 target_percent_s[2] = '\0';
13561 target_percent_s_newline[0] = target_percent;
13562 target_percent_s_newline[1] = target_s;
13563 target_percent_s_newline[2] = target_newline;
13564 target_percent_s_newline[3] = '\0';
13566 init = true;
13568 return true;
13571 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13572 and no overflow/underflow occurred. INEXACT is true if M was not
13573 exactly calculated. TYPE is the tree type for the result. This
13574 function assumes that you cleared the MPFR flags and then
13575 calculated M to see if anything subsequently set a flag prior to
13576 entering this function. Return NULL_TREE if any checks fail. */
13578 static tree
13579 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13581 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13582 overflow/underflow occurred. If -frounding-math, proceed iff the
13583 result of calling FUNC was exact. */
13584 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13585 && (!flag_rounding_math || !inexact))
13587 REAL_VALUE_TYPE rr;
13589 real_from_mpfr (&rr, m, type, GMP_RNDN);
13590 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13591 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13592 but the mpft_t is not, then we underflowed in the
13593 conversion. */
13594 if (real_isfinite (&rr)
13595 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13597 REAL_VALUE_TYPE rmode;
13599 real_convert (&rmode, TYPE_MODE (type), &rr);
13600 /* Proceed iff the specified mode can hold the value. */
13601 if (real_identical (&rmode, &rr))
13602 return build_real (type, rmode);
13605 return NULL_TREE;
13608 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13609 number and no overflow/underflow occurred. INEXACT is true if M
13610 was not exactly calculated. TYPE is the tree type for the result.
13611 This function assumes that you cleared the MPFR flags and then
13612 calculated M to see if anything subsequently set a flag prior to
13613 entering this function. Return NULL_TREE if any checks fail, if
13614 FORCE_CONVERT is true, then bypass the checks. */
13616 static tree
13617 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13619 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13620 overflow/underflow occurred. If -frounding-math, proceed iff the
13621 result of calling FUNC was exact. */
13622 if (force_convert
13623 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13624 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13625 && (!flag_rounding_math || !inexact)))
13627 REAL_VALUE_TYPE re, im;
13629 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13630 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13631 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13632 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13633 but the mpft_t is not, then we underflowed in the
13634 conversion. */
13635 if (force_convert
13636 || (real_isfinite (&re) && real_isfinite (&im)
13637 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13638 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13640 REAL_VALUE_TYPE re_mode, im_mode;
13642 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13643 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13644 /* Proceed iff the specified mode can hold the value. */
13645 if (force_convert
13646 || (real_identical (&re_mode, &re)
13647 && real_identical (&im_mode, &im)))
13648 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13649 build_real (TREE_TYPE (type), im_mode));
13652 return NULL_TREE;
13655 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13656 FUNC on it and return the resulting value as a tree with type TYPE.
13657 If MIN and/or MAX are not NULL, then the supplied ARG must be
13658 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13659 acceptable values, otherwise they are not. The mpfr precision is
13660 set to the precision of TYPE. We assume that function FUNC returns
13661 zero if the result could be calculated exactly within the requested
13662 precision. */
13664 static tree
13665 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13666 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13667 bool inclusive)
13669 tree result = NULL_TREE;
13671 STRIP_NOPS (arg);
13673 /* To proceed, MPFR must exactly represent the target floating point
13674 format, which only happens when the target base equals two. */
13675 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13676 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13678 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13680 if (real_isfinite (ra)
13681 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13682 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13684 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13685 const int prec = fmt->p;
13686 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13687 int inexact;
13688 mpfr_t m;
13690 mpfr_init2 (m, prec);
13691 mpfr_from_real (m, ra, GMP_RNDN);
13692 mpfr_clear_flags ();
13693 inexact = func (m, m, rnd);
13694 result = do_mpfr_ckconv (m, type, inexact);
13695 mpfr_clear (m);
13699 return result;
13702 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13703 FUNC on it and return the resulting value as a tree with type TYPE.
13704 The mpfr precision is set to the precision of TYPE. We assume that
13705 function FUNC returns zero if the result could be calculated
13706 exactly within the requested precision. */
13708 static tree
13709 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13710 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13712 tree result = NULL_TREE;
13714 STRIP_NOPS (arg1);
13715 STRIP_NOPS (arg2);
13717 /* To proceed, MPFR must exactly represent the target floating point
13718 format, which only happens when the target base equals two. */
13719 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13720 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13721 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13723 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13724 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13726 if (real_isfinite (ra1) && real_isfinite (ra2))
13728 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13729 const int prec = fmt->p;
13730 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13731 int inexact;
13732 mpfr_t m1, m2;
13734 mpfr_inits2 (prec, m1, m2, NULL);
13735 mpfr_from_real (m1, ra1, GMP_RNDN);
13736 mpfr_from_real (m2, ra2, GMP_RNDN);
13737 mpfr_clear_flags ();
13738 inexact = func (m1, m1, m2, rnd);
13739 result = do_mpfr_ckconv (m1, type, inexact);
13740 mpfr_clears (m1, m2, NULL);
13744 return result;
13747 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13748 FUNC on it and return the resulting value as a tree with type TYPE.
13749 The mpfr precision is set to the precision of TYPE. We assume that
13750 function FUNC returns zero if the result could be calculated
13751 exactly within the requested precision. */
13753 static tree
13754 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13755 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13757 tree result = NULL_TREE;
13759 STRIP_NOPS (arg1);
13760 STRIP_NOPS (arg2);
13761 STRIP_NOPS (arg3);
13763 /* To proceed, MPFR must exactly represent the target floating point
13764 format, which only happens when the target base equals two. */
13765 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13766 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13767 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13768 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13770 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13771 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13772 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13774 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13776 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13777 const int prec = fmt->p;
13778 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13779 int inexact;
13780 mpfr_t m1, m2, m3;
13782 mpfr_inits2 (prec, m1, m2, m3, NULL);
13783 mpfr_from_real (m1, ra1, GMP_RNDN);
13784 mpfr_from_real (m2, ra2, GMP_RNDN);
13785 mpfr_from_real (m3, ra3, GMP_RNDN);
13786 mpfr_clear_flags ();
13787 inexact = func (m1, m1, m2, m3, rnd);
13788 result = do_mpfr_ckconv (m1, type, inexact);
13789 mpfr_clears (m1, m2, m3, NULL);
13793 return result;
13796 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13797 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13798 If ARG_SINP and ARG_COSP are NULL then the result is returned
13799 as a complex value.
13800 The type is taken from the type of ARG and is used for setting the
13801 precision of the calculation and results. */
13803 static tree
13804 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13806 tree const type = TREE_TYPE (arg);
13807 tree result = NULL_TREE;
13809 STRIP_NOPS (arg);
13811 /* To proceed, MPFR must exactly represent the target floating point
13812 format, which only happens when the target base equals two. */
13813 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13814 && TREE_CODE (arg) == REAL_CST
13815 && !TREE_OVERFLOW (arg))
13817 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13819 if (real_isfinite (ra))
13821 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13822 const int prec = fmt->p;
13823 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13824 tree result_s, result_c;
13825 int inexact;
13826 mpfr_t m, ms, mc;
13828 mpfr_inits2 (prec, m, ms, mc, NULL);
13829 mpfr_from_real (m, ra, GMP_RNDN);
13830 mpfr_clear_flags ();
13831 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13832 result_s = do_mpfr_ckconv (ms, type, inexact);
13833 result_c = do_mpfr_ckconv (mc, type, inexact);
13834 mpfr_clears (m, ms, mc, NULL);
13835 if (result_s && result_c)
13837 /* If we are to return in a complex value do so. */
13838 if (!arg_sinp && !arg_cosp)
13839 return build_complex (build_complex_type (type),
13840 result_c, result_s);
13842 /* Dereference the sin/cos pointer arguments. */
13843 arg_sinp = build_fold_indirect_ref (arg_sinp);
13844 arg_cosp = build_fold_indirect_ref (arg_cosp);
13845 /* Proceed if valid pointer type were passed in. */
13846 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13847 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13849 /* Set the values. */
13850 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13851 result_s);
13852 TREE_SIDE_EFFECTS (result_s) = 1;
13853 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13854 result_c);
13855 TREE_SIDE_EFFECTS (result_c) = 1;
13856 /* Combine the assignments into a compound expr. */
13857 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13858 result_s, result_c));
13863 return result;
13866 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13867 two-argument mpfr order N Bessel function FUNC on them and return
13868 the resulting value as a tree with type TYPE. The mpfr precision
13869 is set to the precision of TYPE. We assume that function FUNC
13870 returns zero if the result could be calculated exactly within the
13871 requested precision. */
13872 static tree
13873 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13874 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13875 const REAL_VALUE_TYPE *min, bool inclusive)
13877 tree result = NULL_TREE;
13879 STRIP_NOPS (arg1);
13880 STRIP_NOPS (arg2);
13882 /* To proceed, MPFR must exactly represent the target floating point
13883 format, which only happens when the target base equals two. */
13884 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13885 && tree_fits_shwi_p (arg1)
13886 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13888 const HOST_WIDE_INT n = tree_to_shwi (arg1);
13889 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13891 if (n == (long)n
13892 && real_isfinite (ra)
13893 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13895 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13896 const int prec = fmt->p;
13897 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13898 int inexact;
13899 mpfr_t m;
13901 mpfr_init2 (m, prec);
13902 mpfr_from_real (m, ra, GMP_RNDN);
13903 mpfr_clear_flags ();
13904 inexact = func (m, n, m, rnd);
13905 result = do_mpfr_ckconv (m, type, inexact);
13906 mpfr_clear (m);
13910 return result;
13913 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13914 the pointer *(ARG_QUO) and return the result. The type is taken
13915 from the type of ARG0 and is used for setting the precision of the
13916 calculation and results. */
13918 static tree
13919 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13921 tree const type = TREE_TYPE (arg0);
13922 tree result = NULL_TREE;
13924 STRIP_NOPS (arg0);
13925 STRIP_NOPS (arg1);
13927 /* To proceed, MPFR must exactly represent the target floating point
13928 format, which only happens when the target base equals two. */
13929 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13930 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13931 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13933 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13934 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13936 if (real_isfinite (ra0) && real_isfinite (ra1))
13938 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13939 const int prec = fmt->p;
13940 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13941 tree result_rem;
13942 long integer_quo;
13943 mpfr_t m0, m1;
13945 mpfr_inits2 (prec, m0, m1, NULL);
13946 mpfr_from_real (m0, ra0, GMP_RNDN);
13947 mpfr_from_real (m1, ra1, GMP_RNDN);
13948 mpfr_clear_flags ();
13949 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13950 /* Remquo is independent of the rounding mode, so pass
13951 inexact=0 to do_mpfr_ckconv(). */
13952 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13953 mpfr_clears (m0, m1, NULL);
13954 if (result_rem)
13956 /* MPFR calculates quo in the host's long so it may
13957 return more bits in quo than the target int can hold
13958 if sizeof(host long) > sizeof(target int). This can
13959 happen even for native compilers in LP64 mode. In
13960 these cases, modulo the quo value with the largest
13961 number that the target int can hold while leaving one
13962 bit for the sign. */
13963 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13964 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13966 /* Dereference the quo pointer argument. */
13967 arg_quo = build_fold_indirect_ref (arg_quo);
13968 /* Proceed iff a valid pointer type was passed in. */
13969 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13971 /* Set the value. */
13972 tree result_quo
13973 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13974 build_int_cst (TREE_TYPE (arg_quo),
13975 integer_quo));
13976 TREE_SIDE_EFFECTS (result_quo) = 1;
13977 /* Combine the quo assignment with the rem. */
13978 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13979 result_quo, result_rem));
13984 return result;
13987 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13988 resulting value as a tree with type TYPE. The mpfr precision is
13989 set to the precision of TYPE. We assume that this mpfr function
13990 returns zero if the result could be calculated exactly within the
13991 requested precision. In addition, the integer pointer represented
13992 by ARG_SG will be dereferenced and set to the appropriate signgam
13993 (-1,1) value. */
13995 static tree
13996 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13998 tree result = NULL_TREE;
14000 STRIP_NOPS (arg);
14002 /* To proceed, MPFR must exactly represent the target floating point
14003 format, which only happens when the target base equals two. Also
14004 verify ARG is a constant and that ARG_SG is an int pointer. */
14005 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
14006 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
14007 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
14008 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
14010 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
14012 /* In addition to NaN and Inf, the argument cannot be zero or a
14013 negative integer. */
14014 if (real_isfinite (ra)
14015 && ra->cl != rvc_zero
14016 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
14018 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14019 const int prec = fmt->p;
14020 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
14021 int inexact, sg;
14022 mpfr_t m;
14023 tree result_lg;
14025 mpfr_init2 (m, prec);
14026 mpfr_from_real (m, ra, GMP_RNDN);
14027 mpfr_clear_flags ();
14028 inexact = mpfr_lgamma (m, &sg, m, rnd);
14029 result_lg = do_mpfr_ckconv (m, type, inexact);
14030 mpfr_clear (m);
14031 if (result_lg)
14033 tree result_sg;
14035 /* Dereference the arg_sg pointer argument. */
14036 arg_sg = build_fold_indirect_ref (arg_sg);
14037 /* Assign the signgam value into *arg_sg. */
14038 result_sg = fold_build2 (MODIFY_EXPR,
14039 TREE_TYPE (arg_sg), arg_sg,
14040 build_int_cst (TREE_TYPE (arg_sg), sg));
14041 TREE_SIDE_EFFECTS (result_sg) = 1;
14042 /* Combine the signgam assignment with the lgamma result. */
14043 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14044 result_sg, result_lg));
14049 return result;
14052 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14053 function FUNC on it and return the resulting value as a tree with
14054 type TYPE. The mpfr precision is set to the precision of TYPE. We
14055 assume that function FUNC returns zero if the result could be
14056 calculated exactly within the requested precision. */
14058 static tree
14059 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14061 tree result = NULL_TREE;
14063 STRIP_NOPS (arg);
14065 /* To proceed, MPFR must exactly represent the target floating point
14066 format, which only happens when the target base equals two. */
14067 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14068 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14069 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14071 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14072 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14074 if (real_isfinite (re) && real_isfinite (im))
14076 const struct real_format *const fmt =
14077 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14078 const int prec = fmt->p;
14079 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14080 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14081 int inexact;
14082 mpc_t m;
14084 mpc_init2 (m, prec);
14085 mpfr_from_real (mpc_realref (m), re, rnd);
14086 mpfr_from_real (mpc_imagref (m), im, rnd);
14087 mpfr_clear_flags ();
14088 inexact = func (m, m, crnd);
14089 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14090 mpc_clear (m);
14094 return result;
14097 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14098 mpc function FUNC on it and return the resulting value as a tree
14099 with type TYPE. The mpfr precision is set to the precision of
14100 TYPE. We assume that function FUNC returns zero if the result
14101 could be calculated exactly within the requested precision. If
14102 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14103 in the arguments and/or results. */
14105 tree
14106 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14107 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14109 tree result = NULL_TREE;
14111 STRIP_NOPS (arg0);
14112 STRIP_NOPS (arg1);
14114 /* To proceed, MPFR must exactly represent the target floating point
14115 format, which only happens when the target base equals two. */
14116 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14117 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14118 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14119 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14120 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14122 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14123 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14124 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14125 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14127 if (do_nonfinite
14128 || (real_isfinite (re0) && real_isfinite (im0)
14129 && real_isfinite (re1) && real_isfinite (im1)))
14131 const struct real_format *const fmt =
14132 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14133 const int prec = fmt->p;
14134 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14135 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14136 int inexact;
14137 mpc_t m0, m1;
14139 mpc_init2 (m0, prec);
14140 mpc_init2 (m1, prec);
14141 mpfr_from_real (mpc_realref (m0), re0, rnd);
14142 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14143 mpfr_from_real (mpc_realref (m1), re1, rnd);
14144 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14145 mpfr_clear_flags ();
14146 inexact = func (m0, m0, m1, crnd);
14147 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14148 mpc_clear (m0);
14149 mpc_clear (m1);
14153 return result;
14156 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14157 a normal call should be emitted rather than expanding the function
14158 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14160 static tree
14161 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14163 int nargs = gimple_call_num_args (stmt);
14165 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14166 (nargs > 0
14167 ? gimple_call_arg_ptr (stmt, 0)
14168 : &error_mark_node), fcode);
14171 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14172 a normal call should be emitted rather than expanding the function
14173 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14174 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14175 passed as second argument. */
14177 tree
14178 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14179 enum built_in_function fcode)
14181 int nargs = gimple_call_num_args (stmt);
14183 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14184 (nargs > 0
14185 ? gimple_call_arg_ptr (stmt, 0)
14186 : &error_mark_node), maxlen, fcode);
14189 /* Builtins with folding operations that operate on "..." arguments
14190 need special handling; we need to store the arguments in a convenient
14191 data structure before attempting any folding. Fortunately there are
14192 only a few builtins that fall into this category. FNDECL is the
14193 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14194 result of the function call is ignored. */
14196 static tree
14197 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14198 bool ignore ATTRIBUTE_UNUSED)
14200 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14201 tree ret = NULL_TREE;
14203 switch (fcode)
14205 case BUILT_IN_SPRINTF_CHK:
14206 case BUILT_IN_VSPRINTF_CHK:
14207 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14208 break;
14210 case BUILT_IN_SNPRINTF_CHK:
14211 case BUILT_IN_VSNPRINTF_CHK:
14212 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14214 default:
14215 break;
14217 if (ret)
14219 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14220 TREE_NO_WARNING (ret) = 1;
14221 return ret;
14223 return NULL_TREE;
14226 /* A wrapper function for builtin folding that prevents warnings for
14227 "statement without effect" and the like, caused by removing the
14228 call node earlier than the warning is generated. */
14230 tree
14231 fold_call_stmt (gimple stmt, bool ignore)
14233 tree ret = NULL_TREE;
14234 tree fndecl = gimple_call_fndecl (stmt);
14235 location_t loc = gimple_location (stmt);
14236 if (fndecl
14237 && TREE_CODE (fndecl) == FUNCTION_DECL
14238 && DECL_BUILT_IN (fndecl)
14239 && !gimple_call_va_arg_pack_p (stmt))
14241 int nargs = gimple_call_num_args (stmt);
14242 tree *args = (nargs > 0
14243 ? gimple_call_arg_ptr (stmt, 0)
14244 : &error_mark_node);
14246 if (avoid_folding_inline_builtin (fndecl))
14247 return NULL_TREE;
14248 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14250 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14252 else
14254 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14255 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14256 if (!ret)
14257 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14258 if (ret)
14260 /* Propagate location information from original call to
14261 expansion of builtin. Otherwise things like
14262 maybe_emit_chk_warning, that operate on the expansion
14263 of a builtin, will use the wrong location information. */
14264 if (gimple_has_location (stmt))
14266 tree realret = ret;
14267 if (TREE_CODE (ret) == NOP_EXPR)
14268 realret = TREE_OPERAND (ret, 0);
14269 if (CAN_HAVE_LOCATION_P (realret)
14270 && !EXPR_HAS_LOCATION (realret))
14271 SET_EXPR_LOCATION (realret, loc);
14272 return realret;
14274 return ret;
14278 return NULL_TREE;
14281 /* Look up the function in builtin_decl that corresponds to DECL
14282 and set ASMSPEC as its user assembler name. DECL must be a
14283 function decl that declares a builtin. */
14285 void
14286 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14288 tree builtin;
14289 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14290 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14291 && asmspec != 0);
14293 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14294 set_user_assembler_name (builtin, asmspec);
14295 switch (DECL_FUNCTION_CODE (decl))
14297 case BUILT_IN_MEMCPY:
14298 init_block_move_fn (asmspec);
14299 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14300 break;
14301 case BUILT_IN_MEMSET:
14302 init_block_clear_fn (asmspec);
14303 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14304 break;
14305 case BUILT_IN_MEMMOVE:
14306 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14307 break;
14308 case BUILT_IN_MEMCMP:
14309 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14310 break;
14311 case BUILT_IN_ABORT:
14312 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14313 break;
14314 case BUILT_IN_FFS:
14315 if (INT_TYPE_SIZE < BITS_PER_WORD)
14317 set_user_assembler_libfunc ("ffs", asmspec);
14318 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14319 MODE_INT, 0), "ffs");
14321 break;
14322 default:
14323 break;
14327 /* Return true if DECL is a builtin that expands to a constant or similarly
14328 simple code. */
14329 bool
14330 is_simple_builtin (tree decl)
14332 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14333 switch (DECL_FUNCTION_CODE (decl))
14335 /* Builtins that expand to constants. */
14336 case BUILT_IN_CONSTANT_P:
14337 case BUILT_IN_EXPECT:
14338 case BUILT_IN_OBJECT_SIZE:
14339 case BUILT_IN_UNREACHABLE:
14340 /* Simple register moves or loads from stack. */
14341 case BUILT_IN_ASSUME_ALIGNED:
14342 case BUILT_IN_RETURN_ADDRESS:
14343 case BUILT_IN_EXTRACT_RETURN_ADDR:
14344 case BUILT_IN_FROB_RETURN_ADDR:
14345 case BUILT_IN_RETURN:
14346 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14347 case BUILT_IN_FRAME_ADDRESS:
14348 case BUILT_IN_VA_END:
14349 case BUILT_IN_STACK_SAVE:
14350 case BUILT_IN_STACK_RESTORE:
14351 /* Exception state returns or moves registers around. */
14352 case BUILT_IN_EH_FILTER:
14353 case BUILT_IN_EH_POINTER:
14354 case BUILT_IN_EH_COPY_VALUES:
14355 return true;
14357 default:
14358 return false;
14361 return false;
14364 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14365 most probably expanded inline into reasonably simple code. This is a
14366 superset of is_simple_builtin. */
14367 bool
14368 is_inexpensive_builtin (tree decl)
14370 if (!decl)
14371 return false;
14372 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14373 return true;
14374 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14375 switch (DECL_FUNCTION_CODE (decl))
14377 case BUILT_IN_ABS:
14378 case BUILT_IN_ALLOCA:
14379 case BUILT_IN_ALLOCA_WITH_ALIGN:
14380 case BUILT_IN_BSWAP16:
14381 case BUILT_IN_BSWAP32:
14382 case BUILT_IN_BSWAP64:
14383 case BUILT_IN_CLZ:
14384 case BUILT_IN_CLZIMAX:
14385 case BUILT_IN_CLZL:
14386 case BUILT_IN_CLZLL:
14387 case BUILT_IN_CTZ:
14388 case BUILT_IN_CTZIMAX:
14389 case BUILT_IN_CTZL:
14390 case BUILT_IN_CTZLL:
14391 case BUILT_IN_FFS:
14392 case BUILT_IN_FFSIMAX:
14393 case BUILT_IN_FFSL:
14394 case BUILT_IN_FFSLL:
14395 case BUILT_IN_IMAXABS:
14396 case BUILT_IN_FINITE:
14397 case BUILT_IN_FINITEF:
14398 case BUILT_IN_FINITEL:
14399 case BUILT_IN_FINITED32:
14400 case BUILT_IN_FINITED64:
14401 case BUILT_IN_FINITED128:
14402 case BUILT_IN_FPCLASSIFY:
14403 case BUILT_IN_ISFINITE:
14404 case BUILT_IN_ISINF_SIGN:
14405 case BUILT_IN_ISINF:
14406 case BUILT_IN_ISINFF:
14407 case BUILT_IN_ISINFL:
14408 case BUILT_IN_ISINFD32:
14409 case BUILT_IN_ISINFD64:
14410 case BUILT_IN_ISINFD128:
14411 case BUILT_IN_ISNAN:
14412 case BUILT_IN_ISNANF:
14413 case BUILT_IN_ISNANL:
14414 case BUILT_IN_ISNAND32:
14415 case BUILT_IN_ISNAND64:
14416 case BUILT_IN_ISNAND128:
14417 case BUILT_IN_ISNORMAL:
14418 case BUILT_IN_ISGREATER:
14419 case BUILT_IN_ISGREATEREQUAL:
14420 case BUILT_IN_ISLESS:
14421 case BUILT_IN_ISLESSEQUAL:
14422 case BUILT_IN_ISLESSGREATER:
14423 case BUILT_IN_ISUNORDERED:
14424 case BUILT_IN_VA_ARG_PACK:
14425 case BUILT_IN_VA_ARG_PACK_LEN:
14426 case BUILT_IN_VA_COPY:
14427 case BUILT_IN_TRAP:
14428 case BUILT_IN_SAVEREGS:
14429 case BUILT_IN_POPCOUNTL:
14430 case BUILT_IN_POPCOUNTLL:
14431 case BUILT_IN_POPCOUNTIMAX:
14432 case BUILT_IN_POPCOUNT:
14433 case BUILT_IN_PARITYL:
14434 case BUILT_IN_PARITYLL:
14435 case BUILT_IN_PARITYIMAX:
14436 case BUILT_IN_PARITY:
14437 case BUILT_IN_LABS:
14438 case BUILT_IN_LLABS:
14439 case BUILT_IN_PREFETCH:
14440 return true;
14442 default:
14443 return is_simple_builtin (decl);
14446 return false;