2013-09-04 Teresa Johnson <tejohnson@google.com>
[official-gcc.git] / gcc / builtins.c
blobbb44a7f9b0106e2bd7411de87c1761aa67ce824f
1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "realmpfr.h"
28 #include "gimple.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "predict.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "langhooks.h"
45 #include "basic-block.h"
46 #include "tree-mudflap.h"
47 #include "tree-flow.h"
48 #include "value-prof.h"
49 #include "diagnostic-core.h"
50 #include "builtins.h"
51 #include "ubsan.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
57 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
59 struct target_builtins default_target_builtins;
60 #if SWITCHABLE_TARGET
61 struct target_builtins *this_target_builtins = &default_target_builtins;
62 #endif
64 /* Define the names of the builtin function types and codes. */
65 const char *const built_in_class_names[BUILT_IN_LAST]
66 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
68 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
69 const char * built_in_names[(int) END_BUILTINS] =
71 #include "builtins.def"
73 #undef DEF_BUILTIN
75 /* Setup an array of _DECL trees, make sure each element is
76 initialized to NULL_TREE. */
77 builtin_info_type builtin_info;
79 /* Non-zero if __builtin_constant_p should be folded right away. */
80 bool force_folding_builtin_constant_p;
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
90 #endif
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
104 static rtx expand_builtin_interclass_mathfn (tree, rtx);
105 static rtx expand_builtin_sincos (tree);
106 static rtx expand_builtin_cexpi (tree, rtx);
107 static rtx expand_builtin_int_roundingfn (tree, rtx);
108 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, bool);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 enum tree_code);
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
192 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
194 static rtx expand_builtin_object_size (tree);
195 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
196 enum built_in_function);
197 static void maybe_emit_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
199 static void maybe_emit_free_warning (tree);
200 static tree fold_builtin_object_size (tree, tree);
201 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
202 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
203 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
204 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
205 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
206 enum built_in_function);
207 static bool init_target_chars (void);
209 static unsigned HOST_WIDE_INT target_newline;
210 static unsigned HOST_WIDE_INT target_percent;
211 static unsigned HOST_WIDE_INT target_c;
212 static unsigned HOST_WIDE_INT target_s;
213 static char target_percent_c[3];
214 static char target_percent_s[3];
215 static char target_percent_s_newline[4];
216 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
217 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
218 static tree do_mpfr_arg2 (tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220 static tree do_mpfr_arg3 (tree, tree, tree, tree,
221 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
222 static tree do_mpfr_sincos (tree, tree, tree);
223 static tree do_mpfr_bessel_n (tree, tree, tree,
224 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
225 const REAL_VALUE_TYPE *, bool);
226 static tree do_mpfr_remquo (tree, tree, tree);
227 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 static void expand_builtin_sync_synchronize (void);
230 /* Return true if NAME starts with __builtin_ or __sync_. */
232 static bool
233 is_builtin_name (const char *name)
235 if (strncmp (name, "__builtin_", 10) == 0)
236 return true;
237 if (strncmp (name, "__sync_", 7) == 0)
238 return true;
239 if (strncmp (name, "__atomic_", 9) == 0)
240 return true;
241 return false;
245 /* Return true if DECL is a function symbol representing a built-in. */
247 bool
248 is_builtin_fn (tree decl)
250 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
253 /* By default we assume that c99 functions are present at the runtime,
254 but sincos is not. */
255 bool
256 default_libc_has_function (enum function_class fn_class)
258 if (fn_class == function_c94
259 || fn_class == function_c99_misc
260 || fn_class == function_c99_math_complex)
261 return true;
263 return false;
266 bool
267 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
269 return true;
272 bool
273 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
275 return false;
278 /* Return true if NODE should be considered for inline expansion regardless
279 of the optimization level. This means whenever a function is invoked with
280 its "internal" name, which normally contains the prefix "__builtin". */
282 static bool
283 called_as_built_in (tree node)
285 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
286 we want the name used to call the function, not the name it
287 will have. */
288 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
289 return is_builtin_name (name);
292 /* Compute values M and N such that M divides (address of EXP - N) and such
293 that N < M. If these numbers can be determined, store M in alignp and N in
294 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
295 *alignp and any bit-offset to *bitposp.
297 Note that the address (and thus the alignment) computed here is based
298 on the address to which a symbol resolves, whereas DECL_ALIGN is based
299 on the address at which an object is actually located. These two
300 addresses are not always the same. For example, on ARM targets,
301 the address &foo of a Thumb function foo() has the lowest bit set,
302 whereas foo() itself starts on an even address.
304 If ADDR_P is true we are taking the address of the memory reference EXP
305 and thus cannot rely on the access taking place. */
307 static bool
308 get_object_alignment_2 (tree exp, unsigned int *alignp,
309 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
311 HOST_WIDE_INT bitsize, bitpos;
312 tree offset;
313 enum machine_mode mode;
314 int unsignedp, volatilep;
315 unsigned int inner, align = BITS_PER_UNIT;
316 bool known_alignment = false;
318 /* Get the innermost object and the constant (bitpos) and possibly
319 variable (offset) offset of the access. */
320 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
321 &mode, &unsignedp, &volatilep, true);
323 /* Extract alignment information from the innermost object and
324 possibly adjust bitpos and offset. */
325 if (TREE_CODE (exp) == FUNCTION_DECL)
327 /* Function addresses can encode extra information besides their
328 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
329 allows the low bit to be used as a virtual bit, we know
330 that the address itself must be at least 2-byte aligned. */
331 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
332 align = 2 * BITS_PER_UNIT;
334 else if (TREE_CODE (exp) == LABEL_DECL)
336 else if (TREE_CODE (exp) == CONST_DECL)
338 /* The alignment of a CONST_DECL is determined by its initializer. */
339 exp = DECL_INITIAL (exp);
340 align = TYPE_ALIGN (TREE_TYPE (exp));
341 #ifdef CONSTANT_ALIGNMENT
342 if (CONSTANT_CLASS_P (exp))
343 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
344 #endif
345 known_alignment = true;
347 else if (DECL_P (exp))
349 align = DECL_ALIGN (exp);
350 known_alignment = true;
352 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
354 align = TYPE_ALIGN (TREE_TYPE (exp));
356 else if (TREE_CODE (exp) == INDIRECT_REF
357 || TREE_CODE (exp) == MEM_REF
358 || TREE_CODE (exp) == TARGET_MEM_REF)
360 tree addr = TREE_OPERAND (exp, 0);
361 unsigned ptr_align;
362 unsigned HOST_WIDE_INT ptr_bitpos;
364 if (TREE_CODE (addr) == BIT_AND_EXPR
365 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
367 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
368 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
369 align *= BITS_PER_UNIT;
370 addr = TREE_OPERAND (addr, 0);
373 known_alignment
374 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
375 align = MAX (ptr_align, align);
377 /* The alignment of the pointer operand in a TARGET_MEM_REF
378 has to take the variable offset parts into account. */
379 if (TREE_CODE (exp) == TARGET_MEM_REF)
381 if (TMR_INDEX (exp))
383 unsigned HOST_WIDE_INT step = 1;
384 if (TMR_STEP (exp))
385 step = TREE_INT_CST_LOW (TMR_STEP (exp));
386 align = MIN (align, (step & -step) * BITS_PER_UNIT);
388 if (TMR_INDEX2 (exp))
389 align = BITS_PER_UNIT;
390 known_alignment = false;
393 /* When EXP is an actual memory reference then we can use
394 TYPE_ALIGN of a pointer indirection to derive alignment.
395 Do so only if get_pointer_alignment_1 did not reveal absolute
396 alignment knowledge and if using that alignment would
397 improve the situation. */
398 if (!addr_p && !known_alignment
399 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
400 align = TYPE_ALIGN (TREE_TYPE (exp));
401 else
403 /* Else adjust bitpos accordingly. */
404 bitpos += ptr_bitpos;
405 if (TREE_CODE (exp) == MEM_REF
406 || TREE_CODE (exp) == TARGET_MEM_REF)
407 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
410 else if (TREE_CODE (exp) == STRING_CST)
412 /* STRING_CST are the only constant objects we allow to be not
413 wrapped inside a CONST_DECL. */
414 align = TYPE_ALIGN (TREE_TYPE (exp));
415 #ifdef CONSTANT_ALIGNMENT
416 if (CONSTANT_CLASS_P (exp))
417 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
418 #endif
419 known_alignment = true;
422 /* If there is a non-constant offset part extract the maximum
423 alignment that can prevail. */
424 inner = ~0U;
425 while (offset)
427 tree next_offset;
429 if (TREE_CODE (offset) == PLUS_EXPR)
431 next_offset = TREE_OPERAND (offset, 0);
432 offset = TREE_OPERAND (offset, 1);
434 else
435 next_offset = NULL;
436 if (host_integerp (offset, 1))
438 /* Any overflow in calculating offset_bits won't change
439 the alignment. */
440 unsigned offset_bits
441 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
443 if (offset_bits)
444 inner = MIN (inner, (offset_bits & -offset_bits));
446 else if (TREE_CODE (offset) == MULT_EXPR
447 && host_integerp (TREE_OPERAND (offset, 1), 1))
449 /* Any overflow in calculating offset_factor won't change
450 the alignment. */
451 unsigned offset_factor
452 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
453 * BITS_PER_UNIT);
455 if (offset_factor)
456 inner = MIN (inner, (offset_factor & -offset_factor));
458 else
460 inner = MIN (inner, BITS_PER_UNIT);
461 break;
463 offset = next_offset;
465 /* Alignment is innermost object alignment adjusted by the constant
466 and non-constant offset parts. */
467 align = MIN (align, inner);
469 *alignp = align;
470 *bitposp = bitpos & (*alignp - 1);
471 return known_alignment;
474 /* For a memory reference expression EXP compute values M and N such that M
475 divides (&EXP - N) and such that N < M. If these numbers can be determined,
476 store M in alignp and N in *BITPOSP and return true. Otherwise return false
477 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
479 bool
480 get_object_alignment_1 (tree exp, unsigned int *alignp,
481 unsigned HOST_WIDE_INT *bitposp)
483 return get_object_alignment_2 (exp, alignp, bitposp, false);
486 /* Return the alignment in bits of EXP, an object. */
488 unsigned int
489 get_object_alignment (tree exp)
491 unsigned HOST_WIDE_INT bitpos = 0;
492 unsigned int align;
494 get_object_alignment_1 (exp, &align, &bitpos);
496 /* align and bitpos now specify known low bits of the pointer.
497 ptr & (align - 1) == bitpos. */
499 if (bitpos != 0)
500 align = (bitpos & -bitpos);
501 return align;
504 /* For a pointer valued expression EXP compute values M and N such that M
505 divides (EXP - N) and such that N < M. If these numbers can be determined,
506 store M in alignp and N in *BITPOSP and return true. Return false if
507 the results are just a conservative approximation.
509 If EXP is not a pointer, false is returned too. */
511 bool
512 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
513 unsigned HOST_WIDE_INT *bitposp)
515 STRIP_NOPS (exp);
517 if (TREE_CODE (exp) == ADDR_EXPR)
518 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
519 alignp, bitposp, true);
520 else if (TREE_CODE (exp) == SSA_NAME
521 && POINTER_TYPE_P (TREE_TYPE (exp)))
523 unsigned int ptr_align, ptr_misalign;
524 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
526 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
528 *bitposp = ptr_misalign * BITS_PER_UNIT;
529 *alignp = ptr_align * BITS_PER_UNIT;
530 /* We cannot really tell whether this result is an approximation. */
531 return true;
533 else
535 *bitposp = 0;
536 *alignp = BITS_PER_UNIT;
537 return false;
540 else if (TREE_CODE (exp) == INTEGER_CST)
542 *alignp = BIGGEST_ALIGNMENT;
543 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
544 & (BIGGEST_ALIGNMENT - 1));
545 return true;
548 *bitposp = 0;
549 *alignp = BITS_PER_UNIT;
550 return false;
553 /* Return the alignment in bits of EXP, a pointer valued expression.
554 The alignment returned is, by default, the alignment of the thing that
555 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
557 Otherwise, look at the expression to see if we can do better, i.e., if the
558 expression is actually pointing at an object whose alignment is tighter. */
560 unsigned int
561 get_pointer_alignment (tree exp)
563 unsigned HOST_WIDE_INT bitpos = 0;
564 unsigned int align;
566 get_pointer_alignment_1 (exp, &align, &bitpos);
568 /* align and bitpos now specify known low bits of the pointer.
569 ptr & (align - 1) == bitpos. */
571 if (bitpos != 0)
572 align = (bitpos & -bitpos);
574 return align;
577 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
578 way, because it could contain a zero byte in the middle.
579 TREE_STRING_LENGTH is the size of the character array, not the string.
581 ONLY_VALUE should be nonzero if the result is not going to be emitted
582 into the instruction stream and zero if it is going to be expanded.
583 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
584 is returned, otherwise NULL, since
585 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
586 evaluate the side-effects.
588 The value returned is of type `ssizetype'.
590 Unfortunately, string_constant can't access the values of const char
591 arrays with initializers, so neither can we do so here. */
593 tree
594 c_strlen (tree src, int only_value)
596 tree offset_node;
597 HOST_WIDE_INT offset;
598 int max;
599 const char *ptr;
600 location_t loc;
602 STRIP_NOPS (src);
603 if (TREE_CODE (src) == COND_EXPR
604 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
606 tree len1, len2;
608 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
609 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
610 if (tree_int_cst_equal (len1, len2))
611 return len1;
614 if (TREE_CODE (src) == COMPOUND_EXPR
615 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
616 return c_strlen (TREE_OPERAND (src, 1), only_value);
618 loc = EXPR_LOC_OR_HERE (src);
620 src = string_constant (src, &offset_node);
621 if (src == 0)
622 return NULL_TREE;
624 max = TREE_STRING_LENGTH (src) - 1;
625 ptr = TREE_STRING_POINTER (src);
627 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
629 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
630 compute the offset to the following null if we don't know where to
631 start searching for it. */
632 int i;
634 for (i = 0; i < max; i++)
635 if (ptr[i] == 0)
636 return NULL_TREE;
638 /* We don't know the starting offset, but we do know that the string
639 has no internal zero bytes. We can assume that the offset falls
640 within the bounds of the string; otherwise, the programmer deserves
641 what he gets. Subtract the offset from the length of the string,
642 and return that. This would perhaps not be valid if we were dealing
643 with named arrays in addition to literal string constants. */
645 return size_diffop_loc (loc, size_int (max), offset_node);
648 /* We have a known offset into the string. Start searching there for
649 a null character if we can represent it as a single HOST_WIDE_INT. */
650 if (offset_node == 0)
651 offset = 0;
652 else if (! host_integerp (offset_node, 0))
653 offset = -1;
654 else
655 offset = tree_low_cst (offset_node, 0);
657 /* If the offset is known to be out of bounds, warn, and call strlen at
658 runtime. */
659 if (offset < 0 || offset > max)
661 /* Suppress multiple warnings for propagated constant strings. */
662 if (! TREE_NO_WARNING (src))
664 warning_at (loc, 0, "offset outside bounds of constant string");
665 TREE_NO_WARNING (src) = 1;
667 return NULL_TREE;
670 /* Use strlen to search for the first zero byte. Since any strings
671 constructed with build_string will have nulls appended, we win even
672 if we get handed something like (char[4])"abcd".
674 Since OFFSET is our starting index into the string, no further
675 calculation is needed. */
676 return ssize_int (strlen (ptr + offset));
679 /* Return a char pointer for a C string if it is a string constant
680 or sum of string constant and integer constant. */
682 static const char *
683 c_getstr (tree src)
685 tree offset_node;
687 src = string_constant (src, &offset_node);
688 if (src == 0)
689 return 0;
691 if (offset_node == 0)
692 return TREE_STRING_POINTER (src);
693 else if (!host_integerp (offset_node, 1)
694 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
695 return 0;
697 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
700 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
701 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
703 static rtx
704 c_readstr (const char *str, enum machine_mode mode)
706 HOST_WIDE_INT c[2];
707 HOST_WIDE_INT ch;
708 unsigned int i, j;
710 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
712 c[0] = 0;
713 c[1] = 0;
714 ch = 1;
715 for (i = 0; i < GET_MODE_SIZE (mode); i++)
717 j = i;
718 if (WORDS_BIG_ENDIAN)
719 j = GET_MODE_SIZE (mode) - i - 1;
720 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
721 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
722 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
723 j *= BITS_PER_UNIT;
724 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
726 if (ch)
727 ch = (unsigned char) str[i];
728 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
730 return immed_double_const (c[0], c[1], mode);
733 /* Cast a target constant CST to target CHAR and if that value fits into
734 host char type, return zero and put that value into variable pointed to by
735 P. */
737 static int
738 target_char_cast (tree cst, char *p)
740 unsigned HOST_WIDE_INT val, hostval;
742 if (TREE_CODE (cst) != INTEGER_CST
743 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
744 return 1;
746 val = TREE_INT_CST_LOW (cst);
747 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
748 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
750 hostval = val;
751 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
752 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
754 if (val != hostval)
755 return 1;
757 *p = hostval;
758 return 0;
761 /* Similar to save_expr, but assumes that arbitrary code is not executed
762 in between the multiple evaluations. In particular, we assume that a
763 non-addressable local variable will not be modified. */
765 static tree
766 builtin_save_expr (tree exp)
768 if (TREE_CODE (exp) == SSA_NAME
769 || (TREE_ADDRESSABLE (exp) == 0
770 && (TREE_CODE (exp) == PARM_DECL
771 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
772 return exp;
774 return save_expr (exp);
777 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
778 times to get the address of either a higher stack frame, or a return
779 address located within it (depending on FNDECL_CODE). */
781 static rtx
782 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
784 int i;
786 #ifdef INITIAL_FRAME_ADDRESS_RTX
787 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
788 #else
789 rtx tem;
791 /* For a zero count with __builtin_return_address, we don't care what
792 frame address we return, because target-specific definitions will
793 override us. Therefore frame pointer elimination is OK, and using
794 the soft frame pointer is OK.
796 For a nonzero count, or a zero count with __builtin_frame_address,
797 we require a stable offset from the current frame pointer to the
798 previous one, so we must use the hard frame pointer, and
799 we must disable frame pointer elimination. */
800 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
801 tem = frame_pointer_rtx;
802 else
804 tem = hard_frame_pointer_rtx;
806 /* Tell reload not to eliminate the frame pointer. */
807 crtl->accesses_prior_frames = 1;
809 #endif
811 /* Some machines need special handling before we can access
812 arbitrary frames. For example, on the SPARC, we must first flush
813 all register windows to the stack. */
814 #ifdef SETUP_FRAME_ADDRESSES
815 if (count > 0)
816 SETUP_FRAME_ADDRESSES ();
817 #endif
819 /* On the SPARC, the return address is not in the frame, it is in a
820 register. There is no way to access it off of the current frame
821 pointer, but it can be accessed off the previous frame pointer by
822 reading the value from the register window save area. */
823 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
824 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
825 count--;
826 #endif
828 /* Scan back COUNT frames to the specified frame. */
829 for (i = 0; i < count; i++)
831 /* Assume the dynamic chain pointer is in the word that the
832 frame address points to, unless otherwise specified. */
833 #ifdef DYNAMIC_CHAIN_ADDRESS
834 tem = DYNAMIC_CHAIN_ADDRESS (tem);
835 #endif
836 tem = memory_address (Pmode, tem);
837 tem = gen_frame_mem (Pmode, tem);
838 tem = copy_to_reg (tem);
841 /* For __builtin_frame_address, return what we've got. But, on
842 the SPARC for example, we may have to add a bias. */
843 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
844 #ifdef FRAME_ADDR_RTX
845 return FRAME_ADDR_RTX (tem);
846 #else
847 return tem;
848 #endif
850 /* For __builtin_return_address, get the return address from that frame. */
851 #ifdef RETURN_ADDR_RTX
852 tem = RETURN_ADDR_RTX (count, tem);
853 #else
854 tem = memory_address (Pmode,
855 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
856 tem = gen_frame_mem (Pmode, tem);
857 #endif
858 return tem;
861 /* Alias set used for setjmp buffer. */
862 static alias_set_type setjmp_alias_set = -1;
864 /* Construct the leading half of a __builtin_setjmp call. Control will
865 return to RECEIVER_LABEL. This is also called directly by the SJLJ
866 exception handling code. */
868 void
869 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
871 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
872 rtx stack_save;
873 rtx mem;
875 if (setjmp_alias_set == -1)
876 setjmp_alias_set = new_alias_set ();
878 buf_addr = convert_memory_address (Pmode, buf_addr);
880 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
882 /* We store the frame pointer and the address of receiver_label in
883 the buffer and use the rest of it for the stack save area, which
884 is machine-dependent. */
886 mem = gen_rtx_MEM (Pmode, buf_addr);
887 set_mem_alias_set (mem, setjmp_alias_set);
888 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
890 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
891 GET_MODE_SIZE (Pmode))),
892 set_mem_alias_set (mem, setjmp_alias_set);
894 emit_move_insn (validize_mem (mem),
895 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
897 stack_save = gen_rtx_MEM (sa_mode,
898 plus_constant (Pmode, buf_addr,
899 2 * GET_MODE_SIZE (Pmode)));
900 set_mem_alias_set (stack_save, setjmp_alias_set);
901 emit_stack_save (SAVE_NONLOCAL, &stack_save);
903 /* If there is further processing to do, do it. */
904 #ifdef HAVE_builtin_setjmp_setup
905 if (HAVE_builtin_setjmp_setup)
906 emit_insn (gen_builtin_setjmp_setup (buf_addr));
907 #endif
909 /* We have a nonlocal label. */
910 cfun->has_nonlocal_label = 1;
913 /* Construct the trailing part of a __builtin_setjmp call. This is
914 also called directly by the SJLJ exception handling code.
915 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
917 void
918 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
920 rtx chain;
922 /* Mark the FP as used when we get here, so we have to make sure it's
923 marked as used by this function. */
924 emit_use (hard_frame_pointer_rtx);
926 /* Mark the static chain as clobbered here so life information
927 doesn't get messed up for it. */
928 chain = targetm.calls.static_chain (current_function_decl, true);
929 if (chain && REG_P (chain))
930 emit_clobber (chain);
932 /* Now put in the code to restore the frame pointer, and argument
933 pointer, if needed. */
934 #ifdef HAVE_nonlocal_goto
935 if (! HAVE_nonlocal_goto)
936 #endif
937 /* First adjust our frame pointer to its actual value. It was
938 previously set to the start of the virtual area corresponding to
939 the stacked variables when we branched here and now needs to be
940 adjusted to the actual hardware fp value.
942 Assignments to virtual registers are converted by
943 instantiate_virtual_regs into the corresponding assignment
944 to the underlying register (fp in this case) that makes
945 the original assignment true.
946 So the following insn will actually be decrementing fp by
947 STARTING_FRAME_OFFSET. */
948 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
950 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
951 if (fixed_regs[ARG_POINTER_REGNUM])
953 #ifdef ELIMINABLE_REGS
954 /* If the argument pointer can be eliminated in favor of the
955 frame pointer, we don't need to restore it. We assume here
956 that if such an elimination is present, it can always be used.
957 This is the case on all known machines; if we don't make this
958 assumption, we do unnecessary saving on many machines. */
959 size_t i;
960 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
962 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
963 if (elim_regs[i].from == ARG_POINTER_REGNUM
964 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
965 break;
967 if (i == ARRAY_SIZE (elim_regs))
968 #endif
970 /* Now restore our arg pointer from the address at which it
971 was saved in our stack frame. */
972 emit_move_insn (crtl->args.internal_arg_pointer,
973 copy_to_reg (get_arg_pointer_save_area ()));
976 #endif
978 #ifdef HAVE_builtin_setjmp_receiver
979 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
980 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
981 else
982 #endif
983 #ifdef HAVE_nonlocal_goto_receiver
984 if (HAVE_nonlocal_goto_receiver)
985 emit_insn (gen_nonlocal_goto_receiver ());
986 else
987 #endif
988 { /* Nothing */ }
990 /* We must not allow the code we just generated to be reordered by
991 scheduling. Specifically, the update of the frame pointer must
992 happen immediately, not later. Similarly, we must block
993 (frame-related) register values to be used across this code. */
994 emit_insn (gen_blockage ());
997 /* __builtin_longjmp is passed a pointer to an array of five words (not
998 all will be used on all machines). It operates similarly to the C
999 library function of the same name, but is more efficient. Much of
1000 the code below is copied from the handling of non-local gotos. */
1002 static void
1003 expand_builtin_longjmp (rtx buf_addr, rtx value)
1005 rtx fp, lab, stack, insn, last;
1006 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1008 /* DRAP is needed for stack realign if longjmp is expanded to current
1009 function */
1010 if (SUPPORTS_STACK_ALIGNMENT)
1011 crtl->need_drap = true;
1013 if (setjmp_alias_set == -1)
1014 setjmp_alias_set = new_alias_set ();
1016 buf_addr = convert_memory_address (Pmode, buf_addr);
1018 buf_addr = force_reg (Pmode, buf_addr);
1020 /* We require that the user must pass a second argument of 1, because
1021 that is what builtin_setjmp will return. */
1022 gcc_assert (value == const1_rtx);
1024 last = get_last_insn ();
1025 #ifdef HAVE_builtin_longjmp
1026 if (HAVE_builtin_longjmp)
1027 emit_insn (gen_builtin_longjmp (buf_addr));
1028 else
1029 #endif
1031 fp = gen_rtx_MEM (Pmode, buf_addr);
1032 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1033 GET_MODE_SIZE (Pmode)));
1035 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1036 2 * GET_MODE_SIZE (Pmode)));
1037 set_mem_alias_set (fp, setjmp_alias_set);
1038 set_mem_alias_set (lab, setjmp_alias_set);
1039 set_mem_alias_set (stack, setjmp_alias_set);
1041 /* Pick up FP, label, and SP from the block and jump. This code is
1042 from expand_goto in stmt.c; see there for detailed comments. */
1043 #ifdef HAVE_nonlocal_goto
1044 if (HAVE_nonlocal_goto)
1045 /* We have to pass a value to the nonlocal_goto pattern that will
1046 get copied into the static_chain pointer, but it does not matter
1047 what that value is, because builtin_setjmp does not use it. */
1048 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1049 else
1050 #endif
1052 lab = copy_to_reg (lab);
1054 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1055 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1057 emit_move_insn (hard_frame_pointer_rtx, fp);
1058 emit_stack_restore (SAVE_NONLOCAL, stack);
1060 emit_use (hard_frame_pointer_rtx);
1061 emit_use (stack_pointer_rtx);
1062 emit_indirect_jump (lab);
1066 /* Search backwards and mark the jump insn as a non-local goto.
1067 Note that this precludes the use of __builtin_longjmp to a
1068 __builtin_setjmp target in the same function. However, we've
1069 already cautioned the user that these functions are for
1070 internal exception handling use only. */
1071 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1073 gcc_assert (insn != last);
1075 if (JUMP_P (insn))
1077 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1078 break;
1080 else if (CALL_P (insn))
1081 break;
1085 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1086 and the address of the save area. */
1088 static rtx
1089 expand_builtin_nonlocal_goto (tree exp)
1091 tree t_label, t_save_area;
1092 rtx r_label, r_save_area, r_fp, r_sp, insn;
1094 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1095 return NULL_RTX;
1097 t_label = CALL_EXPR_ARG (exp, 0);
1098 t_save_area = CALL_EXPR_ARG (exp, 1);
1100 r_label = expand_normal (t_label);
1101 r_label = convert_memory_address (Pmode, r_label);
1102 r_save_area = expand_normal (t_save_area);
1103 r_save_area = convert_memory_address (Pmode, r_save_area);
1104 /* Copy the address of the save location to a register just in case it was
1105 based on the frame pointer. */
1106 r_save_area = copy_to_reg (r_save_area);
1107 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1108 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1109 plus_constant (Pmode, r_save_area,
1110 GET_MODE_SIZE (Pmode)));
1112 crtl->has_nonlocal_goto = 1;
1114 #ifdef HAVE_nonlocal_goto
1115 /* ??? We no longer need to pass the static chain value, afaik. */
1116 if (HAVE_nonlocal_goto)
1117 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1118 else
1119 #endif
1121 r_label = copy_to_reg (r_label);
1123 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1124 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1126 /* Restore frame pointer for containing function. */
1127 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1128 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1130 /* USE of hard_frame_pointer_rtx added for consistency;
1131 not clear if really needed. */
1132 emit_use (hard_frame_pointer_rtx);
1133 emit_use (stack_pointer_rtx);
1135 /* If the architecture is using a GP register, we must
1136 conservatively assume that the target function makes use of it.
1137 The prologue of functions with nonlocal gotos must therefore
1138 initialize the GP register to the appropriate value, and we
1139 must then make sure that this value is live at the point
1140 of the jump. (Note that this doesn't necessarily apply
1141 to targets with a nonlocal_goto pattern; they are free
1142 to implement it in their own way. Note also that this is
1143 a no-op if the GP register is a global invariant.) */
1144 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1145 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1146 emit_use (pic_offset_table_rtx);
1148 emit_indirect_jump (r_label);
1151 /* Search backwards to the jump insn and mark it as a
1152 non-local goto. */
1153 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1155 if (JUMP_P (insn))
1157 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1158 break;
1160 else if (CALL_P (insn))
1161 break;
1164 return const0_rtx;
1167 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1168 (not all will be used on all machines) that was passed to __builtin_setjmp.
1169 It updates the stack pointer in that block to correspond to the current
1170 stack pointer. */
1172 static void
1173 expand_builtin_update_setjmp_buf (rtx buf_addr)
1175 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1176 rtx stack_save
1177 = gen_rtx_MEM (sa_mode,
1178 memory_address
1179 (sa_mode,
1180 plus_constant (Pmode, buf_addr,
1181 2 * GET_MODE_SIZE (Pmode))));
1183 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1186 /* Expand a call to __builtin_prefetch. For a target that does not support
1187 data prefetch, evaluate the memory address argument in case it has side
1188 effects. */
1190 static void
1191 expand_builtin_prefetch (tree exp)
1193 tree arg0, arg1, arg2;
1194 int nargs;
1195 rtx op0, op1, op2;
1197 if (!validate_arglist (exp, POINTER_TYPE, 0))
1198 return;
1200 arg0 = CALL_EXPR_ARG (exp, 0);
1202 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1203 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1204 locality). */
1205 nargs = call_expr_nargs (exp);
1206 if (nargs > 1)
1207 arg1 = CALL_EXPR_ARG (exp, 1);
1208 else
1209 arg1 = integer_zero_node;
1210 if (nargs > 2)
1211 arg2 = CALL_EXPR_ARG (exp, 2);
1212 else
1213 arg2 = integer_three_node;
1215 /* Argument 0 is an address. */
1216 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1218 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1219 if (TREE_CODE (arg1) != INTEGER_CST)
1221 error ("second argument to %<__builtin_prefetch%> must be a constant");
1222 arg1 = integer_zero_node;
1224 op1 = expand_normal (arg1);
1225 /* Argument 1 must be either zero or one. */
1226 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1228 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1229 " using zero");
1230 op1 = const0_rtx;
1233 /* Argument 2 (locality) must be a compile-time constant int. */
1234 if (TREE_CODE (arg2) != INTEGER_CST)
1236 error ("third argument to %<__builtin_prefetch%> must be a constant");
1237 arg2 = integer_zero_node;
1239 op2 = expand_normal (arg2);
1240 /* Argument 2 must be 0, 1, 2, or 3. */
1241 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1243 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1244 op2 = const0_rtx;
1247 #ifdef HAVE_prefetch
1248 if (HAVE_prefetch)
1250 struct expand_operand ops[3];
1252 create_address_operand (&ops[0], op0);
1253 create_integer_operand (&ops[1], INTVAL (op1));
1254 create_integer_operand (&ops[2], INTVAL (op2));
1255 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1256 return;
1258 #endif
1260 /* Don't do anything with direct references to volatile memory, but
1261 generate code to handle other side effects. */
1262 if (!MEM_P (op0) && side_effects_p (op0))
1263 emit_insn (op0);
1266 /* Get a MEM rtx for expression EXP which is the address of an operand
1267 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1268 the maximum length of the block of memory that might be accessed or
1269 NULL if unknown. */
1271 static rtx
1272 get_memory_rtx (tree exp, tree len)
1274 tree orig_exp = exp;
1275 rtx addr, mem;
1277 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1278 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1279 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1280 exp = TREE_OPERAND (exp, 0);
1282 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1283 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1285 /* Get an expression we can use to find the attributes to assign to MEM.
1286 First remove any nops. */
1287 while (CONVERT_EXPR_P (exp)
1288 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1289 exp = TREE_OPERAND (exp, 0);
1291 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1292 (as builtin stringops may alias with anything). */
1293 exp = fold_build2 (MEM_REF,
1294 build_array_type (char_type_node,
1295 build_range_type (sizetype,
1296 size_one_node, len)),
1297 exp, build_int_cst (ptr_type_node, 0));
1299 /* If the MEM_REF has no acceptable address, try to get the base object
1300 from the original address we got, and build an all-aliasing
1301 unknown-sized access to that one. */
1302 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1303 set_mem_attributes (mem, exp, 0);
1304 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1305 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1306 0))))
1308 exp = build_fold_addr_expr (exp);
1309 exp = fold_build2 (MEM_REF,
1310 build_array_type (char_type_node,
1311 build_range_type (sizetype,
1312 size_zero_node,
1313 NULL)),
1314 exp, build_int_cst (ptr_type_node, 0));
1315 set_mem_attributes (mem, exp, 0);
1317 set_mem_alias_set (mem, 0);
1318 return mem;
1321 /* Built-in functions to perform an untyped call and return. */
1323 #define apply_args_mode \
1324 (this_target_builtins->x_apply_args_mode)
1325 #define apply_result_mode \
1326 (this_target_builtins->x_apply_result_mode)
1328 /* Return the size required for the block returned by __builtin_apply_args,
1329 and initialize apply_args_mode. */
1331 static int
1332 apply_args_size (void)
1334 static int size = -1;
1335 int align;
1336 unsigned int regno;
1337 enum machine_mode mode;
1339 /* The values computed by this function never change. */
1340 if (size < 0)
1342 /* The first value is the incoming arg-pointer. */
1343 size = GET_MODE_SIZE (Pmode);
1345 /* The second value is the structure value address unless this is
1346 passed as an "invisible" first argument. */
1347 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1348 size += GET_MODE_SIZE (Pmode);
1350 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1351 if (FUNCTION_ARG_REGNO_P (regno))
1353 mode = targetm.calls.get_raw_arg_mode (regno);
1355 gcc_assert (mode != VOIDmode);
1357 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1358 if (size % align != 0)
1359 size = CEIL (size, align) * align;
1360 size += GET_MODE_SIZE (mode);
1361 apply_args_mode[regno] = mode;
1363 else
1365 apply_args_mode[regno] = VOIDmode;
1368 return size;
1371 /* Return the size required for the block returned by __builtin_apply,
1372 and initialize apply_result_mode. */
1374 static int
1375 apply_result_size (void)
1377 static int size = -1;
1378 int align, regno;
1379 enum machine_mode mode;
1381 /* The values computed by this function never change. */
1382 if (size < 0)
1384 size = 0;
1386 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1387 if (targetm.calls.function_value_regno_p (regno))
1389 mode = targetm.calls.get_raw_result_mode (regno);
1391 gcc_assert (mode != VOIDmode);
1393 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1394 if (size % align != 0)
1395 size = CEIL (size, align) * align;
1396 size += GET_MODE_SIZE (mode);
1397 apply_result_mode[regno] = mode;
1399 else
1400 apply_result_mode[regno] = VOIDmode;
1402 /* Allow targets that use untyped_call and untyped_return to override
1403 the size so that machine-specific information can be stored here. */
1404 #ifdef APPLY_RESULT_SIZE
1405 size = APPLY_RESULT_SIZE;
1406 #endif
1408 return size;
1411 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1412 /* Create a vector describing the result block RESULT. If SAVEP is true,
1413 the result block is used to save the values; otherwise it is used to
1414 restore the values. */
1416 static rtx
1417 result_vector (int savep, rtx result)
1419 int regno, size, align, nelts;
1420 enum machine_mode mode;
1421 rtx reg, mem;
1422 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1424 size = nelts = 0;
1425 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1426 if ((mode = apply_result_mode[regno]) != VOIDmode)
1428 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1429 if (size % align != 0)
1430 size = CEIL (size, align) * align;
1431 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1432 mem = adjust_address (result, mode, size);
1433 savevec[nelts++] = (savep
1434 ? gen_rtx_SET (VOIDmode, mem, reg)
1435 : gen_rtx_SET (VOIDmode, reg, mem));
1436 size += GET_MODE_SIZE (mode);
1438 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1440 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1442 /* Save the state required to perform an untyped call with the same
1443 arguments as were passed to the current function. */
1445 static rtx
1446 expand_builtin_apply_args_1 (void)
1448 rtx registers, tem;
1449 int size, align, regno;
1450 enum machine_mode mode;
1451 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1453 /* Create a block where the arg-pointer, structure value address,
1454 and argument registers can be saved. */
1455 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1457 /* Walk past the arg-pointer and structure value address. */
1458 size = GET_MODE_SIZE (Pmode);
1459 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1460 size += GET_MODE_SIZE (Pmode);
1462 /* Save each register used in calling a function to the block. */
1463 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1464 if ((mode = apply_args_mode[regno]) != VOIDmode)
1466 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1467 if (size % align != 0)
1468 size = CEIL (size, align) * align;
1470 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1472 emit_move_insn (adjust_address (registers, mode, size), tem);
1473 size += GET_MODE_SIZE (mode);
1476 /* Save the arg pointer to the block. */
1477 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1478 #ifdef STACK_GROWS_DOWNWARD
1479 /* We need the pointer as the caller actually passed them to us, not
1480 as we might have pretended they were passed. Make sure it's a valid
1481 operand, as emit_move_insn isn't expected to handle a PLUS. */
1483 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1484 NULL_RTX);
1485 #endif
1486 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1488 size = GET_MODE_SIZE (Pmode);
1490 /* Save the structure value address unless this is passed as an
1491 "invisible" first argument. */
1492 if (struct_incoming_value)
1494 emit_move_insn (adjust_address (registers, Pmode, size),
1495 copy_to_reg (struct_incoming_value));
1496 size += GET_MODE_SIZE (Pmode);
1499 /* Return the address of the block. */
1500 return copy_addr_to_reg (XEXP (registers, 0));
1503 /* __builtin_apply_args returns block of memory allocated on
1504 the stack into which is stored the arg pointer, structure
1505 value address, static chain, and all the registers that might
1506 possibly be used in performing a function call. The code is
1507 moved to the start of the function so the incoming values are
1508 saved. */
1510 static rtx
1511 expand_builtin_apply_args (void)
1513 /* Don't do __builtin_apply_args more than once in a function.
1514 Save the result of the first call and reuse it. */
1515 if (apply_args_value != 0)
1516 return apply_args_value;
1518 /* When this function is called, it means that registers must be
1519 saved on entry to this function. So we migrate the
1520 call to the first insn of this function. */
1521 rtx temp;
1522 rtx seq;
1524 start_sequence ();
1525 temp = expand_builtin_apply_args_1 ();
1526 seq = get_insns ();
1527 end_sequence ();
1529 apply_args_value = temp;
1531 /* Put the insns after the NOTE that starts the function.
1532 If this is inside a start_sequence, make the outer-level insn
1533 chain current, so the code is placed at the start of the
1534 function. If internal_arg_pointer is a non-virtual pseudo,
1535 it needs to be placed after the function that initializes
1536 that pseudo. */
1537 push_topmost_sequence ();
1538 if (REG_P (crtl->args.internal_arg_pointer)
1539 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1540 emit_insn_before (seq, parm_birth_insn);
1541 else
1542 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1543 pop_topmost_sequence ();
1544 return temp;
1548 /* Perform an untyped call and save the state required to perform an
1549 untyped return of whatever value was returned by the given function. */
1551 static rtx
1552 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1554 int size, align, regno;
1555 enum machine_mode mode;
1556 rtx incoming_args, result, reg, dest, src, call_insn;
1557 rtx old_stack_level = 0;
1558 rtx call_fusage = 0;
1559 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1561 arguments = convert_memory_address (Pmode, arguments);
1563 /* Create a block where the return registers can be saved. */
1564 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1566 /* Fetch the arg pointer from the ARGUMENTS block. */
1567 incoming_args = gen_reg_rtx (Pmode);
1568 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1569 #ifndef STACK_GROWS_DOWNWARD
1570 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1571 incoming_args, 0, OPTAB_LIB_WIDEN);
1572 #endif
1574 /* Push a new argument block and copy the arguments. Do not allow
1575 the (potential) memcpy call below to interfere with our stack
1576 manipulations. */
1577 do_pending_stack_adjust ();
1578 NO_DEFER_POP;
1580 /* Save the stack with nonlocal if available. */
1581 #ifdef HAVE_save_stack_nonlocal
1582 if (HAVE_save_stack_nonlocal)
1583 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1584 else
1585 #endif
1586 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1588 /* Allocate a block of memory onto the stack and copy the memory
1589 arguments to the outgoing arguments address. We can pass TRUE
1590 as the 4th argument because we just saved the stack pointer
1591 and will restore it right after the call. */
1592 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1594 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1595 may have already set current_function_calls_alloca to true.
1596 current_function_calls_alloca won't be set if argsize is zero,
1597 so we have to guarantee need_drap is true here. */
1598 if (SUPPORTS_STACK_ALIGNMENT)
1599 crtl->need_drap = true;
1601 dest = virtual_outgoing_args_rtx;
1602 #ifndef STACK_GROWS_DOWNWARD
1603 if (CONST_INT_P (argsize))
1604 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1605 else
1606 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1607 #endif
1608 dest = gen_rtx_MEM (BLKmode, dest);
1609 set_mem_align (dest, PARM_BOUNDARY);
1610 src = gen_rtx_MEM (BLKmode, incoming_args);
1611 set_mem_align (src, PARM_BOUNDARY);
1612 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1614 /* Refer to the argument block. */
1615 apply_args_size ();
1616 arguments = gen_rtx_MEM (BLKmode, arguments);
1617 set_mem_align (arguments, PARM_BOUNDARY);
1619 /* Walk past the arg-pointer and structure value address. */
1620 size = GET_MODE_SIZE (Pmode);
1621 if (struct_value)
1622 size += GET_MODE_SIZE (Pmode);
1624 /* Restore each of the registers previously saved. Make USE insns
1625 for each of these registers for use in making the call. */
1626 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1627 if ((mode = apply_args_mode[regno]) != VOIDmode)
1629 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1630 if (size % align != 0)
1631 size = CEIL (size, align) * align;
1632 reg = gen_rtx_REG (mode, regno);
1633 emit_move_insn (reg, adjust_address (arguments, mode, size));
1634 use_reg (&call_fusage, reg);
1635 size += GET_MODE_SIZE (mode);
1638 /* Restore the structure value address unless this is passed as an
1639 "invisible" first argument. */
1640 size = GET_MODE_SIZE (Pmode);
1641 if (struct_value)
1643 rtx value = gen_reg_rtx (Pmode);
1644 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1645 emit_move_insn (struct_value, value);
1646 if (REG_P (struct_value))
1647 use_reg (&call_fusage, struct_value);
1648 size += GET_MODE_SIZE (Pmode);
1651 /* All arguments and registers used for the call are set up by now! */
1652 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1654 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1655 and we don't want to load it into a register as an optimization,
1656 because prepare_call_address already did it if it should be done. */
1657 if (GET_CODE (function) != SYMBOL_REF)
1658 function = memory_address (FUNCTION_MODE, function);
1660 /* Generate the actual call instruction and save the return value. */
1661 #ifdef HAVE_untyped_call
1662 if (HAVE_untyped_call)
1663 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1664 result, result_vector (1, result)));
1665 else
1666 #endif
1667 #ifdef HAVE_call_value
1668 if (HAVE_call_value)
1670 rtx valreg = 0;
1672 /* Locate the unique return register. It is not possible to
1673 express a call that sets more than one return register using
1674 call_value; use untyped_call for that. In fact, untyped_call
1675 only needs to save the return registers in the given block. */
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1679 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1681 valreg = gen_rtx_REG (mode, regno);
1684 emit_call_insn (GEN_CALL_VALUE (valreg,
1685 gen_rtx_MEM (FUNCTION_MODE, function),
1686 const0_rtx, NULL_RTX, const0_rtx));
1688 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1690 else
1691 #endif
1692 gcc_unreachable ();
1694 /* Find the CALL insn we just emitted, and attach the register usage
1695 information. */
1696 call_insn = last_call_insn ();
1697 add_function_usage_to (call_insn, call_fusage);
1699 /* Restore the stack. */
1700 #ifdef HAVE_save_stack_nonlocal
1701 if (HAVE_save_stack_nonlocal)
1702 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1703 else
1704 #endif
1705 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1706 fixup_args_size_notes (call_insn, get_last_insn(), 0);
1708 OK_DEFER_POP;
1710 /* Return the address of the result block. */
1711 result = copy_addr_to_reg (XEXP (result, 0));
1712 return convert_memory_address (ptr_mode, result);
1715 /* Perform an untyped return. */
1717 static void
1718 expand_builtin_return (rtx result)
1720 int size, align, regno;
1721 enum machine_mode mode;
1722 rtx reg;
1723 rtx call_fusage = 0;
1725 result = convert_memory_address (Pmode, result);
1727 apply_result_size ();
1728 result = gen_rtx_MEM (BLKmode, result);
1730 #ifdef HAVE_untyped_return
1731 if (HAVE_untyped_return)
1733 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1734 emit_barrier ();
1735 return;
1737 #endif
1739 /* Restore the return value and note that each value is used. */
1740 size = 0;
1741 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1742 if ((mode = apply_result_mode[regno]) != VOIDmode)
1744 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1745 if (size % align != 0)
1746 size = CEIL (size, align) * align;
1747 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1748 emit_move_insn (reg, adjust_address (result, mode, size));
1750 push_to_sequence (call_fusage);
1751 emit_use (reg);
1752 call_fusage = get_insns ();
1753 end_sequence ();
1754 size += GET_MODE_SIZE (mode);
1757 /* Put the USE insns before the return. */
1758 emit_insn (call_fusage);
1760 /* Return whatever values was restored by jumping directly to the end
1761 of the function. */
1762 expand_naked_return ();
1765 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1767 static enum type_class
1768 type_to_class (tree type)
1770 switch (TREE_CODE (type))
1772 case VOID_TYPE: return void_type_class;
1773 case INTEGER_TYPE: return integer_type_class;
1774 case ENUMERAL_TYPE: return enumeral_type_class;
1775 case BOOLEAN_TYPE: return boolean_type_class;
1776 case POINTER_TYPE: return pointer_type_class;
1777 case REFERENCE_TYPE: return reference_type_class;
1778 case OFFSET_TYPE: return offset_type_class;
1779 case REAL_TYPE: return real_type_class;
1780 case COMPLEX_TYPE: return complex_type_class;
1781 case FUNCTION_TYPE: return function_type_class;
1782 case METHOD_TYPE: return method_type_class;
1783 case RECORD_TYPE: return record_type_class;
1784 case UNION_TYPE:
1785 case QUAL_UNION_TYPE: return union_type_class;
1786 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1787 ? string_type_class : array_type_class);
1788 case LANG_TYPE: return lang_type_class;
1789 default: return no_type_class;
1793 /* Expand a call EXP to __builtin_classify_type. */
1795 static rtx
1796 expand_builtin_classify_type (tree exp)
1798 if (call_expr_nargs (exp))
1799 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1800 return GEN_INT (no_type_class);
1803 /* This helper macro, meant to be used in mathfn_built_in below,
1804 determines which among a set of three builtin math functions is
1805 appropriate for a given type mode. The `F' and `L' cases are
1806 automatically generated from the `double' case. */
1807 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1808 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1809 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1810 fcodel = BUILT_IN_MATHFN##L ; break;
1811 /* Similar to above, but appends _R after any F/L suffix. */
1812 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1813 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1814 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1815 fcodel = BUILT_IN_MATHFN##L_R ; break;
1817 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1818 if available. If IMPLICIT is true use the implicit builtin declaration,
1819 otherwise use the explicit declaration. If we can't do the conversion,
1820 return zero. */
1822 static tree
1823 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1825 enum built_in_function fcode, fcodef, fcodel, fcode2;
1827 switch (fn)
1829 CASE_MATHFN (BUILT_IN_ACOS)
1830 CASE_MATHFN (BUILT_IN_ACOSH)
1831 CASE_MATHFN (BUILT_IN_ASIN)
1832 CASE_MATHFN (BUILT_IN_ASINH)
1833 CASE_MATHFN (BUILT_IN_ATAN)
1834 CASE_MATHFN (BUILT_IN_ATAN2)
1835 CASE_MATHFN (BUILT_IN_ATANH)
1836 CASE_MATHFN (BUILT_IN_CBRT)
1837 CASE_MATHFN (BUILT_IN_CEIL)
1838 CASE_MATHFN (BUILT_IN_CEXPI)
1839 CASE_MATHFN (BUILT_IN_COPYSIGN)
1840 CASE_MATHFN (BUILT_IN_COS)
1841 CASE_MATHFN (BUILT_IN_COSH)
1842 CASE_MATHFN (BUILT_IN_DREM)
1843 CASE_MATHFN (BUILT_IN_ERF)
1844 CASE_MATHFN (BUILT_IN_ERFC)
1845 CASE_MATHFN (BUILT_IN_EXP)
1846 CASE_MATHFN (BUILT_IN_EXP10)
1847 CASE_MATHFN (BUILT_IN_EXP2)
1848 CASE_MATHFN (BUILT_IN_EXPM1)
1849 CASE_MATHFN (BUILT_IN_FABS)
1850 CASE_MATHFN (BUILT_IN_FDIM)
1851 CASE_MATHFN (BUILT_IN_FLOOR)
1852 CASE_MATHFN (BUILT_IN_FMA)
1853 CASE_MATHFN (BUILT_IN_FMAX)
1854 CASE_MATHFN (BUILT_IN_FMIN)
1855 CASE_MATHFN (BUILT_IN_FMOD)
1856 CASE_MATHFN (BUILT_IN_FREXP)
1857 CASE_MATHFN (BUILT_IN_GAMMA)
1858 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1859 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1860 CASE_MATHFN (BUILT_IN_HYPOT)
1861 CASE_MATHFN (BUILT_IN_ILOGB)
1862 CASE_MATHFN (BUILT_IN_ICEIL)
1863 CASE_MATHFN (BUILT_IN_IFLOOR)
1864 CASE_MATHFN (BUILT_IN_INF)
1865 CASE_MATHFN (BUILT_IN_IRINT)
1866 CASE_MATHFN (BUILT_IN_IROUND)
1867 CASE_MATHFN (BUILT_IN_ISINF)
1868 CASE_MATHFN (BUILT_IN_J0)
1869 CASE_MATHFN (BUILT_IN_J1)
1870 CASE_MATHFN (BUILT_IN_JN)
1871 CASE_MATHFN (BUILT_IN_LCEIL)
1872 CASE_MATHFN (BUILT_IN_LDEXP)
1873 CASE_MATHFN (BUILT_IN_LFLOOR)
1874 CASE_MATHFN (BUILT_IN_LGAMMA)
1875 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1876 CASE_MATHFN (BUILT_IN_LLCEIL)
1877 CASE_MATHFN (BUILT_IN_LLFLOOR)
1878 CASE_MATHFN (BUILT_IN_LLRINT)
1879 CASE_MATHFN (BUILT_IN_LLROUND)
1880 CASE_MATHFN (BUILT_IN_LOG)
1881 CASE_MATHFN (BUILT_IN_LOG10)
1882 CASE_MATHFN (BUILT_IN_LOG1P)
1883 CASE_MATHFN (BUILT_IN_LOG2)
1884 CASE_MATHFN (BUILT_IN_LOGB)
1885 CASE_MATHFN (BUILT_IN_LRINT)
1886 CASE_MATHFN (BUILT_IN_LROUND)
1887 CASE_MATHFN (BUILT_IN_MODF)
1888 CASE_MATHFN (BUILT_IN_NAN)
1889 CASE_MATHFN (BUILT_IN_NANS)
1890 CASE_MATHFN (BUILT_IN_NEARBYINT)
1891 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1892 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1893 CASE_MATHFN (BUILT_IN_POW)
1894 CASE_MATHFN (BUILT_IN_POWI)
1895 CASE_MATHFN (BUILT_IN_POW10)
1896 CASE_MATHFN (BUILT_IN_REMAINDER)
1897 CASE_MATHFN (BUILT_IN_REMQUO)
1898 CASE_MATHFN (BUILT_IN_RINT)
1899 CASE_MATHFN (BUILT_IN_ROUND)
1900 CASE_MATHFN (BUILT_IN_SCALB)
1901 CASE_MATHFN (BUILT_IN_SCALBLN)
1902 CASE_MATHFN (BUILT_IN_SCALBN)
1903 CASE_MATHFN (BUILT_IN_SIGNBIT)
1904 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1905 CASE_MATHFN (BUILT_IN_SIN)
1906 CASE_MATHFN (BUILT_IN_SINCOS)
1907 CASE_MATHFN (BUILT_IN_SINH)
1908 CASE_MATHFN (BUILT_IN_SQRT)
1909 CASE_MATHFN (BUILT_IN_TAN)
1910 CASE_MATHFN (BUILT_IN_TANH)
1911 CASE_MATHFN (BUILT_IN_TGAMMA)
1912 CASE_MATHFN (BUILT_IN_TRUNC)
1913 CASE_MATHFN (BUILT_IN_Y0)
1914 CASE_MATHFN (BUILT_IN_Y1)
1915 CASE_MATHFN (BUILT_IN_YN)
1917 default:
1918 return NULL_TREE;
1921 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1922 fcode2 = fcode;
1923 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1924 fcode2 = fcodef;
1925 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1926 fcode2 = fcodel;
1927 else
1928 return NULL_TREE;
1930 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1931 return NULL_TREE;
1933 return builtin_decl_explicit (fcode2);
1936 /* Like mathfn_built_in_1(), but always use the implicit array. */
1938 tree
1939 mathfn_built_in (tree type, enum built_in_function fn)
1941 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1944 /* If errno must be maintained, expand the RTL to check if the result,
1945 TARGET, of a built-in function call, EXP, is NaN, and if so set
1946 errno to EDOM. */
1948 static void
1949 expand_errno_check (tree exp, rtx target)
1951 rtx lab = gen_label_rtx ();
1953 /* Test the result; if it is NaN, set errno=EDOM because
1954 the argument was not in the domain. */
1955 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1956 NULL_RTX, NULL_RTX, lab,
1957 /* The jump is very likely. */
1958 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1960 #ifdef TARGET_EDOM
1961 /* If this built-in doesn't throw an exception, set errno directly. */
1962 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1964 #ifdef GEN_ERRNO_RTX
1965 rtx errno_rtx = GEN_ERRNO_RTX;
1966 #else
1967 rtx errno_rtx
1968 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1969 #endif
1970 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1971 emit_label (lab);
1972 return;
1974 #endif
1976 /* Make sure the library call isn't expanded as a tail call. */
1977 CALL_EXPR_TAILCALL (exp) = 0;
1979 /* We can't set errno=EDOM directly; let the library call do it.
1980 Pop the arguments right away in case the call gets deleted. */
1981 NO_DEFER_POP;
1982 expand_call (exp, target, 0);
1983 OK_DEFER_POP;
1984 emit_label (lab);
1987 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1988 Return NULL_RTX if a normal call should be emitted rather than expanding
1989 the function in-line. EXP is the expression that is a call to the builtin
1990 function; if convenient, the result should be placed in TARGET.
1991 SUBTARGET may be used as the target for computing one of EXP's operands. */
1993 static rtx
1994 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1996 optab builtin_optab;
1997 rtx op0, insns;
1998 tree fndecl = get_callee_fndecl (exp);
1999 enum machine_mode mode;
2000 bool errno_set = false;
2001 bool try_widening = false;
2002 tree arg;
2004 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2005 return NULL_RTX;
2007 arg = CALL_EXPR_ARG (exp, 0);
2009 switch (DECL_FUNCTION_CODE (fndecl))
2011 CASE_FLT_FN (BUILT_IN_SQRT):
2012 errno_set = ! tree_expr_nonnegative_p (arg);
2013 try_widening = true;
2014 builtin_optab = sqrt_optab;
2015 break;
2016 CASE_FLT_FN (BUILT_IN_EXP):
2017 errno_set = true; builtin_optab = exp_optab; break;
2018 CASE_FLT_FN (BUILT_IN_EXP10):
2019 CASE_FLT_FN (BUILT_IN_POW10):
2020 errno_set = true; builtin_optab = exp10_optab; break;
2021 CASE_FLT_FN (BUILT_IN_EXP2):
2022 errno_set = true; builtin_optab = exp2_optab; break;
2023 CASE_FLT_FN (BUILT_IN_EXPM1):
2024 errno_set = true; builtin_optab = expm1_optab; break;
2025 CASE_FLT_FN (BUILT_IN_LOGB):
2026 errno_set = true; builtin_optab = logb_optab; break;
2027 CASE_FLT_FN (BUILT_IN_LOG):
2028 errno_set = true; builtin_optab = log_optab; break;
2029 CASE_FLT_FN (BUILT_IN_LOG10):
2030 errno_set = true; builtin_optab = log10_optab; break;
2031 CASE_FLT_FN (BUILT_IN_LOG2):
2032 errno_set = true; builtin_optab = log2_optab; break;
2033 CASE_FLT_FN (BUILT_IN_LOG1P):
2034 errno_set = true; builtin_optab = log1p_optab; break;
2035 CASE_FLT_FN (BUILT_IN_ASIN):
2036 builtin_optab = asin_optab; break;
2037 CASE_FLT_FN (BUILT_IN_ACOS):
2038 builtin_optab = acos_optab; break;
2039 CASE_FLT_FN (BUILT_IN_TAN):
2040 builtin_optab = tan_optab; break;
2041 CASE_FLT_FN (BUILT_IN_ATAN):
2042 builtin_optab = atan_optab; break;
2043 CASE_FLT_FN (BUILT_IN_FLOOR):
2044 builtin_optab = floor_optab; break;
2045 CASE_FLT_FN (BUILT_IN_CEIL):
2046 builtin_optab = ceil_optab; break;
2047 CASE_FLT_FN (BUILT_IN_TRUNC):
2048 builtin_optab = btrunc_optab; break;
2049 CASE_FLT_FN (BUILT_IN_ROUND):
2050 builtin_optab = round_optab; break;
2051 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2052 builtin_optab = nearbyint_optab;
2053 if (flag_trapping_math)
2054 break;
2055 /* Else fallthrough and expand as rint. */
2056 CASE_FLT_FN (BUILT_IN_RINT):
2057 builtin_optab = rint_optab; break;
2058 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2059 builtin_optab = significand_optab; break;
2060 default:
2061 gcc_unreachable ();
2064 /* Make a suitable register to place result in. */
2065 mode = TYPE_MODE (TREE_TYPE (exp));
2067 if (! flag_errno_math || ! HONOR_NANS (mode))
2068 errno_set = false;
2070 /* Before working hard, check whether the instruction is available, but try
2071 to widen the mode for specific operations. */
2072 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2073 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2074 && (!errno_set || !optimize_insn_for_size_p ()))
2076 rtx result = gen_reg_rtx (mode);
2078 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2079 need to expand the argument again. This way, we will not perform
2080 side-effects more the once. */
2081 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2083 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2085 start_sequence ();
2087 /* Compute into RESULT.
2088 Set RESULT to wherever the result comes back. */
2089 result = expand_unop (mode, builtin_optab, op0, result, 0);
2091 if (result != 0)
2093 if (errno_set)
2094 expand_errno_check (exp, result);
2096 /* Output the entire sequence. */
2097 insns = get_insns ();
2098 end_sequence ();
2099 emit_insn (insns);
2100 return result;
2103 /* If we were unable to expand via the builtin, stop the sequence
2104 (without outputting the insns) and call to the library function
2105 with the stabilized argument list. */
2106 end_sequence ();
2109 return expand_call (exp, target, target == const0_rtx);
2112 /* Expand a call to the builtin binary math functions (pow and atan2).
2113 Return NULL_RTX if a normal call should be emitted rather than expanding the
2114 function in-line. EXP is the expression that is a call to the builtin
2115 function; if convenient, the result should be placed in TARGET.
2116 SUBTARGET may be used as the target for computing one of EXP's
2117 operands. */
2119 static rtx
2120 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2122 optab builtin_optab;
2123 rtx op0, op1, insns, result;
2124 int op1_type = REAL_TYPE;
2125 tree fndecl = get_callee_fndecl (exp);
2126 tree arg0, arg1;
2127 enum machine_mode mode;
2128 bool errno_set = true;
2130 switch (DECL_FUNCTION_CODE (fndecl))
2132 CASE_FLT_FN (BUILT_IN_SCALBN):
2133 CASE_FLT_FN (BUILT_IN_SCALBLN):
2134 CASE_FLT_FN (BUILT_IN_LDEXP):
2135 op1_type = INTEGER_TYPE;
2136 default:
2137 break;
2140 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2141 return NULL_RTX;
2143 arg0 = CALL_EXPR_ARG (exp, 0);
2144 arg1 = CALL_EXPR_ARG (exp, 1);
2146 switch (DECL_FUNCTION_CODE (fndecl))
2148 CASE_FLT_FN (BUILT_IN_POW):
2149 builtin_optab = pow_optab; break;
2150 CASE_FLT_FN (BUILT_IN_ATAN2):
2151 builtin_optab = atan2_optab; break;
2152 CASE_FLT_FN (BUILT_IN_SCALB):
2153 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2154 return 0;
2155 builtin_optab = scalb_optab; break;
2156 CASE_FLT_FN (BUILT_IN_SCALBN):
2157 CASE_FLT_FN (BUILT_IN_SCALBLN):
2158 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2159 return 0;
2160 /* Fall through... */
2161 CASE_FLT_FN (BUILT_IN_LDEXP):
2162 builtin_optab = ldexp_optab; break;
2163 CASE_FLT_FN (BUILT_IN_FMOD):
2164 builtin_optab = fmod_optab; break;
2165 CASE_FLT_FN (BUILT_IN_REMAINDER):
2166 CASE_FLT_FN (BUILT_IN_DREM):
2167 builtin_optab = remainder_optab; break;
2168 default:
2169 gcc_unreachable ();
2172 /* Make a suitable register to place result in. */
2173 mode = TYPE_MODE (TREE_TYPE (exp));
2175 /* Before working hard, check whether the instruction is available. */
2176 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2177 return NULL_RTX;
2179 result = gen_reg_rtx (mode);
2181 if (! flag_errno_math || ! HONOR_NANS (mode))
2182 errno_set = false;
2184 if (errno_set && optimize_insn_for_size_p ())
2185 return 0;
2187 /* Always stabilize the argument list. */
2188 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2189 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2191 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2192 op1 = expand_normal (arg1);
2194 start_sequence ();
2196 /* Compute into RESULT.
2197 Set RESULT to wherever the result comes back. */
2198 result = expand_binop (mode, builtin_optab, op0, op1,
2199 result, 0, OPTAB_DIRECT);
2201 /* If we were unable to expand via the builtin, stop the sequence
2202 (without outputting the insns) and call to the library function
2203 with the stabilized argument list. */
2204 if (result == 0)
2206 end_sequence ();
2207 return expand_call (exp, target, target == const0_rtx);
2210 if (errno_set)
2211 expand_errno_check (exp, result);
2213 /* Output the entire sequence. */
2214 insns = get_insns ();
2215 end_sequence ();
2216 emit_insn (insns);
2218 return result;
2221 /* Expand a call to the builtin trinary math functions (fma).
2222 Return NULL_RTX if a normal call should be emitted rather than expanding the
2223 function in-line. EXP is the expression that is a call to the builtin
2224 function; if convenient, the result should be placed in TARGET.
2225 SUBTARGET may be used as the target for computing one of EXP's
2226 operands. */
2228 static rtx
2229 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2231 optab builtin_optab;
2232 rtx op0, op1, op2, insns, result;
2233 tree fndecl = get_callee_fndecl (exp);
2234 tree arg0, arg1, arg2;
2235 enum machine_mode mode;
2237 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2238 return NULL_RTX;
2240 arg0 = CALL_EXPR_ARG (exp, 0);
2241 arg1 = CALL_EXPR_ARG (exp, 1);
2242 arg2 = CALL_EXPR_ARG (exp, 2);
2244 switch (DECL_FUNCTION_CODE (fndecl))
2246 CASE_FLT_FN (BUILT_IN_FMA):
2247 builtin_optab = fma_optab; break;
2248 default:
2249 gcc_unreachable ();
2252 /* Make a suitable register to place result in. */
2253 mode = TYPE_MODE (TREE_TYPE (exp));
2255 /* Before working hard, check whether the instruction is available. */
2256 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2257 return NULL_RTX;
2259 result = gen_reg_rtx (mode);
2261 /* Always stabilize the argument list. */
2262 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2263 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2264 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2266 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2267 op1 = expand_normal (arg1);
2268 op2 = expand_normal (arg2);
2270 start_sequence ();
2272 /* Compute into RESULT.
2273 Set RESULT to wherever the result comes back. */
2274 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2275 result, 0);
2277 /* If we were unable to expand via the builtin, stop the sequence
2278 (without outputting the insns) and call to the library function
2279 with the stabilized argument list. */
2280 if (result == 0)
2282 end_sequence ();
2283 return expand_call (exp, target, target == const0_rtx);
2286 /* Output the entire sequence. */
2287 insns = get_insns ();
2288 end_sequence ();
2289 emit_insn (insns);
2291 return result;
2294 /* Expand a call to the builtin sin and cos math functions.
2295 Return NULL_RTX if a normal call should be emitted rather than expanding the
2296 function in-line. EXP is the expression that is a call to the builtin
2297 function; if convenient, the result should be placed in TARGET.
2298 SUBTARGET may be used as the target for computing one of EXP's
2299 operands. */
2301 static rtx
2302 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2304 optab builtin_optab;
2305 rtx op0, insns;
2306 tree fndecl = get_callee_fndecl (exp);
2307 enum machine_mode mode;
2308 tree arg;
2310 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2311 return NULL_RTX;
2313 arg = CALL_EXPR_ARG (exp, 0);
2315 switch (DECL_FUNCTION_CODE (fndecl))
2317 CASE_FLT_FN (BUILT_IN_SIN):
2318 CASE_FLT_FN (BUILT_IN_COS):
2319 builtin_optab = sincos_optab; break;
2320 default:
2321 gcc_unreachable ();
2324 /* Make a suitable register to place result in. */
2325 mode = TYPE_MODE (TREE_TYPE (exp));
2327 /* Check if sincos insn is available, otherwise fallback
2328 to sin or cos insn. */
2329 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2330 switch (DECL_FUNCTION_CODE (fndecl))
2332 CASE_FLT_FN (BUILT_IN_SIN):
2333 builtin_optab = sin_optab; break;
2334 CASE_FLT_FN (BUILT_IN_COS):
2335 builtin_optab = cos_optab; break;
2336 default:
2337 gcc_unreachable ();
2340 /* Before working hard, check whether the instruction is available. */
2341 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2343 rtx result = gen_reg_rtx (mode);
2345 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2346 need to expand the argument again. This way, we will not perform
2347 side-effects more the once. */
2348 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2350 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2352 start_sequence ();
2354 /* Compute into RESULT.
2355 Set RESULT to wherever the result comes back. */
2356 if (builtin_optab == sincos_optab)
2358 int ok;
2360 switch (DECL_FUNCTION_CODE (fndecl))
2362 CASE_FLT_FN (BUILT_IN_SIN):
2363 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2364 break;
2365 CASE_FLT_FN (BUILT_IN_COS):
2366 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2367 break;
2368 default:
2369 gcc_unreachable ();
2371 gcc_assert (ok);
2373 else
2374 result = expand_unop (mode, builtin_optab, op0, result, 0);
2376 if (result != 0)
2378 /* Output the entire sequence. */
2379 insns = get_insns ();
2380 end_sequence ();
2381 emit_insn (insns);
2382 return result;
2385 /* If we were unable to expand via the builtin, stop the sequence
2386 (without outputting the insns) and call to the library function
2387 with the stabilized argument list. */
2388 end_sequence ();
2391 return expand_call (exp, target, target == const0_rtx);
2394 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2395 return an RTL instruction code that implements the functionality.
2396 If that isn't possible or available return CODE_FOR_nothing. */
2398 static enum insn_code
2399 interclass_mathfn_icode (tree arg, tree fndecl)
2401 bool errno_set = false;
2402 optab builtin_optab = unknown_optab;
2403 enum machine_mode mode;
2405 switch (DECL_FUNCTION_CODE (fndecl))
2407 CASE_FLT_FN (BUILT_IN_ILOGB):
2408 errno_set = true; builtin_optab = ilogb_optab; break;
2409 CASE_FLT_FN (BUILT_IN_ISINF):
2410 builtin_optab = isinf_optab; break;
2411 case BUILT_IN_ISNORMAL:
2412 case BUILT_IN_ISFINITE:
2413 CASE_FLT_FN (BUILT_IN_FINITE):
2414 case BUILT_IN_FINITED32:
2415 case BUILT_IN_FINITED64:
2416 case BUILT_IN_FINITED128:
2417 case BUILT_IN_ISINFD32:
2418 case BUILT_IN_ISINFD64:
2419 case BUILT_IN_ISINFD128:
2420 /* These builtins have no optabs (yet). */
2421 break;
2422 default:
2423 gcc_unreachable ();
2426 /* There's no easy way to detect the case we need to set EDOM. */
2427 if (flag_errno_math && errno_set)
2428 return CODE_FOR_nothing;
2430 /* Optab mode depends on the mode of the input argument. */
2431 mode = TYPE_MODE (TREE_TYPE (arg));
2433 if (builtin_optab)
2434 return optab_handler (builtin_optab, mode);
2435 return CODE_FOR_nothing;
2438 /* Expand a call to one of the builtin math functions that operate on
2439 floating point argument and output an integer result (ilogb, isinf,
2440 isnan, etc).
2441 Return 0 if a normal call should be emitted rather than expanding the
2442 function in-line. EXP is the expression that is a call to the builtin
2443 function; if convenient, the result should be placed in TARGET. */
2445 static rtx
2446 expand_builtin_interclass_mathfn (tree exp, rtx target)
2448 enum insn_code icode = CODE_FOR_nothing;
2449 rtx op0;
2450 tree fndecl = get_callee_fndecl (exp);
2451 enum machine_mode mode;
2452 tree arg;
2454 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2455 return NULL_RTX;
2457 arg = CALL_EXPR_ARG (exp, 0);
2458 icode = interclass_mathfn_icode (arg, fndecl);
2459 mode = TYPE_MODE (TREE_TYPE (arg));
2461 if (icode != CODE_FOR_nothing)
2463 struct expand_operand ops[1];
2464 rtx last = get_last_insn ();
2465 tree orig_arg = arg;
2467 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2468 need to expand the argument again. This way, we will not perform
2469 side-effects more the once. */
2470 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2472 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2474 if (mode != GET_MODE (op0))
2475 op0 = convert_to_mode (mode, op0, 0);
2477 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2478 if (maybe_legitimize_operands (icode, 0, 1, ops)
2479 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2480 return ops[0].value;
2482 delete_insns_since (last);
2483 CALL_EXPR_ARG (exp, 0) = orig_arg;
2486 return NULL_RTX;
2489 /* Expand a call to the builtin sincos math function.
2490 Return NULL_RTX if a normal call should be emitted rather than expanding the
2491 function in-line. EXP is the expression that is a call to the builtin
2492 function. */
2494 static rtx
2495 expand_builtin_sincos (tree exp)
2497 rtx op0, op1, op2, target1, target2;
2498 enum machine_mode mode;
2499 tree arg, sinp, cosp;
2500 int result;
2501 location_t loc = EXPR_LOCATION (exp);
2502 tree alias_type, alias_off;
2504 if (!validate_arglist (exp, REAL_TYPE,
2505 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2506 return NULL_RTX;
2508 arg = CALL_EXPR_ARG (exp, 0);
2509 sinp = CALL_EXPR_ARG (exp, 1);
2510 cosp = CALL_EXPR_ARG (exp, 2);
2512 /* Make a suitable register to place result in. */
2513 mode = TYPE_MODE (TREE_TYPE (arg));
2515 /* Check if sincos insn is available, otherwise emit the call. */
2516 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2517 return NULL_RTX;
2519 target1 = gen_reg_rtx (mode);
2520 target2 = gen_reg_rtx (mode);
2522 op0 = expand_normal (arg);
2523 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2524 alias_off = build_int_cst (alias_type, 0);
2525 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2526 sinp, alias_off));
2527 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2528 cosp, alias_off));
2530 /* Compute into target1 and target2.
2531 Set TARGET to wherever the result comes back. */
2532 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2533 gcc_assert (result);
2535 /* Move target1 and target2 to the memory locations indicated
2536 by op1 and op2. */
2537 emit_move_insn (op1, target1);
2538 emit_move_insn (op2, target2);
2540 return const0_rtx;
2543 /* Expand a call to the internal cexpi builtin to the sincos math function.
2544 EXP is the expression that is a call to the builtin function; if convenient,
2545 the result should be placed in TARGET. */
2547 static rtx
2548 expand_builtin_cexpi (tree exp, rtx target)
2550 tree fndecl = get_callee_fndecl (exp);
2551 tree arg, type;
2552 enum machine_mode mode;
2553 rtx op0, op1, op2;
2554 location_t loc = EXPR_LOCATION (exp);
2556 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2557 return NULL_RTX;
2559 arg = CALL_EXPR_ARG (exp, 0);
2560 type = TREE_TYPE (arg);
2561 mode = TYPE_MODE (TREE_TYPE (arg));
2563 /* Try expanding via a sincos optab, fall back to emitting a libcall
2564 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2565 is only generated from sincos, cexp or if we have either of them. */
2566 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2568 op1 = gen_reg_rtx (mode);
2569 op2 = gen_reg_rtx (mode);
2571 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2573 /* Compute into op1 and op2. */
2574 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2576 else if (targetm.libc_has_function (function_sincos))
2578 tree call, fn = NULL_TREE;
2579 tree top1, top2;
2580 rtx op1a, op2a;
2582 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2583 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2584 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2585 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2586 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2587 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2588 else
2589 gcc_unreachable ();
2591 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2592 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2593 op1a = copy_addr_to_reg (XEXP (op1, 0));
2594 op2a = copy_addr_to_reg (XEXP (op2, 0));
2595 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2596 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2598 /* Make sure not to fold the sincos call again. */
2599 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2600 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2601 call, 3, arg, top1, top2));
2603 else
2605 tree call, fn = NULL_TREE, narg;
2606 tree ctype = build_complex_type (type);
2608 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2609 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2610 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2611 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2612 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2613 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2614 else
2615 gcc_unreachable ();
2617 /* If we don't have a decl for cexp create one. This is the
2618 friendliest fallback if the user calls __builtin_cexpi
2619 without full target C99 function support. */
2620 if (fn == NULL_TREE)
2622 tree fntype;
2623 const char *name = NULL;
2625 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2626 name = "cexpf";
2627 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2628 name = "cexp";
2629 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2630 name = "cexpl";
2632 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2633 fn = build_fn_decl (name, fntype);
2636 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2637 build_real (type, dconst0), arg);
2639 /* Make sure not to fold the cexp call again. */
2640 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2641 return expand_expr (build_call_nary (ctype, call, 1, narg),
2642 target, VOIDmode, EXPAND_NORMAL);
2645 /* Now build the proper return type. */
2646 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2647 make_tree (TREE_TYPE (arg), op2),
2648 make_tree (TREE_TYPE (arg), op1)),
2649 target, VOIDmode, EXPAND_NORMAL);
2652 /* Conveniently construct a function call expression. FNDECL names the
2653 function to be called, N is the number of arguments, and the "..."
2654 parameters are the argument expressions. Unlike build_call_exr
2655 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2657 static tree
2658 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2660 va_list ap;
2661 tree fntype = TREE_TYPE (fndecl);
2662 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2664 va_start (ap, n);
2665 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2666 va_end (ap);
2667 SET_EXPR_LOCATION (fn, loc);
2668 return fn;
2671 /* Expand a call to one of the builtin rounding functions gcc defines
2672 as an extension (lfloor and lceil). As these are gcc extensions we
2673 do not need to worry about setting errno to EDOM.
2674 If expanding via optab fails, lower expression to (int)(floor(x)).
2675 EXP is the expression that is a call to the builtin function;
2676 if convenient, the result should be placed in TARGET. */
2678 static rtx
2679 expand_builtin_int_roundingfn (tree exp, rtx target)
2681 convert_optab builtin_optab;
2682 rtx op0, insns, tmp;
2683 tree fndecl = get_callee_fndecl (exp);
2684 enum built_in_function fallback_fn;
2685 tree fallback_fndecl;
2686 enum machine_mode mode;
2687 tree arg;
2689 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2690 gcc_unreachable ();
2692 arg = CALL_EXPR_ARG (exp, 0);
2694 switch (DECL_FUNCTION_CODE (fndecl))
2696 CASE_FLT_FN (BUILT_IN_ICEIL):
2697 CASE_FLT_FN (BUILT_IN_LCEIL):
2698 CASE_FLT_FN (BUILT_IN_LLCEIL):
2699 builtin_optab = lceil_optab;
2700 fallback_fn = BUILT_IN_CEIL;
2701 break;
2703 CASE_FLT_FN (BUILT_IN_IFLOOR):
2704 CASE_FLT_FN (BUILT_IN_LFLOOR):
2705 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2706 builtin_optab = lfloor_optab;
2707 fallback_fn = BUILT_IN_FLOOR;
2708 break;
2710 default:
2711 gcc_unreachable ();
2714 /* Make a suitable register to place result in. */
2715 mode = TYPE_MODE (TREE_TYPE (exp));
2717 target = gen_reg_rtx (mode);
2719 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2720 need to expand the argument again. This way, we will not perform
2721 side-effects more the once. */
2722 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2724 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2726 start_sequence ();
2728 /* Compute into TARGET. */
2729 if (expand_sfix_optab (target, op0, builtin_optab))
2731 /* Output the entire sequence. */
2732 insns = get_insns ();
2733 end_sequence ();
2734 emit_insn (insns);
2735 return target;
2738 /* If we were unable to expand via the builtin, stop the sequence
2739 (without outputting the insns). */
2740 end_sequence ();
2742 /* Fall back to floating point rounding optab. */
2743 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2745 /* For non-C99 targets we may end up without a fallback fndecl here
2746 if the user called __builtin_lfloor directly. In this case emit
2747 a call to the floor/ceil variants nevertheless. This should result
2748 in the best user experience for not full C99 targets. */
2749 if (fallback_fndecl == NULL_TREE)
2751 tree fntype;
2752 const char *name = NULL;
2754 switch (DECL_FUNCTION_CODE (fndecl))
2756 case BUILT_IN_ICEIL:
2757 case BUILT_IN_LCEIL:
2758 case BUILT_IN_LLCEIL:
2759 name = "ceil";
2760 break;
2761 case BUILT_IN_ICEILF:
2762 case BUILT_IN_LCEILF:
2763 case BUILT_IN_LLCEILF:
2764 name = "ceilf";
2765 break;
2766 case BUILT_IN_ICEILL:
2767 case BUILT_IN_LCEILL:
2768 case BUILT_IN_LLCEILL:
2769 name = "ceill";
2770 break;
2771 case BUILT_IN_IFLOOR:
2772 case BUILT_IN_LFLOOR:
2773 case BUILT_IN_LLFLOOR:
2774 name = "floor";
2775 break;
2776 case BUILT_IN_IFLOORF:
2777 case BUILT_IN_LFLOORF:
2778 case BUILT_IN_LLFLOORF:
2779 name = "floorf";
2780 break;
2781 case BUILT_IN_IFLOORL:
2782 case BUILT_IN_LFLOORL:
2783 case BUILT_IN_LLFLOORL:
2784 name = "floorl";
2785 break;
2786 default:
2787 gcc_unreachable ();
2790 fntype = build_function_type_list (TREE_TYPE (arg),
2791 TREE_TYPE (arg), NULL_TREE);
2792 fallback_fndecl = build_fn_decl (name, fntype);
2795 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2797 tmp = expand_normal (exp);
2798 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2800 /* Truncate the result of floating point optab to integer
2801 via expand_fix (). */
2802 target = gen_reg_rtx (mode);
2803 expand_fix (target, tmp, 0);
2805 return target;
2808 /* Expand a call to one of the builtin math functions doing integer
2809 conversion (lrint).
2810 Return 0 if a normal call should be emitted rather than expanding the
2811 function in-line. EXP is the expression that is a call to the builtin
2812 function; if convenient, the result should be placed in TARGET. */
2814 static rtx
2815 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2817 convert_optab builtin_optab;
2818 rtx op0, insns;
2819 tree fndecl = get_callee_fndecl (exp);
2820 tree arg;
2821 enum machine_mode mode;
2822 enum built_in_function fallback_fn = BUILT_IN_NONE;
2824 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2825 gcc_unreachable ();
2827 arg = CALL_EXPR_ARG (exp, 0);
2829 switch (DECL_FUNCTION_CODE (fndecl))
2831 CASE_FLT_FN (BUILT_IN_IRINT):
2832 fallback_fn = BUILT_IN_LRINT;
2833 /* FALLTHRU */
2834 CASE_FLT_FN (BUILT_IN_LRINT):
2835 CASE_FLT_FN (BUILT_IN_LLRINT):
2836 builtin_optab = lrint_optab;
2837 break;
2839 CASE_FLT_FN (BUILT_IN_IROUND):
2840 fallback_fn = BUILT_IN_LROUND;
2841 /* FALLTHRU */
2842 CASE_FLT_FN (BUILT_IN_LROUND):
2843 CASE_FLT_FN (BUILT_IN_LLROUND):
2844 builtin_optab = lround_optab;
2845 break;
2847 default:
2848 gcc_unreachable ();
2851 /* There's no easy way to detect the case we need to set EDOM. */
2852 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2853 return NULL_RTX;
2855 /* Make a suitable register to place result in. */
2856 mode = TYPE_MODE (TREE_TYPE (exp));
2858 /* There's no easy way to detect the case we need to set EDOM. */
2859 if (!flag_errno_math)
2861 rtx result = gen_reg_rtx (mode);
2863 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2864 need to expand the argument again. This way, we will not perform
2865 side-effects more the once. */
2866 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2868 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2870 start_sequence ();
2872 if (expand_sfix_optab (result, op0, builtin_optab))
2874 /* Output the entire sequence. */
2875 insns = get_insns ();
2876 end_sequence ();
2877 emit_insn (insns);
2878 return result;
2881 /* If we were unable to expand via the builtin, stop the sequence
2882 (without outputting the insns) and call to the library function
2883 with the stabilized argument list. */
2884 end_sequence ();
2887 if (fallback_fn != BUILT_IN_NONE)
2889 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2890 targets, (int) round (x) should never be transformed into
2891 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2892 a call to lround in the hope that the target provides at least some
2893 C99 functions. This should result in the best user experience for
2894 not full C99 targets. */
2895 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2896 fallback_fn, 0);
2898 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2899 fallback_fndecl, 1, arg);
2901 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2902 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2903 return convert_to_mode (mode, target, 0);
2906 return expand_call (exp, target, target == const0_rtx);
2909 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2910 a normal call should be emitted rather than expanding the function
2911 in-line. EXP is the expression that is a call to the builtin
2912 function; if convenient, the result should be placed in TARGET. */
2914 static rtx
2915 expand_builtin_powi (tree exp, rtx target)
2917 tree arg0, arg1;
2918 rtx op0, op1;
2919 enum machine_mode mode;
2920 enum machine_mode mode2;
2922 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2923 return NULL_RTX;
2925 arg0 = CALL_EXPR_ARG (exp, 0);
2926 arg1 = CALL_EXPR_ARG (exp, 1);
2927 mode = TYPE_MODE (TREE_TYPE (exp));
2929 /* Emit a libcall to libgcc. */
2931 /* Mode of the 2nd argument must match that of an int. */
2932 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2934 if (target == NULL_RTX)
2935 target = gen_reg_rtx (mode);
2937 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2938 if (GET_MODE (op0) != mode)
2939 op0 = convert_to_mode (mode, op0, 0);
2940 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2941 if (GET_MODE (op1) != mode2)
2942 op1 = convert_to_mode (mode2, op1, 0);
2944 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2945 target, LCT_CONST, mode, 2,
2946 op0, mode, op1, mode2);
2948 return target;
2951 /* Expand expression EXP which is a call to the strlen builtin. Return
2952 NULL_RTX if we failed the caller should emit a normal call, otherwise
2953 try to get the result in TARGET, if convenient. */
2955 static rtx
2956 expand_builtin_strlen (tree exp, rtx target,
2957 enum machine_mode target_mode)
2959 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2960 return NULL_RTX;
2961 else
2963 struct expand_operand ops[4];
2964 rtx pat;
2965 tree len;
2966 tree src = CALL_EXPR_ARG (exp, 0);
2967 rtx src_reg, before_strlen;
2968 enum machine_mode insn_mode = target_mode;
2969 enum insn_code icode = CODE_FOR_nothing;
2970 unsigned int align;
2972 /* If the length can be computed at compile-time, return it. */
2973 len = c_strlen (src, 0);
2974 if (len)
2975 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2977 /* If the length can be computed at compile-time and is constant
2978 integer, but there are side-effects in src, evaluate
2979 src for side-effects, then return len.
2980 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2981 can be optimized into: i++; x = 3; */
2982 len = c_strlen (src, 1);
2983 if (len && TREE_CODE (len) == INTEGER_CST)
2985 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2986 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2989 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2991 /* If SRC is not a pointer type, don't do this operation inline. */
2992 if (align == 0)
2993 return NULL_RTX;
2995 /* Bail out if we can't compute strlen in the right mode. */
2996 while (insn_mode != VOIDmode)
2998 icode = optab_handler (strlen_optab, insn_mode);
2999 if (icode != CODE_FOR_nothing)
3000 break;
3002 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3004 if (insn_mode == VOIDmode)
3005 return NULL_RTX;
3007 /* Make a place to hold the source address. We will not expand
3008 the actual source until we are sure that the expansion will
3009 not fail -- there are trees that cannot be expanded twice. */
3010 src_reg = gen_reg_rtx (Pmode);
3012 /* Mark the beginning of the strlen sequence so we can emit the
3013 source operand later. */
3014 before_strlen = get_last_insn ();
3016 create_output_operand (&ops[0], target, insn_mode);
3017 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3018 create_integer_operand (&ops[2], 0);
3019 create_integer_operand (&ops[3], align);
3020 if (!maybe_expand_insn (icode, 4, ops))
3021 return NULL_RTX;
3023 /* Now that we are assured of success, expand the source. */
3024 start_sequence ();
3025 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3026 if (pat != src_reg)
3028 #ifdef POINTERS_EXTEND_UNSIGNED
3029 if (GET_MODE (pat) != Pmode)
3030 pat = convert_to_mode (Pmode, pat,
3031 POINTERS_EXTEND_UNSIGNED);
3032 #endif
3033 emit_move_insn (src_reg, pat);
3035 pat = get_insns ();
3036 end_sequence ();
3038 if (before_strlen)
3039 emit_insn_after (pat, before_strlen);
3040 else
3041 emit_insn_before (pat, get_insns ());
3043 /* Return the value in the proper mode for this function. */
3044 if (GET_MODE (ops[0].value) == target_mode)
3045 target = ops[0].value;
3046 else if (target != 0)
3047 convert_move (target, ops[0].value, 0);
3048 else
3049 target = convert_to_mode (target_mode, ops[0].value, 0);
3051 return target;
3055 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3056 bytes from constant string DATA + OFFSET and return it as target
3057 constant. */
3059 static rtx
3060 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3061 enum machine_mode mode)
3063 const char *str = (const char *) data;
3065 gcc_assert (offset >= 0
3066 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3067 <= strlen (str) + 1));
3069 return c_readstr (str + offset, mode);
3072 /* Expand a call EXP to the memcpy builtin.
3073 Return NULL_RTX if we failed, the caller should emit a normal call,
3074 otherwise try to get the result in TARGET, if convenient (and in
3075 mode MODE if that's convenient). */
3077 static rtx
3078 expand_builtin_memcpy (tree exp, rtx target)
3080 if (!validate_arglist (exp,
3081 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3082 return NULL_RTX;
3083 else
3085 tree dest = CALL_EXPR_ARG (exp, 0);
3086 tree src = CALL_EXPR_ARG (exp, 1);
3087 tree len = CALL_EXPR_ARG (exp, 2);
3088 const char *src_str;
3089 unsigned int src_align = get_pointer_alignment (src);
3090 unsigned int dest_align = get_pointer_alignment (dest);
3091 rtx dest_mem, src_mem, dest_addr, len_rtx;
3092 HOST_WIDE_INT expected_size = -1;
3093 unsigned int expected_align = 0;
3095 /* If DEST is not a pointer type, call the normal function. */
3096 if (dest_align == 0)
3097 return NULL_RTX;
3099 /* If either SRC is not a pointer type, don't do this
3100 operation in-line. */
3101 if (src_align == 0)
3102 return NULL_RTX;
3104 if (currently_expanding_gimple_stmt)
3105 stringop_block_profile (currently_expanding_gimple_stmt,
3106 &expected_align, &expected_size);
3108 if (expected_align < dest_align)
3109 expected_align = dest_align;
3110 dest_mem = get_memory_rtx (dest, len);
3111 set_mem_align (dest_mem, dest_align);
3112 len_rtx = expand_normal (len);
3113 src_str = c_getstr (src);
3115 /* If SRC is a string constant and block move would be done
3116 by pieces, we can avoid loading the string from memory
3117 and only stored the computed constants. */
3118 if (src_str
3119 && CONST_INT_P (len_rtx)
3120 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3121 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3122 CONST_CAST (char *, src_str),
3123 dest_align, false))
3125 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3126 builtin_memcpy_read_str,
3127 CONST_CAST (char *, src_str),
3128 dest_align, false, 0);
3129 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3130 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3131 return dest_mem;
3134 src_mem = get_memory_rtx (src, len);
3135 set_mem_align (src_mem, src_align);
3137 /* Copy word part most expediently. */
3138 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3139 CALL_EXPR_TAILCALL (exp)
3140 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3141 expected_align, expected_size);
3143 if (dest_addr == 0)
3145 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3146 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3148 return dest_addr;
3152 /* Expand a call EXP to the mempcpy builtin.
3153 Return NULL_RTX if we failed; the caller should emit a normal call,
3154 otherwise try to get the result in TARGET, if convenient (and in
3155 mode MODE if that's convenient). If ENDP is 0 return the
3156 destination pointer, if ENDP is 1 return the end pointer ala
3157 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3158 stpcpy. */
3160 static rtx
3161 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3163 if (!validate_arglist (exp,
3164 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3165 return NULL_RTX;
3166 else
3168 tree dest = CALL_EXPR_ARG (exp, 0);
3169 tree src = CALL_EXPR_ARG (exp, 1);
3170 tree len = CALL_EXPR_ARG (exp, 2);
3171 return expand_builtin_mempcpy_args (dest, src, len,
3172 target, mode, /*endp=*/ 1);
3176 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3177 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3178 so that this can also be called without constructing an actual CALL_EXPR.
3179 The other arguments and return value are the same as for
3180 expand_builtin_mempcpy. */
3182 static rtx
3183 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3184 rtx target, enum machine_mode mode, int endp)
3186 /* If return value is ignored, transform mempcpy into memcpy. */
3187 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3189 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3190 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3191 dest, src, len);
3192 return expand_expr (result, target, mode, EXPAND_NORMAL);
3194 else
3196 const char *src_str;
3197 unsigned int src_align = get_pointer_alignment (src);
3198 unsigned int dest_align = get_pointer_alignment (dest);
3199 rtx dest_mem, src_mem, len_rtx;
3201 /* If either SRC or DEST is not a pointer type, don't do this
3202 operation in-line. */
3203 if (dest_align == 0 || src_align == 0)
3204 return NULL_RTX;
3206 /* If LEN is not constant, call the normal function. */
3207 if (! host_integerp (len, 1))
3208 return NULL_RTX;
3210 len_rtx = expand_normal (len);
3211 src_str = c_getstr (src);
3213 /* If SRC is a string constant and block move would be done
3214 by pieces, we can avoid loading the string from memory
3215 and only stored the computed constants. */
3216 if (src_str
3217 && CONST_INT_P (len_rtx)
3218 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3219 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3220 CONST_CAST (char *, src_str),
3221 dest_align, false))
3223 dest_mem = get_memory_rtx (dest, len);
3224 set_mem_align (dest_mem, dest_align);
3225 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3226 builtin_memcpy_read_str,
3227 CONST_CAST (char *, src_str),
3228 dest_align, false, endp);
3229 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3230 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3231 return dest_mem;
3234 if (CONST_INT_P (len_rtx)
3235 && can_move_by_pieces (INTVAL (len_rtx),
3236 MIN (dest_align, src_align)))
3238 dest_mem = get_memory_rtx (dest, len);
3239 set_mem_align (dest_mem, dest_align);
3240 src_mem = get_memory_rtx (src, len);
3241 set_mem_align (src_mem, src_align);
3242 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3243 MIN (dest_align, src_align), endp);
3244 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3245 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3246 return dest_mem;
3249 return NULL_RTX;
3253 #ifndef HAVE_movstr
3254 # define HAVE_movstr 0
3255 # define CODE_FOR_movstr CODE_FOR_nothing
3256 #endif
3258 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3259 we failed, the caller should emit a normal call, otherwise try to
3260 get the result in TARGET, if convenient. If ENDP is 0 return the
3261 destination pointer, if ENDP is 1 return the end pointer ala
3262 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3263 stpcpy. */
3265 static rtx
3266 expand_movstr (tree dest, tree src, rtx target, int endp)
3268 struct expand_operand ops[3];
3269 rtx dest_mem;
3270 rtx src_mem;
3272 if (!HAVE_movstr)
3273 return NULL_RTX;
3275 dest_mem = get_memory_rtx (dest, NULL);
3276 src_mem = get_memory_rtx (src, NULL);
3277 if (!endp)
3279 target = force_reg (Pmode, XEXP (dest_mem, 0));
3280 dest_mem = replace_equiv_address (dest_mem, target);
3283 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3284 create_fixed_operand (&ops[1], dest_mem);
3285 create_fixed_operand (&ops[2], src_mem);
3286 expand_insn (CODE_FOR_movstr, 3, ops);
3288 if (endp && target != const0_rtx)
3290 target = ops[0].value;
3291 /* movstr is supposed to set end to the address of the NUL
3292 terminator. If the caller requested a mempcpy-like return value,
3293 adjust it. */
3294 if (endp == 1)
3296 rtx tem = plus_constant (GET_MODE (target),
3297 gen_lowpart (GET_MODE (target), target), 1);
3298 emit_move_insn (target, force_operand (tem, NULL_RTX));
3301 return target;
3304 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3305 NULL_RTX if we failed the caller should emit a normal call, otherwise
3306 try to get the result in TARGET, if convenient (and in mode MODE if that's
3307 convenient). */
3309 static rtx
3310 expand_builtin_strcpy (tree exp, rtx target)
3312 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3314 tree dest = CALL_EXPR_ARG (exp, 0);
3315 tree src = CALL_EXPR_ARG (exp, 1);
3316 return expand_builtin_strcpy_args (dest, src, target);
3318 return NULL_RTX;
3321 /* Helper function to do the actual work for expand_builtin_strcpy. The
3322 arguments to the builtin_strcpy call DEST and SRC are broken out
3323 so that this can also be called without constructing an actual CALL_EXPR.
3324 The other arguments and return value are the same as for
3325 expand_builtin_strcpy. */
3327 static rtx
3328 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3330 return expand_movstr (dest, src, target, /*endp=*/0);
3333 /* Expand a call EXP to the stpcpy builtin.
3334 Return NULL_RTX if we failed the caller should emit a normal call,
3335 otherwise try to get the result in TARGET, if convenient (and in
3336 mode MODE if that's convenient). */
3338 static rtx
3339 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3341 tree dst, src;
3342 location_t loc = EXPR_LOCATION (exp);
3344 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3345 return NULL_RTX;
3347 dst = CALL_EXPR_ARG (exp, 0);
3348 src = CALL_EXPR_ARG (exp, 1);
3350 /* If return value is ignored, transform stpcpy into strcpy. */
3351 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3353 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3354 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3355 return expand_expr (result, target, mode, EXPAND_NORMAL);
3357 else
3359 tree len, lenp1;
3360 rtx ret;
3362 /* Ensure we get an actual string whose length can be evaluated at
3363 compile-time, not an expression containing a string. This is
3364 because the latter will potentially produce pessimized code
3365 when used to produce the return value. */
3366 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3367 return expand_movstr (dst, src, target, /*endp=*/2);
3369 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3370 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3371 target, mode, /*endp=*/2);
3373 if (ret)
3374 return ret;
3376 if (TREE_CODE (len) == INTEGER_CST)
3378 rtx len_rtx = expand_normal (len);
3380 if (CONST_INT_P (len_rtx))
3382 ret = expand_builtin_strcpy_args (dst, src, target);
3384 if (ret)
3386 if (! target)
3388 if (mode != VOIDmode)
3389 target = gen_reg_rtx (mode);
3390 else
3391 target = gen_reg_rtx (GET_MODE (ret));
3393 if (GET_MODE (target) != GET_MODE (ret))
3394 ret = gen_lowpart (GET_MODE (target), ret);
3396 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3397 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3398 gcc_assert (ret);
3400 return target;
3405 return expand_movstr (dst, src, target, /*endp=*/2);
3409 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3410 bytes from constant string DATA + OFFSET and return it as target
3411 constant. */
3414 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3415 enum machine_mode mode)
3417 const char *str = (const char *) data;
3419 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3420 return const0_rtx;
3422 return c_readstr (str + offset, mode);
3425 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3426 NULL_RTX if we failed the caller should emit a normal call. */
3428 static rtx
3429 expand_builtin_strncpy (tree exp, rtx target)
3431 location_t loc = EXPR_LOCATION (exp);
3433 if (validate_arglist (exp,
3434 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3436 tree dest = CALL_EXPR_ARG (exp, 0);
3437 tree src = CALL_EXPR_ARG (exp, 1);
3438 tree len = CALL_EXPR_ARG (exp, 2);
3439 tree slen = c_strlen (src, 1);
3441 /* We must be passed a constant len and src parameter. */
3442 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3443 return NULL_RTX;
3445 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3447 /* We're required to pad with trailing zeros if the requested
3448 len is greater than strlen(s2)+1. In that case try to
3449 use store_by_pieces, if it fails, punt. */
3450 if (tree_int_cst_lt (slen, len))
3452 unsigned int dest_align = get_pointer_alignment (dest);
3453 const char *p = c_getstr (src);
3454 rtx dest_mem;
3456 if (!p || dest_align == 0 || !host_integerp (len, 1)
3457 || !can_store_by_pieces (tree_low_cst (len, 1),
3458 builtin_strncpy_read_str,
3459 CONST_CAST (char *, p),
3460 dest_align, false))
3461 return NULL_RTX;
3463 dest_mem = get_memory_rtx (dest, len);
3464 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3465 builtin_strncpy_read_str,
3466 CONST_CAST (char *, p), dest_align, false, 0);
3467 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3468 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3469 return dest_mem;
3472 return NULL_RTX;
3475 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3476 bytes from constant string DATA + OFFSET and return it as target
3477 constant. */
3480 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3481 enum machine_mode mode)
3483 const char *c = (const char *) data;
3484 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3486 memset (p, *c, GET_MODE_SIZE (mode));
3488 return c_readstr (p, mode);
3491 /* Callback routine for store_by_pieces. Return the RTL of a register
3492 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3493 char value given in the RTL register data. For example, if mode is
3494 4 bytes wide, return the RTL for 0x01010101*data. */
3496 static rtx
3497 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3498 enum machine_mode mode)
3500 rtx target, coeff;
3501 size_t size;
3502 char *p;
3504 size = GET_MODE_SIZE (mode);
3505 if (size == 1)
3506 return (rtx) data;
3508 p = XALLOCAVEC (char, size);
3509 memset (p, 1, size);
3510 coeff = c_readstr (p, mode);
3512 target = convert_to_mode (mode, (rtx) data, 1);
3513 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3514 return force_reg (mode, target);
3517 /* Expand expression EXP, which is a call to the memset builtin. Return
3518 NULL_RTX if we failed the caller should emit a normal call, otherwise
3519 try to get the result in TARGET, if convenient (and in mode MODE if that's
3520 convenient). */
3522 static rtx
3523 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3525 if (!validate_arglist (exp,
3526 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3527 return NULL_RTX;
3528 else
3530 tree dest = CALL_EXPR_ARG (exp, 0);
3531 tree val = CALL_EXPR_ARG (exp, 1);
3532 tree len = CALL_EXPR_ARG (exp, 2);
3533 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3537 /* Helper function to do the actual work for expand_builtin_memset. The
3538 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3539 so that this can also be called without constructing an actual CALL_EXPR.
3540 The other arguments and return value are the same as for
3541 expand_builtin_memset. */
3543 static rtx
3544 expand_builtin_memset_args (tree dest, tree val, tree len,
3545 rtx target, enum machine_mode mode, tree orig_exp)
3547 tree fndecl, fn;
3548 enum built_in_function fcode;
3549 enum machine_mode val_mode;
3550 char c;
3551 unsigned int dest_align;
3552 rtx dest_mem, dest_addr, len_rtx;
3553 HOST_WIDE_INT expected_size = -1;
3554 unsigned int expected_align = 0;
3556 dest_align = get_pointer_alignment (dest);
3558 /* If DEST is not a pointer type, don't do this operation in-line. */
3559 if (dest_align == 0)
3560 return NULL_RTX;
3562 if (currently_expanding_gimple_stmt)
3563 stringop_block_profile (currently_expanding_gimple_stmt,
3564 &expected_align, &expected_size);
3566 if (expected_align < dest_align)
3567 expected_align = dest_align;
3569 /* If the LEN parameter is zero, return DEST. */
3570 if (integer_zerop (len))
3572 /* Evaluate and ignore VAL in case it has side-effects. */
3573 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3574 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3577 /* Stabilize the arguments in case we fail. */
3578 dest = builtin_save_expr (dest);
3579 val = builtin_save_expr (val);
3580 len = builtin_save_expr (len);
3582 len_rtx = expand_normal (len);
3583 dest_mem = get_memory_rtx (dest, len);
3584 val_mode = TYPE_MODE (unsigned_char_type_node);
3586 if (TREE_CODE (val) != INTEGER_CST)
3588 rtx val_rtx;
3590 val_rtx = expand_normal (val);
3591 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3593 /* Assume that we can memset by pieces if we can store
3594 * the coefficients by pieces (in the required modes).
3595 * We can't pass builtin_memset_gen_str as that emits RTL. */
3596 c = 1;
3597 if (host_integerp (len, 1)
3598 && can_store_by_pieces (tree_low_cst (len, 1),
3599 builtin_memset_read_str, &c, dest_align,
3600 true))
3602 val_rtx = force_reg (val_mode, val_rtx);
3603 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3604 builtin_memset_gen_str, val_rtx, dest_align,
3605 true, 0);
3607 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3608 dest_align, expected_align,
3609 expected_size))
3610 goto do_libcall;
3612 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3613 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3614 return dest_mem;
3617 if (target_char_cast (val, &c))
3618 goto do_libcall;
3620 if (c)
3622 if (host_integerp (len, 1)
3623 && can_store_by_pieces (tree_low_cst (len, 1),
3624 builtin_memset_read_str, &c, dest_align,
3625 true))
3626 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3627 builtin_memset_read_str, &c, dest_align, true, 0);
3628 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3629 gen_int_mode (c, val_mode),
3630 dest_align, expected_align,
3631 expected_size))
3632 goto do_libcall;
3634 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3635 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3636 return dest_mem;
3639 set_mem_align (dest_mem, dest_align);
3640 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3641 CALL_EXPR_TAILCALL (orig_exp)
3642 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3643 expected_align, expected_size);
3645 if (dest_addr == 0)
3647 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3648 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3651 return dest_addr;
3653 do_libcall:
3654 fndecl = get_callee_fndecl (orig_exp);
3655 fcode = DECL_FUNCTION_CODE (fndecl);
3656 if (fcode == BUILT_IN_MEMSET)
3657 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3658 dest, val, len);
3659 else if (fcode == BUILT_IN_BZERO)
3660 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3661 dest, len);
3662 else
3663 gcc_unreachable ();
3664 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3665 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3666 return expand_call (fn, target, target == const0_rtx);
3669 /* Expand expression EXP, which is a call to the bzero builtin. Return
3670 NULL_RTX if we failed the caller should emit a normal call. */
3672 static rtx
3673 expand_builtin_bzero (tree exp)
3675 tree dest, size;
3676 location_t loc = EXPR_LOCATION (exp);
3678 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3679 return NULL_RTX;
3681 dest = CALL_EXPR_ARG (exp, 0);
3682 size = CALL_EXPR_ARG (exp, 1);
3684 /* New argument list transforming bzero(ptr x, int y) to
3685 memset(ptr x, int 0, size_t y). This is done this way
3686 so that if it isn't expanded inline, we fallback to
3687 calling bzero instead of memset. */
3689 return expand_builtin_memset_args (dest, integer_zero_node,
3690 fold_convert_loc (loc,
3691 size_type_node, size),
3692 const0_rtx, VOIDmode, exp);
3695 /* Expand expression EXP, which is a call to the memcmp built-in function.
3696 Return NULL_RTX if we failed and the caller should emit a normal call,
3697 otherwise try to get the result in TARGET, if convenient (and in mode
3698 MODE, if that's convenient). */
3700 static rtx
3701 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3702 ATTRIBUTE_UNUSED enum machine_mode mode)
3704 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3706 if (!validate_arglist (exp,
3707 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3708 return NULL_RTX;
3710 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3711 implementing memcmp because it will stop if it encounters two
3712 zero bytes. */
3713 #if defined HAVE_cmpmemsi
3715 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3716 rtx result;
3717 rtx insn;
3718 tree arg1 = CALL_EXPR_ARG (exp, 0);
3719 tree arg2 = CALL_EXPR_ARG (exp, 1);
3720 tree len = CALL_EXPR_ARG (exp, 2);
3722 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3723 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3724 enum machine_mode insn_mode;
3726 if (HAVE_cmpmemsi)
3727 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3728 else
3729 return NULL_RTX;
3731 /* If we don't have POINTER_TYPE, call the function. */
3732 if (arg1_align == 0 || arg2_align == 0)
3733 return NULL_RTX;
3735 /* Make a place to write the result of the instruction. */
3736 result = target;
3737 if (! (result != 0
3738 && REG_P (result) && GET_MODE (result) == insn_mode
3739 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3740 result = gen_reg_rtx (insn_mode);
3742 arg1_rtx = get_memory_rtx (arg1, len);
3743 arg2_rtx = get_memory_rtx (arg2, len);
3744 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3746 /* Set MEM_SIZE as appropriate. */
3747 if (CONST_INT_P (arg3_rtx))
3749 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3750 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3753 if (HAVE_cmpmemsi)
3754 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3755 GEN_INT (MIN (arg1_align, arg2_align)));
3756 else
3757 gcc_unreachable ();
3759 if (insn)
3760 emit_insn (insn);
3761 else
3762 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3763 TYPE_MODE (integer_type_node), 3,
3764 XEXP (arg1_rtx, 0), Pmode,
3765 XEXP (arg2_rtx, 0), Pmode,
3766 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3767 TYPE_UNSIGNED (sizetype)),
3768 TYPE_MODE (sizetype));
3770 /* Return the value in the proper mode for this function. */
3771 mode = TYPE_MODE (TREE_TYPE (exp));
3772 if (GET_MODE (result) == mode)
3773 return result;
3774 else if (target != 0)
3776 convert_move (target, result, 0);
3777 return target;
3779 else
3780 return convert_to_mode (mode, result, 0);
3782 #endif /* HAVE_cmpmemsi. */
3784 return NULL_RTX;
3787 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3788 if we failed the caller should emit a normal call, otherwise try to get
3789 the result in TARGET, if convenient. */
3791 static rtx
3792 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3794 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3795 return NULL_RTX;
3797 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3798 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3799 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3801 rtx arg1_rtx, arg2_rtx;
3802 rtx result, insn = NULL_RTX;
3803 tree fndecl, fn;
3804 tree arg1 = CALL_EXPR_ARG (exp, 0);
3805 tree arg2 = CALL_EXPR_ARG (exp, 1);
3807 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3808 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3810 /* If we don't have POINTER_TYPE, call the function. */
3811 if (arg1_align == 0 || arg2_align == 0)
3812 return NULL_RTX;
3814 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3815 arg1 = builtin_save_expr (arg1);
3816 arg2 = builtin_save_expr (arg2);
3818 arg1_rtx = get_memory_rtx (arg1, NULL);
3819 arg2_rtx = get_memory_rtx (arg2, NULL);
3821 #ifdef HAVE_cmpstrsi
3822 /* Try to call cmpstrsi. */
3823 if (HAVE_cmpstrsi)
3825 enum machine_mode insn_mode
3826 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3828 /* Make a place to write the result of the instruction. */
3829 result = target;
3830 if (! (result != 0
3831 && REG_P (result) && GET_MODE (result) == insn_mode
3832 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3833 result = gen_reg_rtx (insn_mode);
3835 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3836 GEN_INT (MIN (arg1_align, arg2_align)));
3838 #endif
3839 #ifdef HAVE_cmpstrnsi
3840 /* Try to determine at least one length and call cmpstrnsi. */
3841 if (!insn && HAVE_cmpstrnsi)
3843 tree len;
3844 rtx arg3_rtx;
3846 enum machine_mode insn_mode
3847 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3848 tree len1 = c_strlen (arg1, 1);
3849 tree len2 = c_strlen (arg2, 1);
3851 if (len1)
3852 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3853 if (len2)
3854 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3856 /* If we don't have a constant length for the first, use the length
3857 of the second, if we know it. We don't require a constant for
3858 this case; some cost analysis could be done if both are available
3859 but neither is constant. For now, assume they're equally cheap,
3860 unless one has side effects. If both strings have constant lengths,
3861 use the smaller. */
3863 if (!len1)
3864 len = len2;
3865 else if (!len2)
3866 len = len1;
3867 else if (TREE_SIDE_EFFECTS (len1))
3868 len = len2;
3869 else if (TREE_SIDE_EFFECTS (len2))
3870 len = len1;
3871 else if (TREE_CODE (len1) != INTEGER_CST)
3872 len = len2;
3873 else if (TREE_CODE (len2) != INTEGER_CST)
3874 len = len1;
3875 else if (tree_int_cst_lt (len1, len2))
3876 len = len1;
3877 else
3878 len = len2;
3880 /* If both arguments have side effects, we cannot optimize. */
3881 if (!len || TREE_SIDE_EFFECTS (len))
3882 goto do_libcall;
3884 arg3_rtx = expand_normal (len);
3886 /* Make a place to write the result of the instruction. */
3887 result = target;
3888 if (! (result != 0
3889 && REG_P (result) && GET_MODE (result) == insn_mode
3890 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3891 result = gen_reg_rtx (insn_mode);
3893 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3894 GEN_INT (MIN (arg1_align, arg2_align)));
3896 #endif
3898 if (insn)
3900 enum machine_mode mode;
3901 emit_insn (insn);
3903 /* Return the value in the proper mode for this function. */
3904 mode = TYPE_MODE (TREE_TYPE (exp));
3905 if (GET_MODE (result) == mode)
3906 return result;
3907 if (target == 0)
3908 return convert_to_mode (mode, result, 0);
3909 convert_move (target, result, 0);
3910 return target;
3913 /* Expand the library call ourselves using a stabilized argument
3914 list to avoid re-evaluating the function's arguments twice. */
3915 #ifdef HAVE_cmpstrnsi
3916 do_libcall:
3917 #endif
3918 fndecl = get_callee_fndecl (exp);
3919 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3920 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3921 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3922 return expand_call (fn, target, target == const0_rtx);
3924 #endif
3925 return NULL_RTX;
3928 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3929 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3930 the result in TARGET, if convenient. */
3932 static rtx
3933 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3934 ATTRIBUTE_UNUSED enum machine_mode mode)
3936 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3938 if (!validate_arglist (exp,
3939 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3940 return NULL_RTX;
3942 /* If c_strlen can determine an expression for one of the string
3943 lengths, and it doesn't have side effects, then emit cmpstrnsi
3944 using length MIN(strlen(string)+1, arg3). */
3945 #ifdef HAVE_cmpstrnsi
3946 if (HAVE_cmpstrnsi)
3948 tree len, len1, len2;
3949 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3950 rtx result, insn;
3951 tree fndecl, fn;
3952 tree arg1 = CALL_EXPR_ARG (exp, 0);
3953 tree arg2 = CALL_EXPR_ARG (exp, 1);
3954 tree arg3 = CALL_EXPR_ARG (exp, 2);
3956 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3957 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3958 enum machine_mode insn_mode
3959 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3961 len1 = c_strlen (arg1, 1);
3962 len2 = c_strlen (arg2, 1);
3964 if (len1)
3965 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3966 if (len2)
3967 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3969 /* If we don't have a constant length for the first, use the length
3970 of the second, if we know it. We don't require a constant for
3971 this case; some cost analysis could be done if both are available
3972 but neither is constant. For now, assume they're equally cheap,
3973 unless one has side effects. If both strings have constant lengths,
3974 use the smaller. */
3976 if (!len1)
3977 len = len2;
3978 else if (!len2)
3979 len = len1;
3980 else if (TREE_SIDE_EFFECTS (len1))
3981 len = len2;
3982 else if (TREE_SIDE_EFFECTS (len2))
3983 len = len1;
3984 else if (TREE_CODE (len1) != INTEGER_CST)
3985 len = len2;
3986 else if (TREE_CODE (len2) != INTEGER_CST)
3987 len = len1;
3988 else if (tree_int_cst_lt (len1, len2))
3989 len = len1;
3990 else
3991 len = len2;
3993 /* If both arguments have side effects, we cannot optimize. */
3994 if (!len || TREE_SIDE_EFFECTS (len))
3995 return NULL_RTX;
3997 /* The actual new length parameter is MIN(len,arg3). */
3998 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3999 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4001 /* If we don't have POINTER_TYPE, call the function. */
4002 if (arg1_align == 0 || arg2_align == 0)
4003 return NULL_RTX;
4005 /* Make a place to write the result of the instruction. */
4006 result = target;
4007 if (! (result != 0
4008 && REG_P (result) && GET_MODE (result) == insn_mode
4009 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4010 result = gen_reg_rtx (insn_mode);
4012 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4013 arg1 = builtin_save_expr (arg1);
4014 arg2 = builtin_save_expr (arg2);
4015 len = builtin_save_expr (len);
4017 arg1_rtx = get_memory_rtx (arg1, len);
4018 arg2_rtx = get_memory_rtx (arg2, len);
4019 arg3_rtx = expand_normal (len);
4020 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4021 GEN_INT (MIN (arg1_align, arg2_align)));
4022 if (insn)
4024 emit_insn (insn);
4026 /* Return the value in the proper mode for this function. */
4027 mode = TYPE_MODE (TREE_TYPE (exp));
4028 if (GET_MODE (result) == mode)
4029 return result;
4030 if (target == 0)
4031 return convert_to_mode (mode, result, 0);
4032 convert_move (target, result, 0);
4033 return target;
4036 /* Expand the library call ourselves using a stabilized argument
4037 list to avoid re-evaluating the function's arguments twice. */
4038 fndecl = get_callee_fndecl (exp);
4039 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4040 arg1, arg2, len);
4041 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4042 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4043 return expand_call (fn, target, target == const0_rtx);
4045 #endif
4046 return NULL_RTX;
4049 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4050 if that's convenient. */
4053 expand_builtin_saveregs (void)
4055 rtx val, seq;
4057 /* Don't do __builtin_saveregs more than once in a function.
4058 Save the result of the first call and reuse it. */
4059 if (saveregs_value != 0)
4060 return saveregs_value;
4062 /* When this function is called, it means that registers must be
4063 saved on entry to this function. So we migrate the call to the
4064 first insn of this function. */
4066 start_sequence ();
4068 /* Do whatever the machine needs done in this case. */
4069 val = targetm.calls.expand_builtin_saveregs ();
4071 seq = get_insns ();
4072 end_sequence ();
4074 saveregs_value = val;
4076 /* Put the insns after the NOTE that starts the function. If this
4077 is inside a start_sequence, make the outer-level insn chain current, so
4078 the code is placed at the start of the function. */
4079 push_topmost_sequence ();
4080 emit_insn_after (seq, entry_of_function ());
4081 pop_topmost_sequence ();
4083 return val;
4086 /* Expand a call to __builtin_next_arg. */
4088 static rtx
4089 expand_builtin_next_arg (void)
4091 /* Checking arguments is already done in fold_builtin_next_arg
4092 that must be called before this function. */
4093 return expand_binop (ptr_mode, add_optab,
4094 crtl->args.internal_arg_pointer,
4095 crtl->args.arg_offset_rtx,
4096 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4099 /* Make it easier for the backends by protecting the valist argument
4100 from multiple evaluations. */
4102 static tree
4103 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4105 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4107 /* The current way of determining the type of valist is completely
4108 bogus. We should have the information on the va builtin instead. */
4109 if (!vatype)
4110 vatype = targetm.fn_abi_va_list (cfun->decl);
4112 if (TREE_CODE (vatype) == ARRAY_TYPE)
4114 if (TREE_SIDE_EFFECTS (valist))
4115 valist = save_expr (valist);
4117 /* For this case, the backends will be expecting a pointer to
4118 vatype, but it's possible we've actually been given an array
4119 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4120 So fix it. */
4121 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4123 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4124 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4127 else
4129 tree pt = build_pointer_type (vatype);
4131 if (! needs_lvalue)
4133 if (! TREE_SIDE_EFFECTS (valist))
4134 return valist;
4136 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4137 TREE_SIDE_EFFECTS (valist) = 1;
4140 if (TREE_SIDE_EFFECTS (valist))
4141 valist = save_expr (valist);
4142 valist = fold_build2_loc (loc, MEM_REF,
4143 vatype, valist, build_int_cst (pt, 0));
4146 return valist;
4149 /* The "standard" definition of va_list is void*. */
4151 tree
4152 std_build_builtin_va_list (void)
4154 return ptr_type_node;
4157 /* The "standard" abi va_list is va_list_type_node. */
4159 tree
4160 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4162 return va_list_type_node;
4165 /* The "standard" type of va_list is va_list_type_node. */
4167 tree
4168 std_canonical_va_list_type (tree type)
4170 tree wtype, htype;
4172 if (INDIRECT_REF_P (type))
4173 type = TREE_TYPE (type);
4174 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4175 type = TREE_TYPE (type);
4176 wtype = va_list_type_node;
4177 htype = type;
4178 /* Treat structure va_list types. */
4179 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4180 htype = TREE_TYPE (htype);
4181 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4183 /* If va_list is an array type, the argument may have decayed
4184 to a pointer type, e.g. by being passed to another function.
4185 In that case, unwrap both types so that we can compare the
4186 underlying records. */
4187 if (TREE_CODE (htype) == ARRAY_TYPE
4188 || POINTER_TYPE_P (htype))
4190 wtype = TREE_TYPE (wtype);
4191 htype = TREE_TYPE (htype);
4194 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4195 return va_list_type_node;
4197 return NULL_TREE;
4200 /* The "standard" implementation of va_start: just assign `nextarg' to
4201 the variable. */
4203 void
4204 std_expand_builtin_va_start (tree valist, rtx nextarg)
4206 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4207 convert_move (va_r, nextarg, 0);
4210 /* Expand EXP, a call to __builtin_va_start. */
4212 static rtx
4213 expand_builtin_va_start (tree exp)
4215 rtx nextarg;
4216 tree valist;
4217 location_t loc = EXPR_LOCATION (exp);
4219 if (call_expr_nargs (exp) < 2)
4221 error_at (loc, "too few arguments to function %<va_start%>");
4222 return const0_rtx;
4225 if (fold_builtin_next_arg (exp, true))
4226 return const0_rtx;
4228 nextarg = expand_builtin_next_arg ();
4229 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4231 if (targetm.expand_builtin_va_start)
4232 targetm.expand_builtin_va_start (valist, nextarg);
4233 else
4234 std_expand_builtin_va_start (valist, nextarg);
4236 return const0_rtx;
4239 /* The "standard" implementation of va_arg: read the value from the
4240 current (padded) address and increment by the (padded) size. */
4242 tree
4243 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4244 gimple_seq *post_p)
4246 tree addr, t, type_size, rounded_size, valist_tmp;
4247 unsigned HOST_WIDE_INT align, boundary;
4248 bool indirect;
4250 #ifdef ARGS_GROW_DOWNWARD
4251 /* All of the alignment and movement below is for args-grow-up machines.
4252 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4253 implement their own specialized gimplify_va_arg_expr routines. */
4254 gcc_unreachable ();
4255 #endif
4257 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4258 if (indirect)
4259 type = build_pointer_type (type);
4261 align = PARM_BOUNDARY / BITS_PER_UNIT;
4262 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4264 /* When we align parameter on stack for caller, if the parameter
4265 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4266 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4267 here with caller. */
4268 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4269 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4271 boundary /= BITS_PER_UNIT;
4273 /* Hoist the valist value into a temporary for the moment. */
4274 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4276 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4277 requires greater alignment, we must perform dynamic alignment. */
4278 if (boundary > align
4279 && !integer_zerop (TYPE_SIZE (type)))
4281 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4282 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
4283 gimplify_and_add (t, pre_p);
4285 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4286 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
4287 valist_tmp,
4288 build_int_cst (TREE_TYPE (valist), -boundary)));
4289 gimplify_and_add (t, pre_p);
4291 else
4292 boundary = align;
4294 /* If the actual alignment is less than the alignment of the type,
4295 adjust the type accordingly so that we don't assume strict alignment
4296 when dereferencing the pointer. */
4297 boundary *= BITS_PER_UNIT;
4298 if (boundary < TYPE_ALIGN (type))
4300 type = build_variant_type_copy (type);
4301 TYPE_ALIGN (type) = boundary;
4304 /* Compute the rounded size of the type. */
4305 type_size = size_in_bytes (type);
4306 rounded_size = round_up (type_size, align);
4308 /* Reduce rounded_size so it's sharable with the postqueue. */
4309 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4311 /* Get AP. */
4312 addr = valist_tmp;
4313 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4315 /* Small args are padded downward. */
4316 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4317 rounded_size, size_int (align));
4318 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4319 size_binop (MINUS_EXPR, rounded_size, type_size));
4320 addr = fold_build_pointer_plus (addr, t);
4323 /* Compute new value for AP. */
4324 t = fold_build_pointer_plus (valist_tmp, rounded_size);
4325 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4326 gimplify_and_add (t, pre_p);
4328 addr = fold_convert (build_pointer_type (type), addr);
4330 if (indirect)
4331 addr = build_va_arg_indirect_ref (addr);
4333 return build_va_arg_indirect_ref (addr);
4336 /* Build an indirect-ref expression over the given TREE, which represents a
4337 piece of a va_arg() expansion. */
4338 tree
4339 build_va_arg_indirect_ref (tree addr)
4341 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4343 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4344 mf_mark (addr);
4346 return addr;
4349 /* Return a dummy expression of type TYPE in order to keep going after an
4350 error. */
4352 static tree
4353 dummy_object (tree type)
4355 tree t = build_int_cst (build_pointer_type (type), 0);
4356 return build2 (MEM_REF, type, t, t);
4359 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4360 builtin function, but a very special sort of operator. */
4362 enum gimplify_status
4363 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4365 tree promoted_type, have_va_type;
4366 tree valist = TREE_OPERAND (*expr_p, 0);
4367 tree type = TREE_TYPE (*expr_p);
4368 tree t;
4369 location_t loc = EXPR_LOCATION (*expr_p);
4371 /* Verify that valist is of the proper type. */
4372 have_va_type = TREE_TYPE (valist);
4373 if (have_va_type == error_mark_node)
4374 return GS_ERROR;
4375 have_va_type = targetm.canonical_va_list_type (have_va_type);
4377 if (have_va_type == NULL_TREE)
4379 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4380 return GS_ERROR;
4383 /* Generate a diagnostic for requesting data of a type that cannot
4384 be passed through `...' due to type promotion at the call site. */
4385 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4386 != type)
4388 static bool gave_help;
4389 bool warned;
4391 /* Unfortunately, this is merely undefined, rather than a constraint
4392 violation, so we cannot make this an error. If this call is never
4393 executed, the program is still strictly conforming. */
4394 warned = warning_at (loc, 0,
4395 "%qT is promoted to %qT when passed through %<...%>",
4396 type, promoted_type);
4397 if (!gave_help && warned)
4399 gave_help = true;
4400 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4401 promoted_type, type);
4404 /* We can, however, treat "undefined" any way we please.
4405 Call abort to encourage the user to fix the program. */
4406 if (warned)
4407 inform (loc, "if this code is reached, the program will abort");
4408 /* Before the abort, allow the evaluation of the va_list
4409 expression to exit or longjmp. */
4410 gimplify_and_add (valist, pre_p);
4411 t = build_call_expr_loc (loc,
4412 builtin_decl_implicit (BUILT_IN_TRAP), 0);
4413 gimplify_and_add (t, pre_p);
4415 /* This is dead code, but go ahead and finish so that the
4416 mode of the result comes out right. */
4417 *expr_p = dummy_object (type);
4418 return GS_ALL_DONE;
4420 else
4422 /* Make it easier for the backends by protecting the valist argument
4423 from multiple evaluations. */
4424 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4426 /* For this case, the backends will be expecting a pointer to
4427 TREE_TYPE (abi), but it's possible we've
4428 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4429 So fix it. */
4430 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4432 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4433 valist = fold_convert_loc (loc, p1,
4434 build_fold_addr_expr_loc (loc, valist));
4437 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4439 else
4440 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4442 if (!targetm.gimplify_va_arg_expr)
4443 /* FIXME: Once most targets are converted we should merely
4444 assert this is non-null. */
4445 return GS_ALL_DONE;
4447 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4448 return GS_OK;
4452 /* Expand EXP, a call to __builtin_va_end. */
4454 static rtx
4455 expand_builtin_va_end (tree exp)
4457 tree valist = CALL_EXPR_ARG (exp, 0);
4459 /* Evaluate for side effects, if needed. I hate macros that don't
4460 do that. */
4461 if (TREE_SIDE_EFFECTS (valist))
4462 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4464 return const0_rtx;
4467 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4468 builtin rather than just as an assignment in stdarg.h because of the
4469 nastiness of array-type va_list types. */
4471 static rtx
4472 expand_builtin_va_copy (tree exp)
4474 tree dst, src, t;
4475 location_t loc = EXPR_LOCATION (exp);
4477 dst = CALL_EXPR_ARG (exp, 0);
4478 src = CALL_EXPR_ARG (exp, 1);
4480 dst = stabilize_va_list_loc (loc, dst, 1);
4481 src = stabilize_va_list_loc (loc, src, 0);
4483 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4485 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4487 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4488 TREE_SIDE_EFFECTS (t) = 1;
4489 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4491 else
4493 rtx dstb, srcb, size;
4495 /* Evaluate to pointers. */
4496 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4497 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4498 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4499 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4501 dstb = convert_memory_address (Pmode, dstb);
4502 srcb = convert_memory_address (Pmode, srcb);
4504 /* "Dereference" to BLKmode memories. */
4505 dstb = gen_rtx_MEM (BLKmode, dstb);
4506 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4507 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4508 srcb = gen_rtx_MEM (BLKmode, srcb);
4509 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4510 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4512 /* Copy. */
4513 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4516 return const0_rtx;
4519 /* Expand a call to one of the builtin functions __builtin_frame_address or
4520 __builtin_return_address. */
4522 static rtx
4523 expand_builtin_frame_address (tree fndecl, tree exp)
4525 /* The argument must be a nonnegative integer constant.
4526 It counts the number of frames to scan up the stack.
4527 The value is the return address saved in that frame. */
4528 if (call_expr_nargs (exp) == 0)
4529 /* Warning about missing arg was already issued. */
4530 return const0_rtx;
4531 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4533 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4534 error ("invalid argument to %<__builtin_frame_address%>");
4535 else
4536 error ("invalid argument to %<__builtin_return_address%>");
4537 return const0_rtx;
4539 else
4541 rtx tem
4542 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4543 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4545 /* Some ports cannot access arbitrary stack frames. */
4546 if (tem == NULL)
4548 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4549 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4550 else
4551 warning (0, "unsupported argument to %<__builtin_return_address%>");
4552 return const0_rtx;
4555 /* For __builtin_frame_address, return what we've got. */
4556 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4557 return tem;
4559 if (!REG_P (tem)
4560 && ! CONSTANT_P (tem))
4561 tem = copy_addr_to_reg (tem);
4562 return tem;
4566 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4567 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4568 is the same as for allocate_dynamic_stack_space. */
4570 static rtx
4571 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4573 rtx op0;
4574 rtx result;
4575 bool valid_arglist;
4576 unsigned int align;
4577 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4578 == BUILT_IN_ALLOCA_WITH_ALIGN);
4580 /* Emit normal call if we use mudflap. */
4581 if (flag_mudflap)
4582 return NULL_RTX;
4584 valid_arglist
4585 = (alloca_with_align
4586 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4587 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4589 if (!valid_arglist)
4590 return NULL_RTX;
4592 /* Compute the argument. */
4593 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4595 /* Compute the alignment. */
4596 align = (alloca_with_align
4597 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4598 : BIGGEST_ALIGNMENT);
4600 /* Allocate the desired space. */
4601 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4602 result = convert_memory_address (ptr_mode, result);
4604 return result;
4607 /* Expand a call to bswap builtin in EXP.
4608 Return NULL_RTX if a normal call should be emitted rather than expanding the
4609 function in-line. If convenient, the result should be placed in TARGET.
4610 SUBTARGET may be used as the target for computing one of EXP's operands. */
4612 static rtx
4613 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4614 rtx subtarget)
4616 tree arg;
4617 rtx op0;
4619 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4620 return NULL_RTX;
4622 arg = CALL_EXPR_ARG (exp, 0);
4623 op0 = expand_expr (arg,
4624 subtarget && GET_MODE (subtarget) == target_mode
4625 ? subtarget : NULL_RTX,
4626 target_mode, EXPAND_NORMAL);
4627 if (GET_MODE (op0) != target_mode)
4628 op0 = convert_to_mode (target_mode, op0, 1);
4630 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4632 gcc_assert (target);
4634 return convert_to_mode (target_mode, target, 1);
4637 /* Expand a call to a unary builtin in EXP.
4638 Return NULL_RTX if a normal call should be emitted rather than expanding the
4639 function in-line. If convenient, the result should be placed in TARGET.
4640 SUBTARGET may be used as the target for computing one of EXP's operands. */
4642 static rtx
4643 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4644 rtx subtarget, optab op_optab)
4646 rtx op0;
4648 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4649 return NULL_RTX;
4651 /* Compute the argument. */
4652 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4653 (subtarget
4654 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4655 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4656 VOIDmode, EXPAND_NORMAL);
4657 /* Compute op, into TARGET if possible.
4658 Set TARGET to wherever the result comes back. */
4659 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4660 op_optab, op0, target, op_optab != clrsb_optab);
4661 gcc_assert (target);
4663 return convert_to_mode (target_mode, target, 0);
4666 /* Expand a call to __builtin_expect. We just return our argument
4667 as the builtin_expect semantic should've been already executed by
4668 tree branch prediction pass. */
4670 static rtx
4671 expand_builtin_expect (tree exp, rtx target)
4673 tree arg;
4675 if (call_expr_nargs (exp) < 2)
4676 return const0_rtx;
4677 arg = CALL_EXPR_ARG (exp, 0);
4679 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4680 /* When guessing was done, the hints should be already stripped away. */
4681 gcc_assert (!flag_guess_branch_prob
4682 || optimize == 0 || seen_error ());
4683 return target;
4686 /* Expand a call to __builtin_assume_aligned. We just return our first
4687 argument as the builtin_assume_aligned semantic should've been already
4688 executed by CCP. */
4690 static rtx
4691 expand_builtin_assume_aligned (tree exp, rtx target)
4693 if (call_expr_nargs (exp) < 2)
4694 return const0_rtx;
4695 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4696 EXPAND_NORMAL);
4697 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4698 && (call_expr_nargs (exp) < 3
4699 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4700 return target;
4703 void
4704 expand_builtin_trap (void)
4706 #ifdef HAVE_trap
4707 if (HAVE_trap)
4709 rtx insn = emit_insn (gen_trap ());
4710 /* For trap insns when not accumulating outgoing args force
4711 REG_ARGS_SIZE note to prevent crossjumping of calls with
4712 different args sizes. */
4713 if (!ACCUMULATE_OUTGOING_ARGS)
4714 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4716 else
4717 #endif
4718 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4719 emit_barrier ();
4722 /* Expand a call to __builtin_unreachable. We do nothing except emit
4723 a barrier saying that control flow will not pass here.
4725 It is the responsibility of the program being compiled to ensure
4726 that control flow does never reach __builtin_unreachable. */
4727 static void
4728 expand_builtin_unreachable (void)
4730 emit_barrier ();
4733 /* Expand EXP, a call to fabs, fabsf or fabsl.
4734 Return NULL_RTX if a normal call should be emitted rather than expanding
4735 the function inline. If convenient, the result should be placed
4736 in TARGET. SUBTARGET may be used as the target for computing
4737 the operand. */
4739 static rtx
4740 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4742 enum machine_mode mode;
4743 tree arg;
4744 rtx op0;
4746 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4747 return NULL_RTX;
4749 arg = CALL_EXPR_ARG (exp, 0);
4750 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4751 mode = TYPE_MODE (TREE_TYPE (arg));
4752 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4753 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4756 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4757 Return NULL is a normal call should be emitted rather than expanding the
4758 function inline. If convenient, the result should be placed in TARGET.
4759 SUBTARGET may be used as the target for computing the operand. */
4761 static rtx
4762 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4764 rtx op0, op1;
4765 tree arg;
4767 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4768 return NULL_RTX;
4770 arg = CALL_EXPR_ARG (exp, 0);
4771 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4773 arg = CALL_EXPR_ARG (exp, 1);
4774 op1 = expand_normal (arg);
4776 return expand_copysign (op0, op1, target);
4779 /* Create a new constant string literal and return a char* pointer to it.
4780 The STRING_CST value is the LEN characters at STR. */
4781 tree
4782 build_string_literal (int len, const char *str)
4784 tree t, elem, index, type;
4786 t = build_string (len, str);
4787 elem = build_type_variant (char_type_node, 1, 0);
4788 index = build_index_type (size_int (len - 1));
4789 type = build_array_type (elem, index);
4790 TREE_TYPE (t) = type;
4791 TREE_CONSTANT (t) = 1;
4792 TREE_READONLY (t) = 1;
4793 TREE_STATIC (t) = 1;
4795 type = build_pointer_type (elem);
4796 t = build1 (ADDR_EXPR, type,
4797 build4 (ARRAY_REF, elem,
4798 t, integer_zero_node, NULL_TREE, NULL_TREE));
4799 return t;
4802 /* Expand a call to __builtin___clear_cache. */
4804 static rtx
4805 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4807 #ifndef HAVE_clear_cache
4808 #ifdef CLEAR_INSN_CACHE
4809 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4810 does something. Just do the default expansion to a call to
4811 __clear_cache(). */
4812 return NULL_RTX;
4813 #else
4814 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4815 does nothing. There is no need to call it. Do nothing. */
4816 return const0_rtx;
4817 #endif /* CLEAR_INSN_CACHE */
4818 #else
4819 /* We have a "clear_cache" insn, and it will handle everything. */
4820 tree begin, end;
4821 rtx begin_rtx, end_rtx;
4823 /* We must not expand to a library call. If we did, any
4824 fallback library function in libgcc that might contain a call to
4825 __builtin___clear_cache() would recurse infinitely. */
4826 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4828 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4829 return const0_rtx;
4832 if (HAVE_clear_cache)
4834 struct expand_operand ops[2];
4836 begin = CALL_EXPR_ARG (exp, 0);
4837 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4839 end = CALL_EXPR_ARG (exp, 1);
4840 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4842 create_address_operand (&ops[0], begin_rtx);
4843 create_address_operand (&ops[1], end_rtx);
4844 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4845 return const0_rtx;
4847 return const0_rtx;
4848 #endif /* HAVE_clear_cache */
4851 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4853 static rtx
4854 round_trampoline_addr (rtx tramp)
4856 rtx temp, addend, mask;
4858 /* If we don't need too much alignment, we'll have been guaranteed
4859 proper alignment by get_trampoline_type. */
4860 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4861 return tramp;
4863 /* Round address up to desired boundary. */
4864 temp = gen_reg_rtx (Pmode);
4865 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
4866 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
4868 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4869 temp, 0, OPTAB_LIB_WIDEN);
4870 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4871 temp, 0, OPTAB_LIB_WIDEN);
4873 return tramp;
4876 static rtx
4877 expand_builtin_init_trampoline (tree exp, bool onstack)
4879 tree t_tramp, t_func, t_chain;
4880 rtx m_tramp, r_tramp, r_chain, tmp;
4882 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4883 POINTER_TYPE, VOID_TYPE))
4884 return NULL_RTX;
4886 t_tramp = CALL_EXPR_ARG (exp, 0);
4887 t_func = CALL_EXPR_ARG (exp, 1);
4888 t_chain = CALL_EXPR_ARG (exp, 2);
4890 r_tramp = expand_normal (t_tramp);
4891 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4892 MEM_NOTRAP_P (m_tramp) = 1;
4894 /* If ONSTACK, the TRAMP argument should be the address of a field
4895 within the local function's FRAME decl. Either way, let's see if
4896 we can fill in the MEM_ATTRs for this memory. */
4897 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4898 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4900 /* Creator of a heap trampoline is responsible for making sure the
4901 address is aligned to at least STACK_BOUNDARY. Normally malloc
4902 will ensure this anyhow. */
4903 tmp = round_trampoline_addr (r_tramp);
4904 if (tmp != r_tramp)
4906 m_tramp = change_address (m_tramp, BLKmode, tmp);
4907 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4908 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4911 /* The FUNC argument should be the address of the nested function.
4912 Extract the actual function decl to pass to the hook. */
4913 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4914 t_func = TREE_OPERAND (t_func, 0);
4915 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4917 r_chain = expand_normal (t_chain);
4919 /* Generate insns to initialize the trampoline. */
4920 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4922 if (onstack)
4924 trampolines_created = 1;
4926 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4927 "trampoline generated for nested function %qD", t_func);
4930 return const0_rtx;
4933 static rtx
4934 expand_builtin_adjust_trampoline (tree exp)
4936 rtx tramp;
4938 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4939 return NULL_RTX;
4941 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4942 tramp = round_trampoline_addr (tramp);
4943 if (targetm.calls.trampoline_adjust_address)
4944 tramp = targetm.calls.trampoline_adjust_address (tramp);
4946 return tramp;
4949 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4950 function. The function first checks whether the back end provides
4951 an insn to implement signbit for the respective mode. If not, it
4952 checks whether the floating point format of the value is such that
4953 the sign bit can be extracted. If that is not the case, the
4954 function returns NULL_RTX to indicate that a normal call should be
4955 emitted rather than expanding the function in-line. EXP is the
4956 expression that is a call to the builtin function; if convenient,
4957 the result should be placed in TARGET. */
4958 static rtx
4959 expand_builtin_signbit (tree exp, rtx target)
4961 const struct real_format *fmt;
4962 enum machine_mode fmode, imode, rmode;
4963 tree arg;
4964 int word, bitpos;
4965 enum insn_code icode;
4966 rtx temp;
4967 location_t loc = EXPR_LOCATION (exp);
4969 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4970 return NULL_RTX;
4972 arg = CALL_EXPR_ARG (exp, 0);
4973 fmode = TYPE_MODE (TREE_TYPE (arg));
4974 rmode = TYPE_MODE (TREE_TYPE (exp));
4975 fmt = REAL_MODE_FORMAT (fmode);
4977 arg = builtin_save_expr (arg);
4979 /* Expand the argument yielding a RTX expression. */
4980 temp = expand_normal (arg);
4982 /* Check if the back end provides an insn that handles signbit for the
4983 argument's mode. */
4984 icode = optab_handler (signbit_optab, fmode);
4985 if (icode != CODE_FOR_nothing)
4987 rtx last = get_last_insn ();
4988 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4989 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4990 return target;
4991 delete_insns_since (last);
4994 /* For floating point formats without a sign bit, implement signbit
4995 as "ARG < 0.0". */
4996 bitpos = fmt->signbit_ro;
4997 if (bitpos < 0)
4999 /* But we can't do this if the format supports signed zero. */
5000 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5001 return NULL_RTX;
5003 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5004 build_real (TREE_TYPE (arg), dconst0));
5005 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5008 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5010 imode = int_mode_for_mode (fmode);
5011 if (imode == BLKmode)
5012 return NULL_RTX;
5013 temp = gen_lowpart (imode, temp);
5015 else
5017 imode = word_mode;
5018 /* Handle targets with different FP word orders. */
5019 if (FLOAT_WORDS_BIG_ENDIAN)
5020 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5021 else
5022 word = bitpos / BITS_PER_WORD;
5023 temp = operand_subword_force (temp, word, fmode);
5024 bitpos = bitpos % BITS_PER_WORD;
5027 /* Force the intermediate word_mode (or narrower) result into a
5028 register. This avoids attempting to create paradoxical SUBREGs
5029 of floating point modes below. */
5030 temp = force_reg (imode, temp);
5032 /* If the bitpos is within the "result mode" lowpart, the operation
5033 can be implement with a single bitwise AND. Otherwise, we need
5034 a right shift and an AND. */
5036 if (bitpos < GET_MODE_BITSIZE (rmode))
5038 double_int mask = double_int_zero.set_bit (bitpos);
5040 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5041 temp = gen_lowpart (rmode, temp);
5042 temp = expand_binop (rmode, and_optab, temp,
5043 immed_double_int_const (mask, rmode),
5044 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5046 else
5048 /* Perform a logical right shift to place the signbit in the least
5049 significant bit, then truncate the result to the desired mode
5050 and mask just this bit. */
5051 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5052 temp = gen_lowpart (rmode, temp);
5053 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5054 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5057 return temp;
5060 /* Expand fork or exec calls. TARGET is the desired target of the
5061 call. EXP is the call. FN is the
5062 identificator of the actual function. IGNORE is nonzero if the
5063 value is to be ignored. */
5065 static rtx
5066 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5068 tree id, decl;
5069 tree call;
5071 /* If we are not profiling, just call the function. */
5072 if (!profile_arc_flag)
5073 return NULL_RTX;
5075 /* Otherwise call the wrapper. This should be equivalent for the rest of
5076 compiler, so the code does not diverge, and the wrapper may run the
5077 code necessary for keeping the profiling sane. */
5079 switch (DECL_FUNCTION_CODE (fn))
5081 case BUILT_IN_FORK:
5082 id = get_identifier ("__gcov_fork");
5083 break;
5085 case BUILT_IN_EXECL:
5086 id = get_identifier ("__gcov_execl");
5087 break;
5089 case BUILT_IN_EXECV:
5090 id = get_identifier ("__gcov_execv");
5091 break;
5093 case BUILT_IN_EXECLP:
5094 id = get_identifier ("__gcov_execlp");
5095 break;
5097 case BUILT_IN_EXECLE:
5098 id = get_identifier ("__gcov_execle");
5099 break;
5101 case BUILT_IN_EXECVP:
5102 id = get_identifier ("__gcov_execvp");
5103 break;
5105 case BUILT_IN_EXECVE:
5106 id = get_identifier ("__gcov_execve");
5107 break;
5109 default:
5110 gcc_unreachable ();
5113 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5114 FUNCTION_DECL, id, TREE_TYPE (fn));
5115 DECL_EXTERNAL (decl) = 1;
5116 TREE_PUBLIC (decl) = 1;
5117 DECL_ARTIFICIAL (decl) = 1;
5118 TREE_NOTHROW (decl) = 1;
5119 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5120 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5121 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5122 return expand_call (call, target, ignore);
5127 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5128 the pointer in these functions is void*, the tree optimizers may remove
5129 casts. The mode computed in expand_builtin isn't reliable either, due
5130 to __sync_bool_compare_and_swap.
5132 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5133 group of builtins. This gives us log2 of the mode size. */
5135 static inline enum machine_mode
5136 get_builtin_sync_mode (int fcode_diff)
5138 /* The size is not negotiable, so ask not to get BLKmode in return
5139 if the target indicates that a smaller size would be better. */
5140 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5143 /* Expand the memory expression LOC and return the appropriate memory operand
5144 for the builtin_sync operations. */
5146 static rtx
5147 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5149 rtx addr, mem;
5151 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5152 addr = convert_memory_address (Pmode, addr);
5154 /* Note that we explicitly do not want any alias information for this
5155 memory, so that we kill all other live memories. Otherwise we don't
5156 satisfy the full barrier semantics of the intrinsic. */
5157 mem = validize_mem (gen_rtx_MEM (mode, addr));
5159 /* The alignment needs to be at least according to that of the mode. */
5160 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5161 get_pointer_alignment (loc)));
5162 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5163 MEM_VOLATILE_P (mem) = 1;
5165 return mem;
5168 /* Make sure an argument is in the right mode.
5169 EXP is the tree argument.
5170 MODE is the mode it should be in. */
5172 static rtx
5173 expand_expr_force_mode (tree exp, enum machine_mode mode)
5175 rtx val;
5176 enum machine_mode old_mode;
5178 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5179 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5180 of CONST_INTs, where we know the old_mode only from the call argument. */
5182 old_mode = GET_MODE (val);
5183 if (old_mode == VOIDmode)
5184 old_mode = TYPE_MODE (TREE_TYPE (exp));
5185 val = convert_modes (mode, old_mode, val, 1);
5186 return val;
5190 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5191 EXP is the CALL_EXPR. CODE is the rtx code
5192 that corresponds to the arithmetic or logical operation from the name;
5193 an exception here is that NOT actually means NAND. TARGET is an optional
5194 place for us to store the results; AFTER is true if this is the
5195 fetch_and_xxx form. */
5197 static rtx
5198 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5199 enum rtx_code code, bool after,
5200 rtx target)
5202 rtx val, mem;
5203 location_t loc = EXPR_LOCATION (exp);
5205 if (code == NOT && warn_sync_nand)
5207 tree fndecl = get_callee_fndecl (exp);
5208 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5210 static bool warned_f_a_n, warned_n_a_f;
5212 switch (fcode)
5214 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5215 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5216 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5217 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5218 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5219 if (warned_f_a_n)
5220 break;
5222 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5223 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5224 warned_f_a_n = true;
5225 break;
5227 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5228 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5229 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5230 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5231 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5232 if (warned_n_a_f)
5233 break;
5235 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5236 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5237 warned_n_a_f = true;
5238 break;
5240 default:
5241 gcc_unreachable ();
5245 /* Expand the operands. */
5246 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5247 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5249 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5250 after);
5253 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5254 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5255 true if this is the boolean form. TARGET is a place for us to store the
5256 results; this is NOT optional if IS_BOOL is true. */
5258 static rtx
5259 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5260 bool is_bool, rtx target)
5262 rtx old_val, new_val, mem;
5263 rtx *pbool, *poval;
5265 /* Expand the operands. */
5266 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5267 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5268 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5270 pbool = poval = NULL;
5271 if (target != const0_rtx)
5273 if (is_bool)
5274 pbool = &target;
5275 else
5276 poval = &target;
5278 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5279 false, MEMMODEL_SEQ_CST,
5280 MEMMODEL_SEQ_CST))
5281 return NULL_RTX;
5283 return target;
5286 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5287 general form is actually an atomic exchange, and some targets only
5288 support a reduced form with the second argument being a constant 1.
5289 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5290 the results. */
5292 static rtx
5293 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5294 rtx target)
5296 rtx val, mem;
5298 /* Expand the operands. */
5299 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5300 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5302 return expand_sync_lock_test_and_set (target, mem, val);
5305 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5307 static void
5308 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5310 rtx mem;
5312 /* Expand the operands. */
5313 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5315 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5318 /* Given an integer representing an ``enum memmodel'', verify its
5319 correctness and return the memory model enum. */
5321 static enum memmodel
5322 get_memmodel (tree exp)
5324 rtx op;
5325 unsigned HOST_WIDE_INT val;
5327 /* If the parameter is not a constant, it's a run time value so we'll just
5328 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5329 if (TREE_CODE (exp) != INTEGER_CST)
5330 return MEMMODEL_SEQ_CST;
5332 op = expand_normal (exp);
5334 val = INTVAL (op);
5335 if (targetm.memmodel_check)
5336 val = targetm.memmodel_check (val);
5337 else if (val & ~MEMMODEL_MASK)
5339 warning (OPT_Winvalid_memory_model,
5340 "Unknown architecture specifier in memory model to builtin.");
5341 return MEMMODEL_SEQ_CST;
5344 if ((INTVAL(op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5346 warning (OPT_Winvalid_memory_model,
5347 "invalid memory model argument to builtin");
5348 return MEMMODEL_SEQ_CST;
5351 return (enum memmodel) val;
5354 /* Expand the __atomic_exchange intrinsic:
5355 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5356 EXP is the CALL_EXPR.
5357 TARGET is an optional place for us to store the results. */
5359 static rtx
5360 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5362 rtx val, mem;
5363 enum memmodel model;
5365 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5366 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5368 error ("invalid memory model for %<__atomic_exchange%>");
5369 return NULL_RTX;
5372 if (!flag_inline_atomics)
5373 return NULL_RTX;
5375 /* Expand the operands. */
5376 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5377 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5379 return expand_atomic_exchange (target, mem, val, model);
5382 /* Expand the __atomic_compare_exchange intrinsic:
5383 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5384 TYPE desired, BOOL weak,
5385 enum memmodel success,
5386 enum memmodel failure)
5387 EXP is the CALL_EXPR.
5388 TARGET is an optional place for us to store the results. */
5390 static rtx
5391 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5392 rtx target)
5394 rtx expect, desired, mem, oldval;
5395 enum memmodel success, failure;
5396 tree weak;
5397 bool is_weak;
5399 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5400 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5402 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5403 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5405 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5406 return NULL_RTX;
5409 if (failure > success)
5411 error ("failure memory model cannot be stronger than success "
5412 "memory model for %<__atomic_compare_exchange%>");
5413 return NULL_RTX;
5416 if (!flag_inline_atomics)
5417 return NULL_RTX;
5419 /* Expand the operands. */
5420 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5422 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5423 expect = convert_memory_address (Pmode, expect);
5424 expect = gen_rtx_MEM (mode, expect);
5425 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5427 weak = CALL_EXPR_ARG (exp, 3);
5428 is_weak = false;
5429 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
5430 is_weak = true;
5432 oldval = expect;
5433 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5434 &oldval, mem, oldval, desired,
5435 is_weak, success, failure))
5436 return NULL_RTX;
5438 if (oldval != expect)
5439 emit_move_insn (expect, oldval);
5441 return target;
5444 /* Expand the __atomic_load intrinsic:
5445 TYPE __atomic_load (TYPE *object, enum memmodel)
5446 EXP is the CALL_EXPR.
5447 TARGET is an optional place for us to store the results. */
5449 static rtx
5450 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5452 rtx mem;
5453 enum memmodel model;
5455 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5456 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5457 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5459 error ("invalid memory model for %<__atomic_load%>");
5460 return NULL_RTX;
5463 if (!flag_inline_atomics)
5464 return NULL_RTX;
5466 /* Expand the operand. */
5467 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5469 return expand_atomic_load (target, mem, model);
5473 /* Expand the __atomic_store intrinsic:
5474 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5475 EXP is the CALL_EXPR.
5476 TARGET is an optional place for us to store the results. */
5478 static rtx
5479 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5481 rtx mem, val;
5482 enum memmodel model;
5484 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5485 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5486 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5487 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5489 error ("invalid memory model for %<__atomic_store%>");
5490 return NULL_RTX;
5493 if (!flag_inline_atomics)
5494 return NULL_RTX;
5496 /* Expand the operands. */
5497 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5498 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5500 return expand_atomic_store (mem, val, model, false);
5503 /* Expand the __atomic_fetch_XXX intrinsic:
5504 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5505 EXP is the CALL_EXPR.
5506 TARGET is an optional place for us to store the results.
5507 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5508 FETCH_AFTER is true if returning the result of the operation.
5509 FETCH_AFTER is false if returning the value before the operation.
5510 IGNORE is true if the result is not used.
5511 EXT_CALL is the correct builtin for an external call if this cannot be
5512 resolved to an instruction sequence. */
5514 static rtx
5515 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5516 enum rtx_code code, bool fetch_after,
5517 bool ignore, enum built_in_function ext_call)
5519 rtx val, mem, ret;
5520 enum memmodel model;
5521 tree fndecl;
5522 tree addr;
5524 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5526 /* Expand the operands. */
5527 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5528 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5530 /* Only try generating instructions if inlining is turned on. */
5531 if (flag_inline_atomics)
5533 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5534 if (ret)
5535 return ret;
5538 /* Return if a different routine isn't needed for the library call. */
5539 if (ext_call == BUILT_IN_NONE)
5540 return NULL_RTX;
5542 /* Change the call to the specified function. */
5543 fndecl = get_callee_fndecl (exp);
5544 addr = CALL_EXPR_FN (exp);
5545 STRIP_NOPS (addr);
5547 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5548 TREE_OPERAND (addr, 0) = builtin_decl_explicit(ext_call);
5550 /* Expand the call here so we can emit trailing code. */
5551 ret = expand_call (exp, target, ignore);
5553 /* Replace the original function just in case it matters. */
5554 TREE_OPERAND (addr, 0) = fndecl;
5556 /* Then issue the arithmetic correction to return the right result. */
5557 if (!ignore)
5559 if (code == NOT)
5561 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5562 OPTAB_LIB_WIDEN);
5563 ret = expand_simple_unop (mode, NOT, ret, target, true);
5565 else
5566 ret = expand_simple_binop (mode, code, ret, val, target, true,
5567 OPTAB_LIB_WIDEN);
5569 return ret;
5573 #ifndef HAVE_atomic_clear
5574 # define HAVE_atomic_clear 0
5575 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5576 #endif
5578 /* Expand an atomic clear operation.
5579 void _atomic_clear (BOOL *obj, enum memmodel)
5580 EXP is the call expression. */
5582 static rtx
5583 expand_builtin_atomic_clear (tree exp)
5585 enum machine_mode mode;
5586 rtx mem, ret;
5587 enum memmodel model;
5589 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5590 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5591 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5593 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5594 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5596 error ("invalid memory model for %<__atomic_store%>");
5597 return const0_rtx;
5600 if (HAVE_atomic_clear)
5602 emit_insn (gen_atomic_clear (mem, model));
5603 return const0_rtx;
5606 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5607 Failing that, a store is issued by __atomic_store. The only way this can
5608 fail is if the bool type is larger than a word size. Unlikely, but
5609 handle it anyway for completeness. Assume a single threaded model since
5610 there is no atomic support in this case, and no barriers are required. */
5611 ret = expand_atomic_store (mem, const0_rtx, model, true);
5612 if (!ret)
5613 emit_move_insn (mem, const0_rtx);
5614 return const0_rtx;
5617 /* Expand an atomic test_and_set operation.
5618 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5619 EXP is the call expression. */
5621 static rtx
5622 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5624 rtx mem;
5625 enum memmodel model;
5626 enum machine_mode mode;
5628 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5629 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5630 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5632 return expand_atomic_test_and_set (target, mem, model);
5636 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5637 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5639 static tree
5640 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5642 int size;
5643 enum machine_mode mode;
5644 unsigned int mode_align, type_align;
5646 if (TREE_CODE (arg0) != INTEGER_CST)
5647 return NULL_TREE;
5649 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5650 mode = mode_for_size (size, MODE_INT, 0);
5651 mode_align = GET_MODE_ALIGNMENT (mode);
5653 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5654 type_align = mode_align;
5655 else
5657 tree ttype = TREE_TYPE (arg1);
5659 /* This function is usually invoked and folded immediately by the front
5660 end before anything else has a chance to look at it. The pointer
5661 parameter at this point is usually cast to a void *, so check for that
5662 and look past the cast. */
5663 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5664 && VOID_TYPE_P (TREE_TYPE (ttype)))
5665 arg1 = TREE_OPERAND (arg1, 0);
5667 ttype = TREE_TYPE (arg1);
5668 gcc_assert (POINTER_TYPE_P (ttype));
5670 /* Get the underlying type of the object. */
5671 ttype = TREE_TYPE (ttype);
5672 type_align = TYPE_ALIGN (ttype);
5675 /* If the object has smaller alignment, the the lock free routines cannot
5676 be used. */
5677 if (type_align < mode_align)
5678 return boolean_false_node;
5680 /* Check if a compare_and_swap pattern exists for the mode which represents
5681 the required size. The pattern is not allowed to fail, so the existence
5682 of the pattern indicates support is present. */
5683 if (can_compare_and_swap_p (mode, true))
5684 return boolean_true_node;
5685 else
5686 return boolean_false_node;
5689 /* Return true if the parameters to call EXP represent an object which will
5690 always generate lock free instructions. The first argument represents the
5691 size of the object, and the second parameter is a pointer to the object
5692 itself. If NULL is passed for the object, then the result is based on
5693 typical alignment for an object of the specified size. Otherwise return
5694 false. */
5696 static rtx
5697 expand_builtin_atomic_always_lock_free (tree exp)
5699 tree size;
5700 tree arg0 = CALL_EXPR_ARG (exp, 0);
5701 tree arg1 = CALL_EXPR_ARG (exp, 1);
5703 if (TREE_CODE (arg0) != INTEGER_CST)
5705 error ("non-constant argument 1 to __atomic_always_lock_free");
5706 return const0_rtx;
5709 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5710 if (size == boolean_true_node)
5711 return const1_rtx;
5712 return const0_rtx;
5715 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5716 is lock free on this architecture. */
5718 static tree
5719 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5721 if (!flag_inline_atomics)
5722 return NULL_TREE;
5724 /* If it isn't always lock free, don't generate a result. */
5725 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5726 return boolean_true_node;
5728 return NULL_TREE;
5731 /* Return true if the parameters to call EXP represent an object which will
5732 always generate lock free instructions. The first argument represents the
5733 size of the object, and the second parameter is a pointer to the object
5734 itself. If NULL is passed for the object, then the result is based on
5735 typical alignment for an object of the specified size. Otherwise return
5736 NULL*/
5738 static rtx
5739 expand_builtin_atomic_is_lock_free (tree exp)
5741 tree size;
5742 tree arg0 = CALL_EXPR_ARG (exp, 0);
5743 tree arg1 = CALL_EXPR_ARG (exp, 1);
5745 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5747 error ("non-integer argument 1 to __atomic_is_lock_free");
5748 return NULL_RTX;
5751 if (!flag_inline_atomics)
5752 return NULL_RTX;
5754 /* If the value is known at compile time, return the RTX for it. */
5755 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5756 if (size == boolean_true_node)
5757 return const1_rtx;
5759 return NULL_RTX;
5762 /* Expand the __atomic_thread_fence intrinsic:
5763 void __atomic_thread_fence (enum memmodel)
5764 EXP is the CALL_EXPR. */
5766 static void
5767 expand_builtin_atomic_thread_fence (tree exp)
5769 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5770 expand_mem_thread_fence (model);
5773 /* Expand the __atomic_signal_fence intrinsic:
5774 void __atomic_signal_fence (enum memmodel)
5775 EXP is the CALL_EXPR. */
5777 static void
5778 expand_builtin_atomic_signal_fence (tree exp)
5780 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5781 expand_mem_signal_fence (model);
5784 /* Expand the __sync_synchronize intrinsic. */
5786 static void
5787 expand_builtin_sync_synchronize (void)
5789 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5792 static rtx
5793 expand_builtin_thread_pointer (tree exp, rtx target)
5795 enum insn_code icode;
5796 if (!validate_arglist (exp, VOID_TYPE))
5797 return const0_rtx;
5798 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5799 if (icode != CODE_FOR_nothing)
5801 struct expand_operand op;
5802 if (!REG_P (target) || GET_MODE (target) != Pmode)
5803 target = gen_reg_rtx (Pmode);
5804 create_output_operand (&op, target, Pmode);
5805 expand_insn (icode, 1, &op);
5806 return target;
5808 error ("__builtin_thread_pointer is not supported on this target");
5809 return const0_rtx;
5812 static void
5813 expand_builtin_set_thread_pointer (tree exp)
5815 enum insn_code icode;
5816 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5817 return;
5818 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5819 if (icode != CODE_FOR_nothing)
5821 struct expand_operand op;
5822 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5823 Pmode, EXPAND_NORMAL);
5824 create_input_operand (&op, val, Pmode);
5825 expand_insn (icode, 1, &op);
5826 return;
5828 error ("__builtin_set_thread_pointer is not supported on this target");
5832 /* Expand an expression EXP that calls a built-in function,
5833 with result going to TARGET if that's convenient
5834 (and in mode MODE if that's convenient).
5835 SUBTARGET may be used as the target for computing one of EXP's operands.
5836 IGNORE is nonzero if the value is to be ignored. */
5839 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5840 int ignore)
5842 tree fndecl = get_callee_fndecl (exp);
5843 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5844 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5845 int flags;
5847 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5848 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5850 /* When not optimizing, generate calls to library functions for a certain
5851 set of builtins. */
5852 if (!optimize
5853 && !called_as_built_in (fndecl)
5854 && fcode != BUILT_IN_FORK
5855 && fcode != BUILT_IN_EXECL
5856 && fcode != BUILT_IN_EXECV
5857 && fcode != BUILT_IN_EXECLP
5858 && fcode != BUILT_IN_EXECLE
5859 && fcode != BUILT_IN_EXECVP
5860 && fcode != BUILT_IN_EXECVE
5861 && fcode != BUILT_IN_ALLOCA
5862 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5863 && fcode != BUILT_IN_FREE)
5864 return expand_call (exp, target, ignore);
5866 /* The built-in function expanders test for target == const0_rtx
5867 to determine whether the function's result will be ignored. */
5868 if (ignore)
5869 target = const0_rtx;
5871 /* If the result of a pure or const built-in function is ignored, and
5872 none of its arguments are volatile, we can avoid expanding the
5873 built-in call and just evaluate the arguments for side-effects. */
5874 if (target == const0_rtx
5875 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5876 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5878 bool volatilep = false;
5879 tree arg;
5880 call_expr_arg_iterator iter;
5882 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5883 if (TREE_THIS_VOLATILE (arg))
5885 volatilep = true;
5886 break;
5889 if (! volatilep)
5891 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5892 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5893 return const0_rtx;
5897 switch (fcode)
5899 CASE_FLT_FN (BUILT_IN_FABS):
5900 case BUILT_IN_FABSD32:
5901 case BUILT_IN_FABSD64:
5902 case BUILT_IN_FABSD128:
5903 target = expand_builtin_fabs (exp, target, subtarget);
5904 if (target)
5905 return target;
5906 break;
5908 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5909 target = expand_builtin_copysign (exp, target, subtarget);
5910 if (target)
5911 return target;
5912 break;
5914 /* Just do a normal library call if we were unable to fold
5915 the values. */
5916 CASE_FLT_FN (BUILT_IN_CABS):
5917 break;
5919 CASE_FLT_FN (BUILT_IN_EXP):
5920 CASE_FLT_FN (BUILT_IN_EXP10):
5921 CASE_FLT_FN (BUILT_IN_POW10):
5922 CASE_FLT_FN (BUILT_IN_EXP2):
5923 CASE_FLT_FN (BUILT_IN_EXPM1):
5924 CASE_FLT_FN (BUILT_IN_LOGB):
5925 CASE_FLT_FN (BUILT_IN_LOG):
5926 CASE_FLT_FN (BUILT_IN_LOG10):
5927 CASE_FLT_FN (BUILT_IN_LOG2):
5928 CASE_FLT_FN (BUILT_IN_LOG1P):
5929 CASE_FLT_FN (BUILT_IN_TAN):
5930 CASE_FLT_FN (BUILT_IN_ASIN):
5931 CASE_FLT_FN (BUILT_IN_ACOS):
5932 CASE_FLT_FN (BUILT_IN_ATAN):
5933 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5934 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5935 because of possible accuracy problems. */
5936 if (! flag_unsafe_math_optimizations)
5937 break;
5938 CASE_FLT_FN (BUILT_IN_SQRT):
5939 CASE_FLT_FN (BUILT_IN_FLOOR):
5940 CASE_FLT_FN (BUILT_IN_CEIL):
5941 CASE_FLT_FN (BUILT_IN_TRUNC):
5942 CASE_FLT_FN (BUILT_IN_ROUND):
5943 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5944 CASE_FLT_FN (BUILT_IN_RINT):
5945 target = expand_builtin_mathfn (exp, target, subtarget);
5946 if (target)
5947 return target;
5948 break;
5950 CASE_FLT_FN (BUILT_IN_FMA):
5951 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5952 if (target)
5953 return target;
5954 break;
5956 CASE_FLT_FN (BUILT_IN_ILOGB):
5957 if (! flag_unsafe_math_optimizations)
5958 break;
5959 CASE_FLT_FN (BUILT_IN_ISINF):
5960 CASE_FLT_FN (BUILT_IN_FINITE):
5961 case BUILT_IN_ISFINITE:
5962 case BUILT_IN_ISNORMAL:
5963 target = expand_builtin_interclass_mathfn (exp, target);
5964 if (target)
5965 return target;
5966 break;
5968 CASE_FLT_FN (BUILT_IN_ICEIL):
5969 CASE_FLT_FN (BUILT_IN_LCEIL):
5970 CASE_FLT_FN (BUILT_IN_LLCEIL):
5971 CASE_FLT_FN (BUILT_IN_LFLOOR):
5972 CASE_FLT_FN (BUILT_IN_IFLOOR):
5973 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5974 target = expand_builtin_int_roundingfn (exp, target);
5975 if (target)
5976 return target;
5977 break;
5979 CASE_FLT_FN (BUILT_IN_IRINT):
5980 CASE_FLT_FN (BUILT_IN_LRINT):
5981 CASE_FLT_FN (BUILT_IN_LLRINT):
5982 CASE_FLT_FN (BUILT_IN_IROUND):
5983 CASE_FLT_FN (BUILT_IN_LROUND):
5984 CASE_FLT_FN (BUILT_IN_LLROUND):
5985 target = expand_builtin_int_roundingfn_2 (exp, target);
5986 if (target)
5987 return target;
5988 break;
5990 CASE_FLT_FN (BUILT_IN_POWI):
5991 target = expand_builtin_powi (exp, target);
5992 if (target)
5993 return target;
5994 break;
5996 CASE_FLT_FN (BUILT_IN_ATAN2):
5997 CASE_FLT_FN (BUILT_IN_LDEXP):
5998 CASE_FLT_FN (BUILT_IN_SCALB):
5999 CASE_FLT_FN (BUILT_IN_SCALBN):
6000 CASE_FLT_FN (BUILT_IN_SCALBLN):
6001 if (! flag_unsafe_math_optimizations)
6002 break;
6004 CASE_FLT_FN (BUILT_IN_FMOD):
6005 CASE_FLT_FN (BUILT_IN_REMAINDER):
6006 CASE_FLT_FN (BUILT_IN_DREM):
6007 CASE_FLT_FN (BUILT_IN_POW):
6008 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6009 if (target)
6010 return target;
6011 break;
6013 CASE_FLT_FN (BUILT_IN_CEXPI):
6014 target = expand_builtin_cexpi (exp, target);
6015 gcc_assert (target);
6016 return target;
6018 CASE_FLT_FN (BUILT_IN_SIN):
6019 CASE_FLT_FN (BUILT_IN_COS):
6020 if (! flag_unsafe_math_optimizations)
6021 break;
6022 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6023 if (target)
6024 return target;
6025 break;
6027 CASE_FLT_FN (BUILT_IN_SINCOS):
6028 if (! flag_unsafe_math_optimizations)
6029 break;
6030 target = expand_builtin_sincos (exp);
6031 if (target)
6032 return target;
6033 break;
6035 case BUILT_IN_APPLY_ARGS:
6036 return expand_builtin_apply_args ();
6038 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6039 FUNCTION with a copy of the parameters described by
6040 ARGUMENTS, and ARGSIZE. It returns a block of memory
6041 allocated on the stack into which is stored all the registers
6042 that might possibly be used for returning the result of a
6043 function. ARGUMENTS is the value returned by
6044 __builtin_apply_args. ARGSIZE is the number of bytes of
6045 arguments that must be copied. ??? How should this value be
6046 computed? We'll also need a safe worst case value for varargs
6047 functions. */
6048 case BUILT_IN_APPLY:
6049 if (!validate_arglist (exp, POINTER_TYPE,
6050 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6051 && !validate_arglist (exp, REFERENCE_TYPE,
6052 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6053 return const0_rtx;
6054 else
6056 rtx ops[3];
6058 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6059 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6060 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6062 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6065 /* __builtin_return (RESULT) causes the function to return the
6066 value described by RESULT. RESULT is address of the block of
6067 memory returned by __builtin_apply. */
6068 case BUILT_IN_RETURN:
6069 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6070 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6071 return const0_rtx;
6073 case BUILT_IN_SAVEREGS:
6074 return expand_builtin_saveregs ();
6076 case BUILT_IN_VA_ARG_PACK:
6077 /* All valid uses of __builtin_va_arg_pack () are removed during
6078 inlining. */
6079 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6080 return const0_rtx;
6082 case BUILT_IN_VA_ARG_PACK_LEN:
6083 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6084 inlining. */
6085 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6086 return const0_rtx;
6088 /* Return the address of the first anonymous stack arg. */
6089 case BUILT_IN_NEXT_ARG:
6090 if (fold_builtin_next_arg (exp, false))
6091 return const0_rtx;
6092 return expand_builtin_next_arg ();
6094 case BUILT_IN_CLEAR_CACHE:
6095 target = expand_builtin___clear_cache (exp);
6096 if (target)
6097 return target;
6098 break;
6100 case BUILT_IN_CLASSIFY_TYPE:
6101 return expand_builtin_classify_type (exp);
6103 case BUILT_IN_CONSTANT_P:
6104 return const0_rtx;
6106 case BUILT_IN_FRAME_ADDRESS:
6107 case BUILT_IN_RETURN_ADDRESS:
6108 return expand_builtin_frame_address (fndecl, exp);
6110 /* Returns the address of the area where the structure is returned.
6111 0 otherwise. */
6112 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6113 if (call_expr_nargs (exp) != 0
6114 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6115 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6116 return const0_rtx;
6117 else
6118 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6120 case BUILT_IN_ALLOCA:
6121 case BUILT_IN_ALLOCA_WITH_ALIGN:
6122 /* If the allocation stems from the declaration of a variable-sized
6123 object, it cannot accumulate. */
6124 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6125 if (target)
6126 return target;
6127 break;
6129 case BUILT_IN_STACK_SAVE:
6130 return expand_stack_save ();
6132 case BUILT_IN_STACK_RESTORE:
6133 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6134 return const0_rtx;
6136 case BUILT_IN_BSWAP16:
6137 case BUILT_IN_BSWAP32:
6138 case BUILT_IN_BSWAP64:
6139 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6140 if (target)
6141 return target;
6142 break;
6144 CASE_INT_FN (BUILT_IN_FFS):
6145 target = expand_builtin_unop (target_mode, exp, target,
6146 subtarget, ffs_optab);
6147 if (target)
6148 return target;
6149 break;
6151 CASE_INT_FN (BUILT_IN_CLZ):
6152 target = expand_builtin_unop (target_mode, exp, target,
6153 subtarget, clz_optab);
6154 if (target)
6155 return target;
6156 break;
6158 CASE_INT_FN (BUILT_IN_CTZ):
6159 target = expand_builtin_unop (target_mode, exp, target,
6160 subtarget, ctz_optab);
6161 if (target)
6162 return target;
6163 break;
6165 CASE_INT_FN (BUILT_IN_CLRSB):
6166 target = expand_builtin_unop (target_mode, exp, target,
6167 subtarget, clrsb_optab);
6168 if (target)
6169 return target;
6170 break;
6172 CASE_INT_FN (BUILT_IN_POPCOUNT):
6173 target = expand_builtin_unop (target_mode, exp, target,
6174 subtarget, popcount_optab);
6175 if (target)
6176 return target;
6177 break;
6179 CASE_INT_FN (BUILT_IN_PARITY):
6180 target = expand_builtin_unop (target_mode, exp, target,
6181 subtarget, parity_optab);
6182 if (target)
6183 return target;
6184 break;
6186 case BUILT_IN_STRLEN:
6187 target = expand_builtin_strlen (exp, target, target_mode);
6188 if (target)
6189 return target;
6190 break;
6192 case BUILT_IN_STRCPY:
6193 target = expand_builtin_strcpy (exp, target);
6194 if (target)
6195 return target;
6196 break;
6198 case BUILT_IN_STRNCPY:
6199 target = expand_builtin_strncpy (exp, target);
6200 if (target)
6201 return target;
6202 break;
6204 case BUILT_IN_STPCPY:
6205 target = expand_builtin_stpcpy (exp, target, mode);
6206 if (target)
6207 return target;
6208 break;
6210 case BUILT_IN_MEMCPY:
6211 target = expand_builtin_memcpy (exp, target);
6212 if (target)
6213 return target;
6214 break;
6216 case BUILT_IN_MEMPCPY:
6217 target = expand_builtin_mempcpy (exp, target, mode);
6218 if (target)
6219 return target;
6220 break;
6222 case BUILT_IN_MEMSET:
6223 target = expand_builtin_memset (exp, target, mode);
6224 if (target)
6225 return target;
6226 break;
6228 case BUILT_IN_BZERO:
6229 target = expand_builtin_bzero (exp);
6230 if (target)
6231 return target;
6232 break;
6234 case BUILT_IN_STRCMP:
6235 target = expand_builtin_strcmp (exp, target);
6236 if (target)
6237 return target;
6238 break;
6240 case BUILT_IN_STRNCMP:
6241 target = expand_builtin_strncmp (exp, target, mode);
6242 if (target)
6243 return target;
6244 break;
6246 case BUILT_IN_BCMP:
6247 case BUILT_IN_MEMCMP:
6248 target = expand_builtin_memcmp (exp, target, mode);
6249 if (target)
6250 return target;
6251 break;
6253 case BUILT_IN_SETJMP:
6254 /* This should have been lowered to the builtins below. */
6255 gcc_unreachable ();
6257 case BUILT_IN_SETJMP_SETUP:
6258 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6259 and the receiver label. */
6260 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6262 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6263 VOIDmode, EXPAND_NORMAL);
6264 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6265 rtx label_r = label_rtx (label);
6267 /* This is copied from the handling of non-local gotos. */
6268 expand_builtin_setjmp_setup (buf_addr, label_r);
6269 nonlocal_goto_handler_labels
6270 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6271 nonlocal_goto_handler_labels);
6272 /* ??? Do not let expand_label treat us as such since we would
6273 not want to be both on the list of non-local labels and on
6274 the list of forced labels. */
6275 FORCED_LABEL (label) = 0;
6276 return const0_rtx;
6278 break;
6280 case BUILT_IN_SETJMP_DISPATCHER:
6281 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6282 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6284 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6285 rtx label_r = label_rtx (label);
6287 /* Remove the dispatcher label from the list of non-local labels
6288 since the receiver labels have been added to it above. */
6289 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6290 return const0_rtx;
6292 break;
6294 case BUILT_IN_SETJMP_RECEIVER:
6295 /* __builtin_setjmp_receiver is passed the receiver label. */
6296 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6298 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6299 rtx label_r = label_rtx (label);
6301 expand_builtin_setjmp_receiver (label_r);
6302 return const0_rtx;
6304 break;
6306 /* __builtin_longjmp is passed a pointer to an array of five words.
6307 It's similar to the C library longjmp function but works with
6308 __builtin_setjmp above. */
6309 case BUILT_IN_LONGJMP:
6310 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6312 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6313 VOIDmode, EXPAND_NORMAL);
6314 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6316 if (value != const1_rtx)
6318 error ("%<__builtin_longjmp%> second argument must be 1");
6319 return const0_rtx;
6322 expand_builtin_longjmp (buf_addr, value);
6323 return const0_rtx;
6325 break;
6327 case BUILT_IN_NONLOCAL_GOTO:
6328 target = expand_builtin_nonlocal_goto (exp);
6329 if (target)
6330 return target;
6331 break;
6333 /* This updates the setjmp buffer that is its argument with the value
6334 of the current stack pointer. */
6335 case BUILT_IN_UPDATE_SETJMP_BUF:
6336 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6338 rtx buf_addr
6339 = expand_normal (CALL_EXPR_ARG (exp, 0));
6341 expand_builtin_update_setjmp_buf (buf_addr);
6342 return const0_rtx;
6344 break;
6346 case BUILT_IN_TRAP:
6347 expand_builtin_trap ();
6348 return const0_rtx;
6350 case BUILT_IN_UNREACHABLE:
6351 expand_builtin_unreachable ();
6352 return const0_rtx;
6354 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6355 case BUILT_IN_SIGNBITD32:
6356 case BUILT_IN_SIGNBITD64:
6357 case BUILT_IN_SIGNBITD128:
6358 target = expand_builtin_signbit (exp, target);
6359 if (target)
6360 return target;
6361 break;
6363 /* Various hooks for the DWARF 2 __throw routine. */
6364 case BUILT_IN_UNWIND_INIT:
6365 expand_builtin_unwind_init ();
6366 return const0_rtx;
6367 case BUILT_IN_DWARF_CFA:
6368 return virtual_cfa_rtx;
6369 #ifdef DWARF2_UNWIND_INFO
6370 case BUILT_IN_DWARF_SP_COLUMN:
6371 return expand_builtin_dwarf_sp_column ();
6372 case BUILT_IN_INIT_DWARF_REG_SIZES:
6373 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6374 return const0_rtx;
6375 #endif
6376 case BUILT_IN_FROB_RETURN_ADDR:
6377 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6378 case BUILT_IN_EXTRACT_RETURN_ADDR:
6379 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6380 case BUILT_IN_EH_RETURN:
6381 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6382 CALL_EXPR_ARG (exp, 1));
6383 return const0_rtx;
6384 #ifdef EH_RETURN_DATA_REGNO
6385 case BUILT_IN_EH_RETURN_DATA_REGNO:
6386 return expand_builtin_eh_return_data_regno (exp);
6387 #endif
6388 case BUILT_IN_EXTEND_POINTER:
6389 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6390 case BUILT_IN_EH_POINTER:
6391 return expand_builtin_eh_pointer (exp);
6392 case BUILT_IN_EH_FILTER:
6393 return expand_builtin_eh_filter (exp);
6394 case BUILT_IN_EH_COPY_VALUES:
6395 return expand_builtin_eh_copy_values (exp);
6397 case BUILT_IN_VA_START:
6398 return expand_builtin_va_start (exp);
6399 case BUILT_IN_VA_END:
6400 return expand_builtin_va_end (exp);
6401 case BUILT_IN_VA_COPY:
6402 return expand_builtin_va_copy (exp);
6403 case BUILT_IN_EXPECT:
6404 return expand_builtin_expect (exp, target);
6405 case BUILT_IN_ASSUME_ALIGNED:
6406 return expand_builtin_assume_aligned (exp, target);
6407 case BUILT_IN_PREFETCH:
6408 expand_builtin_prefetch (exp);
6409 return const0_rtx;
6411 case BUILT_IN_INIT_TRAMPOLINE:
6412 return expand_builtin_init_trampoline (exp, true);
6413 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6414 return expand_builtin_init_trampoline (exp, false);
6415 case BUILT_IN_ADJUST_TRAMPOLINE:
6416 return expand_builtin_adjust_trampoline (exp);
6418 case BUILT_IN_FORK:
6419 case BUILT_IN_EXECL:
6420 case BUILT_IN_EXECV:
6421 case BUILT_IN_EXECLP:
6422 case BUILT_IN_EXECLE:
6423 case BUILT_IN_EXECVP:
6424 case BUILT_IN_EXECVE:
6425 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6426 if (target)
6427 return target;
6428 break;
6430 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6431 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6432 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6433 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6434 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6435 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6436 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6437 if (target)
6438 return target;
6439 break;
6441 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6442 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6443 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6444 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6445 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6446 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6447 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6448 if (target)
6449 return target;
6450 break;
6452 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6453 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6454 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6455 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6456 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6457 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6458 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6459 if (target)
6460 return target;
6461 break;
6463 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6464 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6465 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6466 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6467 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6468 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6469 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6470 if (target)
6471 return target;
6472 break;
6474 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6475 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6476 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6477 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6478 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6479 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6480 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6481 if (target)
6482 return target;
6483 break;
6485 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6486 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6487 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6488 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6489 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6490 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6491 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6492 if (target)
6493 return target;
6494 break;
6496 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6497 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6498 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6499 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6500 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6501 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6502 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6503 if (target)
6504 return target;
6505 break;
6507 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6508 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6509 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6510 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6511 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6512 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6513 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6514 if (target)
6515 return target;
6516 break;
6518 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6519 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6520 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6521 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6522 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6523 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6524 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6525 if (target)
6526 return target;
6527 break;
6529 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6530 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6531 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6532 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6533 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6534 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6535 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6536 if (target)
6537 return target;
6538 break;
6540 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6541 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6542 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6543 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6544 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6545 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6546 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6547 if (target)
6548 return target;
6549 break;
6551 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6552 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6553 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6554 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6555 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6556 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6557 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6558 if (target)
6559 return target;
6560 break;
6562 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6563 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6564 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6565 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6566 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6567 if (mode == VOIDmode)
6568 mode = TYPE_MODE (boolean_type_node);
6569 if (!target || !register_operand (target, mode))
6570 target = gen_reg_rtx (mode);
6572 mode = get_builtin_sync_mode
6573 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6574 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6575 if (target)
6576 return target;
6577 break;
6579 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6580 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6581 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6582 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6583 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6584 mode = get_builtin_sync_mode
6585 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6586 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6587 if (target)
6588 return target;
6589 break;
6591 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6592 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6593 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6594 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6595 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6596 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6597 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6598 if (target)
6599 return target;
6600 break;
6602 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6603 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6604 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6605 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6606 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6607 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6608 expand_builtin_sync_lock_release (mode, exp);
6609 return const0_rtx;
6611 case BUILT_IN_SYNC_SYNCHRONIZE:
6612 expand_builtin_sync_synchronize ();
6613 return const0_rtx;
6615 case BUILT_IN_ATOMIC_EXCHANGE_1:
6616 case BUILT_IN_ATOMIC_EXCHANGE_2:
6617 case BUILT_IN_ATOMIC_EXCHANGE_4:
6618 case BUILT_IN_ATOMIC_EXCHANGE_8:
6619 case BUILT_IN_ATOMIC_EXCHANGE_16:
6620 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6621 target = expand_builtin_atomic_exchange (mode, exp, target);
6622 if (target)
6623 return target;
6624 break;
6626 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6627 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6628 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6629 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6630 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6632 unsigned int nargs, z;
6633 vec<tree, va_gc> *vec;
6635 mode =
6636 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6637 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6638 if (target)
6639 return target;
6641 /* If this is turned into an external library call, the weak parameter
6642 must be dropped to match the expected parameter list. */
6643 nargs = call_expr_nargs (exp);
6644 vec_alloc (vec, nargs - 1);
6645 for (z = 0; z < 3; z++)
6646 vec->quick_push (CALL_EXPR_ARG (exp, z));
6647 /* Skip the boolean weak parameter. */
6648 for (z = 4; z < 6; z++)
6649 vec->quick_push (CALL_EXPR_ARG (exp, z));
6650 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6651 break;
6654 case BUILT_IN_ATOMIC_LOAD_1:
6655 case BUILT_IN_ATOMIC_LOAD_2:
6656 case BUILT_IN_ATOMIC_LOAD_4:
6657 case BUILT_IN_ATOMIC_LOAD_8:
6658 case BUILT_IN_ATOMIC_LOAD_16:
6659 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6660 target = expand_builtin_atomic_load (mode, exp, target);
6661 if (target)
6662 return target;
6663 break;
6665 case BUILT_IN_ATOMIC_STORE_1:
6666 case BUILT_IN_ATOMIC_STORE_2:
6667 case BUILT_IN_ATOMIC_STORE_4:
6668 case BUILT_IN_ATOMIC_STORE_8:
6669 case BUILT_IN_ATOMIC_STORE_16:
6670 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6671 target = expand_builtin_atomic_store (mode, exp);
6672 if (target)
6673 return const0_rtx;
6674 break;
6676 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6677 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6678 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6679 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6680 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6682 enum built_in_function lib;
6683 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6684 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6685 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6686 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6687 ignore, lib);
6688 if (target)
6689 return target;
6690 break;
6692 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6693 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6694 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6695 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6696 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6698 enum built_in_function lib;
6699 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6700 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6701 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6702 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6703 ignore, lib);
6704 if (target)
6705 return target;
6706 break;
6708 case BUILT_IN_ATOMIC_AND_FETCH_1:
6709 case BUILT_IN_ATOMIC_AND_FETCH_2:
6710 case BUILT_IN_ATOMIC_AND_FETCH_4:
6711 case BUILT_IN_ATOMIC_AND_FETCH_8:
6712 case BUILT_IN_ATOMIC_AND_FETCH_16:
6714 enum built_in_function lib;
6715 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6716 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6717 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6718 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6719 ignore, lib);
6720 if (target)
6721 return target;
6722 break;
6724 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6725 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6726 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6727 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6728 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6730 enum built_in_function lib;
6731 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6732 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6733 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6734 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6735 ignore, lib);
6736 if (target)
6737 return target;
6738 break;
6740 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6741 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6742 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6743 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6744 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6746 enum built_in_function lib;
6747 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6748 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6749 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6750 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6751 ignore, lib);
6752 if (target)
6753 return target;
6754 break;
6756 case BUILT_IN_ATOMIC_OR_FETCH_1:
6757 case BUILT_IN_ATOMIC_OR_FETCH_2:
6758 case BUILT_IN_ATOMIC_OR_FETCH_4:
6759 case BUILT_IN_ATOMIC_OR_FETCH_8:
6760 case BUILT_IN_ATOMIC_OR_FETCH_16:
6762 enum built_in_function lib;
6763 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6764 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6765 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6766 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6767 ignore, lib);
6768 if (target)
6769 return target;
6770 break;
6772 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6773 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6774 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6775 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6776 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6777 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6778 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6779 ignore, BUILT_IN_NONE);
6780 if (target)
6781 return target;
6782 break;
6784 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6785 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6786 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6787 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6788 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6789 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6790 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6791 ignore, BUILT_IN_NONE);
6792 if (target)
6793 return target;
6794 break;
6796 case BUILT_IN_ATOMIC_FETCH_AND_1:
6797 case BUILT_IN_ATOMIC_FETCH_AND_2:
6798 case BUILT_IN_ATOMIC_FETCH_AND_4:
6799 case BUILT_IN_ATOMIC_FETCH_AND_8:
6800 case BUILT_IN_ATOMIC_FETCH_AND_16:
6801 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6802 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6803 ignore, BUILT_IN_NONE);
6804 if (target)
6805 return target;
6806 break;
6808 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6809 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6810 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6811 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6812 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6813 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6814 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6815 ignore, BUILT_IN_NONE);
6816 if (target)
6817 return target;
6818 break;
6820 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6821 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6822 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6823 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6824 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6825 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6826 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6827 ignore, BUILT_IN_NONE);
6828 if (target)
6829 return target;
6830 break;
6832 case BUILT_IN_ATOMIC_FETCH_OR_1:
6833 case BUILT_IN_ATOMIC_FETCH_OR_2:
6834 case BUILT_IN_ATOMIC_FETCH_OR_4:
6835 case BUILT_IN_ATOMIC_FETCH_OR_8:
6836 case BUILT_IN_ATOMIC_FETCH_OR_16:
6837 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6838 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6839 ignore, BUILT_IN_NONE);
6840 if (target)
6841 return target;
6842 break;
6844 case BUILT_IN_ATOMIC_TEST_AND_SET:
6845 return expand_builtin_atomic_test_and_set (exp, target);
6847 case BUILT_IN_ATOMIC_CLEAR:
6848 return expand_builtin_atomic_clear (exp);
6850 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6851 return expand_builtin_atomic_always_lock_free (exp);
6853 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6854 target = expand_builtin_atomic_is_lock_free (exp);
6855 if (target)
6856 return target;
6857 break;
6859 case BUILT_IN_ATOMIC_THREAD_FENCE:
6860 expand_builtin_atomic_thread_fence (exp);
6861 return const0_rtx;
6863 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6864 expand_builtin_atomic_signal_fence (exp);
6865 return const0_rtx;
6867 case BUILT_IN_OBJECT_SIZE:
6868 return expand_builtin_object_size (exp);
6870 case BUILT_IN_MEMCPY_CHK:
6871 case BUILT_IN_MEMPCPY_CHK:
6872 case BUILT_IN_MEMMOVE_CHK:
6873 case BUILT_IN_MEMSET_CHK:
6874 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6875 if (target)
6876 return target;
6877 break;
6879 case BUILT_IN_STRCPY_CHK:
6880 case BUILT_IN_STPCPY_CHK:
6881 case BUILT_IN_STRNCPY_CHK:
6882 case BUILT_IN_STPNCPY_CHK:
6883 case BUILT_IN_STRCAT_CHK:
6884 case BUILT_IN_STRNCAT_CHK:
6885 case BUILT_IN_SNPRINTF_CHK:
6886 case BUILT_IN_VSNPRINTF_CHK:
6887 maybe_emit_chk_warning (exp, fcode);
6888 break;
6890 case BUILT_IN_SPRINTF_CHK:
6891 case BUILT_IN_VSPRINTF_CHK:
6892 maybe_emit_sprintf_chk_warning (exp, fcode);
6893 break;
6895 case BUILT_IN_FREE:
6896 if (warn_free_nonheap_object)
6897 maybe_emit_free_warning (exp);
6898 break;
6900 case BUILT_IN_THREAD_POINTER:
6901 return expand_builtin_thread_pointer (exp, target);
6903 case BUILT_IN_SET_THREAD_POINTER:
6904 expand_builtin_set_thread_pointer (exp);
6905 return const0_rtx;
6907 default: /* just do library call, if unknown builtin */
6908 break;
6911 /* The switch statement above can drop through to cause the function
6912 to be called normally. */
6913 return expand_call (exp, target, ignore);
6916 /* Determine whether a tree node represents a call to a built-in
6917 function. If the tree T is a call to a built-in function with
6918 the right number of arguments of the appropriate types, return
6919 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6920 Otherwise the return value is END_BUILTINS. */
6922 enum built_in_function
6923 builtin_mathfn_code (const_tree t)
6925 const_tree fndecl, arg, parmlist;
6926 const_tree argtype, parmtype;
6927 const_call_expr_arg_iterator iter;
6929 if (TREE_CODE (t) != CALL_EXPR
6930 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6931 return END_BUILTINS;
6933 fndecl = get_callee_fndecl (t);
6934 if (fndecl == NULL_TREE
6935 || TREE_CODE (fndecl) != FUNCTION_DECL
6936 || ! DECL_BUILT_IN (fndecl)
6937 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6938 return END_BUILTINS;
6940 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6941 init_const_call_expr_arg_iterator (t, &iter);
6942 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6944 /* If a function doesn't take a variable number of arguments,
6945 the last element in the list will have type `void'. */
6946 parmtype = TREE_VALUE (parmlist);
6947 if (VOID_TYPE_P (parmtype))
6949 if (more_const_call_expr_args_p (&iter))
6950 return END_BUILTINS;
6951 return DECL_FUNCTION_CODE (fndecl);
6954 if (! more_const_call_expr_args_p (&iter))
6955 return END_BUILTINS;
6957 arg = next_const_call_expr_arg (&iter);
6958 argtype = TREE_TYPE (arg);
6960 if (SCALAR_FLOAT_TYPE_P (parmtype))
6962 if (! SCALAR_FLOAT_TYPE_P (argtype))
6963 return END_BUILTINS;
6965 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6967 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6968 return END_BUILTINS;
6970 else if (POINTER_TYPE_P (parmtype))
6972 if (! POINTER_TYPE_P (argtype))
6973 return END_BUILTINS;
6975 else if (INTEGRAL_TYPE_P (parmtype))
6977 if (! INTEGRAL_TYPE_P (argtype))
6978 return END_BUILTINS;
6980 else
6981 return END_BUILTINS;
6984 /* Variable-length argument list. */
6985 return DECL_FUNCTION_CODE (fndecl);
6988 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6989 evaluate to a constant. */
6991 static tree
6992 fold_builtin_constant_p (tree arg)
6994 /* We return 1 for a numeric type that's known to be a constant
6995 value at compile-time or for an aggregate type that's a
6996 literal constant. */
6997 STRIP_NOPS (arg);
6999 /* If we know this is a constant, emit the constant of one. */
7000 if (CONSTANT_CLASS_P (arg)
7001 || (TREE_CODE (arg) == CONSTRUCTOR
7002 && TREE_CONSTANT (arg)))
7003 return integer_one_node;
7004 if (TREE_CODE (arg) == ADDR_EXPR)
7006 tree op = TREE_OPERAND (arg, 0);
7007 if (TREE_CODE (op) == STRING_CST
7008 || (TREE_CODE (op) == ARRAY_REF
7009 && integer_zerop (TREE_OPERAND (op, 1))
7010 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7011 return integer_one_node;
7014 /* If this expression has side effects, show we don't know it to be a
7015 constant. Likewise if it's a pointer or aggregate type since in
7016 those case we only want literals, since those are only optimized
7017 when generating RTL, not later.
7018 And finally, if we are compiling an initializer, not code, we
7019 need to return a definite result now; there's not going to be any
7020 more optimization done. */
7021 if (TREE_SIDE_EFFECTS (arg)
7022 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7023 || POINTER_TYPE_P (TREE_TYPE (arg))
7024 || cfun == 0
7025 || folding_initializer
7026 || force_folding_builtin_constant_p)
7027 return integer_zero_node;
7029 return NULL_TREE;
7032 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7033 return it as a truthvalue. */
7035 static tree
7036 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
7038 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7040 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7041 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7042 ret_type = TREE_TYPE (TREE_TYPE (fn));
7043 pred_type = TREE_VALUE (arg_types);
7044 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7046 pred = fold_convert_loc (loc, pred_type, pred);
7047 expected = fold_convert_loc (loc, expected_type, expected);
7048 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
7050 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7051 build_int_cst (ret_type, 0));
7054 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7055 NULL_TREE if no simplification is possible. */
7057 static tree
7058 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7060 tree inner, fndecl, inner_arg0;
7061 enum tree_code code;
7063 /* Distribute the expected value over short-circuiting operators.
7064 See through the cast from truthvalue_type_node to long. */
7065 inner_arg0 = arg0;
7066 while (TREE_CODE (inner_arg0) == NOP_EXPR
7067 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7068 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7069 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7071 /* If this is a builtin_expect within a builtin_expect keep the
7072 inner one. See through a comparison against a constant. It
7073 might have been added to create a thruthvalue. */
7074 inner = inner_arg0;
7076 if (COMPARISON_CLASS_P (inner)
7077 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7078 inner = TREE_OPERAND (inner, 0);
7080 if (TREE_CODE (inner) == CALL_EXPR
7081 && (fndecl = get_callee_fndecl (inner))
7082 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7083 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7084 return arg0;
7086 inner = inner_arg0;
7087 code = TREE_CODE (inner);
7088 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7090 tree op0 = TREE_OPERAND (inner, 0);
7091 tree op1 = TREE_OPERAND (inner, 1);
7093 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7094 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7095 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7097 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7100 /* If the argument isn't invariant then there's nothing else we can do. */
7101 if (!TREE_CONSTANT (inner_arg0))
7102 return NULL_TREE;
7104 /* If we expect that a comparison against the argument will fold to
7105 a constant return the constant. In practice, this means a true
7106 constant or the address of a non-weak symbol. */
7107 inner = inner_arg0;
7108 STRIP_NOPS (inner);
7109 if (TREE_CODE (inner) == ADDR_EXPR)
7113 inner = TREE_OPERAND (inner, 0);
7115 while (TREE_CODE (inner) == COMPONENT_REF
7116 || TREE_CODE (inner) == ARRAY_REF);
7117 if ((TREE_CODE (inner) == VAR_DECL
7118 || TREE_CODE (inner) == FUNCTION_DECL)
7119 && DECL_WEAK (inner))
7120 return NULL_TREE;
7123 /* Otherwise, ARG0 already has the proper type for the return value. */
7124 return arg0;
7127 /* Fold a call to __builtin_classify_type with argument ARG. */
7129 static tree
7130 fold_builtin_classify_type (tree arg)
7132 if (arg == 0)
7133 return build_int_cst (integer_type_node, no_type_class);
7135 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7138 /* Fold a call to __builtin_strlen with argument ARG. */
7140 static tree
7141 fold_builtin_strlen (location_t loc, tree type, tree arg)
7143 if (!validate_arg (arg, POINTER_TYPE))
7144 return NULL_TREE;
7145 else
7147 tree len = c_strlen (arg, 0);
7149 if (len)
7150 return fold_convert_loc (loc, type, len);
7152 return NULL_TREE;
7156 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7158 static tree
7159 fold_builtin_inf (location_t loc, tree type, int warn)
7161 REAL_VALUE_TYPE real;
7163 /* __builtin_inff is intended to be usable to define INFINITY on all
7164 targets. If an infinity is not available, INFINITY expands "to a
7165 positive constant of type float that overflows at translation
7166 time", footnote "In this case, using INFINITY will violate the
7167 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7168 Thus we pedwarn to ensure this constraint violation is
7169 diagnosed. */
7170 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7171 pedwarn (loc, 0, "target format does not support infinity");
7173 real_inf (&real);
7174 return build_real (type, real);
7177 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7179 static tree
7180 fold_builtin_nan (tree arg, tree type, int quiet)
7182 REAL_VALUE_TYPE real;
7183 const char *str;
7185 if (!validate_arg (arg, POINTER_TYPE))
7186 return NULL_TREE;
7187 str = c_getstr (arg);
7188 if (!str)
7189 return NULL_TREE;
7191 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7192 return NULL_TREE;
7194 return build_real (type, real);
7197 /* Return true if the floating point expression T has an integer value.
7198 We also allow +Inf, -Inf and NaN to be considered integer values. */
7200 static bool
7201 integer_valued_real_p (tree t)
7203 switch (TREE_CODE (t))
7205 case FLOAT_EXPR:
7206 return true;
7208 case ABS_EXPR:
7209 case SAVE_EXPR:
7210 return integer_valued_real_p (TREE_OPERAND (t, 0));
7212 case COMPOUND_EXPR:
7213 case MODIFY_EXPR:
7214 case BIND_EXPR:
7215 return integer_valued_real_p (TREE_OPERAND (t, 1));
7217 case PLUS_EXPR:
7218 case MINUS_EXPR:
7219 case MULT_EXPR:
7220 case MIN_EXPR:
7221 case MAX_EXPR:
7222 return integer_valued_real_p (TREE_OPERAND (t, 0))
7223 && integer_valued_real_p (TREE_OPERAND (t, 1));
7225 case COND_EXPR:
7226 return integer_valued_real_p (TREE_OPERAND (t, 1))
7227 && integer_valued_real_p (TREE_OPERAND (t, 2));
7229 case REAL_CST:
7230 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7232 case NOP_EXPR:
7234 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7235 if (TREE_CODE (type) == INTEGER_TYPE)
7236 return true;
7237 if (TREE_CODE (type) == REAL_TYPE)
7238 return integer_valued_real_p (TREE_OPERAND (t, 0));
7239 break;
7242 case CALL_EXPR:
7243 switch (builtin_mathfn_code (t))
7245 CASE_FLT_FN (BUILT_IN_CEIL):
7246 CASE_FLT_FN (BUILT_IN_FLOOR):
7247 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7248 CASE_FLT_FN (BUILT_IN_RINT):
7249 CASE_FLT_FN (BUILT_IN_ROUND):
7250 CASE_FLT_FN (BUILT_IN_TRUNC):
7251 return true;
7253 CASE_FLT_FN (BUILT_IN_FMIN):
7254 CASE_FLT_FN (BUILT_IN_FMAX):
7255 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7256 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7258 default:
7259 break;
7261 break;
7263 default:
7264 break;
7266 return false;
7269 /* FNDECL is assumed to be a builtin where truncation can be propagated
7270 across (for instance floor((double)f) == (double)floorf (f).
7271 Do the transformation for a call with argument ARG. */
7273 static tree
7274 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7276 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7278 if (!validate_arg (arg, REAL_TYPE))
7279 return NULL_TREE;
7281 /* Integer rounding functions are idempotent. */
7282 if (fcode == builtin_mathfn_code (arg))
7283 return arg;
7285 /* If argument is already integer valued, and we don't need to worry
7286 about setting errno, there's no need to perform rounding. */
7287 if (! flag_errno_math && integer_valued_real_p (arg))
7288 return arg;
7290 if (optimize)
7292 tree arg0 = strip_float_extensions (arg);
7293 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7294 tree newtype = TREE_TYPE (arg0);
7295 tree decl;
7297 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7298 && (decl = mathfn_built_in (newtype, fcode)))
7299 return fold_convert_loc (loc, ftype,
7300 build_call_expr_loc (loc, decl, 1,
7301 fold_convert_loc (loc,
7302 newtype,
7303 arg0)));
7305 return NULL_TREE;
7308 /* FNDECL is assumed to be builtin which can narrow the FP type of
7309 the argument, for instance lround((double)f) -> lroundf (f).
7310 Do the transformation for a call with argument ARG. */
7312 static tree
7313 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7315 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7317 if (!validate_arg (arg, REAL_TYPE))
7318 return NULL_TREE;
7320 /* If argument is already integer valued, and we don't need to worry
7321 about setting errno, there's no need to perform rounding. */
7322 if (! flag_errno_math && integer_valued_real_p (arg))
7323 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7324 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7326 if (optimize)
7328 tree ftype = TREE_TYPE (arg);
7329 tree arg0 = strip_float_extensions (arg);
7330 tree newtype = TREE_TYPE (arg0);
7331 tree decl;
7333 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7334 && (decl = mathfn_built_in (newtype, fcode)))
7335 return build_call_expr_loc (loc, decl, 1,
7336 fold_convert_loc (loc, newtype, arg0));
7339 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7340 sizeof (int) == sizeof (long). */
7341 if (TYPE_PRECISION (integer_type_node)
7342 == TYPE_PRECISION (long_integer_type_node))
7344 tree newfn = NULL_TREE;
7345 switch (fcode)
7347 CASE_FLT_FN (BUILT_IN_ICEIL):
7348 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7349 break;
7351 CASE_FLT_FN (BUILT_IN_IFLOOR):
7352 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7353 break;
7355 CASE_FLT_FN (BUILT_IN_IROUND):
7356 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7357 break;
7359 CASE_FLT_FN (BUILT_IN_IRINT):
7360 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7361 break;
7363 default:
7364 break;
7367 if (newfn)
7369 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7370 return fold_convert_loc (loc,
7371 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7375 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7376 sizeof (long long) == sizeof (long). */
7377 if (TYPE_PRECISION (long_long_integer_type_node)
7378 == TYPE_PRECISION (long_integer_type_node))
7380 tree newfn = NULL_TREE;
7381 switch (fcode)
7383 CASE_FLT_FN (BUILT_IN_LLCEIL):
7384 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7385 break;
7387 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7388 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7389 break;
7391 CASE_FLT_FN (BUILT_IN_LLROUND):
7392 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7393 break;
7395 CASE_FLT_FN (BUILT_IN_LLRINT):
7396 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7397 break;
7399 default:
7400 break;
7403 if (newfn)
7405 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7406 return fold_convert_loc (loc,
7407 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7411 return NULL_TREE;
7414 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7415 return type. Return NULL_TREE if no simplification can be made. */
7417 static tree
7418 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7420 tree res;
7422 if (!validate_arg (arg, COMPLEX_TYPE)
7423 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7424 return NULL_TREE;
7426 /* Calculate the result when the argument is a constant. */
7427 if (TREE_CODE (arg) == COMPLEX_CST
7428 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7429 type, mpfr_hypot)))
7430 return res;
7432 if (TREE_CODE (arg) == COMPLEX_EXPR)
7434 tree real = TREE_OPERAND (arg, 0);
7435 tree imag = TREE_OPERAND (arg, 1);
7437 /* If either part is zero, cabs is fabs of the other. */
7438 if (real_zerop (real))
7439 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7440 if (real_zerop (imag))
7441 return fold_build1_loc (loc, ABS_EXPR, type, real);
7443 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7444 if (flag_unsafe_math_optimizations
7445 && operand_equal_p (real, imag, OEP_PURE_SAME))
7447 const REAL_VALUE_TYPE sqrt2_trunc
7448 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7449 STRIP_NOPS (real);
7450 return fold_build2_loc (loc, MULT_EXPR, type,
7451 fold_build1_loc (loc, ABS_EXPR, type, real),
7452 build_real (type, sqrt2_trunc));
7456 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7457 if (TREE_CODE (arg) == NEGATE_EXPR
7458 || TREE_CODE (arg) == CONJ_EXPR)
7459 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7461 /* Don't do this when optimizing for size. */
7462 if (flag_unsafe_math_optimizations
7463 && optimize && optimize_function_for_speed_p (cfun))
7465 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7467 if (sqrtfn != NULL_TREE)
7469 tree rpart, ipart, result;
7471 arg = builtin_save_expr (arg);
7473 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7474 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7476 rpart = builtin_save_expr (rpart);
7477 ipart = builtin_save_expr (ipart);
7479 result = fold_build2_loc (loc, PLUS_EXPR, type,
7480 fold_build2_loc (loc, MULT_EXPR, type,
7481 rpart, rpart),
7482 fold_build2_loc (loc, MULT_EXPR, type,
7483 ipart, ipart));
7485 return build_call_expr_loc (loc, sqrtfn, 1, result);
7489 return NULL_TREE;
7492 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7493 complex tree type of the result. If NEG is true, the imaginary
7494 zero is negative. */
7496 static tree
7497 build_complex_cproj (tree type, bool neg)
7499 REAL_VALUE_TYPE rinf, rzero = dconst0;
7501 real_inf (&rinf);
7502 rzero.sign = neg;
7503 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7504 build_real (TREE_TYPE (type), rzero));
7507 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7508 return type. Return NULL_TREE if no simplification can be made. */
7510 static tree
7511 fold_builtin_cproj (location_t loc, tree arg, tree type)
7513 if (!validate_arg (arg, COMPLEX_TYPE)
7514 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7515 return NULL_TREE;
7517 /* If there are no infinities, return arg. */
7518 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7519 return non_lvalue_loc (loc, arg);
7521 /* Calculate the result when the argument is a constant. */
7522 if (TREE_CODE (arg) == COMPLEX_CST)
7524 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7525 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7527 if (real_isinf (real) || real_isinf (imag))
7528 return build_complex_cproj (type, imag->sign);
7529 else
7530 return arg;
7532 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7534 tree real = TREE_OPERAND (arg, 0);
7535 tree imag = TREE_OPERAND (arg, 1);
7537 STRIP_NOPS (real);
7538 STRIP_NOPS (imag);
7540 /* If the real part is inf and the imag part is known to be
7541 nonnegative, return (inf + 0i). Remember side-effects are
7542 possible in the imag part. */
7543 if (TREE_CODE (real) == REAL_CST
7544 && real_isinf (TREE_REAL_CST_PTR (real))
7545 && tree_expr_nonnegative_p (imag))
7546 return omit_one_operand_loc (loc, type,
7547 build_complex_cproj (type, false),
7548 arg);
7550 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7551 Remember side-effects are possible in the real part. */
7552 if (TREE_CODE (imag) == REAL_CST
7553 && real_isinf (TREE_REAL_CST_PTR (imag)))
7554 return
7555 omit_one_operand_loc (loc, type,
7556 build_complex_cproj (type, TREE_REAL_CST_PTR
7557 (imag)->sign), arg);
7560 return NULL_TREE;
7563 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7564 Return NULL_TREE if no simplification can be made. */
7566 static tree
7567 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7570 enum built_in_function fcode;
7571 tree res;
7573 if (!validate_arg (arg, REAL_TYPE))
7574 return NULL_TREE;
7576 /* Calculate the result when the argument is a constant. */
7577 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7578 return res;
7580 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7581 fcode = builtin_mathfn_code (arg);
7582 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7584 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7585 arg = fold_build2_loc (loc, MULT_EXPR, type,
7586 CALL_EXPR_ARG (arg, 0),
7587 build_real (type, dconsthalf));
7588 return build_call_expr_loc (loc, expfn, 1, arg);
7591 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7592 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7594 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7596 if (powfn)
7598 tree arg0 = CALL_EXPR_ARG (arg, 0);
7599 tree tree_root;
7600 /* The inner root was either sqrt or cbrt. */
7601 /* This was a conditional expression but it triggered a bug
7602 in Sun C 5.5. */
7603 REAL_VALUE_TYPE dconstroot;
7604 if (BUILTIN_SQRT_P (fcode))
7605 dconstroot = dconsthalf;
7606 else
7607 dconstroot = dconst_third ();
7609 /* Adjust for the outer root. */
7610 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7611 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7612 tree_root = build_real (type, dconstroot);
7613 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7617 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7618 if (flag_unsafe_math_optimizations
7619 && (fcode == BUILT_IN_POW
7620 || fcode == BUILT_IN_POWF
7621 || fcode == BUILT_IN_POWL))
7623 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7624 tree arg0 = CALL_EXPR_ARG (arg, 0);
7625 tree arg1 = CALL_EXPR_ARG (arg, 1);
7626 tree narg1;
7627 if (!tree_expr_nonnegative_p (arg0))
7628 arg0 = build1 (ABS_EXPR, type, arg0);
7629 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7630 build_real (type, dconsthalf));
7631 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7634 return NULL_TREE;
7637 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7638 Return NULL_TREE if no simplification can be made. */
7640 static tree
7641 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7643 const enum built_in_function fcode = builtin_mathfn_code (arg);
7644 tree res;
7646 if (!validate_arg (arg, REAL_TYPE))
7647 return NULL_TREE;
7649 /* Calculate the result when the argument is a constant. */
7650 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7651 return res;
7653 if (flag_unsafe_math_optimizations)
7655 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7656 if (BUILTIN_EXPONENT_P (fcode))
7658 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7659 const REAL_VALUE_TYPE third_trunc =
7660 real_value_truncate (TYPE_MODE (type), dconst_third ());
7661 arg = fold_build2_loc (loc, MULT_EXPR, type,
7662 CALL_EXPR_ARG (arg, 0),
7663 build_real (type, third_trunc));
7664 return build_call_expr_loc (loc, expfn, 1, arg);
7667 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7668 if (BUILTIN_SQRT_P (fcode))
7670 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7672 if (powfn)
7674 tree arg0 = CALL_EXPR_ARG (arg, 0);
7675 tree tree_root;
7676 REAL_VALUE_TYPE dconstroot = dconst_third ();
7678 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7679 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7680 tree_root = build_real (type, dconstroot);
7681 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7685 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7686 if (BUILTIN_CBRT_P (fcode))
7688 tree arg0 = CALL_EXPR_ARG (arg, 0);
7689 if (tree_expr_nonnegative_p (arg0))
7691 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7693 if (powfn)
7695 tree tree_root;
7696 REAL_VALUE_TYPE dconstroot;
7698 real_arithmetic (&dconstroot, MULT_EXPR,
7699 dconst_third_ptr (), dconst_third_ptr ());
7700 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7701 tree_root = build_real (type, dconstroot);
7702 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7707 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7708 if (fcode == BUILT_IN_POW
7709 || fcode == BUILT_IN_POWF
7710 || fcode == BUILT_IN_POWL)
7712 tree arg00 = CALL_EXPR_ARG (arg, 0);
7713 tree arg01 = CALL_EXPR_ARG (arg, 1);
7714 if (tree_expr_nonnegative_p (arg00))
7716 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7717 const REAL_VALUE_TYPE dconstroot
7718 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7719 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7720 build_real (type, dconstroot));
7721 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7725 return NULL_TREE;
7728 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7729 TYPE is the type of the return value. Return NULL_TREE if no
7730 simplification can be made. */
7732 static tree
7733 fold_builtin_cos (location_t loc,
7734 tree arg, tree type, tree fndecl)
7736 tree res, narg;
7738 if (!validate_arg (arg, REAL_TYPE))
7739 return NULL_TREE;
7741 /* Calculate the result when the argument is a constant. */
7742 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7743 return res;
7745 /* Optimize cos(-x) into cos (x). */
7746 if ((narg = fold_strip_sign_ops (arg)))
7747 return build_call_expr_loc (loc, fndecl, 1, narg);
7749 return NULL_TREE;
7752 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7753 Return NULL_TREE if no simplification can be made. */
7755 static tree
7756 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7758 if (validate_arg (arg, REAL_TYPE))
7760 tree res, narg;
7762 /* Calculate the result when the argument is a constant. */
7763 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7764 return res;
7766 /* Optimize cosh(-x) into cosh (x). */
7767 if ((narg = fold_strip_sign_ops (arg)))
7768 return build_call_expr_loc (loc, fndecl, 1, narg);
7771 return NULL_TREE;
7774 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7775 argument ARG. TYPE is the type of the return value. Return
7776 NULL_TREE if no simplification can be made. */
7778 static tree
7779 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7780 bool hyper)
7782 if (validate_arg (arg, COMPLEX_TYPE)
7783 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7785 tree tmp;
7787 /* Calculate the result when the argument is a constant. */
7788 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7789 return tmp;
7791 /* Optimize fn(-x) into fn(x). */
7792 if ((tmp = fold_strip_sign_ops (arg)))
7793 return build_call_expr_loc (loc, fndecl, 1, tmp);
7796 return NULL_TREE;
7799 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7800 Return NULL_TREE if no simplification can be made. */
7802 static tree
7803 fold_builtin_tan (tree arg, tree type)
7805 enum built_in_function fcode;
7806 tree res;
7808 if (!validate_arg (arg, REAL_TYPE))
7809 return NULL_TREE;
7811 /* Calculate the result when the argument is a constant. */
7812 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7813 return res;
7815 /* Optimize tan(atan(x)) = x. */
7816 fcode = builtin_mathfn_code (arg);
7817 if (flag_unsafe_math_optimizations
7818 && (fcode == BUILT_IN_ATAN
7819 || fcode == BUILT_IN_ATANF
7820 || fcode == BUILT_IN_ATANL))
7821 return CALL_EXPR_ARG (arg, 0);
7823 return NULL_TREE;
7826 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7827 NULL_TREE if no simplification can be made. */
7829 static tree
7830 fold_builtin_sincos (location_t loc,
7831 tree arg0, tree arg1, tree arg2)
7833 tree type;
7834 tree res, fn, call;
7836 if (!validate_arg (arg0, REAL_TYPE)
7837 || !validate_arg (arg1, POINTER_TYPE)
7838 || !validate_arg (arg2, POINTER_TYPE))
7839 return NULL_TREE;
7841 type = TREE_TYPE (arg0);
7843 /* Calculate the result when the argument is a constant. */
7844 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7845 return res;
7847 /* Canonicalize sincos to cexpi. */
7848 if (!targetm.libc_has_function (function_c99_math_complex))
7849 return NULL_TREE;
7850 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7851 if (!fn)
7852 return NULL_TREE;
7854 call = build_call_expr_loc (loc, fn, 1, arg0);
7855 call = builtin_save_expr (call);
7857 return build2 (COMPOUND_EXPR, void_type_node,
7858 build2 (MODIFY_EXPR, void_type_node,
7859 build_fold_indirect_ref_loc (loc, arg1),
7860 build1 (IMAGPART_EXPR, type, call)),
7861 build2 (MODIFY_EXPR, void_type_node,
7862 build_fold_indirect_ref_loc (loc, arg2),
7863 build1 (REALPART_EXPR, type, call)));
7866 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7867 NULL_TREE if no simplification can be made. */
7869 static tree
7870 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7872 tree rtype;
7873 tree realp, imagp, ifn;
7874 tree res;
7876 if (!validate_arg (arg0, COMPLEX_TYPE)
7877 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7878 return NULL_TREE;
7880 /* Calculate the result when the argument is a constant. */
7881 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7882 return res;
7884 rtype = TREE_TYPE (TREE_TYPE (arg0));
7886 /* In case we can figure out the real part of arg0 and it is constant zero
7887 fold to cexpi. */
7888 if (!targetm.libc_has_function (function_c99_math_complex))
7889 return NULL_TREE;
7890 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7891 if (!ifn)
7892 return NULL_TREE;
7894 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7895 && real_zerop (realp))
7897 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7898 return build_call_expr_loc (loc, ifn, 1, narg);
7901 /* In case we can easily decompose real and imaginary parts split cexp
7902 to exp (r) * cexpi (i). */
7903 if (flag_unsafe_math_optimizations
7904 && realp)
7906 tree rfn, rcall, icall;
7908 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7909 if (!rfn)
7910 return NULL_TREE;
7912 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7913 if (!imagp)
7914 return NULL_TREE;
7916 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7917 icall = builtin_save_expr (icall);
7918 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7919 rcall = builtin_save_expr (rcall);
7920 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7921 fold_build2_loc (loc, MULT_EXPR, rtype,
7922 rcall,
7923 fold_build1_loc (loc, REALPART_EXPR,
7924 rtype, icall)),
7925 fold_build2_loc (loc, MULT_EXPR, rtype,
7926 rcall,
7927 fold_build1_loc (loc, IMAGPART_EXPR,
7928 rtype, icall)));
7931 return NULL_TREE;
7934 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7935 Return NULL_TREE if no simplification can be made. */
7937 static tree
7938 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7940 if (!validate_arg (arg, REAL_TYPE))
7941 return NULL_TREE;
7943 /* Optimize trunc of constant value. */
7944 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7946 REAL_VALUE_TYPE r, x;
7947 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7949 x = TREE_REAL_CST (arg);
7950 real_trunc (&r, TYPE_MODE (type), &x);
7951 return build_real (type, r);
7954 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7957 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7958 Return NULL_TREE if no simplification can be made. */
7960 static tree
7961 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7963 if (!validate_arg (arg, REAL_TYPE))
7964 return NULL_TREE;
7966 /* Optimize floor of constant value. */
7967 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7969 REAL_VALUE_TYPE x;
7971 x = TREE_REAL_CST (arg);
7972 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7974 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7975 REAL_VALUE_TYPE r;
7977 real_floor (&r, TYPE_MODE (type), &x);
7978 return build_real (type, r);
7982 /* Fold floor (x) where x is nonnegative to trunc (x). */
7983 if (tree_expr_nonnegative_p (arg))
7985 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7986 if (truncfn)
7987 return build_call_expr_loc (loc, truncfn, 1, arg);
7990 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7993 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7994 Return NULL_TREE if no simplification can be made. */
7996 static tree
7997 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7999 if (!validate_arg (arg, REAL_TYPE))
8000 return NULL_TREE;
8002 /* Optimize ceil of constant value. */
8003 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8005 REAL_VALUE_TYPE x;
8007 x = TREE_REAL_CST (arg);
8008 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8010 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8011 REAL_VALUE_TYPE r;
8013 real_ceil (&r, TYPE_MODE (type), &x);
8014 return build_real (type, r);
8018 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8021 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8022 Return NULL_TREE if no simplification can be made. */
8024 static tree
8025 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8027 if (!validate_arg (arg, REAL_TYPE))
8028 return NULL_TREE;
8030 /* Optimize round of constant value. */
8031 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8033 REAL_VALUE_TYPE x;
8035 x = TREE_REAL_CST (arg);
8036 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8038 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8039 REAL_VALUE_TYPE r;
8041 real_round (&r, TYPE_MODE (type), &x);
8042 return build_real (type, r);
8046 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8049 /* Fold function call to builtin lround, lroundf or lroundl (or the
8050 corresponding long long versions) and other rounding functions. ARG
8051 is the argument to the call. Return NULL_TREE if no simplification
8052 can be made. */
8054 static tree
8055 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8057 if (!validate_arg (arg, REAL_TYPE))
8058 return NULL_TREE;
8060 /* Optimize lround of constant value. */
8061 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8063 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8065 if (real_isfinite (&x))
8067 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8068 tree ftype = TREE_TYPE (arg);
8069 double_int val;
8070 REAL_VALUE_TYPE r;
8072 switch (DECL_FUNCTION_CODE (fndecl))
8074 CASE_FLT_FN (BUILT_IN_IFLOOR):
8075 CASE_FLT_FN (BUILT_IN_LFLOOR):
8076 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8077 real_floor (&r, TYPE_MODE (ftype), &x);
8078 break;
8080 CASE_FLT_FN (BUILT_IN_ICEIL):
8081 CASE_FLT_FN (BUILT_IN_LCEIL):
8082 CASE_FLT_FN (BUILT_IN_LLCEIL):
8083 real_ceil (&r, TYPE_MODE (ftype), &x);
8084 break;
8086 CASE_FLT_FN (BUILT_IN_IROUND):
8087 CASE_FLT_FN (BUILT_IN_LROUND):
8088 CASE_FLT_FN (BUILT_IN_LLROUND):
8089 real_round (&r, TYPE_MODE (ftype), &x);
8090 break;
8092 default:
8093 gcc_unreachable ();
8096 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8097 if (double_int_fits_to_tree_p (itype, val))
8098 return double_int_to_tree (itype, val);
8102 switch (DECL_FUNCTION_CODE (fndecl))
8104 CASE_FLT_FN (BUILT_IN_LFLOOR):
8105 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8106 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8107 if (tree_expr_nonnegative_p (arg))
8108 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8109 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8110 break;
8111 default:;
8114 return fold_fixed_mathfn (loc, fndecl, arg);
8117 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8118 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8119 the argument to the call. Return NULL_TREE if no simplification can
8120 be made. */
8122 static tree
8123 fold_builtin_bitop (tree fndecl, tree arg)
8125 if (!validate_arg (arg, INTEGER_TYPE))
8126 return NULL_TREE;
8128 /* Optimize for constant argument. */
8129 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8131 HOST_WIDE_INT hi, width, result;
8132 unsigned HOST_WIDE_INT lo;
8133 tree type;
8135 type = TREE_TYPE (arg);
8136 width = TYPE_PRECISION (type);
8137 lo = TREE_INT_CST_LOW (arg);
8139 /* Clear all the bits that are beyond the type's precision. */
8140 if (width > HOST_BITS_PER_WIDE_INT)
8142 hi = TREE_INT_CST_HIGH (arg);
8143 if (width < HOST_BITS_PER_DOUBLE_INT)
8144 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
8146 else
8148 hi = 0;
8149 if (width < HOST_BITS_PER_WIDE_INT)
8150 lo &= ~(HOST_WIDE_INT_M1U << width);
8153 switch (DECL_FUNCTION_CODE (fndecl))
8155 CASE_INT_FN (BUILT_IN_FFS):
8156 if (lo != 0)
8157 result = ffs_hwi (lo);
8158 else if (hi != 0)
8159 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8160 else
8161 result = 0;
8162 break;
8164 CASE_INT_FN (BUILT_IN_CLZ):
8165 if (hi != 0)
8166 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8167 else if (lo != 0)
8168 result = width - floor_log2 (lo) - 1;
8169 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8170 result = width;
8171 break;
8173 CASE_INT_FN (BUILT_IN_CTZ):
8174 if (lo != 0)
8175 result = ctz_hwi (lo);
8176 else if (hi != 0)
8177 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8178 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8179 result = width;
8180 break;
8182 CASE_INT_FN (BUILT_IN_CLRSB):
8183 if (width > 2 * HOST_BITS_PER_WIDE_INT)
8184 return NULL_TREE;
8185 if (width > HOST_BITS_PER_WIDE_INT
8186 && (hi & ((unsigned HOST_WIDE_INT) 1
8187 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8189 hi = ~hi & ~(HOST_WIDE_INT_M1U
8190 << (width - HOST_BITS_PER_WIDE_INT - 1));
8191 lo = ~lo;
8193 else if (width <= HOST_BITS_PER_WIDE_INT
8194 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8195 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
8196 if (hi != 0)
8197 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8198 else if (lo != 0)
8199 result = width - floor_log2 (lo) - 2;
8200 else
8201 result = width - 1;
8202 break;
8204 CASE_INT_FN (BUILT_IN_POPCOUNT):
8205 result = 0;
8206 while (lo)
8207 result++, lo &= lo - 1;
8208 while (hi)
8209 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8210 break;
8212 CASE_INT_FN (BUILT_IN_PARITY):
8213 result = 0;
8214 while (lo)
8215 result++, lo &= lo - 1;
8216 while (hi)
8217 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8218 result &= 1;
8219 break;
8221 default:
8222 gcc_unreachable ();
8225 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8228 return NULL_TREE;
8231 /* Fold function call to builtin_bswap and the short, long and long long
8232 variants. Return NULL_TREE if no simplification can be made. */
8233 static tree
8234 fold_builtin_bswap (tree fndecl, tree arg)
8236 if (! validate_arg (arg, INTEGER_TYPE))
8237 return NULL_TREE;
8239 /* Optimize constant value. */
8240 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8242 HOST_WIDE_INT hi, width, r_hi = 0;
8243 unsigned HOST_WIDE_INT lo, r_lo = 0;
8244 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8246 width = TYPE_PRECISION (type);
8247 lo = TREE_INT_CST_LOW (arg);
8248 hi = TREE_INT_CST_HIGH (arg);
8250 switch (DECL_FUNCTION_CODE (fndecl))
8252 case BUILT_IN_BSWAP16:
8253 case BUILT_IN_BSWAP32:
8254 case BUILT_IN_BSWAP64:
8256 int s;
8258 for (s = 0; s < width; s += 8)
8260 int d = width - s - 8;
8261 unsigned HOST_WIDE_INT byte;
8263 if (s < HOST_BITS_PER_WIDE_INT)
8264 byte = (lo >> s) & 0xff;
8265 else
8266 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8268 if (d < HOST_BITS_PER_WIDE_INT)
8269 r_lo |= byte << d;
8270 else
8271 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8275 break;
8277 default:
8278 gcc_unreachable ();
8281 if (width < HOST_BITS_PER_WIDE_INT)
8282 return build_int_cst (type, r_lo);
8283 else
8284 return build_int_cst_wide (type, r_lo, r_hi);
8287 return NULL_TREE;
8290 /* A subroutine of fold_builtin to fold the various logarithmic
8291 functions. Return NULL_TREE if no simplification can me made.
8292 FUNC is the corresponding MPFR logarithm function. */
8294 static tree
8295 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8296 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8298 if (validate_arg (arg, REAL_TYPE))
8300 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8301 tree res;
8302 const enum built_in_function fcode = builtin_mathfn_code (arg);
8304 /* Calculate the result when the argument is a constant. */
8305 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8306 return res;
8308 /* Special case, optimize logN(expN(x)) = x. */
8309 if (flag_unsafe_math_optimizations
8310 && ((func == mpfr_log
8311 && (fcode == BUILT_IN_EXP
8312 || fcode == BUILT_IN_EXPF
8313 || fcode == BUILT_IN_EXPL))
8314 || (func == mpfr_log2
8315 && (fcode == BUILT_IN_EXP2
8316 || fcode == BUILT_IN_EXP2F
8317 || fcode == BUILT_IN_EXP2L))
8318 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8319 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8321 /* Optimize logN(func()) for various exponential functions. We
8322 want to determine the value "x" and the power "exponent" in
8323 order to transform logN(x**exponent) into exponent*logN(x). */
8324 if (flag_unsafe_math_optimizations)
8326 tree exponent = 0, x = 0;
8328 switch (fcode)
8330 CASE_FLT_FN (BUILT_IN_EXP):
8331 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8332 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8333 dconst_e ()));
8334 exponent = CALL_EXPR_ARG (arg, 0);
8335 break;
8336 CASE_FLT_FN (BUILT_IN_EXP2):
8337 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8338 x = build_real (type, dconst2);
8339 exponent = CALL_EXPR_ARG (arg, 0);
8340 break;
8341 CASE_FLT_FN (BUILT_IN_EXP10):
8342 CASE_FLT_FN (BUILT_IN_POW10):
8343 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8345 REAL_VALUE_TYPE dconst10;
8346 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8347 x = build_real (type, dconst10);
8349 exponent = CALL_EXPR_ARG (arg, 0);
8350 break;
8351 CASE_FLT_FN (BUILT_IN_SQRT):
8352 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8353 x = CALL_EXPR_ARG (arg, 0);
8354 exponent = build_real (type, dconsthalf);
8355 break;
8356 CASE_FLT_FN (BUILT_IN_CBRT):
8357 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8358 x = CALL_EXPR_ARG (arg, 0);
8359 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8360 dconst_third ()));
8361 break;
8362 CASE_FLT_FN (BUILT_IN_POW):
8363 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8364 x = CALL_EXPR_ARG (arg, 0);
8365 exponent = CALL_EXPR_ARG (arg, 1);
8366 break;
8367 default:
8368 break;
8371 /* Now perform the optimization. */
8372 if (x && exponent)
8374 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8375 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8380 return NULL_TREE;
8383 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8384 NULL_TREE if no simplification can be made. */
8386 static tree
8387 fold_builtin_hypot (location_t loc, tree fndecl,
8388 tree arg0, tree arg1, tree type)
8390 tree res, narg0, narg1;
8392 if (!validate_arg (arg0, REAL_TYPE)
8393 || !validate_arg (arg1, REAL_TYPE))
8394 return NULL_TREE;
8396 /* Calculate the result when the argument is a constant. */
8397 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8398 return res;
8400 /* If either argument to hypot has a negate or abs, strip that off.
8401 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8402 narg0 = fold_strip_sign_ops (arg0);
8403 narg1 = fold_strip_sign_ops (arg1);
8404 if (narg0 || narg1)
8406 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8407 narg1 ? narg1 : arg1);
8410 /* If either argument is zero, hypot is fabs of the other. */
8411 if (real_zerop (arg0))
8412 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8413 else if (real_zerop (arg1))
8414 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8416 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8417 if (flag_unsafe_math_optimizations
8418 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8420 const REAL_VALUE_TYPE sqrt2_trunc
8421 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8422 return fold_build2_loc (loc, MULT_EXPR, type,
8423 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8424 build_real (type, sqrt2_trunc));
8427 return NULL_TREE;
8431 /* Fold a builtin function call to pow, powf, or powl. Return
8432 NULL_TREE if no simplification can be made. */
8433 static tree
8434 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8436 tree res;
8438 if (!validate_arg (arg0, REAL_TYPE)
8439 || !validate_arg (arg1, REAL_TYPE))
8440 return NULL_TREE;
8442 /* Calculate the result when the argument is a constant. */
8443 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8444 return res;
8446 /* Optimize pow(1.0,y) = 1.0. */
8447 if (real_onep (arg0))
8448 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8450 if (TREE_CODE (arg1) == REAL_CST
8451 && !TREE_OVERFLOW (arg1))
8453 REAL_VALUE_TYPE cint;
8454 REAL_VALUE_TYPE c;
8455 HOST_WIDE_INT n;
8457 c = TREE_REAL_CST (arg1);
8459 /* Optimize pow(x,0.0) = 1.0. */
8460 if (REAL_VALUES_EQUAL (c, dconst0))
8461 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8462 arg0);
8464 /* Optimize pow(x,1.0) = x. */
8465 if (REAL_VALUES_EQUAL (c, dconst1))
8466 return arg0;
8468 /* Optimize pow(x,-1.0) = 1.0/x. */
8469 if (REAL_VALUES_EQUAL (c, dconstm1))
8470 return fold_build2_loc (loc, RDIV_EXPR, type,
8471 build_real (type, dconst1), arg0);
8473 /* Optimize pow(x,0.5) = sqrt(x). */
8474 if (flag_unsafe_math_optimizations
8475 && REAL_VALUES_EQUAL (c, dconsthalf))
8477 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8479 if (sqrtfn != NULL_TREE)
8480 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8483 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8484 if (flag_unsafe_math_optimizations)
8486 const REAL_VALUE_TYPE dconstroot
8487 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8489 if (REAL_VALUES_EQUAL (c, dconstroot))
8491 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8492 if (cbrtfn != NULL_TREE)
8493 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8497 /* Check for an integer exponent. */
8498 n = real_to_integer (&c);
8499 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8500 if (real_identical (&c, &cint))
8502 /* Attempt to evaluate pow at compile-time, unless this should
8503 raise an exception. */
8504 if (TREE_CODE (arg0) == REAL_CST
8505 && !TREE_OVERFLOW (arg0)
8506 && (n > 0
8507 || (!flag_trapping_math && !flag_errno_math)
8508 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8510 REAL_VALUE_TYPE x;
8511 bool inexact;
8513 x = TREE_REAL_CST (arg0);
8514 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8515 if (flag_unsafe_math_optimizations || !inexact)
8516 return build_real (type, x);
8519 /* Strip sign ops from even integer powers. */
8520 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8522 tree narg0 = fold_strip_sign_ops (arg0);
8523 if (narg0)
8524 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8529 if (flag_unsafe_math_optimizations)
8531 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8533 /* Optimize pow(expN(x),y) = expN(x*y). */
8534 if (BUILTIN_EXPONENT_P (fcode))
8536 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8537 tree arg = CALL_EXPR_ARG (arg0, 0);
8538 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8539 return build_call_expr_loc (loc, expfn, 1, arg);
8542 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8543 if (BUILTIN_SQRT_P (fcode))
8545 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8546 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8547 build_real (type, dconsthalf));
8548 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8551 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8552 if (BUILTIN_CBRT_P (fcode))
8554 tree arg = CALL_EXPR_ARG (arg0, 0);
8555 if (tree_expr_nonnegative_p (arg))
8557 const REAL_VALUE_TYPE dconstroot
8558 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8559 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8560 build_real (type, dconstroot));
8561 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8565 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8566 if (fcode == BUILT_IN_POW
8567 || fcode == BUILT_IN_POWF
8568 || fcode == BUILT_IN_POWL)
8570 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8571 if (tree_expr_nonnegative_p (arg00))
8573 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8574 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8575 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8580 return NULL_TREE;
8583 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8584 Return NULL_TREE if no simplification can be made. */
8585 static tree
8586 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8587 tree arg0, tree arg1, tree type)
8589 if (!validate_arg (arg0, REAL_TYPE)
8590 || !validate_arg (arg1, INTEGER_TYPE))
8591 return NULL_TREE;
8593 /* Optimize pow(1.0,y) = 1.0. */
8594 if (real_onep (arg0))
8595 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8597 if (host_integerp (arg1, 0))
8599 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8601 /* Evaluate powi at compile-time. */
8602 if (TREE_CODE (arg0) == REAL_CST
8603 && !TREE_OVERFLOW (arg0))
8605 REAL_VALUE_TYPE x;
8606 x = TREE_REAL_CST (arg0);
8607 real_powi (&x, TYPE_MODE (type), &x, c);
8608 return build_real (type, x);
8611 /* Optimize pow(x,0) = 1.0. */
8612 if (c == 0)
8613 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8614 arg0);
8616 /* Optimize pow(x,1) = x. */
8617 if (c == 1)
8618 return arg0;
8620 /* Optimize pow(x,-1) = 1.0/x. */
8621 if (c == -1)
8622 return fold_build2_loc (loc, RDIV_EXPR, type,
8623 build_real (type, dconst1), arg0);
8626 return NULL_TREE;
8629 /* A subroutine of fold_builtin to fold the various exponent
8630 functions. Return NULL_TREE if no simplification can be made.
8631 FUNC is the corresponding MPFR exponent function. */
8633 static tree
8634 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8635 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8637 if (validate_arg (arg, REAL_TYPE))
8639 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8640 tree res;
8642 /* Calculate the result when the argument is a constant. */
8643 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8644 return res;
8646 /* Optimize expN(logN(x)) = x. */
8647 if (flag_unsafe_math_optimizations)
8649 const enum built_in_function fcode = builtin_mathfn_code (arg);
8651 if ((func == mpfr_exp
8652 && (fcode == BUILT_IN_LOG
8653 || fcode == BUILT_IN_LOGF
8654 || fcode == BUILT_IN_LOGL))
8655 || (func == mpfr_exp2
8656 && (fcode == BUILT_IN_LOG2
8657 || fcode == BUILT_IN_LOG2F
8658 || fcode == BUILT_IN_LOG2L))
8659 || (func == mpfr_exp10
8660 && (fcode == BUILT_IN_LOG10
8661 || fcode == BUILT_IN_LOG10F
8662 || fcode == BUILT_IN_LOG10L)))
8663 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8667 return NULL_TREE;
8670 /* Return true if VAR is a VAR_DECL or a component thereof. */
8672 static bool
8673 var_decl_component_p (tree var)
8675 tree inner = var;
8676 while (handled_component_p (inner))
8677 inner = TREE_OPERAND (inner, 0);
8678 return SSA_VAR_P (inner);
8681 /* Fold function call to builtin memset. Return
8682 NULL_TREE if no simplification can be made. */
8684 static tree
8685 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8686 tree type, bool ignore)
8688 tree var, ret, etype;
8689 unsigned HOST_WIDE_INT length, cval;
8691 if (! validate_arg (dest, POINTER_TYPE)
8692 || ! validate_arg (c, INTEGER_TYPE)
8693 || ! validate_arg (len, INTEGER_TYPE))
8694 return NULL_TREE;
8696 if (! host_integerp (len, 1))
8697 return NULL_TREE;
8699 /* If the LEN parameter is zero, return DEST. */
8700 if (integer_zerop (len))
8701 return omit_one_operand_loc (loc, type, dest, c);
8703 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8704 return NULL_TREE;
8706 var = dest;
8707 STRIP_NOPS (var);
8708 if (TREE_CODE (var) != ADDR_EXPR)
8709 return NULL_TREE;
8711 var = TREE_OPERAND (var, 0);
8712 if (TREE_THIS_VOLATILE (var))
8713 return NULL_TREE;
8715 etype = TREE_TYPE (var);
8716 if (TREE_CODE (etype) == ARRAY_TYPE)
8717 etype = TREE_TYPE (etype);
8719 if (!INTEGRAL_TYPE_P (etype)
8720 && !POINTER_TYPE_P (etype))
8721 return NULL_TREE;
8723 if (! var_decl_component_p (var))
8724 return NULL_TREE;
8726 length = tree_low_cst (len, 1);
8727 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8728 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8729 return NULL_TREE;
8731 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8732 return NULL_TREE;
8734 if (integer_zerop (c))
8735 cval = 0;
8736 else
8738 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8739 return NULL_TREE;
8741 cval = TREE_INT_CST_LOW (c);
8742 cval &= 0xff;
8743 cval |= cval << 8;
8744 cval |= cval << 16;
8745 cval |= (cval << 31) << 1;
8748 ret = build_int_cst_type (etype, cval);
8749 var = build_fold_indirect_ref_loc (loc,
8750 fold_convert_loc (loc,
8751 build_pointer_type (etype),
8752 dest));
8753 ret = build2 (MODIFY_EXPR, etype, var, ret);
8754 if (ignore)
8755 return ret;
8757 return omit_one_operand_loc (loc, type, dest, ret);
8760 /* Fold function call to builtin memset. Return
8761 NULL_TREE if no simplification can be made. */
8763 static tree
8764 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8766 if (! validate_arg (dest, POINTER_TYPE)
8767 || ! validate_arg (size, INTEGER_TYPE))
8768 return NULL_TREE;
8770 if (!ignore)
8771 return NULL_TREE;
8773 /* New argument list transforming bzero(ptr x, int y) to
8774 memset(ptr x, int 0, size_t y). This is done this way
8775 so that if it isn't expanded inline, we fallback to
8776 calling bzero instead of memset. */
8778 return fold_builtin_memset (loc, dest, integer_zero_node,
8779 fold_convert_loc (loc, size_type_node, size),
8780 void_type_node, ignore);
8783 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8784 NULL_TREE if no simplification can be made.
8785 If ENDP is 0, return DEST (like memcpy).
8786 If ENDP is 1, return DEST+LEN (like mempcpy).
8787 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8788 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8789 (memmove). */
8791 static tree
8792 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8793 tree len, tree type, bool ignore, int endp)
8795 tree destvar, srcvar, expr;
8797 if (! validate_arg (dest, POINTER_TYPE)
8798 || ! validate_arg (src, POINTER_TYPE)
8799 || ! validate_arg (len, INTEGER_TYPE))
8800 return NULL_TREE;
8802 /* If the LEN parameter is zero, return DEST. */
8803 if (integer_zerop (len))
8804 return omit_one_operand_loc (loc, type, dest, src);
8806 /* If SRC and DEST are the same (and not volatile), return
8807 DEST{,+LEN,+LEN-1}. */
8808 if (operand_equal_p (src, dest, 0))
8809 expr = len;
8810 else
8812 tree srctype, desttype;
8813 unsigned int src_align, dest_align;
8814 tree off0;
8816 if (endp == 3)
8818 src_align = get_pointer_alignment (src);
8819 dest_align = get_pointer_alignment (dest);
8821 /* Both DEST and SRC must be pointer types.
8822 ??? This is what old code did. Is the testing for pointer types
8823 really mandatory?
8825 If either SRC is readonly or length is 1, we can use memcpy. */
8826 if (!dest_align || !src_align)
8827 return NULL_TREE;
8828 if (readonly_data_expr (src)
8829 || (host_integerp (len, 1)
8830 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8831 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8833 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8834 if (!fn)
8835 return NULL_TREE;
8836 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8839 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8840 if (TREE_CODE (src) == ADDR_EXPR
8841 && TREE_CODE (dest) == ADDR_EXPR)
8843 tree src_base, dest_base, fn;
8844 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8845 HOST_WIDE_INT size = -1;
8846 HOST_WIDE_INT maxsize = -1;
8848 srcvar = TREE_OPERAND (src, 0);
8849 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8850 &size, &maxsize);
8851 destvar = TREE_OPERAND (dest, 0);
8852 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8853 &size, &maxsize);
8854 if (host_integerp (len, 1))
8855 maxsize = tree_low_cst (len, 1);
8856 else
8857 maxsize = -1;
8858 src_offset /= BITS_PER_UNIT;
8859 dest_offset /= BITS_PER_UNIT;
8860 if (SSA_VAR_P (src_base)
8861 && SSA_VAR_P (dest_base))
8863 if (operand_equal_p (src_base, dest_base, 0)
8864 && ranges_overlap_p (src_offset, maxsize,
8865 dest_offset, maxsize))
8866 return NULL_TREE;
8868 else if (TREE_CODE (src_base) == MEM_REF
8869 && TREE_CODE (dest_base) == MEM_REF)
8871 double_int off;
8872 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8873 TREE_OPERAND (dest_base, 0), 0))
8874 return NULL_TREE;
8875 off = mem_ref_offset (src_base) +
8876 double_int::from_shwi (src_offset);
8877 if (!off.fits_shwi ())
8878 return NULL_TREE;
8879 src_offset = off.low;
8880 off = mem_ref_offset (dest_base) +
8881 double_int::from_shwi (dest_offset);
8882 if (!off.fits_shwi ())
8883 return NULL_TREE;
8884 dest_offset = off.low;
8885 if (ranges_overlap_p (src_offset, maxsize,
8886 dest_offset, maxsize))
8887 return NULL_TREE;
8889 else
8890 return NULL_TREE;
8892 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8893 if (!fn)
8894 return NULL_TREE;
8895 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8898 /* If the destination and source do not alias optimize into
8899 memcpy as well. */
8900 if ((is_gimple_min_invariant (dest)
8901 || TREE_CODE (dest) == SSA_NAME)
8902 && (is_gimple_min_invariant (src)
8903 || TREE_CODE (src) == SSA_NAME))
8905 ao_ref destr, srcr;
8906 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8907 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8908 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8910 tree fn;
8911 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8912 if (!fn)
8913 return NULL_TREE;
8914 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8918 return NULL_TREE;
8921 if (!host_integerp (len, 0))
8922 return NULL_TREE;
8923 /* FIXME:
8924 This logic lose for arguments like (type *)malloc (sizeof (type)),
8925 since we strip the casts of up to VOID return value from malloc.
8926 Perhaps we ought to inherit type from non-VOID argument here? */
8927 STRIP_NOPS (src);
8928 STRIP_NOPS (dest);
8929 if (!POINTER_TYPE_P (TREE_TYPE (src))
8930 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8931 return NULL_TREE;
8932 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8933 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8935 tree tem = TREE_OPERAND (src, 0);
8936 STRIP_NOPS (tem);
8937 if (tem != TREE_OPERAND (src, 0))
8938 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8940 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8942 tree tem = TREE_OPERAND (dest, 0);
8943 STRIP_NOPS (tem);
8944 if (tem != TREE_OPERAND (dest, 0))
8945 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8947 srctype = TREE_TYPE (TREE_TYPE (src));
8948 if (TREE_CODE (srctype) == ARRAY_TYPE
8949 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8951 srctype = TREE_TYPE (srctype);
8952 STRIP_NOPS (src);
8953 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8955 desttype = TREE_TYPE (TREE_TYPE (dest));
8956 if (TREE_CODE (desttype) == ARRAY_TYPE
8957 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8959 desttype = TREE_TYPE (desttype);
8960 STRIP_NOPS (dest);
8961 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8963 if (TREE_ADDRESSABLE (srctype)
8964 || TREE_ADDRESSABLE (desttype))
8965 return NULL_TREE;
8967 src_align = get_pointer_alignment (src);
8968 dest_align = get_pointer_alignment (dest);
8969 if (dest_align < TYPE_ALIGN (desttype)
8970 || src_align < TYPE_ALIGN (srctype))
8971 return NULL_TREE;
8973 if (!ignore)
8974 dest = builtin_save_expr (dest);
8976 /* Build accesses at offset zero with a ref-all character type. */
8977 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8978 ptr_mode, true), 0);
8980 destvar = dest;
8981 STRIP_NOPS (destvar);
8982 if (TREE_CODE (destvar) == ADDR_EXPR
8983 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8984 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8985 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8986 else
8987 destvar = NULL_TREE;
8989 srcvar = src;
8990 STRIP_NOPS (srcvar);
8991 if (TREE_CODE (srcvar) == ADDR_EXPR
8992 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8993 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8995 if (!destvar
8996 || src_align >= TYPE_ALIGN (desttype))
8997 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8998 srcvar, off0);
8999 else if (!STRICT_ALIGNMENT)
9001 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
9002 src_align);
9003 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
9005 else
9006 srcvar = NULL_TREE;
9008 else
9009 srcvar = NULL_TREE;
9011 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9012 return NULL_TREE;
9014 if (srcvar == NULL_TREE)
9016 STRIP_NOPS (src);
9017 if (src_align >= TYPE_ALIGN (desttype))
9018 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
9019 else
9021 if (STRICT_ALIGNMENT)
9022 return NULL_TREE;
9023 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
9024 src_align);
9025 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
9028 else if (destvar == NULL_TREE)
9030 STRIP_NOPS (dest);
9031 if (dest_align >= TYPE_ALIGN (srctype))
9032 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
9033 else
9035 if (STRICT_ALIGNMENT)
9036 return NULL_TREE;
9037 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
9038 dest_align);
9039 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
9043 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
9046 if (ignore)
9047 return expr;
9049 if (endp == 0 || endp == 3)
9050 return omit_one_operand_loc (loc, type, dest, expr);
9052 if (expr == len)
9053 expr = NULL_TREE;
9055 if (endp == 2)
9056 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9057 ssize_int (1));
9059 dest = fold_build_pointer_plus_loc (loc, dest, len);
9060 dest = fold_convert_loc (loc, type, dest);
9061 if (expr)
9062 dest = omit_one_operand_loc (loc, type, dest, expr);
9063 return dest;
9066 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9067 If LEN is not NULL, it represents the length of the string to be
9068 copied. Return NULL_TREE if no simplification can be made. */
9070 tree
9071 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9073 tree fn;
9075 if (!validate_arg (dest, POINTER_TYPE)
9076 || !validate_arg (src, POINTER_TYPE))
9077 return NULL_TREE;
9079 /* If SRC and DEST are the same (and not volatile), return DEST. */
9080 if (operand_equal_p (src, dest, 0))
9081 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9083 if (optimize_function_for_size_p (cfun))
9084 return NULL_TREE;
9086 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9087 if (!fn)
9088 return NULL_TREE;
9090 if (!len)
9092 len = c_strlen (src, 1);
9093 if (! len || TREE_SIDE_EFFECTS (len))
9094 return NULL_TREE;
9097 len = fold_convert_loc (loc, size_type_node, len);
9098 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9099 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9100 build_call_expr_loc (loc, fn, 3, dest, src, len));
9103 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9104 Return NULL_TREE if no simplification can be made. */
9106 static tree
9107 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9109 tree fn, len, lenp1, call, type;
9111 if (!validate_arg (dest, POINTER_TYPE)
9112 || !validate_arg (src, POINTER_TYPE))
9113 return NULL_TREE;
9115 len = c_strlen (src, 1);
9116 if (!len
9117 || TREE_CODE (len) != INTEGER_CST)
9118 return NULL_TREE;
9120 if (optimize_function_for_size_p (cfun)
9121 /* If length is zero it's small enough. */
9122 && !integer_zerop (len))
9123 return NULL_TREE;
9125 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9126 if (!fn)
9127 return NULL_TREE;
9129 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9130 fold_convert_loc (loc, size_type_node, len),
9131 build_int_cst (size_type_node, 1));
9132 /* We use dest twice in building our expression. Save it from
9133 multiple expansions. */
9134 dest = builtin_save_expr (dest);
9135 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9137 type = TREE_TYPE (TREE_TYPE (fndecl));
9138 dest = fold_build_pointer_plus_loc (loc, dest, len);
9139 dest = fold_convert_loc (loc, type, dest);
9140 dest = omit_one_operand_loc (loc, type, dest, call);
9141 return dest;
9144 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9145 If SLEN is not NULL, it represents the length of the source string.
9146 Return NULL_TREE if no simplification can be made. */
9148 tree
9149 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9150 tree src, tree len, tree slen)
9152 tree fn;
9154 if (!validate_arg (dest, POINTER_TYPE)
9155 || !validate_arg (src, POINTER_TYPE)
9156 || !validate_arg (len, INTEGER_TYPE))
9157 return NULL_TREE;
9159 /* If the LEN parameter is zero, return DEST. */
9160 if (integer_zerop (len))
9161 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9163 /* We can't compare slen with len as constants below if len is not a
9164 constant. */
9165 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9166 return NULL_TREE;
9168 if (!slen)
9169 slen = c_strlen (src, 1);
9171 /* Now, we must be passed a constant src ptr parameter. */
9172 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9173 return NULL_TREE;
9175 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9177 /* We do not support simplification of this case, though we do
9178 support it when expanding trees into RTL. */
9179 /* FIXME: generate a call to __builtin_memset. */
9180 if (tree_int_cst_lt (slen, len))
9181 return NULL_TREE;
9183 /* OK transform into builtin memcpy. */
9184 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9185 if (!fn)
9186 return NULL_TREE;
9188 len = fold_convert_loc (loc, size_type_node, len);
9189 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9190 build_call_expr_loc (loc, fn, 3, dest, src, len));
9193 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9194 arguments to the call, and TYPE is its return type.
9195 Return NULL_TREE if no simplification can be made. */
9197 static tree
9198 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9200 if (!validate_arg (arg1, POINTER_TYPE)
9201 || !validate_arg (arg2, INTEGER_TYPE)
9202 || !validate_arg (len, INTEGER_TYPE))
9203 return NULL_TREE;
9204 else
9206 const char *p1;
9208 if (TREE_CODE (arg2) != INTEGER_CST
9209 || !host_integerp (len, 1))
9210 return NULL_TREE;
9212 p1 = c_getstr (arg1);
9213 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9215 char c;
9216 const char *r;
9217 tree tem;
9219 if (target_char_cast (arg2, &c))
9220 return NULL_TREE;
9222 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9224 if (r == NULL)
9225 return build_int_cst (TREE_TYPE (arg1), 0);
9227 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9228 return fold_convert_loc (loc, type, tem);
9230 return NULL_TREE;
9234 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9235 Return NULL_TREE if no simplification can be made. */
9237 static tree
9238 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9240 const char *p1, *p2;
9242 if (!validate_arg (arg1, POINTER_TYPE)
9243 || !validate_arg (arg2, POINTER_TYPE)
9244 || !validate_arg (len, INTEGER_TYPE))
9245 return NULL_TREE;
9247 /* If the LEN parameter is zero, return zero. */
9248 if (integer_zerop (len))
9249 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9250 arg1, arg2);
9252 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9253 if (operand_equal_p (arg1, arg2, 0))
9254 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9256 p1 = c_getstr (arg1);
9257 p2 = c_getstr (arg2);
9259 /* If all arguments are constant, and the value of len is not greater
9260 than the lengths of arg1 and arg2, evaluate at compile-time. */
9261 if (host_integerp (len, 1) && p1 && p2
9262 && compare_tree_int (len, strlen (p1) + 1) <= 0
9263 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9265 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9267 if (r > 0)
9268 return integer_one_node;
9269 else if (r < 0)
9270 return integer_minus_one_node;
9271 else
9272 return integer_zero_node;
9275 /* If len parameter is one, return an expression corresponding to
9276 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9277 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9279 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9280 tree cst_uchar_ptr_node
9281 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9283 tree ind1
9284 = fold_convert_loc (loc, integer_type_node,
9285 build1 (INDIRECT_REF, cst_uchar_node,
9286 fold_convert_loc (loc,
9287 cst_uchar_ptr_node,
9288 arg1)));
9289 tree ind2
9290 = fold_convert_loc (loc, integer_type_node,
9291 build1 (INDIRECT_REF, cst_uchar_node,
9292 fold_convert_loc (loc,
9293 cst_uchar_ptr_node,
9294 arg2)));
9295 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9298 return NULL_TREE;
9301 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9302 Return NULL_TREE if no simplification can be made. */
9304 static tree
9305 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9307 const char *p1, *p2;
9309 if (!validate_arg (arg1, POINTER_TYPE)
9310 || !validate_arg (arg2, POINTER_TYPE))
9311 return NULL_TREE;
9313 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9314 if (operand_equal_p (arg1, arg2, 0))
9315 return integer_zero_node;
9317 p1 = c_getstr (arg1);
9318 p2 = c_getstr (arg2);
9320 if (p1 && p2)
9322 const int i = strcmp (p1, p2);
9323 if (i < 0)
9324 return integer_minus_one_node;
9325 else if (i > 0)
9326 return integer_one_node;
9327 else
9328 return integer_zero_node;
9331 /* If the second arg is "", return *(const unsigned char*)arg1. */
9332 if (p2 && *p2 == '\0')
9334 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9335 tree cst_uchar_ptr_node
9336 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9338 return fold_convert_loc (loc, integer_type_node,
9339 build1 (INDIRECT_REF, cst_uchar_node,
9340 fold_convert_loc (loc,
9341 cst_uchar_ptr_node,
9342 arg1)));
9345 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9346 if (p1 && *p1 == '\0')
9348 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9349 tree cst_uchar_ptr_node
9350 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9352 tree temp
9353 = fold_convert_loc (loc, integer_type_node,
9354 build1 (INDIRECT_REF, cst_uchar_node,
9355 fold_convert_loc (loc,
9356 cst_uchar_ptr_node,
9357 arg2)));
9358 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9361 return NULL_TREE;
9364 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9365 Return NULL_TREE if no simplification can be made. */
9367 static tree
9368 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9370 const char *p1, *p2;
9372 if (!validate_arg (arg1, POINTER_TYPE)
9373 || !validate_arg (arg2, POINTER_TYPE)
9374 || !validate_arg (len, INTEGER_TYPE))
9375 return NULL_TREE;
9377 /* If the LEN parameter is zero, return zero. */
9378 if (integer_zerop (len))
9379 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9380 arg1, arg2);
9382 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9383 if (operand_equal_p (arg1, arg2, 0))
9384 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9386 p1 = c_getstr (arg1);
9387 p2 = c_getstr (arg2);
9389 if (host_integerp (len, 1) && p1 && p2)
9391 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9392 if (i > 0)
9393 return integer_one_node;
9394 else if (i < 0)
9395 return integer_minus_one_node;
9396 else
9397 return integer_zero_node;
9400 /* If the second arg is "", and the length is greater than zero,
9401 return *(const unsigned char*)arg1. */
9402 if (p2 && *p2 == '\0'
9403 && TREE_CODE (len) == INTEGER_CST
9404 && tree_int_cst_sgn (len) == 1)
9406 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9407 tree cst_uchar_ptr_node
9408 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9410 return fold_convert_loc (loc, integer_type_node,
9411 build1 (INDIRECT_REF, cst_uchar_node,
9412 fold_convert_loc (loc,
9413 cst_uchar_ptr_node,
9414 arg1)));
9417 /* If the first arg is "", and the length is greater than zero,
9418 return -*(const unsigned char*)arg2. */
9419 if (p1 && *p1 == '\0'
9420 && TREE_CODE (len) == INTEGER_CST
9421 && tree_int_cst_sgn (len) == 1)
9423 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9424 tree cst_uchar_ptr_node
9425 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9427 tree temp = fold_convert_loc (loc, integer_type_node,
9428 build1 (INDIRECT_REF, cst_uchar_node,
9429 fold_convert_loc (loc,
9430 cst_uchar_ptr_node,
9431 arg2)));
9432 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9435 /* If len parameter is one, return an expression corresponding to
9436 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9437 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9439 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9440 tree cst_uchar_ptr_node
9441 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9443 tree ind1 = fold_convert_loc (loc, integer_type_node,
9444 build1 (INDIRECT_REF, cst_uchar_node,
9445 fold_convert_loc (loc,
9446 cst_uchar_ptr_node,
9447 arg1)));
9448 tree ind2 = fold_convert_loc (loc, integer_type_node,
9449 build1 (INDIRECT_REF, cst_uchar_node,
9450 fold_convert_loc (loc,
9451 cst_uchar_ptr_node,
9452 arg2)));
9453 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9456 return NULL_TREE;
9459 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9460 ARG. Return NULL_TREE if no simplification can be made. */
9462 static tree
9463 fold_builtin_signbit (location_t loc, tree arg, tree type)
9465 if (!validate_arg (arg, REAL_TYPE))
9466 return NULL_TREE;
9468 /* If ARG is a compile-time constant, determine the result. */
9469 if (TREE_CODE (arg) == REAL_CST
9470 && !TREE_OVERFLOW (arg))
9472 REAL_VALUE_TYPE c;
9474 c = TREE_REAL_CST (arg);
9475 return (REAL_VALUE_NEGATIVE (c)
9476 ? build_one_cst (type)
9477 : build_zero_cst (type));
9480 /* If ARG is non-negative, the result is always zero. */
9481 if (tree_expr_nonnegative_p (arg))
9482 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9484 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9485 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9486 return fold_convert (type,
9487 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9488 build_real (TREE_TYPE (arg), dconst0)));
9490 return NULL_TREE;
9493 /* Fold function call to builtin copysign, copysignf or copysignl with
9494 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9495 be made. */
9497 static tree
9498 fold_builtin_copysign (location_t loc, tree fndecl,
9499 tree arg1, tree arg2, tree type)
9501 tree tem;
9503 if (!validate_arg (arg1, REAL_TYPE)
9504 || !validate_arg (arg2, REAL_TYPE))
9505 return NULL_TREE;
9507 /* copysign(X,X) is X. */
9508 if (operand_equal_p (arg1, arg2, 0))
9509 return fold_convert_loc (loc, type, arg1);
9511 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9512 if (TREE_CODE (arg1) == REAL_CST
9513 && TREE_CODE (arg2) == REAL_CST
9514 && !TREE_OVERFLOW (arg1)
9515 && !TREE_OVERFLOW (arg2))
9517 REAL_VALUE_TYPE c1, c2;
9519 c1 = TREE_REAL_CST (arg1);
9520 c2 = TREE_REAL_CST (arg2);
9521 /* c1.sign := c2.sign. */
9522 real_copysign (&c1, &c2);
9523 return build_real (type, c1);
9526 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9527 Remember to evaluate Y for side-effects. */
9528 if (tree_expr_nonnegative_p (arg2))
9529 return omit_one_operand_loc (loc, type,
9530 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9531 arg2);
9533 /* Strip sign changing operations for the first argument. */
9534 tem = fold_strip_sign_ops (arg1);
9535 if (tem)
9536 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9538 return NULL_TREE;
9541 /* Fold a call to builtin isascii with argument ARG. */
9543 static tree
9544 fold_builtin_isascii (location_t loc, tree arg)
9546 if (!validate_arg (arg, INTEGER_TYPE))
9547 return NULL_TREE;
9548 else
9550 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9551 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9552 build_int_cst (integer_type_node,
9553 ~ (unsigned HOST_WIDE_INT) 0x7f));
9554 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9555 arg, integer_zero_node);
9559 /* Fold a call to builtin toascii with argument ARG. */
9561 static tree
9562 fold_builtin_toascii (location_t loc, tree arg)
9564 if (!validate_arg (arg, INTEGER_TYPE))
9565 return NULL_TREE;
9567 /* Transform toascii(c) -> (c & 0x7f). */
9568 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9569 build_int_cst (integer_type_node, 0x7f));
9572 /* Fold a call to builtin isdigit with argument ARG. */
9574 static tree
9575 fold_builtin_isdigit (location_t loc, tree arg)
9577 if (!validate_arg (arg, INTEGER_TYPE))
9578 return NULL_TREE;
9579 else
9581 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9582 /* According to the C standard, isdigit is unaffected by locale.
9583 However, it definitely is affected by the target character set. */
9584 unsigned HOST_WIDE_INT target_digit0
9585 = lang_hooks.to_target_charset ('0');
9587 if (target_digit0 == 0)
9588 return NULL_TREE;
9590 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9591 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9592 build_int_cst (unsigned_type_node, target_digit0));
9593 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9594 build_int_cst (unsigned_type_node, 9));
9598 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9600 static tree
9601 fold_builtin_fabs (location_t loc, tree arg, tree type)
9603 if (!validate_arg (arg, REAL_TYPE))
9604 return NULL_TREE;
9606 arg = fold_convert_loc (loc, type, arg);
9607 if (TREE_CODE (arg) == REAL_CST)
9608 return fold_abs_const (arg, type);
9609 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9612 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9614 static tree
9615 fold_builtin_abs (location_t loc, tree arg, tree type)
9617 if (!validate_arg (arg, INTEGER_TYPE))
9618 return NULL_TREE;
9620 arg = fold_convert_loc (loc, type, arg);
9621 if (TREE_CODE (arg) == INTEGER_CST)
9622 return fold_abs_const (arg, type);
9623 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9626 /* Fold a fma operation with arguments ARG[012]. */
9628 tree
9629 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9630 tree type, tree arg0, tree arg1, tree arg2)
9632 if (TREE_CODE (arg0) == REAL_CST
9633 && TREE_CODE (arg1) == REAL_CST
9634 && TREE_CODE (arg2) == REAL_CST)
9635 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9637 return NULL_TREE;
9640 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9642 static tree
9643 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9645 if (validate_arg (arg0, REAL_TYPE)
9646 && validate_arg(arg1, REAL_TYPE)
9647 && validate_arg(arg2, REAL_TYPE))
9649 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9650 if (tem)
9651 return tem;
9653 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9654 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9655 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9657 return NULL_TREE;
9660 /* Fold a call to builtin fmin or fmax. */
9662 static tree
9663 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9664 tree type, bool max)
9666 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9668 /* Calculate the result when the argument is a constant. */
9669 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9671 if (res)
9672 return res;
9674 /* If either argument is NaN, return the other one. Avoid the
9675 transformation if we get (and honor) a signalling NaN. Using
9676 omit_one_operand() ensures we create a non-lvalue. */
9677 if (TREE_CODE (arg0) == REAL_CST
9678 && real_isnan (&TREE_REAL_CST (arg0))
9679 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9680 || ! TREE_REAL_CST (arg0).signalling))
9681 return omit_one_operand_loc (loc, type, arg1, arg0);
9682 if (TREE_CODE (arg1) == REAL_CST
9683 && real_isnan (&TREE_REAL_CST (arg1))
9684 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9685 || ! TREE_REAL_CST (arg1).signalling))
9686 return omit_one_operand_loc (loc, type, arg0, arg1);
9688 /* Transform fmin/fmax(x,x) -> x. */
9689 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9690 return omit_one_operand_loc (loc, type, arg0, arg1);
9692 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9693 functions to return the numeric arg if the other one is NaN.
9694 These tree codes don't honor that, so only transform if
9695 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9696 handled, so we don't have to worry about it either. */
9697 if (flag_finite_math_only)
9698 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9699 fold_convert_loc (loc, type, arg0),
9700 fold_convert_loc (loc, type, arg1));
9702 return NULL_TREE;
9705 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9707 static tree
9708 fold_builtin_carg (location_t loc, tree arg, tree type)
9710 if (validate_arg (arg, COMPLEX_TYPE)
9711 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9713 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9715 if (atan2_fn)
9717 tree new_arg = builtin_save_expr (arg);
9718 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9719 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9720 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9724 return NULL_TREE;
9727 /* Fold a call to builtin logb/ilogb. */
9729 static tree
9730 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9732 if (! validate_arg (arg, REAL_TYPE))
9733 return NULL_TREE;
9735 STRIP_NOPS (arg);
9737 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9739 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9741 switch (value->cl)
9743 case rvc_nan:
9744 case rvc_inf:
9745 /* If arg is Inf or NaN and we're logb, return it. */
9746 if (TREE_CODE (rettype) == REAL_TYPE)
9748 /* For logb(-Inf) we have to return +Inf. */
9749 if (real_isinf (value) && real_isneg (value))
9751 REAL_VALUE_TYPE tem;
9752 real_inf (&tem);
9753 return build_real (rettype, tem);
9755 return fold_convert_loc (loc, rettype, arg);
9757 /* Fall through... */
9758 case rvc_zero:
9759 /* Zero may set errno and/or raise an exception for logb, also
9760 for ilogb we don't know FP_ILOGB0. */
9761 return NULL_TREE;
9762 case rvc_normal:
9763 /* For normal numbers, proceed iff radix == 2. In GCC,
9764 normalized significands are in the range [0.5, 1.0). We
9765 want the exponent as if they were [1.0, 2.0) so get the
9766 exponent and subtract 1. */
9767 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9768 return fold_convert_loc (loc, rettype,
9769 build_int_cst (integer_type_node,
9770 REAL_EXP (value)-1));
9771 break;
9775 return NULL_TREE;
9778 /* Fold a call to builtin significand, if radix == 2. */
9780 static tree
9781 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9783 if (! validate_arg (arg, REAL_TYPE))
9784 return NULL_TREE;
9786 STRIP_NOPS (arg);
9788 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9790 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9792 switch (value->cl)
9794 case rvc_zero:
9795 case rvc_nan:
9796 case rvc_inf:
9797 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9798 return fold_convert_loc (loc, rettype, arg);
9799 case rvc_normal:
9800 /* For normal numbers, proceed iff radix == 2. */
9801 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9803 REAL_VALUE_TYPE result = *value;
9804 /* In GCC, normalized significands are in the range [0.5,
9805 1.0). We want them to be [1.0, 2.0) so set the
9806 exponent to 1. */
9807 SET_REAL_EXP (&result, 1);
9808 return build_real (rettype, result);
9810 break;
9814 return NULL_TREE;
9817 /* Fold a call to builtin frexp, we can assume the base is 2. */
9819 static tree
9820 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9822 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9823 return NULL_TREE;
9825 STRIP_NOPS (arg0);
9827 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9828 return NULL_TREE;
9830 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9832 /* Proceed if a valid pointer type was passed in. */
9833 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9835 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9836 tree frac, exp;
9838 switch (value->cl)
9840 case rvc_zero:
9841 /* For +-0, return (*exp = 0, +-0). */
9842 exp = integer_zero_node;
9843 frac = arg0;
9844 break;
9845 case rvc_nan:
9846 case rvc_inf:
9847 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9848 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9849 case rvc_normal:
9851 /* Since the frexp function always expects base 2, and in
9852 GCC normalized significands are already in the range
9853 [0.5, 1.0), we have exactly what frexp wants. */
9854 REAL_VALUE_TYPE frac_rvt = *value;
9855 SET_REAL_EXP (&frac_rvt, 0);
9856 frac = build_real (rettype, frac_rvt);
9857 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9859 break;
9860 default:
9861 gcc_unreachable ();
9864 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9865 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9866 TREE_SIDE_EFFECTS (arg1) = 1;
9867 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9870 return NULL_TREE;
9873 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9874 then we can assume the base is two. If it's false, then we have to
9875 check the mode of the TYPE parameter in certain cases. */
9877 static tree
9878 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9879 tree type, bool ldexp)
9881 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9883 STRIP_NOPS (arg0);
9884 STRIP_NOPS (arg1);
9886 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9887 if (real_zerop (arg0) || integer_zerop (arg1)
9888 || (TREE_CODE (arg0) == REAL_CST
9889 && !real_isfinite (&TREE_REAL_CST (arg0))))
9890 return omit_one_operand_loc (loc, type, arg0, arg1);
9892 /* If both arguments are constant, then try to evaluate it. */
9893 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9894 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9895 && host_integerp (arg1, 0))
9897 /* Bound the maximum adjustment to twice the range of the
9898 mode's valid exponents. Use abs to ensure the range is
9899 positive as a sanity check. */
9900 const long max_exp_adj = 2 *
9901 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9902 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9904 /* Get the user-requested adjustment. */
9905 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9907 /* The requested adjustment must be inside this range. This
9908 is a preliminary cap to avoid things like overflow, we
9909 may still fail to compute the result for other reasons. */
9910 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9912 REAL_VALUE_TYPE initial_result;
9914 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9916 /* Ensure we didn't overflow. */
9917 if (! real_isinf (&initial_result))
9919 const REAL_VALUE_TYPE trunc_result
9920 = real_value_truncate (TYPE_MODE (type), initial_result);
9922 /* Only proceed if the target mode can hold the
9923 resulting value. */
9924 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9925 return build_real (type, trunc_result);
9931 return NULL_TREE;
9934 /* Fold a call to builtin modf. */
9936 static tree
9937 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9939 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9940 return NULL_TREE;
9942 STRIP_NOPS (arg0);
9944 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9945 return NULL_TREE;
9947 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9949 /* Proceed if a valid pointer type was passed in. */
9950 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9952 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9953 REAL_VALUE_TYPE trunc, frac;
9955 switch (value->cl)
9957 case rvc_nan:
9958 case rvc_zero:
9959 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9960 trunc = frac = *value;
9961 break;
9962 case rvc_inf:
9963 /* For +-Inf, return (*arg1 = arg0, +-0). */
9964 frac = dconst0;
9965 frac.sign = value->sign;
9966 trunc = *value;
9967 break;
9968 case rvc_normal:
9969 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9970 real_trunc (&trunc, VOIDmode, value);
9971 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9972 /* If the original number was negative and already
9973 integral, then the fractional part is -0.0. */
9974 if (value->sign && frac.cl == rvc_zero)
9975 frac.sign = value->sign;
9976 break;
9979 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9980 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9981 build_real (rettype, trunc));
9982 TREE_SIDE_EFFECTS (arg1) = 1;
9983 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9984 build_real (rettype, frac));
9987 return NULL_TREE;
9990 /* Given a location LOC, an interclass builtin function decl FNDECL
9991 and its single argument ARG, return an folded expression computing
9992 the same, or NULL_TREE if we either couldn't or didn't want to fold
9993 (the latter happen if there's an RTL instruction available). */
9995 static tree
9996 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9998 enum machine_mode mode;
10000 if (!validate_arg (arg, REAL_TYPE))
10001 return NULL_TREE;
10003 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
10004 return NULL_TREE;
10006 mode = TYPE_MODE (TREE_TYPE (arg));
10008 /* If there is no optab, try generic code. */
10009 switch (DECL_FUNCTION_CODE (fndecl))
10011 tree result;
10013 CASE_FLT_FN (BUILT_IN_ISINF):
10015 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
10016 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
10017 tree const type = TREE_TYPE (arg);
10018 REAL_VALUE_TYPE r;
10019 char buf[128];
10021 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10022 real_from_string (&r, buf);
10023 result = build_call_expr (isgr_fn, 2,
10024 fold_build1_loc (loc, ABS_EXPR, type, arg),
10025 build_real (type, r));
10026 return result;
10028 CASE_FLT_FN (BUILT_IN_FINITE):
10029 case BUILT_IN_ISFINITE:
10031 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
10032 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10033 tree const type = TREE_TYPE (arg);
10034 REAL_VALUE_TYPE r;
10035 char buf[128];
10037 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10038 real_from_string (&r, buf);
10039 result = build_call_expr (isle_fn, 2,
10040 fold_build1_loc (loc, ABS_EXPR, type, arg),
10041 build_real (type, r));
10042 /*result = fold_build2_loc (loc, UNGT_EXPR,
10043 TREE_TYPE (TREE_TYPE (fndecl)),
10044 fold_build1_loc (loc, ABS_EXPR, type, arg),
10045 build_real (type, r));
10046 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10047 TREE_TYPE (TREE_TYPE (fndecl)),
10048 result);*/
10049 return result;
10051 case BUILT_IN_ISNORMAL:
10053 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10054 islessequal(fabs(x),DBL_MAX). */
10055 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10056 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10057 tree const type = TREE_TYPE (arg);
10058 REAL_VALUE_TYPE rmax, rmin;
10059 char buf[128];
10061 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10062 real_from_string (&rmax, buf);
10063 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10064 real_from_string (&rmin, buf);
10065 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10066 result = build_call_expr (isle_fn, 2, arg,
10067 build_real (type, rmax));
10068 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
10069 build_call_expr (isge_fn, 2, arg,
10070 build_real (type, rmin)));
10071 return result;
10073 default:
10074 break;
10077 return NULL_TREE;
10080 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10081 ARG is the argument for the call. */
10083 static tree
10084 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10086 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10087 REAL_VALUE_TYPE r;
10089 if (!validate_arg (arg, REAL_TYPE))
10090 return NULL_TREE;
10092 switch (builtin_index)
10094 case BUILT_IN_ISINF:
10095 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10096 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10098 if (TREE_CODE (arg) == REAL_CST)
10100 r = TREE_REAL_CST (arg);
10101 if (real_isinf (&r))
10102 return real_compare (GT_EXPR, &r, &dconst0)
10103 ? integer_one_node : integer_minus_one_node;
10104 else
10105 return integer_zero_node;
10108 return NULL_TREE;
10110 case BUILT_IN_ISINF_SIGN:
10112 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10113 /* In a boolean context, GCC will fold the inner COND_EXPR to
10114 1. So e.g. "if (isinf_sign(x))" would be folded to just
10115 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10116 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10117 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10118 tree tmp = NULL_TREE;
10120 arg = builtin_save_expr (arg);
10122 if (signbit_fn && isinf_fn)
10124 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10125 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10127 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10128 signbit_call, integer_zero_node);
10129 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10130 isinf_call, integer_zero_node);
10132 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10133 integer_minus_one_node, integer_one_node);
10134 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10135 isinf_call, tmp,
10136 integer_zero_node);
10139 return tmp;
10142 case BUILT_IN_ISFINITE:
10143 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10144 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10145 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10147 if (TREE_CODE (arg) == REAL_CST)
10149 r = TREE_REAL_CST (arg);
10150 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10153 return NULL_TREE;
10155 case BUILT_IN_ISNAN:
10156 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10157 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10159 if (TREE_CODE (arg) == REAL_CST)
10161 r = TREE_REAL_CST (arg);
10162 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10165 arg = builtin_save_expr (arg);
10166 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10168 default:
10169 gcc_unreachable ();
10173 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10174 This builtin will generate code to return the appropriate floating
10175 point classification depending on the value of the floating point
10176 number passed in. The possible return values must be supplied as
10177 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10178 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10179 one floating point argument which is "type generic". */
10181 static tree
10182 fold_builtin_fpclassify (location_t loc, tree exp)
10184 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10185 arg, type, res, tmp;
10186 enum machine_mode mode;
10187 REAL_VALUE_TYPE r;
10188 char buf[128];
10190 /* Verify the required arguments in the original call. */
10191 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10192 INTEGER_TYPE, INTEGER_TYPE,
10193 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10194 return NULL_TREE;
10196 fp_nan = CALL_EXPR_ARG (exp, 0);
10197 fp_infinite = CALL_EXPR_ARG (exp, 1);
10198 fp_normal = CALL_EXPR_ARG (exp, 2);
10199 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10200 fp_zero = CALL_EXPR_ARG (exp, 4);
10201 arg = CALL_EXPR_ARG (exp, 5);
10202 type = TREE_TYPE (arg);
10203 mode = TYPE_MODE (type);
10204 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10206 /* fpclassify(x) ->
10207 isnan(x) ? FP_NAN :
10208 (fabs(x) == Inf ? FP_INFINITE :
10209 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10210 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10212 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10213 build_real (type, dconst0));
10214 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10215 tmp, fp_zero, fp_subnormal);
10217 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10218 real_from_string (&r, buf);
10219 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10220 arg, build_real (type, r));
10221 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10223 if (HONOR_INFINITIES (mode))
10225 real_inf (&r);
10226 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10227 build_real (type, r));
10228 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10229 fp_infinite, res);
10232 if (HONOR_NANS (mode))
10234 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10235 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10238 return res;
10241 /* Fold a call to an unordered comparison function such as
10242 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10243 being called and ARG0 and ARG1 are the arguments for the call.
10244 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10245 the opposite of the desired result. UNORDERED_CODE is used
10246 for modes that can hold NaNs and ORDERED_CODE is used for
10247 the rest. */
10249 static tree
10250 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10251 enum tree_code unordered_code,
10252 enum tree_code ordered_code)
10254 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10255 enum tree_code code;
10256 tree type0, type1;
10257 enum tree_code code0, code1;
10258 tree cmp_type = NULL_TREE;
10260 type0 = TREE_TYPE (arg0);
10261 type1 = TREE_TYPE (arg1);
10263 code0 = TREE_CODE (type0);
10264 code1 = TREE_CODE (type1);
10266 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10267 /* Choose the wider of two real types. */
10268 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10269 ? type0 : type1;
10270 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10271 cmp_type = type0;
10272 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10273 cmp_type = type1;
10275 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10276 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10278 if (unordered_code == UNORDERED_EXPR)
10280 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10281 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10282 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10285 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10286 : ordered_code;
10287 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10288 fold_build2_loc (loc, code, type, arg0, arg1));
10291 /* Fold a call to built-in function FNDECL with 0 arguments.
10292 IGNORE is true if the result of the function call is ignored. This
10293 function returns NULL_TREE if no simplification was possible. */
10295 static tree
10296 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10298 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10299 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10300 switch (fcode)
10302 CASE_FLT_FN (BUILT_IN_INF):
10303 case BUILT_IN_INFD32:
10304 case BUILT_IN_INFD64:
10305 case BUILT_IN_INFD128:
10306 return fold_builtin_inf (loc, type, true);
10308 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10309 return fold_builtin_inf (loc, type, false);
10311 case BUILT_IN_CLASSIFY_TYPE:
10312 return fold_builtin_classify_type (NULL_TREE);
10314 case BUILT_IN_UNREACHABLE:
10315 if (flag_sanitize & SANITIZE_UNREACHABLE)
10316 return ubsan_instrument_unreachable (loc);
10317 break;
10319 default:
10320 break;
10322 return NULL_TREE;
10325 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10326 IGNORE is true if the result of the function call is ignored. This
10327 function returns NULL_TREE if no simplification was possible. */
10329 static tree
10330 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10332 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10333 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10334 switch (fcode)
10336 case BUILT_IN_CONSTANT_P:
10338 tree val = fold_builtin_constant_p (arg0);
10340 /* Gimplification will pull the CALL_EXPR for the builtin out of
10341 an if condition. When not optimizing, we'll not CSE it back.
10342 To avoid link error types of regressions, return false now. */
10343 if (!val && !optimize)
10344 val = integer_zero_node;
10346 return val;
10349 case BUILT_IN_CLASSIFY_TYPE:
10350 return fold_builtin_classify_type (arg0);
10352 case BUILT_IN_STRLEN:
10353 return fold_builtin_strlen (loc, type, arg0);
10355 CASE_FLT_FN (BUILT_IN_FABS):
10356 case BUILT_IN_FABSD32:
10357 case BUILT_IN_FABSD64:
10358 case BUILT_IN_FABSD128:
10359 return fold_builtin_fabs (loc, arg0, type);
10361 case BUILT_IN_ABS:
10362 case BUILT_IN_LABS:
10363 case BUILT_IN_LLABS:
10364 case BUILT_IN_IMAXABS:
10365 return fold_builtin_abs (loc, arg0, type);
10367 CASE_FLT_FN (BUILT_IN_CONJ):
10368 if (validate_arg (arg0, COMPLEX_TYPE)
10369 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10370 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10371 break;
10373 CASE_FLT_FN (BUILT_IN_CREAL):
10374 if (validate_arg (arg0, COMPLEX_TYPE)
10375 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10376 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10377 break;
10379 CASE_FLT_FN (BUILT_IN_CIMAG):
10380 if (validate_arg (arg0, COMPLEX_TYPE)
10381 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10382 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10383 break;
10385 CASE_FLT_FN (BUILT_IN_CCOS):
10386 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
10388 CASE_FLT_FN (BUILT_IN_CCOSH):
10389 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
10391 CASE_FLT_FN (BUILT_IN_CPROJ):
10392 return fold_builtin_cproj(loc, arg0, type);
10394 CASE_FLT_FN (BUILT_IN_CSIN):
10395 if (validate_arg (arg0, COMPLEX_TYPE)
10396 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10397 return do_mpc_arg1 (arg0, type, mpc_sin);
10398 break;
10400 CASE_FLT_FN (BUILT_IN_CSINH):
10401 if (validate_arg (arg0, COMPLEX_TYPE)
10402 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10403 return do_mpc_arg1 (arg0, type, mpc_sinh);
10404 break;
10406 CASE_FLT_FN (BUILT_IN_CTAN):
10407 if (validate_arg (arg0, COMPLEX_TYPE)
10408 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10409 return do_mpc_arg1 (arg0, type, mpc_tan);
10410 break;
10412 CASE_FLT_FN (BUILT_IN_CTANH):
10413 if (validate_arg (arg0, COMPLEX_TYPE)
10414 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10415 return do_mpc_arg1 (arg0, type, mpc_tanh);
10416 break;
10418 CASE_FLT_FN (BUILT_IN_CLOG):
10419 if (validate_arg (arg0, COMPLEX_TYPE)
10420 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10421 return do_mpc_arg1 (arg0, type, mpc_log);
10422 break;
10424 CASE_FLT_FN (BUILT_IN_CSQRT):
10425 if (validate_arg (arg0, COMPLEX_TYPE)
10426 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10427 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10428 break;
10430 CASE_FLT_FN (BUILT_IN_CASIN):
10431 if (validate_arg (arg0, COMPLEX_TYPE)
10432 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10433 return do_mpc_arg1 (arg0, type, mpc_asin);
10434 break;
10436 CASE_FLT_FN (BUILT_IN_CACOS):
10437 if (validate_arg (arg0, COMPLEX_TYPE)
10438 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10439 return do_mpc_arg1 (arg0, type, mpc_acos);
10440 break;
10442 CASE_FLT_FN (BUILT_IN_CATAN):
10443 if (validate_arg (arg0, COMPLEX_TYPE)
10444 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10445 return do_mpc_arg1 (arg0, type, mpc_atan);
10446 break;
10448 CASE_FLT_FN (BUILT_IN_CASINH):
10449 if (validate_arg (arg0, COMPLEX_TYPE)
10450 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10451 return do_mpc_arg1 (arg0, type, mpc_asinh);
10452 break;
10454 CASE_FLT_FN (BUILT_IN_CACOSH):
10455 if (validate_arg (arg0, COMPLEX_TYPE)
10456 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10457 return do_mpc_arg1 (arg0, type, mpc_acosh);
10458 break;
10460 CASE_FLT_FN (BUILT_IN_CATANH):
10461 if (validate_arg (arg0, COMPLEX_TYPE)
10462 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10463 return do_mpc_arg1 (arg0, type, mpc_atanh);
10464 break;
10466 CASE_FLT_FN (BUILT_IN_CABS):
10467 return fold_builtin_cabs (loc, arg0, type, fndecl);
10469 CASE_FLT_FN (BUILT_IN_CARG):
10470 return fold_builtin_carg (loc, arg0, type);
10472 CASE_FLT_FN (BUILT_IN_SQRT):
10473 return fold_builtin_sqrt (loc, arg0, type);
10475 CASE_FLT_FN (BUILT_IN_CBRT):
10476 return fold_builtin_cbrt (loc, arg0, type);
10478 CASE_FLT_FN (BUILT_IN_ASIN):
10479 if (validate_arg (arg0, REAL_TYPE))
10480 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10481 &dconstm1, &dconst1, true);
10482 break;
10484 CASE_FLT_FN (BUILT_IN_ACOS):
10485 if (validate_arg (arg0, REAL_TYPE))
10486 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10487 &dconstm1, &dconst1, true);
10488 break;
10490 CASE_FLT_FN (BUILT_IN_ATAN):
10491 if (validate_arg (arg0, REAL_TYPE))
10492 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10493 break;
10495 CASE_FLT_FN (BUILT_IN_ASINH):
10496 if (validate_arg (arg0, REAL_TYPE))
10497 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10498 break;
10500 CASE_FLT_FN (BUILT_IN_ACOSH):
10501 if (validate_arg (arg0, REAL_TYPE))
10502 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10503 &dconst1, NULL, true);
10504 break;
10506 CASE_FLT_FN (BUILT_IN_ATANH):
10507 if (validate_arg (arg0, REAL_TYPE))
10508 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10509 &dconstm1, &dconst1, false);
10510 break;
10512 CASE_FLT_FN (BUILT_IN_SIN):
10513 if (validate_arg (arg0, REAL_TYPE))
10514 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10515 break;
10517 CASE_FLT_FN (BUILT_IN_COS):
10518 return fold_builtin_cos (loc, arg0, type, fndecl);
10520 CASE_FLT_FN (BUILT_IN_TAN):
10521 return fold_builtin_tan (arg0, type);
10523 CASE_FLT_FN (BUILT_IN_CEXP):
10524 return fold_builtin_cexp (loc, arg0, type);
10526 CASE_FLT_FN (BUILT_IN_CEXPI):
10527 if (validate_arg (arg0, REAL_TYPE))
10528 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10529 break;
10531 CASE_FLT_FN (BUILT_IN_SINH):
10532 if (validate_arg (arg0, REAL_TYPE))
10533 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10534 break;
10536 CASE_FLT_FN (BUILT_IN_COSH):
10537 return fold_builtin_cosh (loc, arg0, type, fndecl);
10539 CASE_FLT_FN (BUILT_IN_TANH):
10540 if (validate_arg (arg0, REAL_TYPE))
10541 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10542 break;
10544 CASE_FLT_FN (BUILT_IN_ERF):
10545 if (validate_arg (arg0, REAL_TYPE))
10546 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10547 break;
10549 CASE_FLT_FN (BUILT_IN_ERFC):
10550 if (validate_arg (arg0, REAL_TYPE))
10551 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10552 break;
10554 CASE_FLT_FN (BUILT_IN_TGAMMA):
10555 if (validate_arg (arg0, REAL_TYPE))
10556 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10557 break;
10559 CASE_FLT_FN (BUILT_IN_EXP):
10560 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10562 CASE_FLT_FN (BUILT_IN_EXP2):
10563 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10565 CASE_FLT_FN (BUILT_IN_EXP10):
10566 CASE_FLT_FN (BUILT_IN_POW10):
10567 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10569 CASE_FLT_FN (BUILT_IN_EXPM1):
10570 if (validate_arg (arg0, REAL_TYPE))
10571 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10572 break;
10574 CASE_FLT_FN (BUILT_IN_LOG):
10575 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10577 CASE_FLT_FN (BUILT_IN_LOG2):
10578 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10580 CASE_FLT_FN (BUILT_IN_LOG10):
10581 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10583 CASE_FLT_FN (BUILT_IN_LOG1P):
10584 if (validate_arg (arg0, REAL_TYPE))
10585 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10586 &dconstm1, NULL, false);
10587 break;
10589 CASE_FLT_FN (BUILT_IN_J0):
10590 if (validate_arg (arg0, REAL_TYPE))
10591 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10592 NULL, NULL, 0);
10593 break;
10595 CASE_FLT_FN (BUILT_IN_J1):
10596 if (validate_arg (arg0, REAL_TYPE))
10597 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10598 NULL, NULL, 0);
10599 break;
10601 CASE_FLT_FN (BUILT_IN_Y0):
10602 if (validate_arg (arg0, REAL_TYPE))
10603 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10604 &dconst0, NULL, false);
10605 break;
10607 CASE_FLT_FN (BUILT_IN_Y1):
10608 if (validate_arg (arg0, REAL_TYPE))
10609 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10610 &dconst0, NULL, false);
10611 break;
10613 CASE_FLT_FN (BUILT_IN_NAN):
10614 case BUILT_IN_NAND32:
10615 case BUILT_IN_NAND64:
10616 case BUILT_IN_NAND128:
10617 return fold_builtin_nan (arg0, type, true);
10619 CASE_FLT_FN (BUILT_IN_NANS):
10620 return fold_builtin_nan (arg0, type, false);
10622 CASE_FLT_FN (BUILT_IN_FLOOR):
10623 return fold_builtin_floor (loc, fndecl, arg0);
10625 CASE_FLT_FN (BUILT_IN_CEIL):
10626 return fold_builtin_ceil (loc, fndecl, arg0);
10628 CASE_FLT_FN (BUILT_IN_TRUNC):
10629 return fold_builtin_trunc (loc, fndecl, arg0);
10631 CASE_FLT_FN (BUILT_IN_ROUND):
10632 return fold_builtin_round (loc, fndecl, arg0);
10634 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10635 CASE_FLT_FN (BUILT_IN_RINT):
10636 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10638 CASE_FLT_FN (BUILT_IN_ICEIL):
10639 CASE_FLT_FN (BUILT_IN_LCEIL):
10640 CASE_FLT_FN (BUILT_IN_LLCEIL):
10641 CASE_FLT_FN (BUILT_IN_LFLOOR):
10642 CASE_FLT_FN (BUILT_IN_IFLOOR):
10643 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10644 CASE_FLT_FN (BUILT_IN_IROUND):
10645 CASE_FLT_FN (BUILT_IN_LROUND):
10646 CASE_FLT_FN (BUILT_IN_LLROUND):
10647 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10649 CASE_FLT_FN (BUILT_IN_IRINT):
10650 CASE_FLT_FN (BUILT_IN_LRINT):
10651 CASE_FLT_FN (BUILT_IN_LLRINT):
10652 return fold_fixed_mathfn (loc, fndecl, arg0);
10654 case BUILT_IN_BSWAP16:
10655 case BUILT_IN_BSWAP32:
10656 case BUILT_IN_BSWAP64:
10657 return fold_builtin_bswap (fndecl, arg0);
10659 CASE_INT_FN (BUILT_IN_FFS):
10660 CASE_INT_FN (BUILT_IN_CLZ):
10661 CASE_INT_FN (BUILT_IN_CTZ):
10662 CASE_INT_FN (BUILT_IN_CLRSB):
10663 CASE_INT_FN (BUILT_IN_POPCOUNT):
10664 CASE_INT_FN (BUILT_IN_PARITY):
10665 return fold_builtin_bitop (fndecl, arg0);
10667 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10668 return fold_builtin_signbit (loc, arg0, type);
10670 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10671 return fold_builtin_significand (loc, arg0, type);
10673 CASE_FLT_FN (BUILT_IN_ILOGB):
10674 CASE_FLT_FN (BUILT_IN_LOGB):
10675 return fold_builtin_logb (loc, arg0, type);
10677 case BUILT_IN_ISASCII:
10678 return fold_builtin_isascii (loc, arg0);
10680 case BUILT_IN_TOASCII:
10681 return fold_builtin_toascii (loc, arg0);
10683 case BUILT_IN_ISDIGIT:
10684 return fold_builtin_isdigit (loc, arg0);
10686 CASE_FLT_FN (BUILT_IN_FINITE):
10687 case BUILT_IN_FINITED32:
10688 case BUILT_IN_FINITED64:
10689 case BUILT_IN_FINITED128:
10690 case BUILT_IN_ISFINITE:
10692 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10693 if (ret)
10694 return ret;
10695 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10698 CASE_FLT_FN (BUILT_IN_ISINF):
10699 case BUILT_IN_ISINFD32:
10700 case BUILT_IN_ISINFD64:
10701 case BUILT_IN_ISINFD128:
10703 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10704 if (ret)
10705 return ret;
10706 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10709 case BUILT_IN_ISNORMAL:
10710 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10712 case BUILT_IN_ISINF_SIGN:
10713 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10715 CASE_FLT_FN (BUILT_IN_ISNAN):
10716 case BUILT_IN_ISNAND32:
10717 case BUILT_IN_ISNAND64:
10718 case BUILT_IN_ISNAND128:
10719 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10721 case BUILT_IN_PRINTF:
10722 case BUILT_IN_PRINTF_UNLOCKED:
10723 case BUILT_IN_VPRINTF:
10724 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10726 case BUILT_IN_FREE:
10727 if (integer_zerop (arg0))
10728 return build_empty_stmt (loc);
10729 break;
10731 default:
10732 break;
10735 return NULL_TREE;
10739 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10740 IGNORE is true if the result of the function call is ignored. This
10741 function returns NULL_TREE if no simplification was possible. */
10743 static tree
10744 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10746 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10747 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10749 switch (fcode)
10751 CASE_FLT_FN (BUILT_IN_JN):
10752 if (validate_arg (arg0, INTEGER_TYPE)
10753 && validate_arg (arg1, REAL_TYPE))
10754 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10755 break;
10757 CASE_FLT_FN (BUILT_IN_YN):
10758 if (validate_arg (arg0, INTEGER_TYPE)
10759 && validate_arg (arg1, REAL_TYPE))
10760 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10761 &dconst0, false);
10762 break;
10764 CASE_FLT_FN (BUILT_IN_DREM):
10765 CASE_FLT_FN (BUILT_IN_REMAINDER):
10766 if (validate_arg (arg0, REAL_TYPE)
10767 && validate_arg(arg1, REAL_TYPE))
10768 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10769 break;
10771 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10772 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10773 if (validate_arg (arg0, REAL_TYPE)
10774 && validate_arg(arg1, POINTER_TYPE))
10775 return do_mpfr_lgamma_r (arg0, arg1, type);
10776 break;
10778 CASE_FLT_FN (BUILT_IN_ATAN2):
10779 if (validate_arg (arg0, REAL_TYPE)
10780 && validate_arg(arg1, REAL_TYPE))
10781 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10782 break;
10784 CASE_FLT_FN (BUILT_IN_FDIM):
10785 if (validate_arg (arg0, REAL_TYPE)
10786 && validate_arg(arg1, REAL_TYPE))
10787 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10788 break;
10790 CASE_FLT_FN (BUILT_IN_HYPOT):
10791 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10793 CASE_FLT_FN (BUILT_IN_CPOW):
10794 if (validate_arg (arg0, COMPLEX_TYPE)
10795 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10796 && validate_arg (arg1, COMPLEX_TYPE)
10797 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10798 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10799 break;
10801 CASE_FLT_FN (BUILT_IN_LDEXP):
10802 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10803 CASE_FLT_FN (BUILT_IN_SCALBN):
10804 CASE_FLT_FN (BUILT_IN_SCALBLN):
10805 return fold_builtin_load_exponent (loc, arg0, arg1,
10806 type, /*ldexp=*/false);
10808 CASE_FLT_FN (BUILT_IN_FREXP):
10809 return fold_builtin_frexp (loc, arg0, arg1, type);
10811 CASE_FLT_FN (BUILT_IN_MODF):
10812 return fold_builtin_modf (loc, arg0, arg1, type);
10814 case BUILT_IN_BZERO:
10815 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10817 case BUILT_IN_FPUTS:
10818 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10820 case BUILT_IN_FPUTS_UNLOCKED:
10821 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10823 case BUILT_IN_STRSTR:
10824 return fold_builtin_strstr (loc, arg0, arg1, type);
10826 case BUILT_IN_STRCAT:
10827 return fold_builtin_strcat (loc, arg0, arg1);
10829 case BUILT_IN_STRSPN:
10830 return fold_builtin_strspn (loc, arg0, arg1);
10832 case BUILT_IN_STRCSPN:
10833 return fold_builtin_strcspn (loc, arg0, arg1);
10835 case BUILT_IN_STRCHR:
10836 case BUILT_IN_INDEX:
10837 return fold_builtin_strchr (loc, arg0, arg1, type);
10839 case BUILT_IN_STRRCHR:
10840 case BUILT_IN_RINDEX:
10841 return fold_builtin_strrchr (loc, arg0, arg1, type);
10843 case BUILT_IN_STRCPY:
10844 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10846 case BUILT_IN_STPCPY:
10847 if (ignore)
10849 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10850 if (!fn)
10851 break;
10853 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10855 else
10856 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10857 break;
10859 case BUILT_IN_STRCMP:
10860 return fold_builtin_strcmp (loc, arg0, arg1);
10862 case BUILT_IN_STRPBRK:
10863 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10865 case BUILT_IN_EXPECT:
10866 return fold_builtin_expect (loc, arg0, arg1);
10868 CASE_FLT_FN (BUILT_IN_POW):
10869 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10871 CASE_FLT_FN (BUILT_IN_POWI):
10872 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10874 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10875 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10877 CASE_FLT_FN (BUILT_IN_FMIN):
10878 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10880 CASE_FLT_FN (BUILT_IN_FMAX):
10881 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10883 case BUILT_IN_ISGREATER:
10884 return fold_builtin_unordered_cmp (loc, fndecl,
10885 arg0, arg1, UNLE_EXPR, LE_EXPR);
10886 case BUILT_IN_ISGREATEREQUAL:
10887 return fold_builtin_unordered_cmp (loc, fndecl,
10888 arg0, arg1, UNLT_EXPR, LT_EXPR);
10889 case BUILT_IN_ISLESS:
10890 return fold_builtin_unordered_cmp (loc, fndecl,
10891 arg0, arg1, UNGE_EXPR, GE_EXPR);
10892 case BUILT_IN_ISLESSEQUAL:
10893 return fold_builtin_unordered_cmp (loc, fndecl,
10894 arg0, arg1, UNGT_EXPR, GT_EXPR);
10895 case BUILT_IN_ISLESSGREATER:
10896 return fold_builtin_unordered_cmp (loc, fndecl,
10897 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10898 case BUILT_IN_ISUNORDERED:
10899 return fold_builtin_unordered_cmp (loc, fndecl,
10900 arg0, arg1, UNORDERED_EXPR,
10901 NOP_EXPR);
10903 /* We do the folding for va_start in the expander. */
10904 case BUILT_IN_VA_START:
10905 break;
10907 case BUILT_IN_SPRINTF:
10908 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10910 case BUILT_IN_OBJECT_SIZE:
10911 return fold_builtin_object_size (arg0, arg1);
10913 case BUILT_IN_PRINTF:
10914 case BUILT_IN_PRINTF_UNLOCKED:
10915 case BUILT_IN_VPRINTF:
10916 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10918 case BUILT_IN_PRINTF_CHK:
10919 case BUILT_IN_VPRINTF_CHK:
10920 if (!validate_arg (arg0, INTEGER_TYPE)
10921 || TREE_SIDE_EFFECTS (arg0))
10922 return NULL_TREE;
10923 else
10924 return fold_builtin_printf (loc, fndecl,
10925 arg1, NULL_TREE, ignore, fcode);
10926 break;
10928 case BUILT_IN_FPRINTF:
10929 case BUILT_IN_FPRINTF_UNLOCKED:
10930 case BUILT_IN_VFPRINTF:
10931 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10932 ignore, fcode);
10934 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10935 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10937 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10938 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10940 default:
10941 break;
10943 return NULL_TREE;
10946 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10947 and ARG2. IGNORE is true if the result of the function call is ignored.
10948 This function returns NULL_TREE if no simplification was possible. */
10950 static tree
10951 fold_builtin_3 (location_t loc, tree fndecl,
10952 tree arg0, tree arg1, tree arg2, bool ignore)
10954 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10955 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10956 switch (fcode)
10959 CASE_FLT_FN (BUILT_IN_SINCOS):
10960 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10962 CASE_FLT_FN (BUILT_IN_FMA):
10963 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10964 break;
10966 CASE_FLT_FN (BUILT_IN_REMQUO):
10967 if (validate_arg (arg0, REAL_TYPE)
10968 && validate_arg(arg1, REAL_TYPE)
10969 && validate_arg(arg2, POINTER_TYPE))
10970 return do_mpfr_remquo (arg0, arg1, arg2);
10971 break;
10973 case BUILT_IN_MEMSET:
10974 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10976 case BUILT_IN_BCOPY:
10977 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10978 void_type_node, true, /*endp=*/3);
10980 case BUILT_IN_MEMCPY:
10981 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10982 type, ignore, /*endp=*/0);
10984 case BUILT_IN_MEMPCPY:
10985 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10986 type, ignore, /*endp=*/1);
10988 case BUILT_IN_MEMMOVE:
10989 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10990 type, ignore, /*endp=*/3);
10992 case BUILT_IN_STRNCAT:
10993 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10995 case BUILT_IN_STRNCPY:
10996 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10998 case BUILT_IN_STRNCMP:
10999 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
11001 case BUILT_IN_MEMCHR:
11002 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
11004 case BUILT_IN_BCMP:
11005 case BUILT_IN_MEMCMP:
11006 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
11008 case BUILT_IN_SPRINTF:
11009 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
11011 case BUILT_IN_SNPRINTF:
11012 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
11014 case BUILT_IN_STRCPY_CHK:
11015 case BUILT_IN_STPCPY_CHK:
11016 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
11017 ignore, fcode);
11019 case BUILT_IN_STRCAT_CHK:
11020 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
11022 case BUILT_IN_PRINTF_CHK:
11023 case BUILT_IN_VPRINTF_CHK:
11024 if (!validate_arg (arg0, INTEGER_TYPE)
11025 || TREE_SIDE_EFFECTS (arg0))
11026 return NULL_TREE;
11027 else
11028 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
11029 break;
11031 case BUILT_IN_FPRINTF:
11032 case BUILT_IN_FPRINTF_UNLOCKED:
11033 case BUILT_IN_VFPRINTF:
11034 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
11035 ignore, fcode);
11037 case BUILT_IN_FPRINTF_CHK:
11038 case BUILT_IN_VFPRINTF_CHK:
11039 if (!validate_arg (arg1, INTEGER_TYPE)
11040 || TREE_SIDE_EFFECTS (arg1))
11041 return NULL_TREE;
11042 else
11043 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
11044 ignore, fcode);
11046 default:
11047 break;
11049 return NULL_TREE;
11052 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11053 ARG2, and ARG3. IGNORE is true if the result of the function call is
11054 ignored. This function returns NULL_TREE if no simplification was
11055 possible. */
11057 static tree
11058 fold_builtin_4 (location_t loc, tree fndecl,
11059 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11061 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11063 switch (fcode)
11065 case BUILT_IN_MEMCPY_CHK:
11066 case BUILT_IN_MEMPCPY_CHK:
11067 case BUILT_IN_MEMMOVE_CHK:
11068 case BUILT_IN_MEMSET_CHK:
11069 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11070 NULL_TREE, ignore,
11071 DECL_FUNCTION_CODE (fndecl));
11073 case BUILT_IN_STRNCPY_CHK:
11074 case BUILT_IN_STPNCPY_CHK:
11075 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
11076 ignore, fcode);
11078 case BUILT_IN_STRNCAT_CHK:
11079 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11081 case BUILT_IN_SNPRINTF:
11082 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
11084 case BUILT_IN_FPRINTF_CHK:
11085 case BUILT_IN_VFPRINTF_CHK:
11086 if (!validate_arg (arg1, INTEGER_TYPE)
11087 || TREE_SIDE_EFFECTS (arg1))
11088 return NULL_TREE;
11089 else
11090 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11091 ignore, fcode);
11092 break;
11094 default:
11095 break;
11097 return NULL_TREE;
11100 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11101 arguments, where NARGS <= 4. IGNORE is true if the result of the
11102 function call is ignored. This function returns NULL_TREE if no
11103 simplification was possible. Note that this only folds builtins with
11104 fixed argument patterns. Foldings that do varargs-to-varargs
11105 transformations, or that match calls with more than 4 arguments,
11106 need to be handled with fold_builtin_varargs instead. */
11108 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11110 static tree
11111 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11113 tree ret = NULL_TREE;
11115 switch (nargs)
11117 case 0:
11118 ret = fold_builtin_0 (loc, fndecl, ignore);
11119 break;
11120 case 1:
11121 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11122 break;
11123 case 2:
11124 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11125 break;
11126 case 3:
11127 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11128 break;
11129 case 4:
11130 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11131 ignore);
11132 break;
11133 default:
11134 break;
11136 if (ret)
11138 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11139 SET_EXPR_LOCATION (ret, loc);
11140 TREE_NO_WARNING (ret) = 1;
11141 return ret;
11143 return NULL_TREE;
11146 /* Builtins with folding operations that operate on "..." arguments
11147 need special handling; we need to store the arguments in a convenient
11148 data structure before attempting any folding. Fortunately there are
11149 only a few builtins that fall into this category. FNDECL is the
11150 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11151 result of the function call is ignored. */
11153 static tree
11154 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11155 bool ignore ATTRIBUTE_UNUSED)
11157 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11158 tree ret = NULL_TREE;
11160 switch (fcode)
11162 case BUILT_IN_SPRINTF_CHK:
11163 case BUILT_IN_VSPRINTF_CHK:
11164 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11165 break;
11167 case BUILT_IN_SNPRINTF_CHK:
11168 case BUILT_IN_VSNPRINTF_CHK:
11169 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11170 break;
11172 case BUILT_IN_FPCLASSIFY:
11173 ret = fold_builtin_fpclassify (loc, exp);
11174 break;
11176 default:
11177 break;
11179 if (ret)
11181 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11182 SET_EXPR_LOCATION (ret, loc);
11183 TREE_NO_WARNING (ret) = 1;
11184 return ret;
11186 return NULL_TREE;
11189 /* Return true if FNDECL shouldn't be folded right now.
11190 If a built-in function has an inline attribute always_inline
11191 wrapper, defer folding it after always_inline functions have
11192 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11193 might not be performed. */
11195 bool
11196 avoid_folding_inline_builtin (tree fndecl)
11198 return (DECL_DECLARED_INLINE_P (fndecl)
11199 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11200 && cfun
11201 && !cfun->always_inline_functions_inlined
11202 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11205 /* A wrapper function for builtin folding that prevents warnings for
11206 "statement without effect" and the like, caused by removing the
11207 call node earlier than the warning is generated. */
11209 tree
11210 fold_call_expr (location_t loc, tree exp, bool ignore)
11212 tree ret = NULL_TREE;
11213 tree fndecl = get_callee_fndecl (exp);
11214 if (fndecl
11215 && TREE_CODE (fndecl) == FUNCTION_DECL
11216 && DECL_BUILT_IN (fndecl)
11217 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11218 yet. Defer folding until we see all the arguments
11219 (after inlining). */
11220 && !CALL_EXPR_VA_ARG_PACK (exp))
11222 int nargs = call_expr_nargs (exp);
11224 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11225 instead last argument is __builtin_va_arg_pack (). Defer folding
11226 even in that case, until arguments are finalized. */
11227 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11229 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11230 if (fndecl2
11231 && TREE_CODE (fndecl2) == FUNCTION_DECL
11232 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11233 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11234 return NULL_TREE;
11237 if (avoid_folding_inline_builtin (fndecl))
11238 return NULL_TREE;
11240 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11241 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11242 CALL_EXPR_ARGP (exp), ignore);
11243 else
11245 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11247 tree *args = CALL_EXPR_ARGP (exp);
11248 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11250 if (!ret)
11251 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11252 if (ret)
11253 return ret;
11256 return NULL_TREE;
11259 /* Conveniently construct a function call expression. FNDECL names the
11260 function to be called and N arguments are passed in the array
11261 ARGARRAY. */
11263 tree
11264 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11266 tree fntype = TREE_TYPE (fndecl);
11267 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11269 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11272 /* Conveniently construct a function call expression. FNDECL names the
11273 function to be called and the arguments are passed in the vector
11274 VEC. */
11276 tree
11277 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11279 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11280 vec_safe_address (vec));
11284 /* Conveniently construct a function call expression. FNDECL names the
11285 function to be called, N is the number of arguments, and the "..."
11286 parameters are the argument expressions. */
11288 tree
11289 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11291 va_list ap;
11292 tree *argarray = XALLOCAVEC (tree, n);
11293 int i;
11295 va_start (ap, n);
11296 for (i = 0; i < n; i++)
11297 argarray[i] = va_arg (ap, tree);
11298 va_end (ap);
11299 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11302 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11303 varargs macros aren't supported by all bootstrap compilers. */
11305 tree
11306 build_call_expr (tree fndecl, int n, ...)
11308 va_list ap;
11309 tree *argarray = XALLOCAVEC (tree, n);
11310 int i;
11312 va_start (ap, n);
11313 for (i = 0; i < n; i++)
11314 argarray[i] = va_arg (ap, tree);
11315 va_end (ap);
11316 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11319 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11320 N arguments are passed in the array ARGARRAY. */
11322 tree
11323 fold_builtin_call_array (location_t loc, tree type,
11324 tree fn,
11325 int n,
11326 tree *argarray)
11328 tree ret = NULL_TREE;
11329 tree exp;
11331 if (TREE_CODE (fn) == ADDR_EXPR)
11333 tree fndecl = TREE_OPERAND (fn, 0);
11334 if (TREE_CODE (fndecl) == FUNCTION_DECL
11335 && DECL_BUILT_IN (fndecl))
11337 /* If last argument is __builtin_va_arg_pack (), arguments to this
11338 function are not finalized yet. Defer folding until they are. */
11339 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11341 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11342 if (fndecl2
11343 && TREE_CODE (fndecl2) == FUNCTION_DECL
11344 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11345 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11346 return build_call_array_loc (loc, type, fn, n, argarray);
11348 if (avoid_folding_inline_builtin (fndecl))
11349 return build_call_array_loc (loc, type, fn, n, argarray);
11350 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11352 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11353 if (ret)
11354 return ret;
11356 return build_call_array_loc (loc, type, fn, n, argarray);
11358 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11360 /* First try the transformations that don't require consing up
11361 an exp. */
11362 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11363 if (ret)
11364 return ret;
11367 /* If we got this far, we need to build an exp. */
11368 exp = build_call_array_loc (loc, type, fn, n, argarray);
11369 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11370 return ret ? ret : exp;
11374 return build_call_array_loc (loc, type, fn, n, argarray);
11377 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11378 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11379 of arguments in ARGS to be omitted. OLDNARGS is the number of
11380 elements in ARGS. */
11382 static tree
11383 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11384 int skip, tree fndecl, int n, va_list newargs)
11386 int nargs = oldnargs - skip + n;
11387 tree *buffer;
11389 if (n > 0)
11391 int i, j;
11393 buffer = XALLOCAVEC (tree, nargs);
11394 for (i = 0; i < n; i++)
11395 buffer[i] = va_arg (newargs, tree);
11396 for (j = skip; j < oldnargs; j++, i++)
11397 buffer[i] = args[j];
11399 else
11400 buffer = args + skip;
11402 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11405 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11406 list ARGS along with N new arguments specified as the "..."
11407 parameters. SKIP is the number of arguments in ARGS to be omitted.
11408 OLDNARGS is the number of elements in ARGS. */
11410 static tree
11411 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11412 int skip, tree fndecl, int n, ...)
11414 va_list ap;
11415 tree t;
11417 va_start (ap, n);
11418 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11419 va_end (ap);
11421 return t;
11424 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11425 along with N new arguments specified as the "..." parameters. SKIP
11426 is the number of arguments in EXP to be omitted. This function is used
11427 to do varargs-to-varargs transformations. */
11429 static tree
11430 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11432 va_list ap;
11433 tree t;
11435 va_start (ap, n);
11436 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11437 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11438 va_end (ap);
11440 return t;
11443 /* Validate a single argument ARG against a tree code CODE representing
11444 a type. */
11446 static bool
11447 validate_arg (const_tree arg, enum tree_code code)
11449 if (!arg)
11450 return false;
11451 else if (code == POINTER_TYPE)
11452 return POINTER_TYPE_P (TREE_TYPE (arg));
11453 else if (code == INTEGER_TYPE)
11454 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11455 return code == TREE_CODE (TREE_TYPE (arg));
11458 /* This function validates the types of a function call argument list
11459 against a specified list of tree_codes. If the last specifier is a 0,
11460 that represents an ellipses, otherwise the last specifier must be a
11461 VOID_TYPE.
11463 This is the GIMPLE version of validate_arglist. Eventually we want to
11464 completely convert builtins.c to work from GIMPLEs and the tree based
11465 validate_arglist will then be removed. */
11467 bool
11468 validate_gimple_arglist (const_gimple call, ...)
11470 enum tree_code code;
11471 bool res = 0;
11472 va_list ap;
11473 const_tree arg;
11474 size_t i;
11476 va_start (ap, call);
11477 i = 0;
11481 code = (enum tree_code) va_arg (ap, int);
11482 switch (code)
11484 case 0:
11485 /* This signifies an ellipses, any further arguments are all ok. */
11486 res = true;
11487 goto end;
11488 case VOID_TYPE:
11489 /* This signifies an endlink, if no arguments remain, return
11490 true, otherwise return false. */
11491 res = (i == gimple_call_num_args (call));
11492 goto end;
11493 default:
11494 /* If no parameters remain or the parameter's code does not
11495 match the specified code, return false. Otherwise continue
11496 checking any remaining arguments. */
11497 arg = gimple_call_arg (call, i++);
11498 if (!validate_arg (arg, code))
11499 goto end;
11500 break;
11503 while (1);
11505 /* We need gotos here since we can only have one VA_CLOSE in a
11506 function. */
11507 end: ;
11508 va_end (ap);
11510 return res;
11513 /* This function validates the types of a function call argument list
11514 against a specified list of tree_codes. If the last specifier is a 0,
11515 that represents an ellipses, otherwise the last specifier must be a
11516 VOID_TYPE. */
11518 bool
11519 validate_arglist (const_tree callexpr, ...)
11521 enum tree_code code;
11522 bool res = 0;
11523 va_list ap;
11524 const_call_expr_arg_iterator iter;
11525 const_tree arg;
11527 va_start (ap, callexpr);
11528 init_const_call_expr_arg_iterator (callexpr, &iter);
11532 code = (enum tree_code) va_arg (ap, int);
11533 switch (code)
11535 case 0:
11536 /* This signifies an ellipses, any further arguments are all ok. */
11537 res = true;
11538 goto end;
11539 case VOID_TYPE:
11540 /* This signifies an endlink, if no arguments remain, return
11541 true, otherwise return false. */
11542 res = !more_const_call_expr_args_p (&iter);
11543 goto end;
11544 default:
11545 /* If no parameters remain or the parameter's code does not
11546 match the specified code, return false. Otherwise continue
11547 checking any remaining arguments. */
11548 arg = next_const_call_expr_arg (&iter);
11549 if (!validate_arg (arg, code))
11550 goto end;
11551 break;
11554 while (1);
11556 /* We need gotos here since we can only have one VA_CLOSE in a
11557 function. */
11558 end: ;
11559 va_end (ap);
11561 return res;
11564 /* Default target-specific builtin expander that does nothing. */
11567 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11568 rtx target ATTRIBUTE_UNUSED,
11569 rtx subtarget ATTRIBUTE_UNUSED,
11570 enum machine_mode mode ATTRIBUTE_UNUSED,
11571 int ignore ATTRIBUTE_UNUSED)
11573 return NULL_RTX;
11576 /* Returns true is EXP represents data that would potentially reside
11577 in a readonly section. */
11579 static bool
11580 readonly_data_expr (tree exp)
11582 STRIP_NOPS (exp);
11584 if (TREE_CODE (exp) != ADDR_EXPR)
11585 return false;
11587 exp = get_base_address (TREE_OPERAND (exp, 0));
11588 if (!exp)
11589 return false;
11591 /* Make sure we call decl_readonly_section only for trees it
11592 can handle (since it returns true for everything it doesn't
11593 understand). */
11594 if (TREE_CODE (exp) == STRING_CST
11595 || TREE_CODE (exp) == CONSTRUCTOR
11596 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11597 return decl_readonly_section (exp, 0);
11598 else
11599 return false;
11602 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11603 to the call, and TYPE is its return type.
11605 Return NULL_TREE if no simplification was possible, otherwise return the
11606 simplified form of the call as a tree.
11608 The simplified form may be a constant or other expression which
11609 computes the same value, but in a more efficient manner (including
11610 calls to other builtin functions).
11612 The call may contain arguments which need to be evaluated, but
11613 which are not useful to determine the result of the call. In
11614 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11615 COMPOUND_EXPR will be an argument which must be evaluated.
11616 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11617 COMPOUND_EXPR in the chain will contain the tree for the simplified
11618 form of the builtin function call. */
11620 static tree
11621 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11623 if (!validate_arg (s1, POINTER_TYPE)
11624 || !validate_arg (s2, POINTER_TYPE))
11625 return NULL_TREE;
11626 else
11628 tree fn;
11629 const char *p1, *p2;
11631 p2 = c_getstr (s2);
11632 if (p2 == NULL)
11633 return NULL_TREE;
11635 p1 = c_getstr (s1);
11636 if (p1 != NULL)
11638 const char *r = strstr (p1, p2);
11639 tree tem;
11641 if (r == NULL)
11642 return build_int_cst (TREE_TYPE (s1), 0);
11644 /* Return an offset into the constant string argument. */
11645 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11646 return fold_convert_loc (loc, type, tem);
11649 /* The argument is const char *, and the result is char *, so we need
11650 a type conversion here to avoid a warning. */
11651 if (p2[0] == '\0')
11652 return fold_convert_loc (loc, type, s1);
11654 if (p2[1] != '\0')
11655 return NULL_TREE;
11657 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11658 if (!fn)
11659 return NULL_TREE;
11661 /* New argument list transforming strstr(s1, s2) to
11662 strchr(s1, s2[0]). */
11663 return build_call_expr_loc (loc, fn, 2, s1,
11664 build_int_cst (integer_type_node, p2[0]));
11668 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11669 the call, and TYPE is its return type.
11671 Return NULL_TREE if no simplification was possible, otherwise return the
11672 simplified form of the call as a tree.
11674 The simplified form may be a constant or other expression which
11675 computes the same value, but in a more efficient manner (including
11676 calls to other builtin functions).
11678 The call may contain arguments which need to be evaluated, but
11679 which are not useful to determine the result of the call. In
11680 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11681 COMPOUND_EXPR will be an argument which must be evaluated.
11682 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11683 COMPOUND_EXPR in the chain will contain the tree for the simplified
11684 form of the builtin function call. */
11686 static tree
11687 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11689 if (!validate_arg (s1, POINTER_TYPE)
11690 || !validate_arg (s2, INTEGER_TYPE))
11691 return NULL_TREE;
11692 else
11694 const char *p1;
11696 if (TREE_CODE (s2) != INTEGER_CST)
11697 return NULL_TREE;
11699 p1 = c_getstr (s1);
11700 if (p1 != NULL)
11702 char c;
11703 const char *r;
11704 tree tem;
11706 if (target_char_cast (s2, &c))
11707 return NULL_TREE;
11709 r = strchr (p1, c);
11711 if (r == NULL)
11712 return build_int_cst (TREE_TYPE (s1), 0);
11714 /* Return an offset into the constant string argument. */
11715 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11716 return fold_convert_loc (loc, type, tem);
11718 return NULL_TREE;
11722 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11723 the call, and TYPE is its return type.
11725 Return NULL_TREE if no simplification was possible, otherwise return the
11726 simplified form of the call as a tree.
11728 The simplified form may be a constant or other expression which
11729 computes the same value, but in a more efficient manner (including
11730 calls to other builtin functions).
11732 The call may contain arguments which need to be evaluated, but
11733 which are not useful to determine the result of the call. In
11734 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11735 COMPOUND_EXPR will be an argument which must be evaluated.
11736 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11737 COMPOUND_EXPR in the chain will contain the tree for the simplified
11738 form of the builtin function call. */
11740 static tree
11741 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11743 if (!validate_arg (s1, POINTER_TYPE)
11744 || !validate_arg (s2, INTEGER_TYPE))
11745 return NULL_TREE;
11746 else
11748 tree fn;
11749 const char *p1;
11751 if (TREE_CODE (s2) != INTEGER_CST)
11752 return NULL_TREE;
11754 p1 = c_getstr (s1);
11755 if (p1 != NULL)
11757 char c;
11758 const char *r;
11759 tree tem;
11761 if (target_char_cast (s2, &c))
11762 return NULL_TREE;
11764 r = strrchr (p1, c);
11766 if (r == NULL)
11767 return build_int_cst (TREE_TYPE (s1), 0);
11769 /* Return an offset into the constant string argument. */
11770 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11771 return fold_convert_loc (loc, type, tem);
11774 if (! integer_zerop (s2))
11775 return NULL_TREE;
11777 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11778 if (!fn)
11779 return NULL_TREE;
11781 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11782 return build_call_expr_loc (loc, fn, 2, s1, s2);
11786 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11787 to the call, and TYPE is its return type.
11789 Return NULL_TREE if no simplification was possible, otherwise return the
11790 simplified form of the call as a tree.
11792 The simplified form may be a constant or other expression which
11793 computes the same value, but in a more efficient manner (including
11794 calls to other builtin functions).
11796 The call may contain arguments which need to be evaluated, but
11797 which are not useful to determine the result of the call. In
11798 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11799 COMPOUND_EXPR will be an argument which must be evaluated.
11800 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11801 COMPOUND_EXPR in the chain will contain the tree for the simplified
11802 form of the builtin function call. */
11804 static tree
11805 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11807 if (!validate_arg (s1, POINTER_TYPE)
11808 || !validate_arg (s2, POINTER_TYPE))
11809 return NULL_TREE;
11810 else
11812 tree fn;
11813 const char *p1, *p2;
11815 p2 = c_getstr (s2);
11816 if (p2 == NULL)
11817 return NULL_TREE;
11819 p1 = c_getstr (s1);
11820 if (p1 != NULL)
11822 const char *r = strpbrk (p1, p2);
11823 tree tem;
11825 if (r == NULL)
11826 return build_int_cst (TREE_TYPE (s1), 0);
11828 /* Return an offset into the constant string argument. */
11829 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11830 return fold_convert_loc (loc, type, tem);
11833 if (p2[0] == '\0')
11834 /* strpbrk(x, "") == NULL.
11835 Evaluate and ignore s1 in case it had side-effects. */
11836 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11838 if (p2[1] != '\0')
11839 return NULL_TREE; /* Really call strpbrk. */
11841 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11842 if (!fn)
11843 return NULL_TREE;
11845 /* New argument list transforming strpbrk(s1, s2) to
11846 strchr(s1, s2[0]). */
11847 return build_call_expr_loc (loc, fn, 2, s1,
11848 build_int_cst (integer_type_node, p2[0]));
11852 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11853 to the call.
11855 Return NULL_TREE if no simplification was possible, otherwise return the
11856 simplified form of the call as a tree.
11858 The simplified form may be a constant or other expression which
11859 computes the same value, but in a more efficient manner (including
11860 calls to other builtin functions).
11862 The call may contain arguments which need to be evaluated, but
11863 which are not useful to determine the result of the call. In
11864 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11865 COMPOUND_EXPR will be an argument which must be evaluated.
11866 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11867 COMPOUND_EXPR in the chain will contain the tree for the simplified
11868 form of the builtin function call. */
11870 static tree
11871 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11873 if (!validate_arg (dst, POINTER_TYPE)
11874 || !validate_arg (src, POINTER_TYPE))
11875 return NULL_TREE;
11876 else
11878 const char *p = c_getstr (src);
11880 /* If the string length is zero, return the dst parameter. */
11881 if (p && *p == '\0')
11882 return dst;
11884 if (optimize_insn_for_speed_p ())
11886 /* See if we can store by pieces into (dst + strlen(dst)). */
11887 tree newdst, call;
11888 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11889 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11891 if (!strlen_fn || !strcpy_fn)
11892 return NULL_TREE;
11894 /* If we don't have a movstr we don't want to emit an strcpy
11895 call. We have to do that if the length of the source string
11896 isn't computable (in that case we can use memcpy probably
11897 later expanding to a sequence of mov instructions). If we
11898 have movstr instructions we can emit strcpy calls. */
11899 if (!HAVE_movstr)
11901 tree len = c_strlen (src, 1);
11902 if (! len || TREE_SIDE_EFFECTS (len))
11903 return NULL_TREE;
11906 /* Stabilize the argument list. */
11907 dst = builtin_save_expr (dst);
11909 /* Create strlen (dst). */
11910 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11911 /* Create (dst p+ strlen (dst)). */
11913 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11914 newdst = builtin_save_expr (newdst);
11916 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11917 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11919 return NULL_TREE;
11923 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11924 arguments to the call.
11926 Return NULL_TREE if no simplification was possible, otherwise return the
11927 simplified form of the call as a tree.
11929 The simplified form may be a constant or other expression which
11930 computes the same value, but in a more efficient manner (including
11931 calls to other builtin functions).
11933 The call may contain arguments which need to be evaluated, but
11934 which are not useful to determine the result of the call. In
11935 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11936 COMPOUND_EXPR will be an argument which must be evaluated.
11937 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11938 COMPOUND_EXPR in the chain will contain the tree for the simplified
11939 form of the builtin function call. */
11941 static tree
11942 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11944 if (!validate_arg (dst, POINTER_TYPE)
11945 || !validate_arg (src, POINTER_TYPE)
11946 || !validate_arg (len, INTEGER_TYPE))
11947 return NULL_TREE;
11948 else
11950 const char *p = c_getstr (src);
11952 /* If the requested length is zero, or the src parameter string
11953 length is zero, return the dst parameter. */
11954 if (integer_zerop (len) || (p && *p == '\0'))
11955 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11957 /* If the requested len is greater than or equal to the string
11958 length, call strcat. */
11959 if (TREE_CODE (len) == INTEGER_CST && p
11960 && compare_tree_int (len, strlen (p)) >= 0)
11962 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11964 /* If the replacement _DECL isn't initialized, don't do the
11965 transformation. */
11966 if (!fn)
11967 return NULL_TREE;
11969 return build_call_expr_loc (loc, fn, 2, dst, src);
11971 return NULL_TREE;
11975 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11976 to the call.
11978 Return NULL_TREE if no simplification was possible, otherwise return the
11979 simplified form of the call as a tree.
11981 The simplified form may be a constant or other expression which
11982 computes the same value, but in a more efficient manner (including
11983 calls to other builtin functions).
11985 The call may contain arguments which need to be evaluated, but
11986 which are not useful to determine the result of the call. In
11987 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11988 COMPOUND_EXPR will be an argument which must be evaluated.
11989 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11990 COMPOUND_EXPR in the chain will contain the tree for the simplified
11991 form of the builtin function call. */
11993 static tree
11994 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11996 if (!validate_arg (s1, POINTER_TYPE)
11997 || !validate_arg (s2, POINTER_TYPE))
11998 return NULL_TREE;
11999 else
12001 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
12003 /* If both arguments are constants, evaluate at compile-time. */
12004 if (p1 && p2)
12006 const size_t r = strspn (p1, p2);
12007 return build_int_cst (size_type_node, r);
12010 /* If either argument is "", return NULL_TREE. */
12011 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
12012 /* Evaluate and ignore both arguments in case either one has
12013 side-effects. */
12014 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
12015 s1, s2);
12016 return NULL_TREE;
12020 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
12021 to the call.
12023 Return NULL_TREE if no simplification was possible, otherwise return the
12024 simplified form of the call as a tree.
12026 The simplified form may be a constant or other expression which
12027 computes the same value, but in a more efficient manner (including
12028 calls to other builtin functions).
12030 The call may contain arguments which need to be evaluated, but
12031 which are not useful to determine the result of the call. In
12032 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12033 COMPOUND_EXPR will be an argument which must be evaluated.
12034 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12035 COMPOUND_EXPR in the chain will contain the tree for the simplified
12036 form of the builtin function call. */
12038 static tree
12039 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
12041 if (!validate_arg (s1, POINTER_TYPE)
12042 || !validate_arg (s2, POINTER_TYPE))
12043 return NULL_TREE;
12044 else
12046 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
12048 /* If both arguments are constants, evaluate at compile-time. */
12049 if (p1 && p2)
12051 const size_t r = strcspn (p1, p2);
12052 return build_int_cst (size_type_node, r);
12055 /* If the first argument is "", return NULL_TREE. */
12056 if (p1 && *p1 == '\0')
12058 /* Evaluate and ignore argument s2 in case it has
12059 side-effects. */
12060 return omit_one_operand_loc (loc, size_type_node,
12061 size_zero_node, s2);
12064 /* If the second argument is "", return __builtin_strlen(s1). */
12065 if (p2 && *p2 == '\0')
12067 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
12069 /* If the replacement _DECL isn't initialized, don't do the
12070 transformation. */
12071 if (!fn)
12072 return NULL_TREE;
12074 return build_call_expr_loc (loc, fn, 1, s1);
12076 return NULL_TREE;
12080 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
12081 to the call. IGNORE is true if the value returned
12082 by the builtin will be ignored. UNLOCKED is true is true if this
12083 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
12084 the known length of the string. Return NULL_TREE if no simplification
12085 was possible. */
12087 tree
12088 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
12089 bool ignore, bool unlocked, tree len)
12091 /* If we're using an unlocked function, assume the other unlocked
12092 functions exist explicitly. */
12093 tree const fn_fputc = (unlocked
12094 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
12095 : builtin_decl_implicit (BUILT_IN_FPUTC));
12096 tree const fn_fwrite = (unlocked
12097 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
12098 : builtin_decl_implicit (BUILT_IN_FWRITE));
12100 /* If the return value is used, don't do the transformation. */
12101 if (!ignore)
12102 return NULL_TREE;
12104 /* Verify the arguments in the original call. */
12105 if (!validate_arg (arg0, POINTER_TYPE)
12106 || !validate_arg (arg1, POINTER_TYPE))
12107 return NULL_TREE;
12109 if (! len)
12110 len = c_strlen (arg0, 0);
12112 /* Get the length of the string passed to fputs. If the length
12113 can't be determined, punt. */
12114 if (!len
12115 || TREE_CODE (len) != INTEGER_CST)
12116 return NULL_TREE;
12118 switch (compare_tree_int (len, 1))
12120 case -1: /* length is 0, delete the call entirely . */
12121 return omit_one_operand_loc (loc, integer_type_node,
12122 integer_zero_node, arg1);;
12124 case 0: /* length is 1, call fputc. */
12126 const char *p = c_getstr (arg0);
12128 if (p != NULL)
12130 if (fn_fputc)
12131 return build_call_expr_loc (loc, fn_fputc, 2,
12132 build_int_cst
12133 (integer_type_node, p[0]), arg1);
12134 else
12135 return NULL_TREE;
12138 /* FALLTHROUGH */
12139 case 1: /* length is greater than 1, call fwrite. */
12141 /* If optimizing for size keep fputs. */
12142 if (optimize_function_for_size_p (cfun))
12143 return NULL_TREE;
12144 /* New argument list transforming fputs(string, stream) to
12145 fwrite(string, 1, len, stream). */
12146 if (fn_fwrite)
12147 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12148 size_one_node, len, arg1);
12149 else
12150 return NULL_TREE;
12152 default:
12153 gcc_unreachable ();
12155 return NULL_TREE;
12158 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12159 produced. False otherwise. This is done so that we don't output the error
12160 or warning twice or three times. */
12162 bool
12163 fold_builtin_next_arg (tree exp, bool va_start_p)
12165 tree fntype = TREE_TYPE (current_function_decl);
12166 int nargs = call_expr_nargs (exp);
12167 tree arg;
12168 /* There is good chance the current input_location points inside the
12169 definition of the va_start macro (perhaps on the token for
12170 builtin) in a system header, so warnings will not be emitted.
12171 Use the location in real source code. */
12172 source_location current_location =
12173 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12174 NULL);
12176 if (!stdarg_p (fntype))
12178 error ("%<va_start%> used in function with fixed args");
12179 return true;
12182 if (va_start_p)
12184 if (va_start_p && (nargs != 2))
12186 error ("wrong number of arguments to function %<va_start%>");
12187 return true;
12189 arg = CALL_EXPR_ARG (exp, 1);
12191 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12192 when we checked the arguments and if needed issued a warning. */
12193 else
12195 if (nargs == 0)
12197 /* Evidently an out of date version of <stdarg.h>; can't validate
12198 va_start's second argument, but can still work as intended. */
12199 warning_at (current_location,
12200 OPT_Wvarargs,
12201 "%<__builtin_next_arg%> called without an argument");
12202 return true;
12204 else if (nargs > 1)
12206 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12207 return true;
12209 arg = CALL_EXPR_ARG (exp, 0);
12212 if (TREE_CODE (arg) == SSA_NAME)
12213 arg = SSA_NAME_VAR (arg);
12215 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12216 or __builtin_next_arg (0) the first time we see it, after checking
12217 the arguments and if needed issuing a warning. */
12218 if (!integer_zerop (arg))
12220 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12222 /* Strip off all nops for the sake of the comparison. This
12223 is not quite the same as STRIP_NOPS. It does more.
12224 We must also strip off INDIRECT_EXPR for C++ reference
12225 parameters. */
12226 while (CONVERT_EXPR_P (arg)
12227 || TREE_CODE (arg) == INDIRECT_REF)
12228 arg = TREE_OPERAND (arg, 0);
12229 if (arg != last_parm)
12231 /* FIXME: Sometimes with the tree optimizers we can get the
12232 not the last argument even though the user used the last
12233 argument. We just warn and set the arg to be the last
12234 argument so that we will get wrong-code because of
12235 it. */
12236 warning_at (current_location,
12237 OPT_Wvarargs,
12238 "second parameter of %<va_start%> not last named argument");
12241 /* Undefined by C99 7.15.1.4p4 (va_start):
12242 "If the parameter parmN is declared with the register storage
12243 class, with a function or array type, or with a type that is
12244 not compatible with the type that results after application of
12245 the default argument promotions, the behavior is undefined."
12247 else if (DECL_REGISTER (arg))
12249 warning_at (current_location,
12250 OPT_Wvarargs,
12251 "undefined behaviour when second parameter of "
12252 "%<va_start%> is declared with %<register%> storage");
12255 /* We want to verify the second parameter just once before the tree
12256 optimizers are run and then avoid keeping it in the tree,
12257 as otherwise we could warn even for correct code like:
12258 void foo (int i, ...)
12259 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12260 if (va_start_p)
12261 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12262 else
12263 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12265 return false;
12269 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12270 ORIG may be null if this is a 2-argument call. We don't attempt to
12271 simplify calls with more than 3 arguments.
12273 Return NULL_TREE if no simplification was possible, otherwise return the
12274 simplified form of the call as a tree. If IGNORED is true, it means that
12275 the caller does not use the returned value of the function. */
12277 static tree
12278 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12279 tree orig, int ignored)
12281 tree call, retval;
12282 const char *fmt_str = NULL;
12284 /* Verify the required arguments in the original call. We deal with two
12285 types of sprintf() calls: 'sprintf (str, fmt)' and
12286 'sprintf (dest, "%s", orig)'. */
12287 if (!validate_arg (dest, POINTER_TYPE)
12288 || !validate_arg (fmt, POINTER_TYPE))
12289 return NULL_TREE;
12290 if (orig && !validate_arg (orig, POINTER_TYPE))
12291 return NULL_TREE;
12293 /* Check whether the format is a literal string constant. */
12294 fmt_str = c_getstr (fmt);
12295 if (fmt_str == NULL)
12296 return NULL_TREE;
12298 call = NULL_TREE;
12299 retval = NULL_TREE;
12301 if (!init_target_chars ())
12302 return NULL_TREE;
12304 /* If the format doesn't contain % args or %%, use strcpy. */
12305 if (strchr (fmt_str, target_percent) == NULL)
12307 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12309 if (!fn)
12310 return NULL_TREE;
12312 /* Don't optimize sprintf (buf, "abc", ptr++). */
12313 if (orig)
12314 return NULL_TREE;
12316 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12317 'format' is known to contain no % formats. */
12318 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12319 if (!ignored)
12320 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12323 /* If the format is "%s", use strcpy if the result isn't used. */
12324 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12326 tree fn;
12327 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12329 if (!fn)
12330 return NULL_TREE;
12332 /* Don't crash on sprintf (str1, "%s"). */
12333 if (!orig)
12334 return NULL_TREE;
12336 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12337 if (!ignored)
12339 retval = c_strlen (orig, 1);
12340 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12341 return NULL_TREE;
12343 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12346 if (call && retval)
12348 retval = fold_convert_loc
12349 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12350 retval);
12351 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12353 else
12354 return call;
12357 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12358 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12359 attempt to simplify calls with more than 4 arguments.
12361 Return NULL_TREE if no simplification was possible, otherwise return the
12362 simplified form of the call as a tree. If IGNORED is true, it means that
12363 the caller does not use the returned value of the function. */
12365 static tree
12366 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12367 tree orig, int ignored)
12369 tree call, retval;
12370 const char *fmt_str = NULL;
12371 unsigned HOST_WIDE_INT destlen;
12373 /* Verify the required arguments in the original call. We deal with two
12374 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12375 'snprintf (dest, cst, "%s", orig)'. */
12376 if (!validate_arg (dest, POINTER_TYPE)
12377 || !validate_arg (destsize, INTEGER_TYPE)
12378 || !validate_arg (fmt, POINTER_TYPE))
12379 return NULL_TREE;
12380 if (orig && !validate_arg (orig, POINTER_TYPE))
12381 return NULL_TREE;
12383 if (!host_integerp (destsize, 1))
12384 return NULL_TREE;
12386 /* Check whether the format is a literal string constant. */
12387 fmt_str = c_getstr (fmt);
12388 if (fmt_str == NULL)
12389 return NULL_TREE;
12391 call = NULL_TREE;
12392 retval = NULL_TREE;
12394 if (!init_target_chars ())
12395 return NULL_TREE;
12397 destlen = tree_low_cst (destsize, 1);
12399 /* If the format doesn't contain % args or %%, use strcpy. */
12400 if (strchr (fmt_str, target_percent) == NULL)
12402 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12403 size_t len = strlen (fmt_str);
12405 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12406 if (orig)
12407 return NULL_TREE;
12409 /* We could expand this as
12410 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12411 or to
12412 memcpy (str, fmt_with_nul_at_cstm1, cst);
12413 but in the former case that might increase code size
12414 and in the latter case grow .rodata section too much.
12415 So punt for now. */
12416 if (len >= destlen)
12417 return NULL_TREE;
12419 if (!fn)
12420 return NULL_TREE;
12422 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12423 'format' is known to contain no % formats and
12424 strlen (fmt) < cst. */
12425 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12427 if (!ignored)
12428 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12431 /* If the format is "%s", use strcpy if the result isn't used. */
12432 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12434 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12435 unsigned HOST_WIDE_INT origlen;
12437 /* Don't crash on snprintf (str1, cst, "%s"). */
12438 if (!orig)
12439 return NULL_TREE;
12441 retval = c_strlen (orig, 1);
12442 if (!retval || !host_integerp (retval, 1))
12443 return NULL_TREE;
12445 origlen = tree_low_cst (retval, 1);
12446 /* We could expand this as
12447 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12448 or to
12449 memcpy (str1, str2_with_nul_at_cstm1, cst);
12450 but in the former case that might increase code size
12451 and in the latter case grow .rodata section too much.
12452 So punt for now. */
12453 if (origlen >= destlen)
12454 return NULL_TREE;
12456 /* Convert snprintf (str1, cst, "%s", str2) into
12457 strcpy (str1, str2) if strlen (str2) < cst. */
12458 if (!fn)
12459 return NULL_TREE;
12461 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12463 if (ignored)
12464 retval = NULL_TREE;
12467 if (call && retval)
12469 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12470 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12471 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12473 else
12474 return call;
12477 /* Expand a call EXP to __builtin_object_size. */
12480 expand_builtin_object_size (tree exp)
12482 tree ost;
12483 int object_size_type;
12484 tree fndecl = get_callee_fndecl (exp);
12486 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12488 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12489 exp, fndecl);
12490 expand_builtin_trap ();
12491 return const0_rtx;
12494 ost = CALL_EXPR_ARG (exp, 1);
12495 STRIP_NOPS (ost);
12497 if (TREE_CODE (ost) != INTEGER_CST
12498 || tree_int_cst_sgn (ost) < 0
12499 || compare_tree_int (ost, 3) > 0)
12501 error ("%Klast argument of %D is not integer constant between 0 and 3",
12502 exp, fndecl);
12503 expand_builtin_trap ();
12504 return const0_rtx;
12507 object_size_type = tree_low_cst (ost, 0);
12509 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12512 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12513 FCODE is the BUILT_IN_* to use.
12514 Return NULL_RTX if we failed; the caller should emit a normal call,
12515 otherwise try to get the result in TARGET, if convenient (and in
12516 mode MODE if that's convenient). */
12518 static rtx
12519 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12520 enum built_in_function fcode)
12522 tree dest, src, len, size;
12524 if (!validate_arglist (exp,
12525 POINTER_TYPE,
12526 fcode == BUILT_IN_MEMSET_CHK
12527 ? INTEGER_TYPE : POINTER_TYPE,
12528 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12529 return NULL_RTX;
12531 dest = CALL_EXPR_ARG (exp, 0);
12532 src = CALL_EXPR_ARG (exp, 1);
12533 len = CALL_EXPR_ARG (exp, 2);
12534 size = CALL_EXPR_ARG (exp, 3);
12536 if (! host_integerp (size, 1))
12537 return NULL_RTX;
12539 if (host_integerp (len, 1) || integer_all_onesp (size))
12541 tree fn;
12543 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12545 warning_at (tree_nonartificial_location (exp),
12546 0, "%Kcall to %D will always overflow destination buffer",
12547 exp, get_callee_fndecl (exp));
12548 return NULL_RTX;
12551 fn = NULL_TREE;
12552 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12553 mem{cpy,pcpy,move,set} is available. */
12554 switch (fcode)
12556 case BUILT_IN_MEMCPY_CHK:
12557 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12558 break;
12559 case BUILT_IN_MEMPCPY_CHK:
12560 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12561 break;
12562 case BUILT_IN_MEMMOVE_CHK:
12563 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12564 break;
12565 case BUILT_IN_MEMSET_CHK:
12566 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12567 break;
12568 default:
12569 break;
12572 if (! fn)
12573 return NULL_RTX;
12575 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12576 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12577 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12578 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12580 else if (fcode == BUILT_IN_MEMSET_CHK)
12581 return NULL_RTX;
12582 else
12584 unsigned int dest_align = get_pointer_alignment (dest);
12586 /* If DEST is not a pointer type, call the normal function. */
12587 if (dest_align == 0)
12588 return NULL_RTX;
12590 /* If SRC and DEST are the same (and not volatile), do nothing. */
12591 if (operand_equal_p (src, dest, 0))
12593 tree expr;
12595 if (fcode != BUILT_IN_MEMPCPY_CHK)
12597 /* Evaluate and ignore LEN in case it has side-effects. */
12598 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12599 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12602 expr = fold_build_pointer_plus (dest, len);
12603 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12606 /* __memmove_chk special case. */
12607 if (fcode == BUILT_IN_MEMMOVE_CHK)
12609 unsigned int src_align = get_pointer_alignment (src);
12611 if (src_align == 0)
12612 return NULL_RTX;
12614 /* If src is categorized for a readonly section we can use
12615 normal __memcpy_chk. */
12616 if (readonly_data_expr (src))
12618 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12619 if (!fn)
12620 return NULL_RTX;
12621 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12622 dest, src, len, size);
12623 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12624 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12625 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12628 return NULL_RTX;
12632 /* Emit warning if a buffer overflow is detected at compile time. */
12634 static void
12635 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12637 int is_strlen = 0;
12638 tree len, size;
12639 location_t loc = tree_nonartificial_location (exp);
12641 switch (fcode)
12643 case BUILT_IN_STRCPY_CHK:
12644 case BUILT_IN_STPCPY_CHK:
12645 /* For __strcat_chk the warning will be emitted only if overflowing
12646 by at least strlen (dest) + 1 bytes. */
12647 case BUILT_IN_STRCAT_CHK:
12648 len = CALL_EXPR_ARG (exp, 1);
12649 size = CALL_EXPR_ARG (exp, 2);
12650 is_strlen = 1;
12651 break;
12652 case BUILT_IN_STRNCAT_CHK:
12653 case BUILT_IN_STRNCPY_CHK:
12654 case BUILT_IN_STPNCPY_CHK:
12655 len = CALL_EXPR_ARG (exp, 2);
12656 size = CALL_EXPR_ARG (exp, 3);
12657 break;
12658 case BUILT_IN_SNPRINTF_CHK:
12659 case BUILT_IN_VSNPRINTF_CHK:
12660 len = CALL_EXPR_ARG (exp, 1);
12661 size = CALL_EXPR_ARG (exp, 3);
12662 break;
12663 default:
12664 gcc_unreachable ();
12667 if (!len || !size)
12668 return;
12670 if (! host_integerp (size, 1) || integer_all_onesp (size))
12671 return;
12673 if (is_strlen)
12675 len = c_strlen (len, 1);
12676 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12677 return;
12679 else if (fcode == BUILT_IN_STRNCAT_CHK)
12681 tree src = CALL_EXPR_ARG (exp, 1);
12682 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12683 return;
12684 src = c_strlen (src, 1);
12685 if (! src || ! host_integerp (src, 1))
12687 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12688 exp, get_callee_fndecl (exp));
12689 return;
12691 else if (tree_int_cst_lt (src, size))
12692 return;
12694 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12695 return;
12697 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12698 exp, get_callee_fndecl (exp));
12701 /* Emit warning if a buffer overflow is detected at compile time
12702 in __sprintf_chk/__vsprintf_chk calls. */
12704 static void
12705 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12707 tree size, len, fmt;
12708 const char *fmt_str;
12709 int nargs = call_expr_nargs (exp);
12711 /* Verify the required arguments in the original call. */
12713 if (nargs < 4)
12714 return;
12715 size = CALL_EXPR_ARG (exp, 2);
12716 fmt = CALL_EXPR_ARG (exp, 3);
12718 if (! host_integerp (size, 1) || integer_all_onesp (size))
12719 return;
12721 /* Check whether the format is a literal string constant. */
12722 fmt_str = c_getstr (fmt);
12723 if (fmt_str == NULL)
12724 return;
12726 if (!init_target_chars ())
12727 return;
12729 /* If the format doesn't contain % args or %%, we know its size. */
12730 if (strchr (fmt_str, target_percent) == 0)
12731 len = build_int_cstu (size_type_node, strlen (fmt_str));
12732 /* If the format is "%s" and first ... argument is a string literal,
12733 we know it too. */
12734 else if (fcode == BUILT_IN_SPRINTF_CHK
12735 && strcmp (fmt_str, target_percent_s) == 0)
12737 tree arg;
12739 if (nargs < 5)
12740 return;
12741 arg = CALL_EXPR_ARG (exp, 4);
12742 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12743 return;
12745 len = c_strlen (arg, 1);
12746 if (!len || ! host_integerp (len, 1))
12747 return;
12749 else
12750 return;
12752 if (! tree_int_cst_lt (len, size))
12753 warning_at (tree_nonartificial_location (exp),
12754 0, "%Kcall to %D will always overflow destination buffer",
12755 exp, get_callee_fndecl (exp));
12758 /* Emit warning if a free is called with address of a variable. */
12760 static void
12761 maybe_emit_free_warning (tree exp)
12763 tree arg = CALL_EXPR_ARG (exp, 0);
12765 STRIP_NOPS (arg);
12766 if (TREE_CODE (arg) != ADDR_EXPR)
12767 return;
12769 arg = get_base_address (TREE_OPERAND (arg, 0));
12770 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12771 return;
12773 if (SSA_VAR_P (arg))
12774 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12775 "%Kattempt to free a non-heap object %qD", exp, arg);
12776 else
12777 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12778 "%Kattempt to free a non-heap object", exp);
12781 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12782 if possible. */
12784 tree
12785 fold_builtin_object_size (tree ptr, tree ost)
12787 unsigned HOST_WIDE_INT bytes;
12788 int object_size_type;
12790 if (!validate_arg (ptr, POINTER_TYPE)
12791 || !validate_arg (ost, INTEGER_TYPE))
12792 return NULL_TREE;
12794 STRIP_NOPS (ost);
12796 if (TREE_CODE (ost) != INTEGER_CST
12797 || tree_int_cst_sgn (ost) < 0
12798 || compare_tree_int (ost, 3) > 0)
12799 return NULL_TREE;
12801 object_size_type = tree_low_cst (ost, 0);
12803 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12804 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12805 and (size_t) 0 for types 2 and 3. */
12806 if (TREE_SIDE_EFFECTS (ptr))
12807 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12809 if (TREE_CODE (ptr) == ADDR_EXPR)
12811 bytes = compute_builtin_object_size (ptr, object_size_type);
12812 if (double_int_fits_to_tree_p (size_type_node,
12813 double_int::from_uhwi (bytes)))
12814 return build_int_cstu (size_type_node, bytes);
12816 else if (TREE_CODE (ptr) == SSA_NAME)
12818 /* If object size is not known yet, delay folding until
12819 later. Maybe subsequent passes will help determining
12820 it. */
12821 bytes = compute_builtin_object_size (ptr, object_size_type);
12822 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12823 && double_int_fits_to_tree_p (size_type_node,
12824 double_int::from_uhwi (bytes)))
12825 return build_int_cstu (size_type_node, bytes);
12828 return NULL_TREE;
12831 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12832 DEST, SRC, LEN, and SIZE are the arguments to the call.
12833 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12834 code of the builtin. If MAXLEN is not NULL, it is maximum length
12835 passed as third argument. */
12837 tree
12838 fold_builtin_memory_chk (location_t loc, tree fndecl,
12839 tree dest, tree src, tree len, tree size,
12840 tree maxlen, bool ignore,
12841 enum built_in_function fcode)
12843 tree fn;
12845 if (!validate_arg (dest, POINTER_TYPE)
12846 || !validate_arg (src,
12847 (fcode == BUILT_IN_MEMSET_CHK
12848 ? INTEGER_TYPE : POINTER_TYPE))
12849 || !validate_arg (len, INTEGER_TYPE)
12850 || !validate_arg (size, INTEGER_TYPE))
12851 return NULL_TREE;
12853 /* If SRC and DEST are the same (and not volatile), return DEST
12854 (resp. DEST+LEN for __mempcpy_chk). */
12855 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12857 if (fcode != BUILT_IN_MEMPCPY_CHK)
12858 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12859 dest, len);
12860 else
12862 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12863 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12867 if (! host_integerp (size, 1))
12868 return NULL_TREE;
12870 if (! integer_all_onesp (size))
12872 if (! host_integerp (len, 1))
12874 /* If LEN is not constant, try MAXLEN too.
12875 For MAXLEN only allow optimizing into non-_ocs function
12876 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12877 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12879 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12881 /* (void) __mempcpy_chk () can be optimized into
12882 (void) __memcpy_chk (). */
12883 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12884 if (!fn)
12885 return NULL_TREE;
12887 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12889 return NULL_TREE;
12892 else
12893 maxlen = len;
12895 if (tree_int_cst_lt (size, maxlen))
12896 return NULL_TREE;
12899 fn = NULL_TREE;
12900 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12901 mem{cpy,pcpy,move,set} is available. */
12902 switch (fcode)
12904 case BUILT_IN_MEMCPY_CHK:
12905 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12906 break;
12907 case BUILT_IN_MEMPCPY_CHK:
12908 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12909 break;
12910 case BUILT_IN_MEMMOVE_CHK:
12911 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12912 break;
12913 case BUILT_IN_MEMSET_CHK:
12914 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12915 break;
12916 default:
12917 break;
12920 if (!fn)
12921 return NULL_TREE;
12923 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12926 /* Fold a call to the __st[rp]cpy_chk builtin.
12927 DEST, SRC, and SIZE are the arguments to the call.
12928 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12929 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12930 strings passed as second argument. */
12932 tree
12933 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12934 tree src, tree size,
12935 tree maxlen, bool ignore,
12936 enum built_in_function fcode)
12938 tree len, fn;
12940 if (!validate_arg (dest, POINTER_TYPE)
12941 || !validate_arg (src, POINTER_TYPE)
12942 || !validate_arg (size, INTEGER_TYPE))
12943 return NULL_TREE;
12945 /* If SRC and DEST are the same (and not volatile), return DEST. */
12946 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12947 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12949 if (! host_integerp (size, 1))
12950 return NULL_TREE;
12952 if (! integer_all_onesp (size))
12954 len = c_strlen (src, 1);
12955 if (! len || ! host_integerp (len, 1))
12957 /* If LEN is not constant, try MAXLEN too.
12958 For MAXLEN only allow optimizing into non-_ocs function
12959 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12960 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12962 if (fcode == BUILT_IN_STPCPY_CHK)
12964 if (! ignore)
12965 return NULL_TREE;
12967 /* If return value of __stpcpy_chk is ignored,
12968 optimize into __strcpy_chk. */
12969 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12970 if (!fn)
12971 return NULL_TREE;
12973 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12976 if (! len || TREE_SIDE_EFFECTS (len))
12977 return NULL_TREE;
12979 /* If c_strlen returned something, but not a constant,
12980 transform __strcpy_chk into __memcpy_chk. */
12981 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12982 if (!fn)
12983 return NULL_TREE;
12985 len = fold_convert_loc (loc, size_type_node, len);
12986 len = size_binop_loc (loc, PLUS_EXPR, len,
12987 build_int_cst (size_type_node, 1));
12988 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12989 build_call_expr_loc (loc, fn, 4,
12990 dest, src, len, size));
12993 else
12994 maxlen = len;
12996 if (! tree_int_cst_lt (maxlen, size))
12997 return NULL_TREE;
13000 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
13001 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
13002 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
13003 if (!fn)
13004 return NULL_TREE;
13006 return build_call_expr_loc (loc, fn, 2, dest, src);
13009 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
13010 are the arguments to the call. If MAXLEN is not NULL, it is maximum
13011 length passed as third argument. IGNORE is true if return value can be
13012 ignored. FCODE is the BUILT_IN_* code of the builtin. */
13014 tree
13015 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
13016 tree len, tree size, tree maxlen, bool ignore,
13017 enum built_in_function fcode)
13019 tree fn;
13021 if (!validate_arg (dest, POINTER_TYPE)
13022 || !validate_arg (src, POINTER_TYPE)
13023 || !validate_arg (len, INTEGER_TYPE)
13024 || !validate_arg (size, INTEGER_TYPE))
13025 return NULL_TREE;
13027 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
13029 /* If return value of __stpncpy_chk is ignored,
13030 optimize into __strncpy_chk. */
13031 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
13032 if (fn)
13033 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
13036 if (! host_integerp (size, 1))
13037 return NULL_TREE;
13039 if (! integer_all_onesp (size))
13041 if (! host_integerp (len, 1))
13043 /* If LEN is not constant, try MAXLEN too.
13044 For MAXLEN only allow optimizing into non-_ocs function
13045 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13046 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13047 return NULL_TREE;
13049 else
13050 maxlen = len;
13052 if (tree_int_cst_lt (size, maxlen))
13053 return NULL_TREE;
13056 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
13057 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
13058 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
13059 if (!fn)
13060 return NULL_TREE;
13062 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13065 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
13066 are the arguments to the call. */
13068 static tree
13069 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
13070 tree src, tree size)
13072 tree fn;
13073 const char *p;
13075 if (!validate_arg (dest, POINTER_TYPE)
13076 || !validate_arg (src, POINTER_TYPE)
13077 || !validate_arg (size, INTEGER_TYPE))
13078 return NULL_TREE;
13080 p = c_getstr (src);
13081 /* If the SRC parameter is "", return DEST. */
13082 if (p && *p == '\0')
13083 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13085 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
13086 return NULL_TREE;
13088 /* If __builtin_strcat_chk is used, assume strcat is available. */
13089 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
13090 if (!fn)
13091 return NULL_TREE;
13093 return build_call_expr_loc (loc, fn, 2, dest, src);
13096 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
13097 LEN, and SIZE. */
13099 static tree
13100 fold_builtin_strncat_chk (location_t loc, tree fndecl,
13101 tree dest, tree src, tree len, tree size)
13103 tree fn;
13104 const char *p;
13106 if (!validate_arg (dest, POINTER_TYPE)
13107 || !validate_arg (src, POINTER_TYPE)
13108 || !validate_arg (size, INTEGER_TYPE)
13109 || !validate_arg (size, INTEGER_TYPE))
13110 return NULL_TREE;
13112 p = c_getstr (src);
13113 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13114 if (p && *p == '\0')
13115 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
13116 else if (integer_zerop (len))
13117 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13119 if (! host_integerp (size, 1))
13120 return NULL_TREE;
13122 if (! integer_all_onesp (size))
13124 tree src_len = c_strlen (src, 1);
13125 if (src_len
13126 && host_integerp (src_len, 1)
13127 && host_integerp (len, 1)
13128 && ! tree_int_cst_lt (len, src_len))
13130 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13131 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13132 if (!fn)
13133 return NULL_TREE;
13135 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13137 return NULL_TREE;
13140 /* If __builtin_strncat_chk is used, assume strncat is available. */
13141 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13142 if (!fn)
13143 return NULL_TREE;
13145 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13148 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13149 Return NULL_TREE if a normal call should be emitted rather than
13150 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13151 or BUILT_IN_VSPRINTF_CHK. */
13153 static tree
13154 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13155 enum built_in_function fcode)
13157 tree dest, size, len, fn, fmt, flag;
13158 const char *fmt_str;
13160 /* Verify the required arguments in the original call. */
13161 if (nargs < 4)
13162 return NULL_TREE;
13163 dest = args[0];
13164 if (!validate_arg (dest, POINTER_TYPE))
13165 return NULL_TREE;
13166 flag = args[1];
13167 if (!validate_arg (flag, INTEGER_TYPE))
13168 return NULL_TREE;
13169 size = args[2];
13170 if (!validate_arg (size, INTEGER_TYPE))
13171 return NULL_TREE;
13172 fmt = args[3];
13173 if (!validate_arg (fmt, POINTER_TYPE))
13174 return NULL_TREE;
13176 if (! host_integerp (size, 1))
13177 return NULL_TREE;
13179 len = NULL_TREE;
13181 if (!init_target_chars ())
13182 return NULL_TREE;
13184 /* Check whether the format is a literal string constant. */
13185 fmt_str = c_getstr (fmt);
13186 if (fmt_str != NULL)
13188 /* If the format doesn't contain % args or %%, we know the size. */
13189 if (strchr (fmt_str, target_percent) == 0)
13191 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13192 len = build_int_cstu (size_type_node, strlen (fmt_str));
13194 /* If the format is "%s" and first ... argument is a string literal,
13195 we know the size too. */
13196 else if (fcode == BUILT_IN_SPRINTF_CHK
13197 && strcmp (fmt_str, target_percent_s) == 0)
13199 tree arg;
13201 if (nargs == 5)
13203 arg = args[4];
13204 if (validate_arg (arg, POINTER_TYPE))
13206 len = c_strlen (arg, 1);
13207 if (! len || ! host_integerp (len, 1))
13208 len = NULL_TREE;
13214 if (! integer_all_onesp (size))
13216 if (! len || ! tree_int_cst_lt (len, size))
13217 return NULL_TREE;
13220 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13221 or if format doesn't contain % chars or is "%s". */
13222 if (! integer_zerop (flag))
13224 if (fmt_str == NULL)
13225 return NULL_TREE;
13226 if (strchr (fmt_str, target_percent) != NULL
13227 && strcmp (fmt_str, target_percent_s))
13228 return NULL_TREE;
13231 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13232 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13233 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13234 if (!fn)
13235 return NULL_TREE;
13237 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13240 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13241 a normal call should be emitted rather than expanding the function
13242 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13244 static tree
13245 fold_builtin_sprintf_chk (location_t loc, tree exp,
13246 enum built_in_function fcode)
13248 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13249 CALL_EXPR_ARGP (exp), fcode);
13252 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13253 NULL_TREE if a normal call should be emitted rather than expanding
13254 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13255 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13256 passed as second argument. */
13258 static tree
13259 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13260 tree maxlen, enum built_in_function fcode)
13262 tree dest, size, len, fn, fmt, flag;
13263 const char *fmt_str;
13265 /* Verify the required arguments in the original call. */
13266 if (nargs < 5)
13267 return NULL_TREE;
13268 dest = args[0];
13269 if (!validate_arg (dest, POINTER_TYPE))
13270 return NULL_TREE;
13271 len = args[1];
13272 if (!validate_arg (len, INTEGER_TYPE))
13273 return NULL_TREE;
13274 flag = args[2];
13275 if (!validate_arg (flag, INTEGER_TYPE))
13276 return NULL_TREE;
13277 size = args[3];
13278 if (!validate_arg (size, INTEGER_TYPE))
13279 return NULL_TREE;
13280 fmt = args[4];
13281 if (!validate_arg (fmt, POINTER_TYPE))
13282 return NULL_TREE;
13284 if (! host_integerp (size, 1))
13285 return NULL_TREE;
13287 if (! integer_all_onesp (size))
13289 if (! host_integerp (len, 1))
13291 /* If LEN is not constant, try MAXLEN too.
13292 For MAXLEN only allow optimizing into non-_ocs function
13293 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13294 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13295 return NULL_TREE;
13297 else
13298 maxlen = len;
13300 if (tree_int_cst_lt (size, maxlen))
13301 return NULL_TREE;
13304 if (!init_target_chars ())
13305 return NULL_TREE;
13307 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13308 or if format doesn't contain % chars or is "%s". */
13309 if (! integer_zerop (flag))
13311 fmt_str = c_getstr (fmt);
13312 if (fmt_str == NULL)
13313 return NULL_TREE;
13314 if (strchr (fmt_str, target_percent) != NULL
13315 && strcmp (fmt_str, target_percent_s))
13316 return NULL_TREE;
13319 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13320 available. */
13321 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13322 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13323 if (!fn)
13324 return NULL_TREE;
13326 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13329 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13330 a normal call should be emitted rather than expanding the function
13331 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13332 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13333 passed as second argument. */
13335 tree
13336 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13337 enum built_in_function fcode)
13339 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13340 CALL_EXPR_ARGP (exp), maxlen, fcode);
13343 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13344 FMT and ARG are the arguments to the call; we don't fold cases with
13345 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13347 Return NULL_TREE if no simplification was possible, otherwise return the
13348 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13349 code of the function to be simplified. */
13351 static tree
13352 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13353 tree arg, bool ignore,
13354 enum built_in_function fcode)
13356 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13357 const char *fmt_str = NULL;
13359 /* If the return value is used, don't do the transformation. */
13360 if (! ignore)
13361 return NULL_TREE;
13363 /* Verify the required arguments in the original call. */
13364 if (!validate_arg (fmt, POINTER_TYPE))
13365 return NULL_TREE;
13367 /* Check whether the format is a literal string constant. */
13368 fmt_str = c_getstr (fmt);
13369 if (fmt_str == NULL)
13370 return NULL_TREE;
13372 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13374 /* If we're using an unlocked function, assume the other
13375 unlocked functions exist explicitly. */
13376 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13377 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13379 else
13381 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13382 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13385 if (!init_target_chars ())
13386 return NULL_TREE;
13388 if (strcmp (fmt_str, target_percent_s) == 0
13389 || strchr (fmt_str, target_percent) == NULL)
13391 const char *str;
13393 if (strcmp (fmt_str, target_percent_s) == 0)
13395 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13396 return NULL_TREE;
13398 if (!arg || !validate_arg (arg, POINTER_TYPE))
13399 return NULL_TREE;
13401 str = c_getstr (arg);
13402 if (str == NULL)
13403 return NULL_TREE;
13405 else
13407 /* The format specifier doesn't contain any '%' characters. */
13408 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13409 && arg)
13410 return NULL_TREE;
13411 str = fmt_str;
13414 /* If the string was "", printf does nothing. */
13415 if (str[0] == '\0')
13416 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13418 /* If the string has length of 1, call putchar. */
13419 if (str[1] == '\0')
13421 /* Given printf("c"), (where c is any one character,)
13422 convert "c"[0] to an int and pass that to the replacement
13423 function. */
13424 newarg = build_int_cst (integer_type_node, str[0]);
13425 if (fn_putchar)
13426 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13428 else
13430 /* If the string was "string\n", call puts("string"). */
13431 size_t len = strlen (str);
13432 if ((unsigned char)str[len - 1] == target_newline
13433 && (size_t) (int) len == len
13434 && (int) len > 0)
13436 char *newstr;
13437 tree offset_node, string_cst;
13439 /* Create a NUL-terminated string that's one char shorter
13440 than the original, stripping off the trailing '\n'. */
13441 newarg = build_string_literal (len, str);
13442 string_cst = string_constant (newarg, &offset_node);
13443 gcc_checking_assert (string_cst
13444 && (TREE_STRING_LENGTH (string_cst)
13445 == (int) len)
13446 && integer_zerop (offset_node)
13447 && (unsigned char)
13448 TREE_STRING_POINTER (string_cst)[len - 1]
13449 == target_newline);
13450 /* build_string_literal creates a new STRING_CST,
13451 modify it in place to avoid double copying. */
13452 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13453 newstr[len - 1] = '\0';
13454 if (fn_puts)
13455 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13457 else
13458 /* We'd like to arrange to call fputs(string,stdout) here,
13459 but we need stdout and don't have a way to get it yet. */
13460 return NULL_TREE;
13464 /* The other optimizations can be done only on the non-va_list variants. */
13465 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13466 return NULL_TREE;
13468 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13469 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13471 if (!arg || !validate_arg (arg, POINTER_TYPE))
13472 return NULL_TREE;
13473 if (fn_puts)
13474 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13477 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13478 else if (strcmp (fmt_str, target_percent_c) == 0)
13480 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13481 return NULL_TREE;
13482 if (fn_putchar)
13483 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13486 if (!call)
13487 return NULL_TREE;
13489 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13492 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13493 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13494 more than 3 arguments, and ARG may be null in the 2-argument case.
13496 Return NULL_TREE if no simplification was possible, otherwise return the
13497 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13498 code of the function to be simplified. */
13500 static tree
13501 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13502 tree fmt, tree arg, bool ignore,
13503 enum built_in_function fcode)
13505 tree fn_fputc, fn_fputs, call = NULL_TREE;
13506 const char *fmt_str = NULL;
13508 /* If the return value is used, don't do the transformation. */
13509 if (! ignore)
13510 return NULL_TREE;
13512 /* Verify the required arguments in the original call. */
13513 if (!validate_arg (fp, POINTER_TYPE))
13514 return NULL_TREE;
13515 if (!validate_arg (fmt, POINTER_TYPE))
13516 return NULL_TREE;
13518 /* Check whether the format is a literal string constant. */
13519 fmt_str = c_getstr (fmt);
13520 if (fmt_str == NULL)
13521 return NULL_TREE;
13523 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13525 /* If we're using an unlocked function, assume the other
13526 unlocked functions exist explicitly. */
13527 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13528 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13530 else
13532 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13533 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13536 if (!init_target_chars ())
13537 return NULL_TREE;
13539 /* If the format doesn't contain % args or %%, use strcpy. */
13540 if (strchr (fmt_str, target_percent) == NULL)
13542 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13543 && arg)
13544 return NULL_TREE;
13546 /* If the format specifier was "", fprintf does nothing. */
13547 if (fmt_str[0] == '\0')
13549 /* If FP has side-effects, just wait until gimplification is
13550 done. */
13551 if (TREE_SIDE_EFFECTS (fp))
13552 return NULL_TREE;
13554 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13557 /* When "string" doesn't contain %, replace all cases of
13558 fprintf (fp, string) with fputs (string, fp). The fputs
13559 builtin will take care of special cases like length == 1. */
13560 if (fn_fputs)
13561 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13564 /* The other optimizations can be done only on the non-va_list variants. */
13565 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13566 return NULL_TREE;
13568 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13569 else if (strcmp (fmt_str, target_percent_s) == 0)
13571 if (!arg || !validate_arg (arg, POINTER_TYPE))
13572 return NULL_TREE;
13573 if (fn_fputs)
13574 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13577 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13578 else if (strcmp (fmt_str, target_percent_c) == 0)
13580 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13581 return NULL_TREE;
13582 if (fn_fputc)
13583 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13586 if (!call)
13587 return NULL_TREE;
13588 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13591 /* Initialize format string characters in the target charset. */
13593 static bool
13594 init_target_chars (void)
13596 static bool init;
13597 if (!init)
13599 target_newline = lang_hooks.to_target_charset ('\n');
13600 target_percent = lang_hooks.to_target_charset ('%');
13601 target_c = lang_hooks.to_target_charset ('c');
13602 target_s = lang_hooks.to_target_charset ('s');
13603 if (target_newline == 0 || target_percent == 0 || target_c == 0
13604 || target_s == 0)
13605 return false;
13607 target_percent_c[0] = target_percent;
13608 target_percent_c[1] = target_c;
13609 target_percent_c[2] = '\0';
13611 target_percent_s[0] = target_percent;
13612 target_percent_s[1] = target_s;
13613 target_percent_s[2] = '\0';
13615 target_percent_s_newline[0] = target_percent;
13616 target_percent_s_newline[1] = target_s;
13617 target_percent_s_newline[2] = target_newline;
13618 target_percent_s_newline[3] = '\0';
13620 init = true;
13622 return true;
13625 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13626 and no overflow/underflow occurred. INEXACT is true if M was not
13627 exactly calculated. TYPE is the tree type for the result. This
13628 function assumes that you cleared the MPFR flags and then
13629 calculated M to see if anything subsequently set a flag prior to
13630 entering this function. Return NULL_TREE if any checks fail. */
13632 static tree
13633 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13635 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13636 overflow/underflow occurred. If -frounding-math, proceed iff the
13637 result of calling FUNC was exact. */
13638 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13639 && (!flag_rounding_math || !inexact))
13641 REAL_VALUE_TYPE rr;
13643 real_from_mpfr (&rr, m, type, GMP_RNDN);
13644 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13645 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13646 but the mpft_t is not, then we underflowed in the
13647 conversion. */
13648 if (real_isfinite (&rr)
13649 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13651 REAL_VALUE_TYPE rmode;
13653 real_convert (&rmode, TYPE_MODE (type), &rr);
13654 /* Proceed iff the specified mode can hold the value. */
13655 if (real_identical (&rmode, &rr))
13656 return build_real (type, rmode);
13659 return NULL_TREE;
13662 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13663 number and no overflow/underflow occurred. INEXACT is true if M
13664 was not exactly calculated. TYPE is the tree type for the result.
13665 This function assumes that you cleared the MPFR flags and then
13666 calculated M to see if anything subsequently set a flag prior to
13667 entering this function. Return NULL_TREE if any checks fail, if
13668 FORCE_CONVERT is true, then bypass the checks. */
13670 static tree
13671 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13673 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13674 overflow/underflow occurred. If -frounding-math, proceed iff the
13675 result of calling FUNC was exact. */
13676 if (force_convert
13677 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13678 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13679 && (!flag_rounding_math || !inexact)))
13681 REAL_VALUE_TYPE re, im;
13683 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13684 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13685 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13686 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13687 but the mpft_t is not, then we underflowed in the
13688 conversion. */
13689 if (force_convert
13690 || (real_isfinite (&re) && real_isfinite (&im)
13691 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13692 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13694 REAL_VALUE_TYPE re_mode, im_mode;
13696 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13697 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13698 /* Proceed iff the specified mode can hold the value. */
13699 if (force_convert
13700 || (real_identical (&re_mode, &re)
13701 && real_identical (&im_mode, &im)))
13702 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13703 build_real (TREE_TYPE (type), im_mode));
13706 return NULL_TREE;
13709 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13710 FUNC on it and return the resulting value as a tree with type TYPE.
13711 If MIN and/or MAX are not NULL, then the supplied ARG must be
13712 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13713 acceptable values, otherwise they are not. The mpfr precision is
13714 set to the precision of TYPE. We assume that function FUNC returns
13715 zero if the result could be calculated exactly within the requested
13716 precision. */
13718 static tree
13719 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13720 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13721 bool inclusive)
13723 tree result = NULL_TREE;
13725 STRIP_NOPS (arg);
13727 /* To proceed, MPFR must exactly represent the target floating point
13728 format, which only happens when the target base equals two. */
13729 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13730 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13732 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13734 if (real_isfinite (ra)
13735 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13736 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13738 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13739 const int prec = fmt->p;
13740 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13741 int inexact;
13742 mpfr_t m;
13744 mpfr_init2 (m, prec);
13745 mpfr_from_real (m, ra, GMP_RNDN);
13746 mpfr_clear_flags ();
13747 inexact = func (m, m, rnd);
13748 result = do_mpfr_ckconv (m, type, inexact);
13749 mpfr_clear (m);
13753 return result;
13756 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13757 FUNC on it and return the resulting value as a tree with type TYPE.
13758 The mpfr precision is set to the precision of TYPE. We assume that
13759 function FUNC returns zero if the result could be calculated
13760 exactly within the requested precision. */
13762 static tree
13763 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13764 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13766 tree result = NULL_TREE;
13768 STRIP_NOPS (arg1);
13769 STRIP_NOPS (arg2);
13771 /* To proceed, MPFR must exactly represent the target floating point
13772 format, which only happens when the target base equals two. */
13773 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13774 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13775 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13777 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13778 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13780 if (real_isfinite (ra1) && real_isfinite (ra2))
13782 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13783 const int prec = fmt->p;
13784 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13785 int inexact;
13786 mpfr_t m1, m2;
13788 mpfr_inits2 (prec, m1, m2, NULL);
13789 mpfr_from_real (m1, ra1, GMP_RNDN);
13790 mpfr_from_real (m2, ra2, GMP_RNDN);
13791 mpfr_clear_flags ();
13792 inexact = func (m1, m1, m2, rnd);
13793 result = do_mpfr_ckconv (m1, type, inexact);
13794 mpfr_clears (m1, m2, NULL);
13798 return result;
13801 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13802 FUNC on it and return the resulting value as a tree with type TYPE.
13803 The mpfr precision is set to the precision of TYPE. We assume that
13804 function FUNC returns zero if the result could be calculated
13805 exactly within the requested precision. */
13807 static tree
13808 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13809 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13811 tree result = NULL_TREE;
13813 STRIP_NOPS (arg1);
13814 STRIP_NOPS (arg2);
13815 STRIP_NOPS (arg3);
13817 /* To proceed, MPFR must exactly represent the target floating point
13818 format, which only happens when the target base equals two. */
13819 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13820 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13821 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13822 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13824 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13825 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13826 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13828 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13830 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13831 const int prec = fmt->p;
13832 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13833 int inexact;
13834 mpfr_t m1, m2, m3;
13836 mpfr_inits2 (prec, m1, m2, m3, NULL);
13837 mpfr_from_real (m1, ra1, GMP_RNDN);
13838 mpfr_from_real (m2, ra2, GMP_RNDN);
13839 mpfr_from_real (m3, ra3, GMP_RNDN);
13840 mpfr_clear_flags ();
13841 inexact = func (m1, m1, m2, m3, rnd);
13842 result = do_mpfr_ckconv (m1, type, inexact);
13843 mpfr_clears (m1, m2, m3, NULL);
13847 return result;
13850 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13851 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13852 If ARG_SINP and ARG_COSP are NULL then the result is returned
13853 as a complex value.
13854 The type is taken from the type of ARG and is used for setting the
13855 precision of the calculation and results. */
13857 static tree
13858 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13860 tree const type = TREE_TYPE (arg);
13861 tree result = NULL_TREE;
13863 STRIP_NOPS (arg);
13865 /* To proceed, MPFR must exactly represent the target floating point
13866 format, which only happens when the target base equals two. */
13867 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13868 && TREE_CODE (arg) == REAL_CST
13869 && !TREE_OVERFLOW (arg))
13871 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13873 if (real_isfinite (ra))
13875 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13876 const int prec = fmt->p;
13877 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13878 tree result_s, result_c;
13879 int inexact;
13880 mpfr_t m, ms, mc;
13882 mpfr_inits2 (prec, m, ms, mc, NULL);
13883 mpfr_from_real (m, ra, GMP_RNDN);
13884 mpfr_clear_flags ();
13885 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13886 result_s = do_mpfr_ckconv (ms, type, inexact);
13887 result_c = do_mpfr_ckconv (mc, type, inexact);
13888 mpfr_clears (m, ms, mc, NULL);
13889 if (result_s && result_c)
13891 /* If we are to return in a complex value do so. */
13892 if (!arg_sinp && !arg_cosp)
13893 return build_complex (build_complex_type (type),
13894 result_c, result_s);
13896 /* Dereference the sin/cos pointer arguments. */
13897 arg_sinp = build_fold_indirect_ref (arg_sinp);
13898 arg_cosp = build_fold_indirect_ref (arg_cosp);
13899 /* Proceed if valid pointer type were passed in. */
13900 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13901 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13903 /* Set the values. */
13904 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13905 result_s);
13906 TREE_SIDE_EFFECTS (result_s) = 1;
13907 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13908 result_c);
13909 TREE_SIDE_EFFECTS (result_c) = 1;
13910 /* Combine the assignments into a compound expr. */
13911 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13912 result_s, result_c));
13917 return result;
13920 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13921 two-argument mpfr order N Bessel function FUNC on them and return
13922 the resulting value as a tree with type TYPE. The mpfr precision
13923 is set to the precision of TYPE. We assume that function FUNC
13924 returns zero if the result could be calculated exactly within the
13925 requested precision. */
13926 static tree
13927 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13928 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13929 const REAL_VALUE_TYPE *min, bool inclusive)
13931 tree result = NULL_TREE;
13933 STRIP_NOPS (arg1);
13934 STRIP_NOPS (arg2);
13936 /* To proceed, MPFR must exactly represent the target floating point
13937 format, which only happens when the target base equals two. */
13938 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13939 && host_integerp (arg1, 0)
13940 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13942 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13943 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13945 if (n == (long)n
13946 && real_isfinite (ra)
13947 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13949 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13950 const int prec = fmt->p;
13951 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13952 int inexact;
13953 mpfr_t m;
13955 mpfr_init2 (m, prec);
13956 mpfr_from_real (m, ra, GMP_RNDN);
13957 mpfr_clear_flags ();
13958 inexact = func (m, n, m, rnd);
13959 result = do_mpfr_ckconv (m, type, inexact);
13960 mpfr_clear (m);
13964 return result;
13967 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13968 the pointer *(ARG_QUO) and return the result. The type is taken
13969 from the type of ARG0 and is used for setting the precision of the
13970 calculation and results. */
13972 static tree
13973 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13975 tree const type = TREE_TYPE (arg0);
13976 tree result = NULL_TREE;
13978 STRIP_NOPS (arg0);
13979 STRIP_NOPS (arg1);
13981 /* To proceed, MPFR must exactly represent the target floating point
13982 format, which only happens when the target base equals two. */
13983 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13984 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13985 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13987 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13988 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13990 if (real_isfinite (ra0) && real_isfinite (ra1))
13992 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13993 const int prec = fmt->p;
13994 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13995 tree result_rem;
13996 long integer_quo;
13997 mpfr_t m0, m1;
13999 mpfr_inits2 (prec, m0, m1, NULL);
14000 mpfr_from_real (m0, ra0, GMP_RNDN);
14001 mpfr_from_real (m1, ra1, GMP_RNDN);
14002 mpfr_clear_flags ();
14003 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
14004 /* Remquo is independent of the rounding mode, so pass
14005 inexact=0 to do_mpfr_ckconv(). */
14006 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
14007 mpfr_clears (m0, m1, NULL);
14008 if (result_rem)
14010 /* MPFR calculates quo in the host's long so it may
14011 return more bits in quo than the target int can hold
14012 if sizeof(host long) > sizeof(target int). This can
14013 happen even for native compilers in LP64 mode. In
14014 these cases, modulo the quo value with the largest
14015 number that the target int can hold while leaving one
14016 bit for the sign. */
14017 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
14018 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
14020 /* Dereference the quo pointer argument. */
14021 arg_quo = build_fold_indirect_ref (arg_quo);
14022 /* Proceed iff a valid pointer type was passed in. */
14023 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
14025 /* Set the value. */
14026 tree result_quo
14027 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
14028 build_int_cst (TREE_TYPE (arg_quo),
14029 integer_quo));
14030 TREE_SIDE_EFFECTS (result_quo) = 1;
14031 /* Combine the quo assignment with the rem. */
14032 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14033 result_quo, result_rem));
14038 return result;
14041 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
14042 resulting value as a tree with type TYPE. The mpfr precision is
14043 set to the precision of TYPE. We assume that this mpfr function
14044 returns zero if the result could be calculated exactly within the
14045 requested precision. In addition, the integer pointer represented
14046 by ARG_SG will be dereferenced and set to the appropriate signgam
14047 (-1,1) value. */
14049 static tree
14050 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
14052 tree result = NULL_TREE;
14054 STRIP_NOPS (arg);
14056 /* To proceed, MPFR must exactly represent the target floating point
14057 format, which only happens when the target base equals two. Also
14058 verify ARG is a constant and that ARG_SG is an int pointer. */
14059 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
14060 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
14061 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
14062 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
14064 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
14066 /* In addition to NaN and Inf, the argument cannot be zero or a
14067 negative integer. */
14068 if (real_isfinite (ra)
14069 && ra->cl != rvc_zero
14070 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
14072 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14073 const int prec = fmt->p;
14074 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
14075 int inexact, sg;
14076 mpfr_t m;
14077 tree result_lg;
14079 mpfr_init2 (m, prec);
14080 mpfr_from_real (m, ra, GMP_RNDN);
14081 mpfr_clear_flags ();
14082 inexact = mpfr_lgamma (m, &sg, m, rnd);
14083 result_lg = do_mpfr_ckconv (m, type, inexact);
14084 mpfr_clear (m);
14085 if (result_lg)
14087 tree result_sg;
14089 /* Dereference the arg_sg pointer argument. */
14090 arg_sg = build_fold_indirect_ref (arg_sg);
14091 /* Assign the signgam value into *arg_sg. */
14092 result_sg = fold_build2 (MODIFY_EXPR,
14093 TREE_TYPE (arg_sg), arg_sg,
14094 build_int_cst (TREE_TYPE (arg_sg), sg));
14095 TREE_SIDE_EFFECTS (result_sg) = 1;
14096 /* Combine the signgam assignment with the lgamma result. */
14097 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14098 result_sg, result_lg));
14103 return result;
14106 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14107 function FUNC on it and return the resulting value as a tree with
14108 type TYPE. The mpfr precision is set to the precision of TYPE. We
14109 assume that function FUNC returns zero if the result could be
14110 calculated exactly within the requested precision. */
14112 static tree
14113 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14115 tree result = NULL_TREE;
14117 STRIP_NOPS (arg);
14119 /* To proceed, MPFR must exactly represent the target floating point
14120 format, which only happens when the target base equals two. */
14121 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14122 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14123 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14125 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14126 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14128 if (real_isfinite (re) && real_isfinite (im))
14130 const struct real_format *const fmt =
14131 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14132 const int prec = fmt->p;
14133 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14134 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14135 int inexact;
14136 mpc_t m;
14138 mpc_init2 (m, prec);
14139 mpfr_from_real (mpc_realref(m), re, rnd);
14140 mpfr_from_real (mpc_imagref(m), im, rnd);
14141 mpfr_clear_flags ();
14142 inexact = func (m, m, crnd);
14143 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14144 mpc_clear (m);
14148 return result;
14151 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14152 mpc function FUNC on it and return the resulting value as a tree
14153 with type TYPE. The mpfr precision is set to the precision of
14154 TYPE. We assume that function FUNC returns zero if the result
14155 could be calculated exactly within the requested precision. If
14156 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14157 in the arguments and/or results. */
14159 tree
14160 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14161 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14163 tree result = NULL_TREE;
14165 STRIP_NOPS (arg0);
14166 STRIP_NOPS (arg1);
14168 /* To proceed, MPFR must exactly represent the target floating point
14169 format, which only happens when the target base equals two. */
14170 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14171 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14172 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14173 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14174 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14176 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14177 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14178 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14179 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14181 if (do_nonfinite
14182 || (real_isfinite (re0) && real_isfinite (im0)
14183 && real_isfinite (re1) && real_isfinite (im1)))
14185 const struct real_format *const fmt =
14186 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14187 const int prec = fmt->p;
14188 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14189 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14190 int inexact;
14191 mpc_t m0, m1;
14193 mpc_init2 (m0, prec);
14194 mpc_init2 (m1, prec);
14195 mpfr_from_real (mpc_realref(m0), re0, rnd);
14196 mpfr_from_real (mpc_imagref(m0), im0, rnd);
14197 mpfr_from_real (mpc_realref(m1), re1, rnd);
14198 mpfr_from_real (mpc_imagref(m1), im1, rnd);
14199 mpfr_clear_flags ();
14200 inexact = func (m0, m0, m1, crnd);
14201 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14202 mpc_clear (m0);
14203 mpc_clear (m1);
14207 return result;
14210 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14211 a normal call should be emitted rather than expanding the function
14212 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14214 static tree
14215 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14217 int nargs = gimple_call_num_args (stmt);
14219 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14220 (nargs > 0
14221 ? gimple_call_arg_ptr (stmt, 0)
14222 : &error_mark_node), fcode);
14225 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14226 a normal call should be emitted rather than expanding the function
14227 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14228 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14229 passed as second argument. */
14231 tree
14232 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14233 enum built_in_function fcode)
14235 int nargs = gimple_call_num_args (stmt);
14237 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14238 (nargs > 0
14239 ? gimple_call_arg_ptr (stmt, 0)
14240 : &error_mark_node), maxlen, fcode);
14243 /* Builtins with folding operations that operate on "..." arguments
14244 need special handling; we need to store the arguments in a convenient
14245 data structure before attempting any folding. Fortunately there are
14246 only a few builtins that fall into this category. FNDECL is the
14247 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14248 result of the function call is ignored. */
14250 static tree
14251 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14252 bool ignore ATTRIBUTE_UNUSED)
14254 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14255 tree ret = NULL_TREE;
14257 switch (fcode)
14259 case BUILT_IN_SPRINTF_CHK:
14260 case BUILT_IN_VSPRINTF_CHK:
14261 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14262 break;
14264 case BUILT_IN_SNPRINTF_CHK:
14265 case BUILT_IN_VSNPRINTF_CHK:
14266 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14268 default:
14269 break;
14271 if (ret)
14273 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14274 TREE_NO_WARNING (ret) = 1;
14275 return ret;
14277 return NULL_TREE;
14280 /* A wrapper function for builtin folding that prevents warnings for
14281 "statement without effect" and the like, caused by removing the
14282 call node earlier than the warning is generated. */
14284 tree
14285 fold_call_stmt (gimple stmt, bool ignore)
14287 tree ret = NULL_TREE;
14288 tree fndecl = gimple_call_fndecl (stmt);
14289 location_t loc = gimple_location (stmt);
14290 if (fndecl
14291 && TREE_CODE (fndecl) == FUNCTION_DECL
14292 && DECL_BUILT_IN (fndecl)
14293 && !gimple_call_va_arg_pack_p (stmt))
14295 int nargs = gimple_call_num_args (stmt);
14296 tree *args = (nargs > 0
14297 ? gimple_call_arg_ptr (stmt, 0)
14298 : &error_mark_node);
14300 if (avoid_folding_inline_builtin (fndecl))
14301 return NULL_TREE;
14302 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14304 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14306 else
14308 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14309 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14310 if (!ret)
14311 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14312 if (ret)
14314 /* Propagate location information from original call to
14315 expansion of builtin. Otherwise things like
14316 maybe_emit_chk_warning, that operate on the expansion
14317 of a builtin, will use the wrong location information. */
14318 if (gimple_has_location (stmt))
14320 tree realret = ret;
14321 if (TREE_CODE (ret) == NOP_EXPR)
14322 realret = TREE_OPERAND (ret, 0);
14323 if (CAN_HAVE_LOCATION_P (realret)
14324 && !EXPR_HAS_LOCATION (realret))
14325 SET_EXPR_LOCATION (realret, loc);
14326 return realret;
14328 return ret;
14332 return NULL_TREE;
14335 /* Look up the function in builtin_decl that corresponds to DECL
14336 and set ASMSPEC as its user assembler name. DECL must be a
14337 function decl that declares a builtin. */
14339 void
14340 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14342 tree builtin;
14343 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14344 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14345 && asmspec != 0);
14347 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14348 set_user_assembler_name (builtin, asmspec);
14349 switch (DECL_FUNCTION_CODE (decl))
14351 case BUILT_IN_MEMCPY:
14352 init_block_move_fn (asmspec);
14353 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14354 break;
14355 case BUILT_IN_MEMSET:
14356 init_block_clear_fn (asmspec);
14357 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14358 break;
14359 case BUILT_IN_MEMMOVE:
14360 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14361 break;
14362 case BUILT_IN_MEMCMP:
14363 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14364 break;
14365 case BUILT_IN_ABORT:
14366 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14367 break;
14368 case BUILT_IN_FFS:
14369 if (INT_TYPE_SIZE < BITS_PER_WORD)
14371 set_user_assembler_libfunc ("ffs", asmspec);
14372 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14373 MODE_INT, 0), "ffs");
14375 break;
14376 default:
14377 break;
14381 /* Return true if DECL is a builtin that expands to a constant or similarly
14382 simple code. */
14383 bool
14384 is_simple_builtin (tree decl)
14386 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14387 switch (DECL_FUNCTION_CODE (decl))
14389 /* Builtins that expand to constants. */
14390 case BUILT_IN_CONSTANT_P:
14391 case BUILT_IN_EXPECT:
14392 case BUILT_IN_OBJECT_SIZE:
14393 case BUILT_IN_UNREACHABLE:
14394 /* Simple register moves or loads from stack. */
14395 case BUILT_IN_ASSUME_ALIGNED:
14396 case BUILT_IN_RETURN_ADDRESS:
14397 case BUILT_IN_EXTRACT_RETURN_ADDR:
14398 case BUILT_IN_FROB_RETURN_ADDR:
14399 case BUILT_IN_RETURN:
14400 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14401 case BUILT_IN_FRAME_ADDRESS:
14402 case BUILT_IN_VA_END:
14403 case BUILT_IN_STACK_SAVE:
14404 case BUILT_IN_STACK_RESTORE:
14405 /* Exception state returns or moves registers around. */
14406 case BUILT_IN_EH_FILTER:
14407 case BUILT_IN_EH_POINTER:
14408 case BUILT_IN_EH_COPY_VALUES:
14409 return true;
14411 default:
14412 return false;
14415 return false;
14418 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14419 most probably expanded inline into reasonably simple code. This is a
14420 superset of is_simple_builtin. */
14421 bool
14422 is_inexpensive_builtin (tree decl)
14424 if (!decl)
14425 return false;
14426 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14427 return true;
14428 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14429 switch (DECL_FUNCTION_CODE (decl))
14431 case BUILT_IN_ABS:
14432 case BUILT_IN_ALLOCA:
14433 case BUILT_IN_ALLOCA_WITH_ALIGN:
14434 case BUILT_IN_BSWAP16:
14435 case BUILT_IN_BSWAP32:
14436 case BUILT_IN_BSWAP64:
14437 case BUILT_IN_CLZ:
14438 case BUILT_IN_CLZIMAX:
14439 case BUILT_IN_CLZL:
14440 case BUILT_IN_CLZLL:
14441 case BUILT_IN_CTZ:
14442 case BUILT_IN_CTZIMAX:
14443 case BUILT_IN_CTZL:
14444 case BUILT_IN_CTZLL:
14445 case BUILT_IN_FFS:
14446 case BUILT_IN_FFSIMAX:
14447 case BUILT_IN_FFSL:
14448 case BUILT_IN_FFSLL:
14449 case BUILT_IN_IMAXABS:
14450 case BUILT_IN_FINITE:
14451 case BUILT_IN_FINITEF:
14452 case BUILT_IN_FINITEL:
14453 case BUILT_IN_FINITED32:
14454 case BUILT_IN_FINITED64:
14455 case BUILT_IN_FINITED128:
14456 case BUILT_IN_FPCLASSIFY:
14457 case BUILT_IN_ISFINITE:
14458 case BUILT_IN_ISINF_SIGN:
14459 case BUILT_IN_ISINF:
14460 case BUILT_IN_ISINFF:
14461 case BUILT_IN_ISINFL:
14462 case BUILT_IN_ISINFD32:
14463 case BUILT_IN_ISINFD64:
14464 case BUILT_IN_ISINFD128:
14465 case BUILT_IN_ISNAN:
14466 case BUILT_IN_ISNANF:
14467 case BUILT_IN_ISNANL:
14468 case BUILT_IN_ISNAND32:
14469 case BUILT_IN_ISNAND64:
14470 case BUILT_IN_ISNAND128:
14471 case BUILT_IN_ISNORMAL:
14472 case BUILT_IN_ISGREATER:
14473 case BUILT_IN_ISGREATEREQUAL:
14474 case BUILT_IN_ISLESS:
14475 case BUILT_IN_ISLESSEQUAL:
14476 case BUILT_IN_ISLESSGREATER:
14477 case BUILT_IN_ISUNORDERED:
14478 case BUILT_IN_VA_ARG_PACK:
14479 case BUILT_IN_VA_ARG_PACK_LEN:
14480 case BUILT_IN_VA_COPY:
14481 case BUILT_IN_TRAP:
14482 case BUILT_IN_SAVEREGS:
14483 case BUILT_IN_POPCOUNTL:
14484 case BUILT_IN_POPCOUNTLL:
14485 case BUILT_IN_POPCOUNTIMAX:
14486 case BUILT_IN_POPCOUNT:
14487 case BUILT_IN_PARITYL:
14488 case BUILT_IN_PARITYLL:
14489 case BUILT_IN_PARITYIMAX:
14490 case BUILT_IN_PARITY:
14491 case BUILT_IN_LABS:
14492 case BUILT_IN_LLABS:
14493 case BUILT_IN_PREFETCH:
14494 return true;
14496 default:
14497 return is_simple_builtin (decl);
14500 return false;