2013-09-12 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / builtins.c
blob0ab6d9b5d7058c27c5ac38f597cdd3026d3bd5c5
1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "realmpfr.h"
28 #include "gimple.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "predict.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "langhooks.h"
45 #include "basic-block.h"
46 #include "tree-mudflap.h"
47 #include "tree-ssa.h"
48 #include "value-prof.h"
49 #include "diagnostic-core.h"
50 #include "builtins.h"
51 #include "ubsan.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
57 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
59 struct target_builtins default_target_builtins;
60 #if SWITCHABLE_TARGET
61 struct target_builtins *this_target_builtins = &default_target_builtins;
62 #endif
64 /* Define the names of the builtin function types and codes. */
65 const char *const built_in_class_names[BUILT_IN_LAST]
66 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
68 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
69 const char * built_in_names[(int) END_BUILTINS] =
71 #include "builtins.def"
73 #undef DEF_BUILTIN
75 /* Setup an array of _DECL trees, make sure each element is
76 initialized to NULL_TREE. */
77 builtin_info_type builtin_info;
79 /* Non-zero if __builtin_constant_p should be folded right away. */
80 bool force_folding_builtin_constant_p;
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
90 #endif
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
104 static rtx expand_builtin_interclass_mathfn (tree, rtx);
105 static rtx expand_builtin_sincos (tree);
106 static rtx expand_builtin_cexpi (tree, rtx);
107 static rtx expand_builtin_int_roundingfn (tree, rtx);
108 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, bool);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 enum tree_code);
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
192 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
194 static rtx expand_builtin_object_size (tree);
195 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
196 enum built_in_function);
197 static void maybe_emit_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
199 static void maybe_emit_free_warning (tree);
200 static tree fold_builtin_object_size (tree, tree);
201 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
202 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
203 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
204 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
205 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
206 enum built_in_function);
207 static bool init_target_chars (void);
209 static unsigned HOST_WIDE_INT target_newline;
210 static unsigned HOST_WIDE_INT target_percent;
211 static unsigned HOST_WIDE_INT target_c;
212 static unsigned HOST_WIDE_INT target_s;
213 static char target_percent_c[3];
214 static char target_percent_s[3];
215 static char target_percent_s_newline[4];
216 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
217 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
218 static tree do_mpfr_arg2 (tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220 static tree do_mpfr_arg3 (tree, tree, tree, tree,
221 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
222 static tree do_mpfr_sincos (tree, tree, tree);
223 static tree do_mpfr_bessel_n (tree, tree, tree,
224 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
225 const REAL_VALUE_TYPE *, bool);
226 static tree do_mpfr_remquo (tree, tree, tree);
227 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 static void expand_builtin_sync_synchronize (void);
230 /* Return true if NAME starts with __builtin_ or __sync_. */
232 static bool
233 is_builtin_name (const char *name)
235 if (strncmp (name, "__builtin_", 10) == 0)
236 return true;
237 if (strncmp (name, "__sync_", 7) == 0)
238 return true;
239 if (strncmp (name, "__atomic_", 9) == 0)
240 return true;
241 return false;
245 /* Return true if DECL is a function symbol representing a built-in. */
247 bool
248 is_builtin_fn (tree decl)
250 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
253 /* By default we assume that c99 functions are present at the runtime,
254 but sincos is not. */
255 bool
256 default_libc_has_function (enum function_class fn_class)
258 if (fn_class == function_c94
259 || fn_class == function_c99_misc
260 || fn_class == function_c99_math_complex)
261 return true;
263 return false;
266 bool
267 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
269 return true;
272 bool
273 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
275 return false;
278 /* Return true if NODE should be considered for inline expansion regardless
279 of the optimization level. This means whenever a function is invoked with
280 its "internal" name, which normally contains the prefix "__builtin". */
282 static bool
283 called_as_built_in (tree node)
285 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
286 we want the name used to call the function, not the name it
287 will have. */
288 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
289 return is_builtin_name (name);
292 /* Compute values M and N such that M divides (address of EXP - N) and such
293 that N < M. If these numbers can be determined, store M in alignp and N in
294 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
295 *alignp and any bit-offset to *bitposp.
297 Note that the address (and thus the alignment) computed here is based
298 on the address to which a symbol resolves, whereas DECL_ALIGN is based
299 on the address at which an object is actually located. These two
300 addresses are not always the same. For example, on ARM targets,
301 the address &foo of a Thumb function foo() has the lowest bit set,
302 whereas foo() itself starts on an even address.
304 If ADDR_P is true we are taking the address of the memory reference EXP
305 and thus cannot rely on the access taking place. */
307 static bool
308 get_object_alignment_2 (tree exp, unsigned int *alignp,
309 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
311 HOST_WIDE_INT bitsize, bitpos;
312 tree offset;
313 enum machine_mode mode;
314 int unsignedp, volatilep;
315 unsigned int inner, align = BITS_PER_UNIT;
316 bool known_alignment = false;
318 /* Get the innermost object and the constant (bitpos) and possibly
319 variable (offset) offset of the access. */
320 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
321 &mode, &unsignedp, &volatilep, true);
323 /* Extract alignment information from the innermost object and
324 possibly adjust bitpos and offset. */
325 if (TREE_CODE (exp) == FUNCTION_DECL)
327 /* Function addresses can encode extra information besides their
328 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
329 allows the low bit to be used as a virtual bit, we know
330 that the address itself must be at least 2-byte aligned. */
331 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
332 align = 2 * BITS_PER_UNIT;
334 else if (TREE_CODE (exp) == LABEL_DECL)
336 else if (TREE_CODE (exp) == CONST_DECL)
338 /* The alignment of a CONST_DECL is determined by its initializer. */
339 exp = DECL_INITIAL (exp);
340 align = TYPE_ALIGN (TREE_TYPE (exp));
341 #ifdef CONSTANT_ALIGNMENT
342 if (CONSTANT_CLASS_P (exp))
343 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
344 #endif
345 known_alignment = true;
347 else if (DECL_P (exp))
349 align = DECL_ALIGN (exp);
350 known_alignment = true;
352 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
354 align = TYPE_ALIGN (TREE_TYPE (exp));
356 else if (TREE_CODE (exp) == INDIRECT_REF
357 || TREE_CODE (exp) == MEM_REF
358 || TREE_CODE (exp) == TARGET_MEM_REF)
360 tree addr = TREE_OPERAND (exp, 0);
361 unsigned ptr_align;
362 unsigned HOST_WIDE_INT ptr_bitpos;
364 if (TREE_CODE (addr) == BIT_AND_EXPR
365 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
367 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
368 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
369 align *= BITS_PER_UNIT;
370 addr = TREE_OPERAND (addr, 0);
373 known_alignment
374 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
375 align = MAX (ptr_align, align);
377 /* The alignment of the pointer operand in a TARGET_MEM_REF
378 has to take the variable offset parts into account. */
379 if (TREE_CODE (exp) == TARGET_MEM_REF)
381 if (TMR_INDEX (exp))
383 unsigned HOST_WIDE_INT step = 1;
384 if (TMR_STEP (exp))
385 step = TREE_INT_CST_LOW (TMR_STEP (exp));
386 align = MIN (align, (step & -step) * BITS_PER_UNIT);
388 if (TMR_INDEX2 (exp))
389 align = BITS_PER_UNIT;
390 known_alignment = false;
393 /* When EXP is an actual memory reference then we can use
394 TYPE_ALIGN of a pointer indirection to derive alignment.
395 Do so only if get_pointer_alignment_1 did not reveal absolute
396 alignment knowledge and if using that alignment would
397 improve the situation. */
398 if (!addr_p && !known_alignment
399 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
400 align = TYPE_ALIGN (TREE_TYPE (exp));
401 else
403 /* Else adjust bitpos accordingly. */
404 bitpos += ptr_bitpos;
405 if (TREE_CODE (exp) == MEM_REF
406 || TREE_CODE (exp) == TARGET_MEM_REF)
407 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
410 else if (TREE_CODE (exp) == STRING_CST)
412 /* STRING_CST are the only constant objects we allow to be not
413 wrapped inside a CONST_DECL. */
414 align = TYPE_ALIGN (TREE_TYPE (exp));
415 #ifdef CONSTANT_ALIGNMENT
416 if (CONSTANT_CLASS_P (exp))
417 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
418 #endif
419 known_alignment = true;
422 /* If there is a non-constant offset part extract the maximum
423 alignment that can prevail. */
424 inner = ~0U;
425 while (offset)
427 tree next_offset;
429 if (TREE_CODE (offset) == PLUS_EXPR)
431 next_offset = TREE_OPERAND (offset, 0);
432 offset = TREE_OPERAND (offset, 1);
434 else
435 next_offset = NULL;
436 if (host_integerp (offset, 1))
438 /* Any overflow in calculating offset_bits won't change
439 the alignment. */
440 unsigned offset_bits
441 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
443 if (offset_bits)
444 inner = MIN (inner, (offset_bits & -offset_bits));
446 else if (TREE_CODE (offset) == MULT_EXPR
447 && host_integerp (TREE_OPERAND (offset, 1), 1))
449 /* Any overflow in calculating offset_factor won't change
450 the alignment. */
451 unsigned offset_factor
452 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
453 * BITS_PER_UNIT);
455 if (offset_factor)
456 inner = MIN (inner, (offset_factor & -offset_factor));
458 else
460 inner = MIN (inner, BITS_PER_UNIT);
461 break;
463 offset = next_offset;
465 /* Alignment is innermost object alignment adjusted by the constant
466 and non-constant offset parts. */
467 align = MIN (align, inner);
469 *alignp = align;
470 *bitposp = bitpos & (*alignp - 1);
471 return known_alignment;
474 /* For a memory reference expression EXP compute values M and N such that M
475 divides (&EXP - N) and such that N < M. If these numbers can be determined,
476 store M in alignp and N in *BITPOSP and return true. Otherwise return false
477 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
479 bool
480 get_object_alignment_1 (tree exp, unsigned int *alignp,
481 unsigned HOST_WIDE_INT *bitposp)
483 return get_object_alignment_2 (exp, alignp, bitposp, false);
486 /* Return the alignment in bits of EXP, an object. */
488 unsigned int
489 get_object_alignment (tree exp)
491 unsigned HOST_WIDE_INT bitpos = 0;
492 unsigned int align;
494 get_object_alignment_1 (exp, &align, &bitpos);
496 /* align and bitpos now specify known low bits of the pointer.
497 ptr & (align - 1) == bitpos. */
499 if (bitpos != 0)
500 align = (bitpos & -bitpos);
501 return align;
504 /* For a pointer valued expression EXP compute values M and N such that M
505 divides (EXP - N) and such that N < M. If these numbers can be determined,
506 store M in alignp and N in *BITPOSP and return true. Return false if
507 the results are just a conservative approximation.
509 If EXP is not a pointer, false is returned too. */
511 bool
512 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
513 unsigned HOST_WIDE_INT *bitposp)
515 STRIP_NOPS (exp);
517 if (TREE_CODE (exp) == ADDR_EXPR)
518 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
519 alignp, bitposp, true);
520 else if (TREE_CODE (exp) == SSA_NAME
521 && POINTER_TYPE_P (TREE_TYPE (exp)))
523 unsigned int ptr_align, ptr_misalign;
524 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
526 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
528 *bitposp = ptr_misalign * BITS_PER_UNIT;
529 *alignp = ptr_align * BITS_PER_UNIT;
530 /* We cannot really tell whether this result is an approximation. */
531 return true;
533 else
535 *bitposp = 0;
536 *alignp = BITS_PER_UNIT;
537 return false;
540 else if (TREE_CODE (exp) == INTEGER_CST)
542 *alignp = BIGGEST_ALIGNMENT;
543 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
544 & (BIGGEST_ALIGNMENT - 1));
545 return true;
548 *bitposp = 0;
549 *alignp = BITS_PER_UNIT;
550 return false;
553 /* Return the alignment in bits of EXP, a pointer valued expression.
554 The alignment returned is, by default, the alignment of the thing that
555 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
557 Otherwise, look at the expression to see if we can do better, i.e., if the
558 expression is actually pointing at an object whose alignment is tighter. */
560 unsigned int
561 get_pointer_alignment (tree exp)
563 unsigned HOST_WIDE_INT bitpos = 0;
564 unsigned int align;
566 get_pointer_alignment_1 (exp, &align, &bitpos);
568 /* align and bitpos now specify known low bits of the pointer.
569 ptr & (align - 1) == bitpos. */
571 if (bitpos != 0)
572 align = (bitpos & -bitpos);
574 return align;
577 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
578 way, because it could contain a zero byte in the middle.
579 TREE_STRING_LENGTH is the size of the character array, not the string.
581 ONLY_VALUE should be nonzero if the result is not going to be emitted
582 into the instruction stream and zero if it is going to be expanded.
583 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
584 is returned, otherwise NULL, since
585 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
586 evaluate the side-effects.
588 The value returned is of type `ssizetype'.
590 Unfortunately, string_constant can't access the values of const char
591 arrays with initializers, so neither can we do so here. */
593 tree
594 c_strlen (tree src, int only_value)
596 tree offset_node;
597 HOST_WIDE_INT offset;
598 int max;
599 const char *ptr;
600 location_t loc;
602 STRIP_NOPS (src);
603 if (TREE_CODE (src) == COND_EXPR
604 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
606 tree len1, len2;
608 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
609 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
610 if (tree_int_cst_equal (len1, len2))
611 return len1;
614 if (TREE_CODE (src) == COMPOUND_EXPR
615 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
616 return c_strlen (TREE_OPERAND (src, 1), only_value);
618 loc = EXPR_LOC_OR_HERE (src);
620 src = string_constant (src, &offset_node);
621 if (src == 0)
622 return NULL_TREE;
624 max = TREE_STRING_LENGTH (src) - 1;
625 ptr = TREE_STRING_POINTER (src);
627 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
629 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
630 compute the offset to the following null if we don't know where to
631 start searching for it. */
632 int i;
634 for (i = 0; i < max; i++)
635 if (ptr[i] == 0)
636 return NULL_TREE;
638 /* We don't know the starting offset, but we do know that the string
639 has no internal zero bytes. We can assume that the offset falls
640 within the bounds of the string; otherwise, the programmer deserves
641 what he gets. Subtract the offset from the length of the string,
642 and return that. This would perhaps not be valid if we were dealing
643 with named arrays in addition to literal string constants. */
645 return size_diffop_loc (loc, size_int (max), offset_node);
648 /* We have a known offset into the string. Start searching there for
649 a null character if we can represent it as a single HOST_WIDE_INT. */
650 if (offset_node == 0)
651 offset = 0;
652 else if (! host_integerp (offset_node, 0))
653 offset = -1;
654 else
655 offset = tree_low_cst (offset_node, 0);
657 /* If the offset is known to be out of bounds, warn, and call strlen at
658 runtime. */
659 if (offset < 0 || offset > max)
661 /* Suppress multiple warnings for propagated constant strings. */
662 if (! TREE_NO_WARNING (src))
664 warning_at (loc, 0, "offset outside bounds of constant string");
665 TREE_NO_WARNING (src) = 1;
667 return NULL_TREE;
670 /* Use strlen to search for the first zero byte. Since any strings
671 constructed with build_string will have nulls appended, we win even
672 if we get handed something like (char[4])"abcd".
674 Since OFFSET is our starting index into the string, no further
675 calculation is needed. */
676 return ssize_int (strlen (ptr + offset));
679 /* Return a char pointer for a C string if it is a string constant
680 or sum of string constant and integer constant. */
682 static const char *
683 c_getstr (tree src)
685 tree offset_node;
687 src = string_constant (src, &offset_node);
688 if (src == 0)
689 return 0;
691 if (offset_node == 0)
692 return TREE_STRING_POINTER (src);
693 else if (!host_integerp (offset_node, 1)
694 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
695 return 0;
697 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
700 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
701 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
703 static rtx
704 c_readstr (const char *str, enum machine_mode mode)
706 HOST_WIDE_INT c[2];
707 HOST_WIDE_INT ch;
708 unsigned int i, j;
710 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
712 c[0] = 0;
713 c[1] = 0;
714 ch = 1;
715 for (i = 0; i < GET_MODE_SIZE (mode); i++)
717 j = i;
718 if (WORDS_BIG_ENDIAN)
719 j = GET_MODE_SIZE (mode) - i - 1;
720 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
721 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
722 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
723 j *= BITS_PER_UNIT;
724 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
726 if (ch)
727 ch = (unsigned char) str[i];
728 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
730 return immed_double_const (c[0], c[1], mode);
733 /* Cast a target constant CST to target CHAR and if that value fits into
734 host char type, return zero and put that value into variable pointed to by
735 P. */
737 static int
738 target_char_cast (tree cst, char *p)
740 unsigned HOST_WIDE_INT val, hostval;
742 if (TREE_CODE (cst) != INTEGER_CST
743 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
744 return 1;
746 val = TREE_INT_CST_LOW (cst);
747 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
748 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
750 hostval = val;
751 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
752 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
754 if (val != hostval)
755 return 1;
757 *p = hostval;
758 return 0;
761 /* Similar to save_expr, but assumes that arbitrary code is not executed
762 in between the multiple evaluations. In particular, we assume that a
763 non-addressable local variable will not be modified. */
765 static tree
766 builtin_save_expr (tree exp)
768 if (TREE_CODE (exp) == SSA_NAME
769 || (TREE_ADDRESSABLE (exp) == 0
770 && (TREE_CODE (exp) == PARM_DECL
771 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
772 return exp;
774 return save_expr (exp);
777 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
778 times to get the address of either a higher stack frame, or a return
779 address located within it (depending on FNDECL_CODE). */
781 static rtx
782 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
784 int i;
786 #ifdef INITIAL_FRAME_ADDRESS_RTX
787 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
788 #else
789 rtx tem;
791 /* For a zero count with __builtin_return_address, we don't care what
792 frame address we return, because target-specific definitions will
793 override us. Therefore frame pointer elimination is OK, and using
794 the soft frame pointer is OK.
796 For a nonzero count, or a zero count with __builtin_frame_address,
797 we require a stable offset from the current frame pointer to the
798 previous one, so we must use the hard frame pointer, and
799 we must disable frame pointer elimination. */
800 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
801 tem = frame_pointer_rtx;
802 else
804 tem = hard_frame_pointer_rtx;
806 /* Tell reload not to eliminate the frame pointer. */
807 crtl->accesses_prior_frames = 1;
809 #endif
811 /* Some machines need special handling before we can access
812 arbitrary frames. For example, on the SPARC, we must first flush
813 all register windows to the stack. */
814 #ifdef SETUP_FRAME_ADDRESSES
815 if (count > 0)
816 SETUP_FRAME_ADDRESSES ();
817 #endif
819 /* On the SPARC, the return address is not in the frame, it is in a
820 register. There is no way to access it off of the current frame
821 pointer, but it can be accessed off the previous frame pointer by
822 reading the value from the register window save area. */
823 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
824 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
825 count--;
826 #endif
828 /* Scan back COUNT frames to the specified frame. */
829 for (i = 0; i < count; i++)
831 /* Assume the dynamic chain pointer is in the word that the
832 frame address points to, unless otherwise specified. */
833 #ifdef DYNAMIC_CHAIN_ADDRESS
834 tem = DYNAMIC_CHAIN_ADDRESS (tem);
835 #endif
836 tem = memory_address (Pmode, tem);
837 tem = gen_frame_mem (Pmode, tem);
838 tem = copy_to_reg (tem);
841 /* For __builtin_frame_address, return what we've got. But, on
842 the SPARC for example, we may have to add a bias. */
843 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
844 #ifdef FRAME_ADDR_RTX
845 return FRAME_ADDR_RTX (tem);
846 #else
847 return tem;
848 #endif
850 /* For __builtin_return_address, get the return address from that frame. */
851 #ifdef RETURN_ADDR_RTX
852 tem = RETURN_ADDR_RTX (count, tem);
853 #else
854 tem = memory_address (Pmode,
855 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
856 tem = gen_frame_mem (Pmode, tem);
857 #endif
858 return tem;
861 /* Alias set used for setjmp buffer. */
862 static alias_set_type setjmp_alias_set = -1;
864 /* Construct the leading half of a __builtin_setjmp call. Control will
865 return to RECEIVER_LABEL. This is also called directly by the SJLJ
866 exception handling code. */
868 void
869 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
871 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
872 rtx stack_save;
873 rtx mem;
875 if (setjmp_alias_set == -1)
876 setjmp_alias_set = new_alias_set ();
878 buf_addr = convert_memory_address (Pmode, buf_addr);
880 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
882 /* We store the frame pointer and the address of receiver_label in
883 the buffer and use the rest of it for the stack save area, which
884 is machine-dependent. */
886 mem = gen_rtx_MEM (Pmode, buf_addr);
887 set_mem_alias_set (mem, setjmp_alias_set);
888 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
890 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
891 GET_MODE_SIZE (Pmode))),
892 set_mem_alias_set (mem, setjmp_alias_set);
894 emit_move_insn (validize_mem (mem),
895 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
897 stack_save = gen_rtx_MEM (sa_mode,
898 plus_constant (Pmode, buf_addr,
899 2 * GET_MODE_SIZE (Pmode)));
900 set_mem_alias_set (stack_save, setjmp_alias_set);
901 emit_stack_save (SAVE_NONLOCAL, &stack_save);
903 /* If there is further processing to do, do it. */
904 #ifdef HAVE_builtin_setjmp_setup
905 if (HAVE_builtin_setjmp_setup)
906 emit_insn (gen_builtin_setjmp_setup (buf_addr));
907 #endif
909 /* We have a nonlocal label. */
910 cfun->has_nonlocal_label = 1;
913 /* Construct the trailing part of a __builtin_setjmp call. This is
914 also called directly by the SJLJ exception handling code.
915 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
917 void
918 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
920 rtx chain;
922 /* Mark the FP as used when we get here, so we have to make sure it's
923 marked as used by this function. */
924 emit_use (hard_frame_pointer_rtx);
926 /* Mark the static chain as clobbered here so life information
927 doesn't get messed up for it. */
928 chain = targetm.calls.static_chain (current_function_decl, true);
929 if (chain && REG_P (chain))
930 emit_clobber (chain);
932 /* Now put in the code to restore the frame pointer, and argument
933 pointer, if needed. */
934 #ifdef HAVE_nonlocal_goto
935 if (! HAVE_nonlocal_goto)
936 #endif
937 /* First adjust our frame pointer to its actual value. It was
938 previously set to the start of the virtual area corresponding to
939 the stacked variables when we branched here and now needs to be
940 adjusted to the actual hardware fp value.
942 Assignments to virtual registers are converted by
943 instantiate_virtual_regs into the corresponding assignment
944 to the underlying register (fp in this case) that makes
945 the original assignment true.
946 So the following insn will actually be decrementing fp by
947 STARTING_FRAME_OFFSET. */
948 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
950 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
951 if (fixed_regs[ARG_POINTER_REGNUM])
953 #ifdef ELIMINABLE_REGS
954 /* If the argument pointer can be eliminated in favor of the
955 frame pointer, we don't need to restore it. We assume here
956 that if such an elimination is present, it can always be used.
957 This is the case on all known machines; if we don't make this
958 assumption, we do unnecessary saving on many machines. */
959 size_t i;
960 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
962 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
963 if (elim_regs[i].from == ARG_POINTER_REGNUM
964 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
965 break;
967 if (i == ARRAY_SIZE (elim_regs))
968 #endif
970 /* Now restore our arg pointer from the address at which it
971 was saved in our stack frame. */
972 emit_move_insn (crtl->args.internal_arg_pointer,
973 copy_to_reg (get_arg_pointer_save_area ()));
976 #endif
978 #ifdef HAVE_builtin_setjmp_receiver
979 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
980 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
981 else
982 #endif
983 #ifdef HAVE_nonlocal_goto_receiver
984 if (HAVE_nonlocal_goto_receiver)
985 emit_insn (gen_nonlocal_goto_receiver ());
986 else
987 #endif
988 { /* Nothing */ }
990 /* We must not allow the code we just generated to be reordered by
991 scheduling. Specifically, the update of the frame pointer must
992 happen immediately, not later. Similarly, we must block
993 (frame-related) register values to be used across this code. */
994 emit_insn (gen_blockage ());
997 /* __builtin_longjmp is passed a pointer to an array of five words (not
998 all will be used on all machines). It operates similarly to the C
999 library function of the same name, but is more efficient. Much of
1000 the code below is copied from the handling of non-local gotos. */
1002 static void
1003 expand_builtin_longjmp (rtx buf_addr, rtx value)
1005 rtx fp, lab, stack, insn, last;
1006 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1008 /* DRAP is needed for stack realign if longjmp is expanded to current
1009 function */
1010 if (SUPPORTS_STACK_ALIGNMENT)
1011 crtl->need_drap = true;
1013 if (setjmp_alias_set == -1)
1014 setjmp_alias_set = new_alias_set ();
1016 buf_addr = convert_memory_address (Pmode, buf_addr);
1018 buf_addr = force_reg (Pmode, buf_addr);
1020 /* We require that the user must pass a second argument of 1, because
1021 that is what builtin_setjmp will return. */
1022 gcc_assert (value == const1_rtx);
1024 last = get_last_insn ();
1025 #ifdef HAVE_builtin_longjmp
1026 if (HAVE_builtin_longjmp)
1027 emit_insn (gen_builtin_longjmp (buf_addr));
1028 else
1029 #endif
1031 fp = gen_rtx_MEM (Pmode, buf_addr);
1032 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1033 GET_MODE_SIZE (Pmode)));
1035 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1036 2 * GET_MODE_SIZE (Pmode)));
1037 set_mem_alias_set (fp, setjmp_alias_set);
1038 set_mem_alias_set (lab, setjmp_alias_set);
1039 set_mem_alias_set (stack, setjmp_alias_set);
1041 /* Pick up FP, label, and SP from the block and jump. This code is
1042 from expand_goto in stmt.c; see there for detailed comments. */
1043 #ifdef HAVE_nonlocal_goto
1044 if (HAVE_nonlocal_goto)
1045 /* We have to pass a value to the nonlocal_goto pattern that will
1046 get copied into the static_chain pointer, but it does not matter
1047 what that value is, because builtin_setjmp does not use it. */
1048 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1049 else
1050 #endif
1052 lab = copy_to_reg (lab);
1054 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1055 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1057 emit_move_insn (hard_frame_pointer_rtx, fp);
1058 emit_stack_restore (SAVE_NONLOCAL, stack);
1060 emit_use (hard_frame_pointer_rtx);
1061 emit_use (stack_pointer_rtx);
1062 emit_indirect_jump (lab);
1066 /* Search backwards and mark the jump insn as a non-local goto.
1067 Note that this precludes the use of __builtin_longjmp to a
1068 __builtin_setjmp target in the same function. However, we've
1069 already cautioned the user that these functions are for
1070 internal exception handling use only. */
1071 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1073 gcc_assert (insn != last);
1075 if (JUMP_P (insn))
1077 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1078 break;
1080 else if (CALL_P (insn))
1081 break;
1085 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1086 and the address of the save area. */
1088 static rtx
1089 expand_builtin_nonlocal_goto (tree exp)
1091 tree t_label, t_save_area;
1092 rtx r_label, r_save_area, r_fp, r_sp, insn;
1094 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1095 return NULL_RTX;
1097 t_label = CALL_EXPR_ARG (exp, 0);
1098 t_save_area = CALL_EXPR_ARG (exp, 1);
1100 r_label = expand_normal (t_label);
1101 r_label = convert_memory_address (Pmode, r_label);
1102 r_save_area = expand_normal (t_save_area);
1103 r_save_area = convert_memory_address (Pmode, r_save_area);
1104 /* Copy the address of the save location to a register just in case it was
1105 based on the frame pointer. */
1106 r_save_area = copy_to_reg (r_save_area);
1107 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1108 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1109 plus_constant (Pmode, r_save_area,
1110 GET_MODE_SIZE (Pmode)));
1112 crtl->has_nonlocal_goto = 1;
1114 #ifdef HAVE_nonlocal_goto
1115 /* ??? We no longer need to pass the static chain value, afaik. */
1116 if (HAVE_nonlocal_goto)
1117 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1118 else
1119 #endif
1121 r_label = copy_to_reg (r_label);
1123 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1124 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1126 /* Restore frame pointer for containing function. */
1127 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1128 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1130 /* USE of hard_frame_pointer_rtx added for consistency;
1131 not clear if really needed. */
1132 emit_use (hard_frame_pointer_rtx);
1133 emit_use (stack_pointer_rtx);
1135 /* If the architecture is using a GP register, we must
1136 conservatively assume that the target function makes use of it.
1137 The prologue of functions with nonlocal gotos must therefore
1138 initialize the GP register to the appropriate value, and we
1139 must then make sure that this value is live at the point
1140 of the jump. (Note that this doesn't necessarily apply
1141 to targets with a nonlocal_goto pattern; they are free
1142 to implement it in their own way. Note also that this is
1143 a no-op if the GP register is a global invariant.) */
1144 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1145 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1146 emit_use (pic_offset_table_rtx);
1148 emit_indirect_jump (r_label);
1151 /* Search backwards to the jump insn and mark it as a
1152 non-local goto. */
1153 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1155 if (JUMP_P (insn))
1157 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1158 break;
1160 else if (CALL_P (insn))
1161 break;
1164 return const0_rtx;
1167 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1168 (not all will be used on all machines) that was passed to __builtin_setjmp.
1169 It updates the stack pointer in that block to correspond to the current
1170 stack pointer. */
1172 static void
1173 expand_builtin_update_setjmp_buf (rtx buf_addr)
1175 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1176 rtx stack_save
1177 = gen_rtx_MEM (sa_mode,
1178 memory_address
1179 (sa_mode,
1180 plus_constant (Pmode, buf_addr,
1181 2 * GET_MODE_SIZE (Pmode))));
1183 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1186 /* Expand a call to __builtin_prefetch. For a target that does not support
1187 data prefetch, evaluate the memory address argument in case it has side
1188 effects. */
1190 static void
1191 expand_builtin_prefetch (tree exp)
1193 tree arg0, arg1, arg2;
1194 int nargs;
1195 rtx op0, op1, op2;
1197 if (!validate_arglist (exp, POINTER_TYPE, 0))
1198 return;
1200 arg0 = CALL_EXPR_ARG (exp, 0);
1202 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1203 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1204 locality). */
1205 nargs = call_expr_nargs (exp);
1206 if (nargs > 1)
1207 arg1 = CALL_EXPR_ARG (exp, 1);
1208 else
1209 arg1 = integer_zero_node;
1210 if (nargs > 2)
1211 arg2 = CALL_EXPR_ARG (exp, 2);
1212 else
1213 arg2 = integer_three_node;
1215 /* Argument 0 is an address. */
1216 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1218 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1219 if (TREE_CODE (arg1) != INTEGER_CST)
1221 error ("second argument to %<__builtin_prefetch%> must be a constant");
1222 arg1 = integer_zero_node;
1224 op1 = expand_normal (arg1);
1225 /* Argument 1 must be either zero or one. */
1226 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1228 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1229 " using zero");
1230 op1 = const0_rtx;
1233 /* Argument 2 (locality) must be a compile-time constant int. */
1234 if (TREE_CODE (arg2) != INTEGER_CST)
1236 error ("third argument to %<__builtin_prefetch%> must be a constant");
1237 arg2 = integer_zero_node;
1239 op2 = expand_normal (arg2);
1240 /* Argument 2 must be 0, 1, 2, or 3. */
1241 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1243 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1244 op2 = const0_rtx;
1247 #ifdef HAVE_prefetch
1248 if (HAVE_prefetch)
1250 struct expand_operand ops[3];
1252 create_address_operand (&ops[0], op0);
1253 create_integer_operand (&ops[1], INTVAL (op1));
1254 create_integer_operand (&ops[2], INTVAL (op2));
1255 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1256 return;
1258 #endif
1260 /* Don't do anything with direct references to volatile memory, but
1261 generate code to handle other side effects. */
1262 if (!MEM_P (op0) && side_effects_p (op0))
1263 emit_insn (op0);
1266 /* Get a MEM rtx for expression EXP which is the address of an operand
1267 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1268 the maximum length of the block of memory that might be accessed or
1269 NULL if unknown. */
1271 static rtx
1272 get_memory_rtx (tree exp, tree len)
1274 tree orig_exp = exp;
1275 rtx addr, mem;
1277 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1278 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1279 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1280 exp = TREE_OPERAND (exp, 0);
1282 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1283 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1285 /* Get an expression we can use to find the attributes to assign to MEM.
1286 First remove any nops. */
1287 while (CONVERT_EXPR_P (exp)
1288 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1289 exp = TREE_OPERAND (exp, 0);
1291 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1292 (as builtin stringops may alias with anything). */
1293 exp = fold_build2 (MEM_REF,
1294 build_array_type (char_type_node,
1295 build_range_type (sizetype,
1296 size_one_node, len)),
1297 exp, build_int_cst (ptr_type_node, 0));
1299 /* If the MEM_REF has no acceptable address, try to get the base object
1300 from the original address we got, and build an all-aliasing
1301 unknown-sized access to that one. */
1302 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1303 set_mem_attributes (mem, exp, 0);
1304 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1305 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1306 0))))
1308 exp = build_fold_addr_expr (exp);
1309 exp = fold_build2 (MEM_REF,
1310 build_array_type (char_type_node,
1311 build_range_type (sizetype,
1312 size_zero_node,
1313 NULL)),
1314 exp, build_int_cst (ptr_type_node, 0));
1315 set_mem_attributes (mem, exp, 0);
1317 set_mem_alias_set (mem, 0);
1318 return mem;
1321 /* Built-in functions to perform an untyped call and return. */
1323 #define apply_args_mode \
1324 (this_target_builtins->x_apply_args_mode)
1325 #define apply_result_mode \
1326 (this_target_builtins->x_apply_result_mode)
1328 /* Return the size required for the block returned by __builtin_apply_args,
1329 and initialize apply_args_mode. */
1331 static int
1332 apply_args_size (void)
1334 static int size = -1;
1335 int align;
1336 unsigned int regno;
1337 enum machine_mode mode;
1339 /* The values computed by this function never change. */
1340 if (size < 0)
1342 /* The first value is the incoming arg-pointer. */
1343 size = GET_MODE_SIZE (Pmode);
1345 /* The second value is the structure value address unless this is
1346 passed as an "invisible" first argument. */
1347 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1348 size += GET_MODE_SIZE (Pmode);
1350 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1351 if (FUNCTION_ARG_REGNO_P (regno))
1353 mode = targetm.calls.get_raw_arg_mode (regno);
1355 gcc_assert (mode != VOIDmode);
1357 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1358 if (size % align != 0)
1359 size = CEIL (size, align) * align;
1360 size += GET_MODE_SIZE (mode);
1361 apply_args_mode[regno] = mode;
1363 else
1365 apply_args_mode[regno] = VOIDmode;
1368 return size;
1371 /* Return the size required for the block returned by __builtin_apply,
1372 and initialize apply_result_mode. */
1374 static int
1375 apply_result_size (void)
1377 static int size = -1;
1378 int align, regno;
1379 enum machine_mode mode;
1381 /* The values computed by this function never change. */
1382 if (size < 0)
1384 size = 0;
1386 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1387 if (targetm.calls.function_value_regno_p (regno))
1389 mode = targetm.calls.get_raw_result_mode (regno);
1391 gcc_assert (mode != VOIDmode);
1393 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1394 if (size % align != 0)
1395 size = CEIL (size, align) * align;
1396 size += GET_MODE_SIZE (mode);
1397 apply_result_mode[regno] = mode;
1399 else
1400 apply_result_mode[regno] = VOIDmode;
1402 /* Allow targets that use untyped_call and untyped_return to override
1403 the size so that machine-specific information can be stored here. */
1404 #ifdef APPLY_RESULT_SIZE
1405 size = APPLY_RESULT_SIZE;
1406 #endif
1408 return size;
1411 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1412 /* Create a vector describing the result block RESULT. If SAVEP is true,
1413 the result block is used to save the values; otherwise it is used to
1414 restore the values. */
1416 static rtx
1417 result_vector (int savep, rtx result)
1419 int regno, size, align, nelts;
1420 enum machine_mode mode;
1421 rtx reg, mem;
1422 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1424 size = nelts = 0;
1425 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1426 if ((mode = apply_result_mode[regno]) != VOIDmode)
1428 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1429 if (size % align != 0)
1430 size = CEIL (size, align) * align;
1431 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1432 mem = adjust_address (result, mode, size);
1433 savevec[nelts++] = (savep
1434 ? gen_rtx_SET (VOIDmode, mem, reg)
1435 : gen_rtx_SET (VOIDmode, reg, mem));
1436 size += GET_MODE_SIZE (mode);
1438 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1440 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1442 /* Save the state required to perform an untyped call with the same
1443 arguments as were passed to the current function. */
1445 static rtx
1446 expand_builtin_apply_args_1 (void)
1448 rtx registers, tem;
1449 int size, align, regno;
1450 enum machine_mode mode;
1451 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1453 /* Create a block where the arg-pointer, structure value address,
1454 and argument registers can be saved. */
1455 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1457 /* Walk past the arg-pointer and structure value address. */
1458 size = GET_MODE_SIZE (Pmode);
1459 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1460 size += GET_MODE_SIZE (Pmode);
1462 /* Save each register used in calling a function to the block. */
1463 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1464 if ((mode = apply_args_mode[regno]) != VOIDmode)
1466 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1467 if (size % align != 0)
1468 size = CEIL (size, align) * align;
1470 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1472 emit_move_insn (adjust_address (registers, mode, size), tem);
1473 size += GET_MODE_SIZE (mode);
1476 /* Save the arg pointer to the block. */
1477 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1478 #ifdef STACK_GROWS_DOWNWARD
1479 /* We need the pointer as the caller actually passed them to us, not
1480 as we might have pretended they were passed. Make sure it's a valid
1481 operand, as emit_move_insn isn't expected to handle a PLUS. */
1483 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1484 NULL_RTX);
1485 #endif
1486 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1488 size = GET_MODE_SIZE (Pmode);
1490 /* Save the structure value address unless this is passed as an
1491 "invisible" first argument. */
1492 if (struct_incoming_value)
1494 emit_move_insn (adjust_address (registers, Pmode, size),
1495 copy_to_reg (struct_incoming_value));
1496 size += GET_MODE_SIZE (Pmode);
1499 /* Return the address of the block. */
1500 return copy_addr_to_reg (XEXP (registers, 0));
1503 /* __builtin_apply_args returns block of memory allocated on
1504 the stack into which is stored the arg pointer, structure
1505 value address, static chain, and all the registers that might
1506 possibly be used in performing a function call. The code is
1507 moved to the start of the function so the incoming values are
1508 saved. */
1510 static rtx
1511 expand_builtin_apply_args (void)
1513 /* Don't do __builtin_apply_args more than once in a function.
1514 Save the result of the first call and reuse it. */
1515 if (apply_args_value != 0)
1516 return apply_args_value;
1518 /* When this function is called, it means that registers must be
1519 saved on entry to this function. So we migrate the
1520 call to the first insn of this function. */
1521 rtx temp;
1522 rtx seq;
1524 start_sequence ();
1525 temp = expand_builtin_apply_args_1 ();
1526 seq = get_insns ();
1527 end_sequence ();
1529 apply_args_value = temp;
1531 /* Put the insns after the NOTE that starts the function.
1532 If this is inside a start_sequence, make the outer-level insn
1533 chain current, so the code is placed at the start of the
1534 function. If internal_arg_pointer is a non-virtual pseudo,
1535 it needs to be placed after the function that initializes
1536 that pseudo. */
1537 push_topmost_sequence ();
1538 if (REG_P (crtl->args.internal_arg_pointer)
1539 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1540 emit_insn_before (seq, parm_birth_insn);
1541 else
1542 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1543 pop_topmost_sequence ();
1544 return temp;
1548 /* Perform an untyped call and save the state required to perform an
1549 untyped return of whatever value was returned by the given function. */
1551 static rtx
1552 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1554 int size, align, regno;
1555 enum machine_mode mode;
1556 rtx incoming_args, result, reg, dest, src, call_insn;
1557 rtx old_stack_level = 0;
1558 rtx call_fusage = 0;
1559 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1561 arguments = convert_memory_address (Pmode, arguments);
1563 /* Create a block where the return registers can be saved. */
1564 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1566 /* Fetch the arg pointer from the ARGUMENTS block. */
1567 incoming_args = gen_reg_rtx (Pmode);
1568 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1569 #ifndef STACK_GROWS_DOWNWARD
1570 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1571 incoming_args, 0, OPTAB_LIB_WIDEN);
1572 #endif
1574 /* Push a new argument block and copy the arguments. Do not allow
1575 the (potential) memcpy call below to interfere with our stack
1576 manipulations. */
1577 do_pending_stack_adjust ();
1578 NO_DEFER_POP;
1580 /* Save the stack with nonlocal if available. */
1581 #ifdef HAVE_save_stack_nonlocal
1582 if (HAVE_save_stack_nonlocal)
1583 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1584 else
1585 #endif
1586 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1588 /* Allocate a block of memory onto the stack and copy the memory
1589 arguments to the outgoing arguments address. We can pass TRUE
1590 as the 4th argument because we just saved the stack pointer
1591 and will restore it right after the call. */
1592 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1594 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1595 may have already set current_function_calls_alloca to true.
1596 current_function_calls_alloca won't be set if argsize is zero,
1597 so we have to guarantee need_drap is true here. */
1598 if (SUPPORTS_STACK_ALIGNMENT)
1599 crtl->need_drap = true;
1601 dest = virtual_outgoing_args_rtx;
1602 #ifndef STACK_GROWS_DOWNWARD
1603 if (CONST_INT_P (argsize))
1604 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1605 else
1606 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1607 #endif
1608 dest = gen_rtx_MEM (BLKmode, dest);
1609 set_mem_align (dest, PARM_BOUNDARY);
1610 src = gen_rtx_MEM (BLKmode, incoming_args);
1611 set_mem_align (src, PARM_BOUNDARY);
1612 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1614 /* Refer to the argument block. */
1615 apply_args_size ();
1616 arguments = gen_rtx_MEM (BLKmode, arguments);
1617 set_mem_align (arguments, PARM_BOUNDARY);
1619 /* Walk past the arg-pointer and structure value address. */
1620 size = GET_MODE_SIZE (Pmode);
1621 if (struct_value)
1622 size += GET_MODE_SIZE (Pmode);
1624 /* Restore each of the registers previously saved. Make USE insns
1625 for each of these registers for use in making the call. */
1626 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1627 if ((mode = apply_args_mode[regno]) != VOIDmode)
1629 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1630 if (size % align != 0)
1631 size = CEIL (size, align) * align;
1632 reg = gen_rtx_REG (mode, regno);
1633 emit_move_insn (reg, adjust_address (arguments, mode, size));
1634 use_reg (&call_fusage, reg);
1635 size += GET_MODE_SIZE (mode);
1638 /* Restore the structure value address unless this is passed as an
1639 "invisible" first argument. */
1640 size = GET_MODE_SIZE (Pmode);
1641 if (struct_value)
1643 rtx value = gen_reg_rtx (Pmode);
1644 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1645 emit_move_insn (struct_value, value);
1646 if (REG_P (struct_value))
1647 use_reg (&call_fusage, struct_value);
1648 size += GET_MODE_SIZE (Pmode);
1651 /* All arguments and registers used for the call are set up by now! */
1652 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1654 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1655 and we don't want to load it into a register as an optimization,
1656 because prepare_call_address already did it if it should be done. */
1657 if (GET_CODE (function) != SYMBOL_REF)
1658 function = memory_address (FUNCTION_MODE, function);
1660 /* Generate the actual call instruction and save the return value. */
1661 #ifdef HAVE_untyped_call
1662 if (HAVE_untyped_call)
1663 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1664 result, result_vector (1, result)));
1665 else
1666 #endif
1667 #ifdef HAVE_call_value
1668 if (HAVE_call_value)
1670 rtx valreg = 0;
1672 /* Locate the unique return register. It is not possible to
1673 express a call that sets more than one return register using
1674 call_value; use untyped_call for that. In fact, untyped_call
1675 only needs to save the return registers in the given block. */
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1679 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1681 valreg = gen_rtx_REG (mode, regno);
1684 emit_call_insn (GEN_CALL_VALUE (valreg,
1685 gen_rtx_MEM (FUNCTION_MODE, function),
1686 const0_rtx, NULL_RTX, const0_rtx));
1688 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1690 else
1691 #endif
1692 gcc_unreachable ();
1694 /* Find the CALL insn we just emitted, and attach the register usage
1695 information. */
1696 call_insn = last_call_insn ();
1697 add_function_usage_to (call_insn, call_fusage);
1699 /* Restore the stack. */
1700 #ifdef HAVE_save_stack_nonlocal
1701 if (HAVE_save_stack_nonlocal)
1702 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1703 else
1704 #endif
1705 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1706 fixup_args_size_notes (call_insn, get_last_insn(), 0);
1708 OK_DEFER_POP;
1710 /* Return the address of the result block. */
1711 result = copy_addr_to_reg (XEXP (result, 0));
1712 return convert_memory_address (ptr_mode, result);
1715 /* Perform an untyped return. */
1717 static void
1718 expand_builtin_return (rtx result)
1720 int size, align, regno;
1721 enum machine_mode mode;
1722 rtx reg;
1723 rtx call_fusage = 0;
1725 result = convert_memory_address (Pmode, result);
1727 apply_result_size ();
1728 result = gen_rtx_MEM (BLKmode, result);
1730 #ifdef HAVE_untyped_return
1731 if (HAVE_untyped_return)
1733 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1734 emit_barrier ();
1735 return;
1737 #endif
1739 /* Restore the return value and note that each value is used. */
1740 size = 0;
1741 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1742 if ((mode = apply_result_mode[regno]) != VOIDmode)
1744 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1745 if (size % align != 0)
1746 size = CEIL (size, align) * align;
1747 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1748 emit_move_insn (reg, adjust_address (result, mode, size));
1750 push_to_sequence (call_fusage);
1751 emit_use (reg);
1752 call_fusage = get_insns ();
1753 end_sequence ();
1754 size += GET_MODE_SIZE (mode);
1757 /* Put the USE insns before the return. */
1758 emit_insn (call_fusage);
1760 /* Return whatever values was restored by jumping directly to the end
1761 of the function. */
1762 expand_naked_return ();
1765 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1767 static enum type_class
1768 type_to_class (tree type)
1770 switch (TREE_CODE (type))
1772 case VOID_TYPE: return void_type_class;
1773 case INTEGER_TYPE: return integer_type_class;
1774 case ENUMERAL_TYPE: return enumeral_type_class;
1775 case BOOLEAN_TYPE: return boolean_type_class;
1776 case POINTER_TYPE: return pointer_type_class;
1777 case REFERENCE_TYPE: return reference_type_class;
1778 case OFFSET_TYPE: return offset_type_class;
1779 case REAL_TYPE: return real_type_class;
1780 case COMPLEX_TYPE: return complex_type_class;
1781 case FUNCTION_TYPE: return function_type_class;
1782 case METHOD_TYPE: return method_type_class;
1783 case RECORD_TYPE: return record_type_class;
1784 case UNION_TYPE:
1785 case QUAL_UNION_TYPE: return union_type_class;
1786 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1787 ? string_type_class : array_type_class);
1788 case LANG_TYPE: return lang_type_class;
1789 default: return no_type_class;
1793 /* Expand a call EXP to __builtin_classify_type. */
1795 static rtx
1796 expand_builtin_classify_type (tree exp)
1798 if (call_expr_nargs (exp))
1799 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1800 return GEN_INT (no_type_class);
1803 /* This helper macro, meant to be used in mathfn_built_in below,
1804 determines which among a set of three builtin math functions is
1805 appropriate for a given type mode. The `F' and `L' cases are
1806 automatically generated from the `double' case. */
1807 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1808 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1809 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1810 fcodel = BUILT_IN_MATHFN##L ; break;
1811 /* Similar to above, but appends _R after any F/L suffix. */
1812 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1813 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1814 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1815 fcodel = BUILT_IN_MATHFN##L_R ; break;
1817 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1818 if available. If IMPLICIT is true use the implicit builtin declaration,
1819 otherwise use the explicit declaration. If we can't do the conversion,
1820 return zero. */
1822 static tree
1823 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1825 enum built_in_function fcode, fcodef, fcodel, fcode2;
1827 switch (fn)
1829 CASE_MATHFN (BUILT_IN_ACOS)
1830 CASE_MATHFN (BUILT_IN_ACOSH)
1831 CASE_MATHFN (BUILT_IN_ASIN)
1832 CASE_MATHFN (BUILT_IN_ASINH)
1833 CASE_MATHFN (BUILT_IN_ATAN)
1834 CASE_MATHFN (BUILT_IN_ATAN2)
1835 CASE_MATHFN (BUILT_IN_ATANH)
1836 CASE_MATHFN (BUILT_IN_CBRT)
1837 CASE_MATHFN (BUILT_IN_CEIL)
1838 CASE_MATHFN (BUILT_IN_CEXPI)
1839 CASE_MATHFN (BUILT_IN_COPYSIGN)
1840 CASE_MATHFN (BUILT_IN_COS)
1841 CASE_MATHFN (BUILT_IN_COSH)
1842 CASE_MATHFN (BUILT_IN_DREM)
1843 CASE_MATHFN (BUILT_IN_ERF)
1844 CASE_MATHFN (BUILT_IN_ERFC)
1845 CASE_MATHFN (BUILT_IN_EXP)
1846 CASE_MATHFN (BUILT_IN_EXP10)
1847 CASE_MATHFN (BUILT_IN_EXP2)
1848 CASE_MATHFN (BUILT_IN_EXPM1)
1849 CASE_MATHFN (BUILT_IN_FABS)
1850 CASE_MATHFN (BUILT_IN_FDIM)
1851 CASE_MATHFN (BUILT_IN_FLOOR)
1852 CASE_MATHFN (BUILT_IN_FMA)
1853 CASE_MATHFN (BUILT_IN_FMAX)
1854 CASE_MATHFN (BUILT_IN_FMIN)
1855 CASE_MATHFN (BUILT_IN_FMOD)
1856 CASE_MATHFN (BUILT_IN_FREXP)
1857 CASE_MATHFN (BUILT_IN_GAMMA)
1858 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1859 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1860 CASE_MATHFN (BUILT_IN_HYPOT)
1861 CASE_MATHFN (BUILT_IN_ILOGB)
1862 CASE_MATHFN (BUILT_IN_ICEIL)
1863 CASE_MATHFN (BUILT_IN_IFLOOR)
1864 CASE_MATHFN (BUILT_IN_INF)
1865 CASE_MATHFN (BUILT_IN_IRINT)
1866 CASE_MATHFN (BUILT_IN_IROUND)
1867 CASE_MATHFN (BUILT_IN_ISINF)
1868 CASE_MATHFN (BUILT_IN_J0)
1869 CASE_MATHFN (BUILT_IN_J1)
1870 CASE_MATHFN (BUILT_IN_JN)
1871 CASE_MATHFN (BUILT_IN_LCEIL)
1872 CASE_MATHFN (BUILT_IN_LDEXP)
1873 CASE_MATHFN (BUILT_IN_LFLOOR)
1874 CASE_MATHFN (BUILT_IN_LGAMMA)
1875 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1876 CASE_MATHFN (BUILT_IN_LLCEIL)
1877 CASE_MATHFN (BUILT_IN_LLFLOOR)
1878 CASE_MATHFN (BUILT_IN_LLRINT)
1879 CASE_MATHFN (BUILT_IN_LLROUND)
1880 CASE_MATHFN (BUILT_IN_LOG)
1881 CASE_MATHFN (BUILT_IN_LOG10)
1882 CASE_MATHFN (BUILT_IN_LOG1P)
1883 CASE_MATHFN (BUILT_IN_LOG2)
1884 CASE_MATHFN (BUILT_IN_LOGB)
1885 CASE_MATHFN (BUILT_IN_LRINT)
1886 CASE_MATHFN (BUILT_IN_LROUND)
1887 CASE_MATHFN (BUILT_IN_MODF)
1888 CASE_MATHFN (BUILT_IN_NAN)
1889 CASE_MATHFN (BUILT_IN_NANS)
1890 CASE_MATHFN (BUILT_IN_NEARBYINT)
1891 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1892 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1893 CASE_MATHFN (BUILT_IN_POW)
1894 CASE_MATHFN (BUILT_IN_POWI)
1895 CASE_MATHFN (BUILT_IN_POW10)
1896 CASE_MATHFN (BUILT_IN_REMAINDER)
1897 CASE_MATHFN (BUILT_IN_REMQUO)
1898 CASE_MATHFN (BUILT_IN_RINT)
1899 CASE_MATHFN (BUILT_IN_ROUND)
1900 CASE_MATHFN (BUILT_IN_SCALB)
1901 CASE_MATHFN (BUILT_IN_SCALBLN)
1902 CASE_MATHFN (BUILT_IN_SCALBN)
1903 CASE_MATHFN (BUILT_IN_SIGNBIT)
1904 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1905 CASE_MATHFN (BUILT_IN_SIN)
1906 CASE_MATHFN (BUILT_IN_SINCOS)
1907 CASE_MATHFN (BUILT_IN_SINH)
1908 CASE_MATHFN (BUILT_IN_SQRT)
1909 CASE_MATHFN (BUILT_IN_TAN)
1910 CASE_MATHFN (BUILT_IN_TANH)
1911 CASE_MATHFN (BUILT_IN_TGAMMA)
1912 CASE_MATHFN (BUILT_IN_TRUNC)
1913 CASE_MATHFN (BUILT_IN_Y0)
1914 CASE_MATHFN (BUILT_IN_Y1)
1915 CASE_MATHFN (BUILT_IN_YN)
1917 default:
1918 return NULL_TREE;
1921 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1922 fcode2 = fcode;
1923 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1924 fcode2 = fcodef;
1925 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1926 fcode2 = fcodel;
1927 else
1928 return NULL_TREE;
1930 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1931 return NULL_TREE;
1933 return builtin_decl_explicit (fcode2);
1936 /* Like mathfn_built_in_1(), but always use the implicit array. */
1938 tree
1939 mathfn_built_in (tree type, enum built_in_function fn)
1941 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1944 /* If errno must be maintained, expand the RTL to check if the result,
1945 TARGET, of a built-in function call, EXP, is NaN, and if so set
1946 errno to EDOM. */
1948 static void
1949 expand_errno_check (tree exp, rtx target)
1951 rtx lab = gen_label_rtx ();
1953 /* Test the result; if it is NaN, set errno=EDOM because
1954 the argument was not in the domain. */
1955 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1956 NULL_RTX, NULL_RTX, lab,
1957 /* The jump is very likely. */
1958 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1960 #ifdef TARGET_EDOM
1961 /* If this built-in doesn't throw an exception, set errno directly. */
1962 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1964 #ifdef GEN_ERRNO_RTX
1965 rtx errno_rtx = GEN_ERRNO_RTX;
1966 #else
1967 rtx errno_rtx
1968 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1969 #endif
1970 emit_move_insn (errno_rtx,
1971 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1972 emit_label (lab);
1973 return;
1975 #endif
1977 /* Make sure the library call isn't expanded as a tail call. */
1978 CALL_EXPR_TAILCALL (exp) = 0;
1980 /* We can't set errno=EDOM directly; let the library call do it.
1981 Pop the arguments right away in case the call gets deleted. */
1982 NO_DEFER_POP;
1983 expand_call (exp, target, 0);
1984 OK_DEFER_POP;
1985 emit_label (lab);
1988 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1989 Return NULL_RTX if a normal call should be emitted rather than expanding
1990 the function in-line. EXP is the expression that is a call to the builtin
1991 function; if convenient, the result should be placed in TARGET.
1992 SUBTARGET may be used as the target for computing one of EXP's operands. */
1994 static rtx
1995 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1997 optab builtin_optab;
1998 rtx op0, insns;
1999 tree fndecl = get_callee_fndecl (exp);
2000 enum machine_mode mode;
2001 bool errno_set = false;
2002 bool try_widening = false;
2003 tree arg;
2005 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2006 return NULL_RTX;
2008 arg = CALL_EXPR_ARG (exp, 0);
2010 switch (DECL_FUNCTION_CODE (fndecl))
2012 CASE_FLT_FN (BUILT_IN_SQRT):
2013 errno_set = ! tree_expr_nonnegative_p (arg);
2014 try_widening = true;
2015 builtin_optab = sqrt_optab;
2016 break;
2017 CASE_FLT_FN (BUILT_IN_EXP):
2018 errno_set = true; builtin_optab = exp_optab; break;
2019 CASE_FLT_FN (BUILT_IN_EXP10):
2020 CASE_FLT_FN (BUILT_IN_POW10):
2021 errno_set = true; builtin_optab = exp10_optab; break;
2022 CASE_FLT_FN (BUILT_IN_EXP2):
2023 errno_set = true; builtin_optab = exp2_optab; break;
2024 CASE_FLT_FN (BUILT_IN_EXPM1):
2025 errno_set = true; builtin_optab = expm1_optab; break;
2026 CASE_FLT_FN (BUILT_IN_LOGB):
2027 errno_set = true; builtin_optab = logb_optab; break;
2028 CASE_FLT_FN (BUILT_IN_LOG):
2029 errno_set = true; builtin_optab = log_optab; break;
2030 CASE_FLT_FN (BUILT_IN_LOG10):
2031 errno_set = true; builtin_optab = log10_optab; break;
2032 CASE_FLT_FN (BUILT_IN_LOG2):
2033 errno_set = true; builtin_optab = log2_optab; break;
2034 CASE_FLT_FN (BUILT_IN_LOG1P):
2035 errno_set = true; builtin_optab = log1p_optab; break;
2036 CASE_FLT_FN (BUILT_IN_ASIN):
2037 builtin_optab = asin_optab; break;
2038 CASE_FLT_FN (BUILT_IN_ACOS):
2039 builtin_optab = acos_optab; break;
2040 CASE_FLT_FN (BUILT_IN_TAN):
2041 builtin_optab = tan_optab; break;
2042 CASE_FLT_FN (BUILT_IN_ATAN):
2043 builtin_optab = atan_optab; break;
2044 CASE_FLT_FN (BUILT_IN_FLOOR):
2045 builtin_optab = floor_optab; break;
2046 CASE_FLT_FN (BUILT_IN_CEIL):
2047 builtin_optab = ceil_optab; break;
2048 CASE_FLT_FN (BUILT_IN_TRUNC):
2049 builtin_optab = btrunc_optab; break;
2050 CASE_FLT_FN (BUILT_IN_ROUND):
2051 builtin_optab = round_optab; break;
2052 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2053 builtin_optab = nearbyint_optab;
2054 if (flag_trapping_math)
2055 break;
2056 /* Else fallthrough and expand as rint. */
2057 CASE_FLT_FN (BUILT_IN_RINT):
2058 builtin_optab = rint_optab; break;
2059 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2060 builtin_optab = significand_optab; break;
2061 default:
2062 gcc_unreachable ();
2065 /* Make a suitable register to place result in. */
2066 mode = TYPE_MODE (TREE_TYPE (exp));
2068 if (! flag_errno_math || ! HONOR_NANS (mode))
2069 errno_set = false;
2071 /* Before working hard, check whether the instruction is available, but try
2072 to widen the mode for specific operations. */
2073 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2074 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2075 && (!errno_set || !optimize_insn_for_size_p ()))
2077 rtx result = gen_reg_rtx (mode);
2079 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2080 need to expand the argument again. This way, we will not perform
2081 side-effects more the once. */
2082 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2084 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2086 start_sequence ();
2088 /* Compute into RESULT.
2089 Set RESULT to wherever the result comes back. */
2090 result = expand_unop (mode, builtin_optab, op0, result, 0);
2092 if (result != 0)
2094 if (errno_set)
2095 expand_errno_check (exp, result);
2097 /* Output the entire sequence. */
2098 insns = get_insns ();
2099 end_sequence ();
2100 emit_insn (insns);
2101 return result;
2104 /* If we were unable to expand via the builtin, stop the sequence
2105 (without outputting the insns) and call to the library function
2106 with the stabilized argument list. */
2107 end_sequence ();
2110 return expand_call (exp, target, target == const0_rtx);
2113 /* Expand a call to the builtin binary math functions (pow and atan2).
2114 Return NULL_RTX if a normal call should be emitted rather than expanding the
2115 function in-line. EXP is the expression that is a call to the builtin
2116 function; if convenient, the result should be placed in TARGET.
2117 SUBTARGET may be used as the target for computing one of EXP's
2118 operands. */
2120 static rtx
2121 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2123 optab builtin_optab;
2124 rtx op0, op1, insns, result;
2125 int op1_type = REAL_TYPE;
2126 tree fndecl = get_callee_fndecl (exp);
2127 tree arg0, arg1;
2128 enum machine_mode mode;
2129 bool errno_set = true;
2131 switch (DECL_FUNCTION_CODE (fndecl))
2133 CASE_FLT_FN (BUILT_IN_SCALBN):
2134 CASE_FLT_FN (BUILT_IN_SCALBLN):
2135 CASE_FLT_FN (BUILT_IN_LDEXP):
2136 op1_type = INTEGER_TYPE;
2137 default:
2138 break;
2141 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2142 return NULL_RTX;
2144 arg0 = CALL_EXPR_ARG (exp, 0);
2145 arg1 = CALL_EXPR_ARG (exp, 1);
2147 switch (DECL_FUNCTION_CODE (fndecl))
2149 CASE_FLT_FN (BUILT_IN_POW):
2150 builtin_optab = pow_optab; break;
2151 CASE_FLT_FN (BUILT_IN_ATAN2):
2152 builtin_optab = atan2_optab; break;
2153 CASE_FLT_FN (BUILT_IN_SCALB):
2154 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2155 return 0;
2156 builtin_optab = scalb_optab; break;
2157 CASE_FLT_FN (BUILT_IN_SCALBN):
2158 CASE_FLT_FN (BUILT_IN_SCALBLN):
2159 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2160 return 0;
2161 /* Fall through... */
2162 CASE_FLT_FN (BUILT_IN_LDEXP):
2163 builtin_optab = ldexp_optab; break;
2164 CASE_FLT_FN (BUILT_IN_FMOD):
2165 builtin_optab = fmod_optab; break;
2166 CASE_FLT_FN (BUILT_IN_REMAINDER):
2167 CASE_FLT_FN (BUILT_IN_DREM):
2168 builtin_optab = remainder_optab; break;
2169 default:
2170 gcc_unreachable ();
2173 /* Make a suitable register to place result in. */
2174 mode = TYPE_MODE (TREE_TYPE (exp));
2176 /* Before working hard, check whether the instruction is available. */
2177 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2178 return NULL_RTX;
2180 result = gen_reg_rtx (mode);
2182 if (! flag_errno_math || ! HONOR_NANS (mode))
2183 errno_set = false;
2185 if (errno_set && optimize_insn_for_size_p ())
2186 return 0;
2188 /* Always stabilize the argument list. */
2189 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2190 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2192 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2193 op1 = expand_normal (arg1);
2195 start_sequence ();
2197 /* Compute into RESULT.
2198 Set RESULT to wherever the result comes back. */
2199 result = expand_binop (mode, builtin_optab, op0, op1,
2200 result, 0, OPTAB_DIRECT);
2202 /* If we were unable to expand via the builtin, stop the sequence
2203 (without outputting the insns) and call to the library function
2204 with the stabilized argument list. */
2205 if (result == 0)
2207 end_sequence ();
2208 return expand_call (exp, target, target == const0_rtx);
2211 if (errno_set)
2212 expand_errno_check (exp, result);
2214 /* Output the entire sequence. */
2215 insns = get_insns ();
2216 end_sequence ();
2217 emit_insn (insns);
2219 return result;
2222 /* Expand a call to the builtin trinary math functions (fma).
2223 Return NULL_RTX if a normal call should be emitted rather than expanding the
2224 function in-line. EXP is the expression that is a call to the builtin
2225 function; if convenient, the result should be placed in TARGET.
2226 SUBTARGET may be used as the target for computing one of EXP's
2227 operands. */
2229 static rtx
2230 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2232 optab builtin_optab;
2233 rtx op0, op1, op2, insns, result;
2234 tree fndecl = get_callee_fndecl (exp);
2235 tree arg0, arg1, arg2;
2236 enum machine_mode mode;
2238 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2239 return NULL_RTX;
2241 arg0 = CALL_EXPR_ARG (exp, 0);
2242 arg1 = CALL_EXPR_ARG (exp, 1);
2243 arg2 = CALL_EXPR_ARG (exp, 2);
2245 switch (DECL_FUNCTION_CODE (fndecl))
2247 CASE_FLT_FN (BUILT_IN_FMA):
2248 builtin_optab = fma_optab; break;
2249 default:
2250 gcc_unreachable ();
2253 /* Make a suitable register to place result in. */
2254 mode = TYPE_MODE (TREE_TYPE (exp));
2256 /* Before working hard, check whether the instruction is available. */
2257 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2258 return NULL_RTX;
2260 result = gen_reg_rtx (mode);
2262 /* Always stabilize the argument list. */
2263 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2264 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2265 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2267 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2268 op1 = expand_normal (arg1);
2269 op2 = expand_normal (arg2);
2271 start_sequence ();
2273 /* Compute into RESULT.
2274 Set RESULT to wherever the result comes back. */
2275 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2276 result, 0);
2278 /* If we were unable to expand via the builtin, stop the sequence
2279 (without outputting the insns) and call to the library function
2280 with the stabilized argument list. */
2281 if (result == 0)
2283 end_sequence ();
2284 return expand_call (exp, target, target == const0_rtx);
2287 /* Output the entire sequence. */
2288 insns = get_insns ();
2289 end_sequence ();
2290 emit_insn (insns);
2292 return result;
2295 /* Expand a call to the builtin sin and cos math functions.
2296 Return NULL_RTX if a normal call should be emitted rather than expanding the
2297 function in-line. EXP is the expression that is a call to the builtin
2298 function; if convenient, the result should be placed in TARGET.
2299 SUBTARGET may be used as the target for computing one of EXP's
2300 operands. */
2302 static rtx
2303 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2305 optab builtin_optab;
2306 rtx op0, insns;
2307 tree fndecl = get_callee_fndecl (exp);
2308 enum machine_mode mode;
2309 tree arg;
2311 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2312 return NULL_RTX;
2314 arg = CALL_EXPR_ARG (exp, 0);
2316 switch (DECL_FUNCTION_CODE (fndecl))
2318 CASE_FLT_FN (BUILT_IN_SIN):
2319 CASE_FLT_FN (BUILT_IN_COS):
2320 builtin_optab = sincos_optab; break;
2321 default:
2322 gcc_unreachable ();
2325 /* Make a suitable register to place result in. */
2326 mode = TYPE_MODE (TREE_TYPE (exp));
2328 /* Check if sincos insn is available, otherwise fallback
2329 to sin or cos insn. */
2330 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2331 switch (DECL_FUNCTION_CODE (fndecl))
2333 CASE_FLT_FN (BUILT_IN_SIN):
2334 builtin_optab = sin_optab; break;
2335 CASE_FLT_FN (BUILT_IN_COS):
2336 builtin_optab = cos_optab; break;
2337 default:
2338 gcc_unreachable ();
2341 /* Before working hard, check whether the instruction is available. */
2342 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2344 rtx result = gen_reg_rtx (mode);
2346 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2347 need to expand the argument again. This way, we will not perform
2348 side-effects more the once. */
2349 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2351 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2353 start_sequence ();
2355 /* Compute into RESULT.
2356 Set RESULT to wherever the result comes back. */
2357 if (builtin_optab == sincos_optab)
2359 int ok;
2361 switch (DECL_FUNCTION_CODE (fndecl))
2363 CASE_FLT_FN (BUILT_IN_SIN):
2364 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2365 break;
2366 CASE_FLT_FN (BUILT_IN_COS):
2367 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2368 break;
2369 default:
2370 gcc_unreachable ();
2372 gcc_assert (ok);
2374 else
2375 result = expand_unop (mode, builtin_optab, op0, result, 0);
2377 if (result != 0)
2379 /* Output the entire sequence. */
2380 insns = get_insns ();
2381 end_sequence ();
2382 emit_insn (insns);
2383 return result;
2386 /* If we were unable to expand via the builtin, stop the sequence
2387 (without outputting the insns) and call to the library function
2388 with the stabilized argument list. */
2389 end_sequence ();
2392 return expand_call (exp, target, target == const0_rtx);
2395 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2396 return an RTL instruction code that implements the functionality.
2397 If that isn't possible or available return CODE_FOR_nothing. */
2399 static enum insn_code
2400 interclass_mathfn_icode (tree arg, tree fndecl)
2402 bool errno_set = false;
2403 optab builtin_optab = unknown_optab;
2404 enum machine_mode mode;
2406 switch (DECL_FUNCTION_CODE (fndecl))
2408 CASE_FLT_FN (BUILT_IN_ILOGB):
2409 errno_set = true; builtin_optab = ilogb_optab; break;
2410 CASE_FLT_FN (BUILT_IN_ISINF):
2411 builtin_optab = isinf_optab; break;
2412 case BUILT_IN_ISNORMAL:
2413 case BUILT_IN_ISFINITE:
2414 CASE_FLT_FN (BUILT_IN_FINITE):
2415 case BUILT_IN_FINITED32:
2416 case BUILT_IN_FINITED64:
2417 case BUILT_IN_FINITED128:
2418 case BUILT_IN_ISINFD32:
2419 case BUILT_IN_ISINFD64:
2420 case BUILT_IN_ISINFD128:
2421 /* These builtins have no optabs (yet). */
2422 break;
2423 default:
2424 gcc_unreachable ();
2427 /* There's no easy way to detect the case we need to set EDOM. */
2428 if (flag_errno_math && errno_set)
2429 return CODE_FOR_nothing;
2431 /* Optab mode depends on the mode of the input argument. */
2432 mode = TYPE_MODE (TREE_TYPE (arg));
2434 if (builtin_optab)
2435 return optab_handler (builtin_optab, mode);
2436 return CODE_FOR_nothing;
2439 /* Expand a call to one of the builtin math functions that operate on
2440 floating point argument and output an integer result (ilogb, isinf,
2441 isnan, etc).
2442 Return 0 if a normal call should be emitted rather than expanding the
2443 function in-line. EXP is the expression that is a call to the builtin
2444 function; if convenient, the result should be placed in TARGET. */
2446 static rtx
2447 expand_builtin_interclass_mathfn (tree exp, rtx target)
2449 enum insn_code icode = CODE_FOR_nothing;
2450 rtx op0;
2451 tree fndecl = get_callee_fndecl (exp);
2452 enum machine_mode mode;
2453 tree arg;
2455 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2456 return NULL_RTX;
2458 arg = CALL_EXPR_ARG (exp, 0);
2459 icode = interclass_mathfn_icode (arg, fndecl);
2460 mode = TYPE_MODE (TREE_TYPE (arg));
2462 if (icode != CODE_FOR_nothing)
2464 struct expand_operand ops[1];
2465 rtx last = get_last_insn ();
2466 tree orig_arg = arg;
2468 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2469 need to expand the argument again. This way, we will not perform
2470 side-effects more the once. */
2471 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2473 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2475 if (mode != GET_MODE (op0))
2476 op0 = convert_to_mode (mode, op0, 0);
2478 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2479 if (maybe_legitimize_operands (icode, 0, 1, ops)
2480 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2481 return ops[0].value;
2483 delete_insns_since (last);
2484 CALL_EXPR_ARG (exp, 0) = orig_arg;
2487 return NULL_RTX;
2490 /* Expand a call to the builtin sincos math function.
2491 Return NULL_RTX if a normal call should be emitted rather than expanding the
2492 function in-line. EXP is the expression that is a call to the builtin
2493 function. */
2495 static rtx
2496 expand_builtin_sincos (tree exp)
2498 rtx op0, op1, op2, target1, target2;
2499 enum machine_mode mode;
2500 tree arg, sinp, cosp;
2501 int result;
2502 location_t loc = EXPR_LOCATION (exp);
2503 tree alias_type, alias_off;
2505 if (!validate_arglist (exp, REAL_TYPE,
2506 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2507 return NULL_RTX;
2509 arg = CALL_EXPR_ARG (exp, 0);
2510 sinp = CALL_EXPR_ARG (exp, 1);
2511 cosp = CALL_EXPR_ARG (exp, 2);
2513 /* Make a suitable register to place result in. */
2514 mode = TYPE_MODE (TREE_TYPE (arg));
2516 /* Check if sincos insn is available, otherwise emit the call. */
2517 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2518 return NULL_RTX;
2520 target1 = gen_reg_rtx (mode);
2521 target2 = gen_reg_rtx (mode);
2523 op0 = expand_normal (arg);
2524 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2525 alias_off = build_int_cst (alias_type, 0);
2526 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2527 sinp, alias_off));
2528 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2529 cosp, alias_off));
2531 /* Compute into target1 and target2.
2532 Set TARGET to wherever the result comes back. */
2533 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2534 gcc_assert (result);
2536 /* Move target1 and target2 to the memory locations indicated
2537 by op1 and op2. */
2538 emit_move_insn (op1, target1);
2539 emit_move_insn (op2, target2);
2541 return const0_rtx;
2544 /* Expand a call to the internal cexpi builtin to the sincos math function.
2545 EXP is the expression that is a call to the builtin function; if convenient,
2546 the result should be placed in TARGET. */
2548 static rtx
2549 expand_builtin_cexpi (tree exp, rtx target)
2551 tree fndecl = get_callee_fndecl (exp);
2552 tree arg, type;
2553 enum machine_mode mode;
2554 rtx op0, op1, op2;
2555 location_t loc = EXPR_LOCATION (exp);
2557 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2558 return NULL_RTX;
2560 arg = CALL_EXPR_ARG (exp, 0);
2561 type = TREE_TYPE (arg);
2562 mode = TYPE_MODE (TREE_TYPE (arg));
2564 /* Try expanding via a sincos optab, fall back to emitting a libcall
2565 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2566 is only generated from sincos, cexp or if we have either of them. */
2567 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2569 op1 = gen_reg_rtx (mode);
2570 op2 = gen_reg_rtx (mode);
2572 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2574 /* Compute into op1 and op2. */
2575 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2577 else if (targetm.libc_has_function (function_sincos))
2579 tree call, fn = NULL_TREE;
2580 tree top1, top2;
2581 rtx op1a, op2a;
2583 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2584 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2585 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2586 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2587 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2588 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2589 else
2590 gcc_unreachable ();
2592 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2593 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2594 op1a = copy_addr_to_reg (XEXP (op1, 0));
2595 op2a = copy_addr_to_reg (XEXP (op2, 0));
2596 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2597 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2599 /* Make sure not to fold the sincos call again. */
2600 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2601 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2602 call, 3, arg, top1, top2));
2604 else
2606 tree call, fn = NULL_TREE, narg;
2607 tree ctype = build_complex_type (type);
2609 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2610 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2611 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2612 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2613 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2614 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2615 else
2616 gcc_unreachable ();
2618 /* If we don't have a decl for cexp create one. This is the
2619 friendliest fallback if the user calls __builtin_cexpi
2620 without full target C99 function support. */
2621 if (fn == NULL_TREE)
2623 tree fntype;
2624 const char *name = NULL;
2626 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2627 name = "cexpf";
2628 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2629 name = "cexp";
2630 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2631 name = "cexpl";
2633 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2634 fn = build_fn_decl (name, fntype);
2637 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2638 build_real (type, dconst0), arg);
2640 /* Make sure not to fold the cexp call again. */
2641 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2642 return expand_expr (build_call_nary (ctype, call, 1, narg),
2643 target, VOIDmode, EXPAND_NORMAL);
2646 /* Now build the proper return type. */
2647 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2648 make_tree (TREE_TYPE (arg), op2),
2649 make_tree (TREE_TYPE (arg), op1)),
2650 target, VOIDmode, EXPAND_NORMAL);
2653 /* Conveniently construct a function call expression. FNDECL names the
2654 function to be called, N is the number of arguments, and the "..."
2655 parameters are the argument expressions. Unlike build_call_exr
2656 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2658 static tree
2659 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2661 va_list ap;
2662 tree fntype = TREE_TYPE (fndecl);
2663 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2665 va_start (ap, n);
2666 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2667 va_end (ap);
2668 SET_EXPR_LOCATION (fn, loc);
2669 return fn;
2672 /* Expand a call to one of the builtin rounding functions gcc defines
2673 as an extension (lfloor and lceil). As these are gcc extensions we
2674 do not need to worry about setting errno to EDOM.
2675 If expanding via optab fails, lower expression to (int)(floor(x)).
2676 EXP is the expression that is a call to the builtin function;
2677 if convenient, the result should be placed in TARGET. */
2679 static rtx
2680 expand_builtin_int_roundingfn (tree exp, rtx target)
2682 convert_optab builtin_optab;
2683 rtx op0, insns, tmp;
2684 tree fndecl = get_callee_fndecl (exp);
2685 enum built_in_function fallback_fn;
2686 tree fallback_fndecl;
2687 enum machine_mode mode;
2688 tree arg;
2690 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2691 gcc_unreachable ();
2693 arg = CALL_EXPR_ARG (exp, 0);
2695 switch (DECL_FUNCTION_CODE (fndecl))
2697 CASE_FLT_FN (BUILT_IN_ICEIL):
2698 CASE_FLT_FN (BUILT_IN_LCEIL):
2699 CASE_FLT_FN (BUILT_IN_LLCEIL):
2700 builtin_optab = lceil_optab;
2701 fallback_fn = BUILT_IN_CEIL;
2702 break;
2704 CASE_FLT_FN (BUILT_IN_IFLOOR):
2705 CASE_FLT_FN (BUILT_IN_LFLOOR):
2706 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2707 builtin_optab = lfloor_optab;
2708 fallback_fn = BUILT_IN_FLOOR;
2709 break;
2711 default:
2712 gcc_unreachable ();
2715 /* Make a suitable register to place result in. */
2716 mode = TYPE_MODE (TREE_TYPE (exp));
2718 target = gen_reg_rtx (mode);
2720 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2721 need to expand the argument again. This way, we will not perform
2722 side-effects more the once. */
2723 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2725 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2727 start_sequence ();
2729 /* Compute into TARGET. */
2730 if (expand_sfix_optab (target, op0, builtin_optab))
2732 /* Output the entire sequence. */
2733 insns = get_insns ();
2734 end_sequence ();
2735 emit_insn (insns);
2736 return target;
2739 /* If we were unable to expand via the builtin, stop the sequence
2740 (without outputting the insns). */
2741 end_sequence ();
2743 /* Fall back to floating point rounding optab. */
2744 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2746 /* For non-C99 targets we may end up without a fallback fndecl here
2747 if the user called __builtin_lfloor directly. In this case emit
2748 a call to the floor/ceil variants nevertheless. This should result
2749 in the best user experience for not full C99 targets. */
2750 if (fallback_fndecl == NULL_TREE)
2752 tree fntype;
2753 const char *name = NULL;
2755 switch (DECL_FUNCTION_CODE (fndecl))
2757 case BUILT_IN_ICEIL:
2758 case BUILT_IN_LCEIL:
2759 case BUILT_IN_LLCEIL:
2760 name = "ceil";
2761 break;
2762 case BUILT_IN_ICEILF:
2763 case BUILT_IN_LCEILF:
2764 case BUILT_IN_LLCEILF:
2765 name = "ceilf";
2766 break;
2767 case BUILT_IN_ICEILL:
2768 case BUILT_IN_LCEILL:
2769 case BUILT_IN_LLCEILL:
2770 name = "ceill";
2771 break;
2772 case BUILT_IN_IFLOOR:
2773 case BUILT_IN_LFLOOR:
2774 case BUILT_IN_LLFLOOR:
2775 name = "floor";
2776 break;
2777 case BUILT_IN_IFLOORF:
2778 case BUILT_IN_LFLOORF:
2779 case BUILT_IN_LLFLOORF:
2780 name = "floorf";
2781 break;
2782 case BUILT_IN_IFLOORL:
2783 case BUILT_IN_LFLOORL:
2784 case BUILT_IN_LLFLOORL:
2785 name = "floorl";
2786 break;
2787 default:
2788 gcc_unreachable ();
2791 fntype = build_function_type_list (TREE_TYPE (arg),
2792 TREE_TYPE (arg), NULL_TREE);
2793 fallback_fndecl = build_fn_decl (name, fntype);
2796 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2798 tmp = expand_normal (exp);
2799 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2801 /* Truncate the result of floating point optab to integer
2802 via expand_fix (). */
2803 target = gen_reg_rtx (mode);
2804 expand_fix (target, tmp, 0);
2806 return target;
2809 /* Expand a call to one of the builtin math functions doing integer
2810 conversion (lrint).
2811 Return 0 if a normal call should be emitted rather than expanding the
2812 function in-line. EXP is the expression that is a call to the builtin
2813 function; if convenient, the result should be placed in TARGET. */
2815 static rtx
2816 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2818 convert_optab builtin_optab;
2819 rtx op0, insns;
2820 tree fndecl = get_callee_fndecl (exp);
2821 tree arg;
2822 enum machine_mode mode;
2823 enum built_in_function fallback_fn = BUILT_IN_NONE;
2825 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2826 gcc_unreachable ();
2828 arg = CALL_EXPR_ARG (exp, 0);
2830 switch (DECL_FUNCTION_CODE (fndecl))
2832 CASE_FLT_FN (BUILT_IN_IRINT):
2833 fallback_fn = BUILT_IN_LRINT;
2834 /* FALLTHRU */
2835 CASE_FLT_FN (BUILT_IN_LRINT):
2836 CASE_FLT_FN (BUILT_IN_LLRINT):
2837 builtin_optab = lrint_optab;
2838 break;
2840 CASE_FLT_FN (BUILT_IN_IROUND):
2841 fallback_fn = BUILT_IN_LROUND;
2842 /* FALLTHRU */
2843 CASE_FLT_FN (BUILT_IN_LROUND):
2844 CASE_FLT_FN (BUILT_IN_LLROUND):
2845 builtin_optab = lround_optab;
2846 break;
2848 default:
2849 gcc_unreachable ();
2852 /* There's no easy way to detect the case we need to set EDOM. */
2853 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2854 return NULL_RTX;
2856 /* Make a suitable register to place result in. */
2857 mode = TYPE_MODE (TREE_TYPE (exp));
2859 /* There's no easy way to detect the case we need to set EDOM. */
2860 if (!flag_errno_math)
2862 rtx result = gen_reg_rtx (mode);
2864 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2865 need to expand the argument again. This way, we will not perform
2866 side-effects more the once. */
2867 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2869 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2871 start_sequence ();
2873 if (expand_sfix_optab (result, op0, builtin_optab))
2875 /* Output the entire sequence. */
2876 insns = get_insns ();
2877 end_sequence ();
2878 emit_insn (insns);
2879 return result;
2882 /* If we were unable to expand via the builtin, stop the sequence
2883 (without outputting the insns) and call to the library function
2884 with the stabilized argument list. */
2885 end_sequence ();
2888 if (fallback_fn != BUILT_IN_NONE)
2890 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2891 targets, (int) round (x) should never be transformed into
2892 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2893 a call to lround in the hope that the target provides at least some
2894 C99 functions. This should result in the best user experience for
2895 not full C99 targets. */
2896 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2897 fallback_fn, 0);
2899 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2900 fallback_fndecl, 1, arg);
2902 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2903 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2904 return convert_to_mode (mode, target, 0);
2907 return expand_call (exp, target, target == const0_rtx);
2910 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2911 a normal call should be emitted rather than expanding the function
2912 in-line. EXP is the expression that is a call to the builtin
2913 function; if convenient, the result should be placed in TARGET. */
2915 static rtx
2916 expand_builtin_powi (tree exp, rtx target)
2918 tree arg0, arg1;
2919 rtx op0, op1;
2920 enum machine_mode mode;
2921 enum machine_mode mode2;
2923 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2924 return NULL_RTX;
2926 arg0 = CALL_EXPR_ARG (exp, 0);
2927 arg1 = CALL_EXPR_ARG (exp, 1);
2928 mode = TYPE_MODE (TREE_TYPE (exp));
2930 /* Emit a libcall to libgcc. */
2932 /* Mode of the 2nd argument must match that of an int. */
2933 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2935 if (target == NULL_RTX)
2936 target = gen_reg_rtx (mode);
2938 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2939 if (GET_MODE (op0) != mode)
2940 op0 = convert_to_mode (mode, op0, 0);
2941 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2942 if (GET_MODE (op1) != mode2)
2943 op1 = convert_to_mode (mode2, op1, 0);
2945 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2946 target, LCT_CONST, mode, 2,
2947 op0, mode, op1, mode2);
2949 return target;
2952 /* Expand expression EXP which is a call to the strlen builtin. Return
2953 NULL_RTX if we failed the caller should emit a normal call, otherwise
2954 try to get the result in TARGET, if convenient. */
2956 static rtx
2957 expand_builtin_strlen (tree exp, rtx target,
2958 enum machine_mode target_mode)
2960 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2961 return NULL_RTX;
2962 else
2964 struct expand_operand ops[4];
2965 rtx pat;
2966 tree len;
2967 tree src = CALL_EXPR_ARG (exp, 0);
2968 rtx src_reg, before_strlen;
2969 enum machine_mode insn_mode = target_mode;
2970 enum insn_code icode = CODE_FOR_nothing;
2971 unsigned int align;
2973 /* If the length can be computed at compile-time, return it. */
2974 len = c_strlen (src, 0);
2975 if (len)
2976 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2978 /* If the length can be computed at compile-time and is constant
2979 integer, but there are side-effects in src, evaluate
2980 src for side-effects, then return len.
2981 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2982 can be optimized into: i++; x = 3; */
2983 len = c_strlen (src, 1);
2984 if (len && TREE_CODE (len) == INTEGER_CST)
2986 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2987 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2990 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2992 /* If SRC is not a pointer type, don't do this operation inline. */
2993 if (align == 0)
2994 return NULL_RTX;
2996 /* Bail out if we can't compute strlen in the right mode. */
2997 while (insn_mode != VOIDmode)
2999 icode = optab_handler (strlen_optab, insn_mode);
3000 if (icode != CODE_FOR_nothing)
3001 break;
3003 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3005 if (insn_mode == VOIDmode)
3006 return NULL_RTX;
3008 /* Make a place to hold the source address. We will not expand
3009 the actual source until we are sure that the expansion will
3010 not fail -- there are trees that cannot be expanded twice. */
3011 src_reg = gen_reg_rtx (Pmode);
3013 /* Mark the beginning of the strlen sequence so we can emit the
3014 source operand later. */
3015 before_strlen = get_last_insn ();
3017 create_output_operand (&ops[0], target, insn_mode);
3018 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3019 create_integer_operand (&ops[2], 0);
3020 create_integer_operand (&ops[3], align);
3021 if (!maybe_expand_insn (icode, 4, ops))
3022 return NULL_RTX;
3024 /* Now that we are assured of success, expand the source. */
3025 start_sequence ();
3026 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3027 if (pat != src_reg)
3029 #ifdef POINTERS_EXTEND_UNSIGNED
3030 if (GET_MODE (pat) != Pmode)
3031 pat = convert_to_mode (Pmode, pat,
3032 POINTERS_EXTEND_UNSIGNED);
3033 #endif
3034 emit_move_insn (src_reg, pat);
3036 pat = get_insns ();
3037 end_sequence ();
3039 if (before_strlen)
3040 emit_insn_after (pat, before_strlen);
3041 else
3042 emit_insn_before (pat, get_insns ());
3044 /* Return the value in the proper mode for this function. */
3045 if (GET_MODE (ops[0].value) == target_mode)
3046 target = ops[0].value;
3047 else if (target != 0)
3048 convert_move (target, ops[0].value, 0);
3049 else
3050 target = convert_to_mode (target_mode, ops[0].value, 0);
3052 return target;
3056 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3057 bytes from constant string DATA + OFFSET and return it as target
3058 constant. */
3060 static rtx
3061 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3062 enum machine_mode mode)
3064 const char *str = (const char *) data;
3066 gcc_assert (offset >= 0
3067 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3068 <= strlen (str) + 1));
3070 return c_readstr (str + offset, mode);
3073 /* Expand a call EXP to the memcpy builtin.
3074 Return NULL_RTX if we failed, the caller should emit a normal call,
3075 otherwise try to get the result in TARGET, if convenient (and in
3076 mode MODE if that's convenient). */
3078 static rtx
3079 expand_builtin_memcpy (tree exp, rtx target)
3081 if (!validate_arglist (exp,
3082 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3083 return NULL_RTX;
3084 else
3086 tree dest = CALL_EXPR_ARG (exp, 0);
3087 tree src = CALL_EXPR_ARG (exp, 1);
3088 tree len = CALL_EXPR_ARG (exp, 2);
3089 const char *src_str;
3090 unsigned int src_align = get_pointer_alignment (src);
3091 unsigned int dest_align = get_pointer_alignment (dest);
3092 rtx dest_mem, src_mem, dest_addr, len_rtx;
3093 HOST_WIDE_INT expected_size = -1;
3094 unsigned int expected_align = 0;
3096 /* If DEST is not a pointer type, call the normal function. */
3097 if (dest_align == 0)
3098 return NULL_RTX;
3100 /* If either SRC is not a pointer type, don't do this
3101 operation in-line. */
3102 if (src_align == 0)
3103 return NULL_RTX;
3105 if (currently_expanding_gimple_stmt)
3106 stringop_block_profile (currently_expanding_gimple_stmt,
3107 &expected_align, &expected_size);
3109 if (expected_align < dest_align)
3110 expected_align = dest_align;
3111 dest_mem = get_memory_rtx (dest, len);
3112 set_mem_align (dest_mem, dest_align);
3113 len_rtx = expand_normal (len);
3114 src_str = c_getstr (src);
3116 /* If SRC is a string constant and block move would be done
3117 by pieces, we can avoid loading the string from memory
3118 and only stored the computed constants. */
3119 if (src_str
3120 && CONST_INT_P (len_rtx)
3121 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3122 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3123 CONST_CAST (char *, src_str),
3124 dest_align, false))
3126 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3127 builtin_memcpy_read_str,
3128 CONST_CAST (char *, src_str),
3129 dest_align, false, 0);
3130 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3131 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3132 return dest_mem;
3135 src_mem = get_memory_rtx (src, len);
3136 set_mem_align (src_mem, src_align);
3138 /* Copy word part most expediently. */
3139 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3140 CALL_EXPR_TAILCALL (exp)
3141 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3142 expected_align, expected_size);
3144 if (dest_addr == 0)
3146 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3147 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3149 return dest_addr;
3153 /* Expand a call EXP to the mempcpy builtin.
3154 Return NULL_RTX if we failed; the caller should emit a normal call,
3155 otherwise try to get the result in TARGET, if convenient (and in
3156 mode MODE if that's convenient). If ENDP is 0 return the
3157 destination pointer, if ENDP is 1 return the end pointer ala
3158 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3159 stpcpy. */
3161 static rtx
3162 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3164 if (!validate_arglist (exp,
3165 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3166 return NULL_RTX;
3167 else
3169 tree dest = CALL_EXPR_ARG (exp, 0);
3170 tree src = CALL_EXPR_ARG (exp, 1);
3171 tree len = CALL_EXPR_ARG (exp, 2);
3172 return expand_builtin_mempcpy_args (dest, src, len,
3173 target, mode, /*endp=*/ 1);
3177 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3178 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3179 so that this can also be called without constructing an actual CALL_EXPR.
3180 The other arguments and return value are the same as for
3181 expand_builtin_mempcpy. */
3183 static rtx
3184 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3185 rtx target, enum machine_mode mode, int endp)
3187 /* If return value is ignored, transform mempcpy into memcpy. */
3188 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3190 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3191 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3192 dest, src, len);
3193 return expand_expr (result, target, mode, EXPAND_NORMAL);
3195 else
3197 const char *src_str;
3198 unsigned int src_align = get_pointer_alignment (src);
3199 unsigned int dest_align = get_pointer_alignment (dest);
3200 rtx dest_mem, src_mem, len_rtx;
3202 /* If either SRC or DEST is not a pointer type, don't do this
3203 operation in-line. */
3204 if (dest_align == 0 || src_align == 0)
3205 return NULL_RTX;
3207 /* If LEN is not constant, call the normal function. */
3208 if (! host_integerp (len, 1))
3209 return NULL_RTX;
3211 len_rtx = expand_normal (len);
3212 src_str = c_getstr (src);
3214 /* If SRC is a string constant and block move would be done
3215 by pieces, we can avoid loading the string from memory
3216 and only stored the computed constants. */
3217 if (src_str
3218 && CONST_INT_P (len_rtx)
3219 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3220 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3221 CONST_CAST (char *, src_str),
3222 dest_align, false))
3224 dest_mem = get_memory_rtx (dest, len);
3225 set_mem_align (dest_mem, dest_align);
3226 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3227 builtin_memcpy_read_str,
3228 CONST_CAST (char *, src_str),
3229 dest_align, false, endp);
3230 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3231 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3232 return dest_mem;
3235 if (CONST_INT_P (len_rtx)
3236 && can_move_by_pieces (INTVAL (len_rtx),
3237 MIN (dest_align, src_align)))
3239 dest_mem = get_memory_rtx (dest, len);
3240 set_mem_align (dest_mem, dest_align);
3241 src_mem = get_memory_rtx (src, len);
3242 set_mem_align (src_mem, src_align);
3243 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3244 MIN (dest_align, src_align), endp);
3245 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3246 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3247 return dest_mem;
3250 return NULL_RTX;
3254 #ifndef HAVE_movstr
3255 # define HAVE_movstr 0
3256 # define CODE_FOR_movstr CODE_FOR_nothing
3257 #endif
3259 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3260 we failed, the caller should emit a normal call, otherwise try to
3261 get the result in TARGET, if convenient. If ENDP is 0 return the
3262 destination pointer, if ENDP is 1 return the end pointer ala
3263 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3264 stpcpy. */
3266 static rtx
3267 expand_movstr (tree dest, tree src, rtx target, int endp)
3269 struct expand_operand ops[3];
3270 rtx dest_mem;
3271 rtx src_mem;
3273 if (!HAVE_movstr)
3274 return NULL_RTX;
3276 dest_mem = get_memory_rtx (dest, NULL);
3277 src_mem = get_memory_rtx (src, NULL);
3278 if (!endp)
3280 target = force_reg (Pmode, XEXP (dest_mem, 0));
3281 dest_mem = replace_equiv_address (dest_mem, target);
3284 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3285 create_fixed_operand (&ops[1], dest_mem);
3286 create_fixed_operand (&ops[2], src_mem);
3287 expand_insn (CODE_FOR_movstr, 3, ops);
3289 if (endp && target != const0_rtx)
3291 target = ops[0].value;
3292 /* movstr is supposed to set end to the address of the NUL
3293 terminator. If the caller requested a mempcpy-like return value,
3294 adjust it. */
3295 if (endp == 1)
3297 rtx tem = plus_constant (GET_MODE (target),
3298 gen_lowpart (GET_MODE (target), target), 1);
3299 emit_move_insn (target, force_operand (tem, NULL_RTX));
3302 return target;
3305 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3306 NULL_RTX if we failed the caller should emit a normal call, otherwise
3307 try to get the result in TARGET, if convenient (and in mode MODE if that's
3308 convenient). */
3310 static rtx
3311 expand_builtin_strcpy (tree exp, rtx target)
3313 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3315 tree dest = CALL_EXPR_ARG (exp, 0);
3316 tree src = CALL_EXPR_ARG (exp, 1);
3317 return expand_builtin_strcpy_args (dest, src, target);
3319 return NULL_RTX;
3322 /* Helper function to do the actual work for expand_builtin_strcpy. The
3323 arguments to the builtin_strcpy call DEST and SRC are broken out
3324 so that this can also be called without constructing an actual CALL_EXPR.
3325 The other arguments and return value are the same as for
3326 expand_builtin_strcpy. */
3328 static rtx
3329 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3331 return expand_movstr (dest, src, target, /*endp=*/0);
3334 /* Expand a call EXP to the stpcpy builtin.
3335 Return NULL_RTX if we failed the caller should emit a normal call,
3336 otherwise try to get the result in TARGET, if convenient (and in
3337 mode MODE if that's convenient). */
3339 static rtx
3340 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3342 tree dst, src;
3343 location_t loc = EXPR_LOCATION (exp);
3345 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3346 return NULL_RTX;
3348 dst = CALL_EXPR_ARG (exp, 0);
3349 src = CALL_EXPR_ARG (exp, 1);
3351 /* If return value is ignored, transform stpcpy into strcpy. */
3352 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3354 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3355 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3356 return expand_expr (result, target, mode, EXPAND_NORMAL);
3358 else
3360 tree len, lenp1;
3361 rtx ret;
3363 /* Ensure we get an actual string whose length can be evaluated at
3364 compile-time, not an expression containing a string. This is
3365 because the latter will potentially produce pessimized code
3366 when used to produce the return value. */
3367 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3368 return expand_movstr (dst, src, target, /*endp=*/2);
3370 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3371 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3372 target, mode, /*endp=*/2);
3374 if (ret)
3375 return ret;
3377 if (TREE_CODE (len) == INTEGER_CST)
3379 rtx len_rtx = expand_normal (len);
3381 if (CONST_INT_P (len_rtx))
3383 ret = expand_builtin_strcpy_args (dst, src, target);
3385 if (ret)
3387 if (! target)
3389 if (mode != VOIDmode)
3390 target = gen_reg_rtx (mode);
3391 else
3392 target = gen_reg_rtx (GET_MODE (ret));
3394 if (GET_MODE (target) != GET_MODE (ret))
3395 ret = gen_lowpart (GET_MODE (target), ret);
3397 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3398 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3399 gcc_assert (ret);
3401 return target;
3406 return expand_movstr (dst, src, target, /*endp=*/2);
3410 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3411 bytes from constant string DATA + OFFSET and return it as target
3412 constant. */
3415 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3416 enum machine_mode mode)
3418 const char *str = (const char *) data;
3420 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3421 return const0_rtx;
3423 return c_readstr (str + offset, mode);
3426 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3427 NULL_RTX if we failed the caller should emit a normal call. */
3429 static rtx
3430 expand_builtin_strncpy (tree exp, rtx target)
3432 location_t loc = EXPR_LOCATION (exp);
3434 if (validate_arglist (exp,
3435 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3437 tree dest = CALL_EXPR_ARG (exp, 0);
3438 tree src = CALL_EXPR_ARG (exp, 1);
3439 tree len = CALL_EXPR_ARG (exp, 2);
3440 tree slen = c_strlen (src, 1);
3442 /* We must be passed a constant len and src parameter. */
3443 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3444 return NULL_RTX;
3446 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3448 /* We're required to pad with trailing zeros if the requested
3449 len is greater than strlen(s2)+1. In that case try to
3450 use store_by_pieces, if it fails, punt. */
3451 if (tree_int_cst_lt (slen, len))
3453 unsigned int dest_align = get_pointer_alignment (dest);
3454 const char *p = c_getstr (src);
3455 rtx dest_mem;
3457 if (!p || dest_align == 0 || !host_integerp (len, 1)
3458 || !can_store_by_pieces (tree_low_cst (len, 1),
3459 builtin_strncpy_read_str,
3460 CONST_CAST (char *, p),
3461 dest_align, false))
3462 return NULL_RTX;
3464 dest_mem = get_memory_rtx (dest, len);
3465 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3466 builtin_strncpy_read_str,
3467 CONST_CAST (char *, p), dest_align, false, 0);
3468 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3469 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3470 return dest_mem;
3473 return NULL_RTX;
3476 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3477 bytes from constant string DATA + OFFSET and return it as target
3478 constant. */
3481 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3482 enum machine_mode mode)
3484 const char *c = (const char *) data;
3485 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3487 memset (p, *c, GET_MODE_SIZE (mode));
3489 return c_readstr (p, mode);
3492 /* Callback routine for store_by_pieces. Return the RTL of a register
3493 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3494 char value given in the RTL register data. For example, if mode is
3495 4 bytes wide, return the RTL for 0x01010101*data. */
3497 static rtx
3498 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3499 enum machine_mode mode)
3501 rtx target, coeff;
3502 size_t size;
3503 char *p;
3505 size = GET_MODE_SIZE (mode);
3506 if (size == 1)
3507 return (rtx) data;
3509 p = XALLOCAVEC (char, size);
3510 memset (p, 1, size);
3511 coeff = c_readstr (p, mode);
3513 target = convert_to_mode (mode, (rtx) data, 1);
3514 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3515 return force_reg (mode, target);
3518 /* Expand expression EXP, which is a call to the memset builtin. Return
3519 NULL_RTX if we failed the caller should emit a normal call, otherwise
3520 try to get the result in TARGET, if convenient (and in mode MODE if that's
3521 convenient). */
3523 static rtx
3524 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3526 if (!validate_arglist (exp,
3527 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3528 return NULL_RTX;
3529 else
3531 tree dest = CALL_EXPR_ARG (exp, 0);
3532 tree val = CALL_EXPR_ARG (exp, 1);
3533 tree len = CALL_EXPR_ARG (exp, 2);
3534 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3538 /* Helper function to do the actual work for expand_builtin_memset. The
3539 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3540 so that this can also be called without constructing an actual CALL_EXPR.
3541 The other arguments and return value are the same as for
3542 expand_builtin_memset. */
3544 static rtx
3545 expand_builtin_memset_args (tree dest, tree val, tree len,
3546 rtx target, enum machine_mode mode, tree orig_exp)
3548 tree fndecl, fn;
3549 enum built_in_function fcode;
3550 enum machine_mode val_mode;
3551 char c;
3552 unsigned int dest_align;
3553 rtx dest_mem, dest_addr, len_rtx;
3554 HOST_WIDE_INT expected_size = -1;
3555 unsigned int expected_align = 0;
3557 dest_align = get_pointer_alignment (dest);
3559 /* If DEST is not a pointer type, don't do this operation in-line. */
3560 if (dest_align == 0)
3561 return NULL_RTX;
3563 if (currently_expanding_gimple_stmt)
3564 stringop_block_profile (currently_expanding_gimple_stmt,
3565 &expected_align, &expected_size);
3567 if (expected_align < dest_align)
3568 expected_align = dest_align;
3570 /* If the LEN parameter is zero, return DEST. */
3571 if (integer_zerop (len))
3573 /* Evaluate and ignore VAL in case it has side-effects. */
3574 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3575 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3578 /* Stabilize the arguments in case we fail. */
3579 dest = builtin_save_expr (dest);
3580 val = builtin_save_expr (val);
3581 len = builtin_save_expr (len);
3583 len_rtx = expand_normal (len);
3584 dest_mem = get_memory_rtx (dest, len);
3585 val_mode = TYPE_MODE (unsigned_char_type_node);
3587 if (TREE_CODE (val) != INTEGER_CST)
3589 rtx val_rtx;
3591 val_rtx = expand_normal (val);
3592 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3594 /* Assume that we can memset by pieces if we can store
3595 * the coefficients by pieces (in the required modes).
3596 * We can't pass builtin_memset_gen_str as that emits RTL. */
3597 c = 1;
3598 if (host_integerp (len, 1)
3599 && can_store_by_pieces (tree_low_cst (len, 1),
3600 builtin_memset_read_str, &c, dest_align,
3601 true))
3603 val_rtx = force_reg (val_mode, val_rtx);
3604 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3605 builtin_memset_gen_str, val_rtx, dest_align,
3606 true, 0);
3608 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3609 dest_align, expected_align,
3610 expected_size))
3611 goto do_libcall;
3613 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3614 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3615 return dest_mem;
3618 if (target_char_cast (val, &c))
3619 goto do_libcall;
3621 if (c)
3623 if (host_integerp (len, 1)
3624 && can_store_by_pieces (tree_low_cst (len, 1),
3625 builtin_memset_read_str, &c, dest_align,
3626 true))
3627 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3628 builtin_memset_read_str, &c, dest_align, true, 0);
3629 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3630 gen_int_mode (c, val_mode),
3631 dest_align, expected_align,
3632 expected_size))
3633 goto do_libcall;
3635 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3636 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3637 return dest_mem;
3640 set_mem_align (dest_mem, dest_align);
3641 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3642 CALL_EXPR_TAILCALL (orig_exp)
3643 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3644 expected_align, expected_size);
3646 if (dest_addr == 0)
3648 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3649 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3652 return dest_addr;
3654 do_libcall:
3655 fndecl = get_callee_fndecl (orig_exp);
3656 fcode = DECL_FUNCTION_CODE (fndecl);
3657 if (fcode == BUILT_IN_MEMSET)
3658 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3659 dest, val, len);
3660 else if (fcode == BUILT_IN_BZERO)
3661 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3662 dest, len);
3663 else
3664 gcc_unreachable ();
3665 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3666 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3667 return expand_call (fn, target, target == const0_rtx);
3670 /* Expand expression EXP, which is a call to the bzero builtin. Return
3671 NULL_RTX if we failed the caller should emit a normal call. */
3673 static rtx
3674 expand_builtin_bzero (tree exp)
3676 tree dest, size;
3677 location_t loc = EXPR_LOCATION (exp);
3679 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3680 return NULL_RTX;
3682 dest = CALL_EXPR_ARG (exp, 0);
3683 size = CALL_EXPR_ARG (exp, 1);
3685 /* New argument list transforming bzero(ptr x, int y) to
3686 memset(ptr x, int 0, size_t y). This is done this way
3687 so that if it isn't expanded inline, we fallback to
3688 calling bzero instead of memset. */
3690 return expand_builtin_memset_args (dest, integer_zero_node,
3691 fold_convert_loc (loc,
3692 size_type_node, size),
3693 const0_rtx, VOIDmode, exp);
3696 /* Expand expression EXP, which is a call to the memcmp built-in function.
3697 Return NULL_RTX if we failed and the caller should emit a normal call,
3698 otherwise try to get the result in TARGET, if convenient (and in mode
3699 MODE, if that's convenient). */
3701 static rtx
3702 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3703 ATTRIBUTE_UNUSED enum machine_mode mode)
3705 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3707 if (!validate_arglist (exp,
3708 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3709 return NULL_RTX;
3711 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3712 implementing memcmp because it will stop if it encounters two
3713 zero bytes. */
3714 #if defined HAVE_cmpmemsi
3716 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3717 rtx result;
3718 rtx insn;
3719 tree arg1 = CALL_EXPR_ARG (exp, 0);
3720 tree arg2 = CALL_EXPR_ARG (exp, 1);
3721 tree len = CALL_EXPR_ARG (exp, 2);
3723 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3724 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3725 enum machine_mode insn_mode;
3727 if (HAVE_cmpmemsi)
3728 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3729 else
3730 return NULL_RTX;
3732 /* If we don't have POINTER_TYPE, call the function. */
3733 if (arg1_align == 0 || arg2_align == 0)
3734 return NULL_RTX;
3736 /* Make a place to write the result of the instruction. */
3737 result = target;
3738 if (! (result != 0
3739 && REG_P (result) && GET_MODE (result) == insn_mode
3740 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3741 result = gen_reg_rtx (insn_mode);
3743 arg1_rtx = get_memory_rtx (arg1, len);
3744 arg2_rtx = get_memory_rtx (arg2, len);
3745 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3747 /* Set MEM_SIZE as appropriate. */
3748 if (CONST_INT_P (arg3_rtx))
3750 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3751 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3754 if (HAVE_cmpmemsi)
3755 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3756 GEN_INT (MIN (arg1_align, arg2_align)));
3757 else
3758 gcc_unreachable ();
3760 if (insn)
3761 emit_insn (insn);
3762 else
3763 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3764 TYPE_MODE (integer_type_node), 3,
3765 XEXP (arg1_rtx, 0), Pmode,
3766 XEXP (arg2_rtx, 0), Pmode,
3767 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3768 TYPE_UNSIGNED (sizetype)),
3769 TYPE_MODE (sizetype));
3771 /* Return the value in the proper mode for this function. */
3772 mode = TYPE_MODE (TREE_TYPE (exp));
3773 if (GET_MODE (result) == mode)
3774 return result;
3775 else if (target != 0)
3777 convert_move (target, result, 0);
3778 return target;
3780 else
3781 return convert_to_mode (mode, result, 0);
3783 #endif /* HAVE_cmpmemsi. */
3785 return NULL_RTX;
3788 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3789 if we failed the caller should emit a normal call, otherwise try to get
3790 the result in TARGET, if convenient. */
3792 static rtx
3793 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3795 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3796 return NULL_RTX;
3798 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3799 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3800 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3802 rtx arg1_rtx, arg2_rtx;
3803 rtx result, insn = NULL_RTX;
3804 tree fndecl, fn;
3805 tree arg1 = CALL_EXPR_ARG (exp, 0);
3806 tree arg2 = CALL_EXPR_ARG (exp, 1);
3808 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3809 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3811 /* If we don't have POINTER_TYPE, call the function. */
3812 if (arg1_align == 0 || arg2_align == 0)
3813 return NULL_RTX;
3815 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3816 arg1 = builtin_save_expr (arg1);
3817 arg2 = builtin_save_expr (arg2);
3819 arg1_rtx = get_memory_rtx (arg1, NULL);
3820 arg2_rtx = get_memory_rtx (arg2, NULL);
3822 #ifdef HAVE_cmpstrsi
3823 /* Try to call cmpstrsi. */
3824 if (HAVE_cmpstrsi)
3826 enum machine_mode insn_mode
3827 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3829 /* Make a place to write the result of the instruction. */
3830 result = target;
3831 if (! (result != 0
3832 && REG_P (result) && GET_MODE (result) == insn_mode
3833 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3834 result = gen_reg_rtx (insn_mode);
3836 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3837 GEN_INT (MIN (arg1_align, arg2_align)));
3839 #endif
3840 #ifdef HAVE_cmpstrnsi
3841 /* Try to determine at least one length and call cmpstrnsi. */
3842 if (!insn && HAVE_cmpstrnsi)
3844 tree len;
3845 rtx arg3_rtx;
3847 enum machine_mode insn_mode
3848 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3849 tree len1 = c_strlen (arg1, 1);
3850 tree len2 = c_strlen (arg2, 1);
3852 if (len1)
3853 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3854 if (len2)
3855 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3857 /* If we don't have a constant length for the first, use the length
3858 of the second, if we know it. We don't require a constant for
3859 this case; some cost analysis could be done if both are available
3860 but neither is constant. For now, assume they're equally cheap,
3861 unless one has side effects. If both strings have constant lengths,
3862 use the smaller. */
3864 if (!len1)
3865 len = len2;
3866 else if (!len2)
3867 len = len1;
3868 else if (TREE_SIDE_EFFECTS (len1))
3869 len = len2;
3870 else if (TREE_SIDE_EFFECTS (len2))
3871 len = len1;
3872 else if (TREE_CODE (len1) != INTEGER_CST)
3873 len = len2;
3874 else if (TREE_CODE (len2) != INTEGER_CST)
3875 len = len1;
3876 else if (tree_int_cst_lt (len1, len2))
3877 len = len1;
3878 else
3879 len = len2;
3881 /* If both arguments have side effects, we cannot optimize. */
3882 if (!len || TREE_SIDE_EFFECTS (len))
3883 goto do_libcall;
3885 arg3_rtx = expand_normal (len);
3887 /* Make a place to write the result of the instruction. */
3888 result = target;
3889 if (! (result != 0
3890 && REG_P (result) && GET_MODE (result) == insn_mode
3891 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3892 result = gen_reg_rtx (insn_mode);
3894 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3895 GEN_INT (MIN (arg1_align, arg2_align)));
3897 #endif
3899 if (insn)
3901 enum machine_mode mode;
3902 emit_insn (insn);
3904 /* Return the value in the proper mode for this function. */
3905 mode = TYPE_MODE (TREE_TYPE (exp));
3906 if (GET_MODE (result) == mode)
3907 return result;
3908 if (target == 0)
3909 return convert_to_mode (mode, result, 0);
3910 convert_move (target, result, 0);
3911 return target;
3914 /* Expand the library call ourselves using a stabilized argument
3915 list to avoid re-evaluating the function's arguments twice. */
3916 #ifdef HAVE_cmpstrnsi
3917 do_libcall:
3918 #endif
3919 fndecl = get_callee_fndecl (exp);
3920 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3921 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3922 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3923 return expand_call (fn, target, target == const0_rtx);
3925 #endif
3926 return NULL_RTX;
3929 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3930 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3931 the result in TARGET, if convenient. */
3933 static rtx
3934 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3935 ATTRIBUTE_UNUSED enum machine_mode mode)
3937 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3939 if (!validate_arglist (exp,
3940 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3941 return NULL_RTX;
3943 /* If c_strlen can determine an expression for one of the string
3944 lengths, and it doesn't have side effects, then emit cmpstrnsi
3945 using length MIN(strlen(string)+1, arg3). */
3946 #ifdef HAVE_cmpstrnsi
3947 if (HAVE_cmpstrnsi)
3949 tree len, len1, len2;
3950 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3951 rtx result, insn;
3952 tree fndecl, fn;
3953 tree arg1 = CALL_EXPR_ARG (exp, 0);
3954 tree arg2 = CALL_EXPR_ARG (exp, 1);
3955 tree arg3 = CALL_EXPR_ARG (exp, 2);
3957 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3958 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3959 enum machine_mode insn_mode
3960 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3962 len1 = c_strlen (arg1, 1);
3963 len2 = c_strlen (arg2, 1);
3965 if (len1)
3966 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3967 if (len2)
3968 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3970 /* If we don't have a constant length for the first, use the length
3971 of the second, if we know it. We don't require a constant for
3972 this case; some cost analysis could be done if both are available
3973 but neither is constant. For now, assume they're equally cheap,
3974 unless one has side effects. If both strings have constant lengths,
3975 use the smaller. */
3977 if (!len1)
3978 len = len2;
3979 else if (!len2)
3980 len = len1;
3981 else if (TREE_SIDE_EFFECTS (len1))
3982 len = len2;
3983 else if (TREE_SIDE_EFFECTS (len2))
3984 len = len1;
3985 else if (TREE_CODE (len1) != INTEGER_CST)
3986 len = len2;
3987 else if (TREE_CODE (len2) != INTEGER_CST)
3988 len = len1;
3989 else if (tree_int_cst_lt (len1, len2))
3990 len = len1;
3991 else
3992 len = len2;
3994 /* If both arguments have side effects, we cannot optimize. */
3995 if (!len || TREE_SIDE_EFFECTS (len))
3996 return NULL_RTX;
3998 /* The actual new length parameter is MIN(len,arg3). */
3999 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4000 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4002 /* If we don't have POINTER_TYPE, call the function. */
4003 if (arg1_align == 0 || arg2_align == 0)
4004 return NULL_RTX;
4006 /* Make a place to write the result of the instruction. */
4007 result = target;
4008 if (! (result != 0
4009 && REG_P (result) && GET_MODE (result) == insn_mode
4010 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4011 result = gen_reg_rtx (insn_mode);
4013 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4014 arg1 = builtin_save_expr (arg1);
4015 arg2 = builtin_save_expr (arg2);
4016 len = builtin_save_expr (len);
4018 arg1_rtx = get_memory_rtx (arg1, len);
4019 arg2_rtx = get_memory_rtx (arg2, len);
4020 arg3_rtx = expand_normal (len);
4021 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4022 GEN_INT (MIN (arg1_align, arg2_align)));
4023 if (insn)
4025 emit_insn (insn);
4027 /* Return the value in the proper mode for this function. */
4028 mode = TYPE_MODE (TREE_TYPE (exp));
4029 if (GET_MODE (result) == mode)
4030 return result;
4031 if (target == 0)
4032 return convert_to_mode (mode, result, 0);
4033 convert_move (target, result, 0);
4034 return target;
4037 /* Expand the library call ourselves using a stabilized argument
4038 list to avoid re-evaluating the function's arguments twice. */
4039 fndecl = get_callee_fndecl (exp);
4040 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4041 arg1, arg2, len);
4042 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4043 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4044 return expand_call (fn, target, target == const0_rtx);
4046 #endif
4047 return NULL_RTX;
4050 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4051 if that's convenient. */
4054 expand_builtin_saveregs (void)
4056 rtx val, seq;
4058 /* Don't do __builtin_saveregs more than once in a function.
4059 Save the result of the first call and reuse it. */
4060 if (saveregs_value != 0)
4061 return saveregs_value;
4063 /* When this function is called, it means that registers must be
4064 saved on entry to this function. So we migrate the call to the
4065 first insn of this function. */
4067 start_sequence ();
4069 /* Do whatever the machine needs done in this case. */
4070 val = targetm.calls.expand_builtin_saveregs ();
4072 seq = get_insns ();
4073 end_sequence ();
4075 saveregs_value = val;
4077 /* Put the insns after the NOTE that starts the function. If this
4078 is inside a start_sequence, make the outer-level insn chain current, so
4079 the code is placed at the start of the function. */
4080 push_topmost_sequence ();
4081 emit_insn_after (seq, entry_of_function ());
4082 pop_topmost_sequence ();
4084 return val;
4087 /* Expand a call to __builtin_next_arg. */
4089 static rtx
4090 expand_builtin_next_arg (void)
4092 /* Checking arguments is already done in fold_builtin_next_arg
4093 that must be called before this function. */
4094 return expand_binop (ptr_mode, add_optab,
4095 crtl->args.internal_arg_pointer,
4096 crtl->args.arg_offset_rtx,
4097 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4100 /* Make it easier for the backends by protecting the valist argument
4101 from multiple evaluations. */
4103 static tree
4104 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4106 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4108 /* The current way of determining the type of valist is completely
4109 bogus. We should have the information on the va builtin instead. */
4110 if (!vatype)
4111 vatype = targetm.fn_abi_va_list (cfun->decl);
4113 if (TREE_CODE (vatype) == ARRAY_TYPE)
4115 if (TREE_SIDE_EFFECTS (valist))
4116 valist = save_expr (valist);
4118 /* For this case, the backends will be expecting a pointer to
4119 vatype, but it's possible we've actually been given an array
4120 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4121 So fix it. */
4122 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4124 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4125 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4128 else
4130 tree pt = build_pointer_type (vatype);
4132 if (! needs_lvalue)
4134 if (! TREE_SIDE_EFFECTS (valist))
4135 return valist;
4137 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4138 TREE_SIDE_EFFECTS (valist) = 1;
4141 if (TREE_SIDE_EFFECTS (valist))
4142 valist = save_expr (valist);
4143 valist = fold_build2_loc (loc, MEM_REF,
4144 vatype, valist, build_int_cst (pt, 0));
4147 return valist;
4150 /* The "standard" definition of va_list is void*. */
4152 tree
4153 std_build_builtin_va_list (void)
4155 return ptr_type_node;
4158 /* The "standard" abi va_list is va_list_type_node. */
4160 tree
4161 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4163 return va_list_type_node;
4166 /* The "standard" type of va_list is va_list_type_node. */
4168 tree
4169 std_canonical_va_list_type (tree type)
4171 tree wtype, htype;
4173 if (INDIRECT_REF_P (type))
4174 type = TREE_TYPE (type);
4175 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4176 type = TREE_TYPE (type);
4177 wtype = va_list_type_node;
4178 htype = type;
4179 /* Treat structure va_list types. */
4180 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4181 htype = TREE_TYPE (htype);
4182 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4184 /* If va_list is an array type, the argument may have decayed
4185 to a pointer type, e.g. by being passed to another function.
4186 In that case, unwrap both types so that we can compare the
4187 underlying records. */
4188 if (TREE_CODE (htype) == ARRAY_TYPE
4189 || POINTER_TYPE_P (htype))
4191 wtype = TREE_TYPE (wtype);
4192 htype = TREE_TYPE (htype);
4195 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4196 return va_list_type_node;
4198 return NULL_TREE;
4201 /* The "standard" implementation of va_start: just assign `nextarg' to
4202 the variable. */
4204 void
4205 std_expand_builtin_va_start (tree valist, rtx nextarg)
4207 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4208 convert_move (va_r, nextarg, 0);
4211 /* Expand EXP, a call to __builtin_va_start. */
4213 static rtx
4214 expand_builtin_va_start (tree exp)
4216 rtx nextarg;
4217 tree valist;
4218 location_t loc = EXPR_LOCATION (exp);
4220 if (call_expr_nargs (exp) < 2)
4222 error_at (loc, "too few arguments to function %<va_start%>");
4223 return const0_rtx;
4226 if (fold_builtin_next_arg (exp, true))
4227 return const0_rtx;
4229 nextarg = expand_builtin_next_arg ();
4230 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4232 if (targetm.expand_builtin_va_start)
4233 targetm.expand_builtin_va_start (valist, nextarg);
4234 else
4235 std_expand_builtin_va_start (valist, nextarg);
4237 return const0_rtx;
4240 /* The "standard" implementation of va_arg: read the value from the
4241 current (padded) address and increment by the (padded) size. */
4243 tree
4244 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4245 gimple_seq *post_p)
4247 tree addr, t, type_size, rounded_size, valist_tmp;
4248 unsigned HOST_WIDE_INT align, boundary;
4249 bool indirect;
4251 #ifdef ARGS_GROW_DOWNWARD
4252 /* All of the alignment and movement below is for args-grow-up machines.
4253 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4254 implement their own specialized gimplify_va_arg_expr routines. */
4255 gcc_unreachable ();
4256 #endif
4258 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4259 if (indirect)
4260 type = build_pointer_type (type);
4262 align = PARM_BOUNDARY / BITS_PER_UNIT;
4263 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4265 /* When we align parameter on stack for caller, if the parameter
4266 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4267 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4268 here with caller. */
4269 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4270 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4272 boundary /= BITS_PER_UNIT;
4274 /* Hoist the valist value into a temporary for the moment. */
4275 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4277 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4278 requires greater alignment, we must perform dynamic alignment. */
4279 if (boundary > align
4280 && !integer_zerop (TYPE_SIZE (type)))
4282 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4283 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
4284 gimplify_and_add (t, pre_p);
4286 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4287 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
4288 valist_tmp,
4289 build_int_cst (TREE_TYPE (valist), -boundary)));
4290 gimplify_and_add (t, pre_p);
4292 else
4293 boundary = align;
4295 /* If the actual alignment is less than the alignment of the type,
4296 adjust the type accordingly so that we don't assume strict alignment
4297 when dereferencing the pointer. */
4298 boundary *= BITS_PER_UNIT;
4299 if (boundary < TYPE_ALIGN (type))
4301 type = build_variant_type_copy (type);
4302 TYPE_ALIGN (type) = boundary;
4305 /* Compute the rounded size of the type. */
4306 type_size = size_in_bytes (type);
4307 rounded_size = round_up (type_size, align);
4309 /* Reduce rounded_size so it's sharable with the postqueue. */
4310 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4312 /* Get AP. */
4313 addr = valist_tmp;
4314 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4316 /* Small args are padded downward. */
4317 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4318 rounded_size, size_int (align));
4319 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4320 size_binop (MINUS_EXPR, rounded_size, type_size));
4321 addr = fold_build_pointer_plus (addr, t);
4324 /* Compute new value for AP. */
4325 t = fold_build_pointer_plus (valist_tmp, rounded_size);
4326 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4327 gimplify_and_add (t, pre_p);
4329 addr = fold_convert (build_pointer_type (type), addr);
4331 if (indirect)
4332 addr = build_va_arg_indirect_ref (addr);
4334 return build_va_arg_indirect_ref (addr);
4337 /* Build an indirect-ref expression over the given TREE, which represents a
4338 piece of a va_arg() expansion. */
4339 tree
4340 build_va_arg_indirect_ref (tree addr)
4342 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4344 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4345 mf_mark (addr);
4347 return addr;
4350 /* Return a dummy expression of type TYPE in order to keep going after an
4351 error. */
4353 static tree
4354 dummy_object (tree type)
4356 tree t = build_int_cst (build_pointer_type (type), 0);
4357 return build2 (MEM_REF, type, t, t);
4360 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4361 builtin function, but a very special sort of operator. */
4363 enum gimplify_status
4364 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4366 tree promoted_type, have_va_type;
4367 tree valist = TREE_OPERAND (*expr_p, 0);
4368 tree type = TREE_TYPE (*expr_p);
4369 tree t;
4370 location_t loc = EXPR_LOCATION (*expr_p);
4372 /* Verify that valist is of the proper type. */
4373 have_va_type = TREE_TYPE (valist);
4374 if (have_va_type == error_mark_node)
4375 return GS_ERROR;
4376 have_va_type = targetm.canonical_va_list_type (have_va_type);
4378 if (have_va_type == NULL_TREE)
4380 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4381 return GS_ERROR;
4384 /* Generate a diagnostic for requesting data of a type that cannot
4385 be passed through `...' due to type promotion at the call site. */
4386 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4387 != type)
4389 static bool gave_help;
4390 bool warned;
4392 /* Unfortunately, this is merely undefined, rather than a constraint
4393 violation, so we cannot make this an error. If this call is never
4394 executed, the program is still strictly conforming. */
4395 warned = warning_at (loc, 0,
4396 "%qT is promoted to %qT when passed through %<...%>",
4397 type, promoted_type);
4398 if (!gave_help && warned)
4400 gave_help = true;
4401 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4402 promoted_type, type);
4405 /* We can, however, treat "undefined" any way we please.
4406 Call abort to encourage the user to fix the program. */
4407 if (warned)
4408 inform (loc, "if this code is reached, the program will abort");
4409 /* Before the abort, allow the evaluation of the va_list
4410 expression to exit or longjmp. */
4411 gimplify_and_add (valist, pre_p);
4412 t = build_call_expr_loc (loc,
4413 builtin_decl_implicit (BUILT_IN_TRAP), 0);
4414 gimplify_and_add (t, pre_p);
4416 /* This is dead code, but go ahead and finish so that the
4417 mode of the result comes out right. */
4418 *expr_p = dummy_object (type);
4419 return GS_ALL_DONE;
4421 else
4423 /* Make it easier for the backends by protecting the valist argument
4424 from multiple evaluations. */
4425 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4427 /* For this case, the backends will be expecting a pointer to
4428 TREE_TYPE (abi), but it's possible we've
4429 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4430 So fix it. */
4431 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4433 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4434 valist = fold_convert_loc (loc, p1,
4435 build_fold_addr_expr_loc (loc, valist));
4438 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4440 else
4441 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4443 if (!targetm.gimplify_va_arg_expr)
4444 /* FIXME: Once most targets are converted we should merely
4445 assert this is non-null. */
4446 return GS_ALL_DONE;
4448 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4449 return GS_OK;
4453 /* Expand EXP, a call to __builtin_va_end. */
4455 static rtx
4456 expand_builtin_va_end (tree exp)
4458 tree valist = CALL_EXPR_ARG (exp, 0);
4460 /* Evaluate for side effects, if needed. I hate macros that don't
4461 do that. */
4462 if (TREE_SIDE_EFFECTS (valist))
4463 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4465 return const0_rtx;
4468 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4469 builtin rather than just as an assignment in stdarg.h because of the
4470 nastiness of array-type va_list types. */
4472 static rtx
4473 expand_builtin_va_copy (tree exp)
4475 tree dst, src, t;
4476 location_t loc = EXPR_LOCATION (exp);
4478 dst = CALL_EXPR_ARG (exp, 0);
4479 src = CALL_EXPR_ARG (exp, 1);
4481 dst = stabilize_va_list_loc (loc, dst, 1);
4482 src = stabilize_va_list_loc (loc, src, 0);
4484 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4486 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4488 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4489 TREE_SIDE_EFFECTS (t) = 1;
4490 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4492 else
4494 rtx dstb, srcb, size;
4496 /* Evaluate to pointers. */
4497 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4498 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4499 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4500 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4502 dstb = convert_memory_address (Pmode, dstb);
4503 srcb = convert_memory_address (Pmode, srcb);
4505 /* "Dereference" to BLKmode memories. */
4506 dstb = gen_rtx_MEM (BLKmode, dstb);
4507 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4508 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4509 srcb = gen_rtx_MEM (BLKmode, srcb);
4510 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4511 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4513 /* Copy. */
4514 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4517 return const0_rtx;
4520 /* Expand a call to one of the builtin functions __builtin_frame_address or
4521 __builtin_return_address. */
4523 static rtx
4524 expand_builtin_frame_address (tree fndecl, tree exp)
4526 /* The argument must be a nonnegative integer constant.
4527 It counts the number of frames to scan up the stack.
4528 The value is the return address saved in that frame. */
4529 if (call_expr_nargs (exp) == 0)
4530 /* Warning about missing arg was already issued. */
4531 return const0_rtx;
4532 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4534 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4535 error ("invalid argument to %<__builtin_frame_address%>");
4536 else
4537 error ("invalid argument to %<__builtin_return_address%>");
4538 return const0_rtx;
4540 else
4542 rtx tem
4543 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4544 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4546 /* Some ports cannot access arbitrary stack frames. */
4547 if (tem == NULL)
4549 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4550 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4551 else
4552 warning (0, "unsupported argument to %<__builtin_return_address%>");
4553 return const0_rtx;
4556 /* For __builtin_frame_address, return what we've got. */
4557 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4558 return tem;
4560 if (!REG_P (tem)
4561 && ! CONSTANT_P (tem))
4562 tem = copy_addr_to_reg (tem);
4563 return tem;
4567 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4568 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4569 is the same as for allocate_dynamic_stack_space. */
4571 static rtx
4572 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4574 rtx op0;
4575 rtx result;
4576 bool valid_arglist;
4577 unsigned int align;
4578 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4579 == BUILT_IN_ALLOCA_WITH_ALIGN);
4581 /* Emit normal call if we use mudflap. */
4582 if (flag_mudflap)
4583 return NULL_RTX;
4585 valid_arglist
4586 = (alloca_with_align
4587 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4588 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4590 if (!valid_arglist)
4591 return NULL_RTX;
4593 /* Compute the argument. */
4594 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4596 /* Compute the alignment. */
4597 align = (alloca_with_align
4598 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4599 : BIGGEST_ALIGNMENT);
4601 /* Allocate the desired space. */
4602 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4603 result = convert_memory_address (ptr_mode, result);
4605 return result;
4608 /* Expand a call to bswap builtin in EXP.
4609 Return NULL_RTX if a normal call should be emitted rather than expanding the
4610 function in-line. If convenient, the result should be placed in TARGET.
4611 SUBTARGET may be used as the target for computing one of EXP's operands. */
4613 static rtx
4614 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4615 rtx subtarget)
4617 tree arg;
4618 rtx op0;
4620 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4621 return NULL_RTX;
4623 arg = CALL_EXPR_ARG (exp, 0);
4624 op0 = expand_expr (arg,
4625 subtarget && GET_MODE (subtarget) == target_mode
4626 ? subtarget : NULL_RTX,
4627 target_mode, EXPAND_NORMAL);
4628 if (GET_MODE (op0) != target_mode)
4629 op0 = convert_to_mode (target_mode, op0, 1);
4631 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4633 gcc_assert (target);
4635 return convert_to_mode (target_mode, target, 1);
4638 /* Expand a call to a unary builtin in EXP.
4639 Return NULL_RTX if a normal call should be emitted rather than expanding the
4640 function in-line. If convenient, the result should be placed in TARGET.
4641 SUBTARGET may be used as the target for computing one of EXP's operands. */
4643 static rtx
4644 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4645 rtx subtarget, optab op_optab)
4647 rtx op0;
4649 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4650 return NULL_RTX;
4652 /* Compute the argument. */
4653 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4654 (subtarget
4655 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4656 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4657 VOIDmode, EXPAND_NORMAL);
4658 /* Compute op, into TARGET if possible.
4659 Set TARGET to wherever the result comes back. */
4660 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4661 op_optab, op0, target, op_optab != clrsb_optab);
4662 gcc_assert (target);
4664 return convert_to_mode (target_mode, target, 0);
4667 /* Expand a call to __builtin_expect. We just return our argument
4668 as the builtin_expect semantic should've been already executed by
4669 tree branch prediction pass. */
4671 static rtx
4672 expand_builtin_expect (tree exp, rtx target)
4674 tree arg;
4676 if (call_expr_nargs (exp) < 2)
4677 return const0_rtx;
4678 arg = CALL_EXPR_ARG (exp, 0);
4680 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4681 /* When guessing was done, the hints should be already stripped away. */
4682 gcc_assert (!flag_guess_branch_prob
4683 || optimize == 0 || seen_error ());
4684 return target;
4687 /* Expand a call to __builtin_assume_aligned. We just return our first
4688 argument as the builtin_assume_aligned semantic should've been already
4689 executed by CCP. */
4691 static rtx
4692 expand_builtin_assume_aligned (tree exp, rtx target)
4694 if (call_expr_nargs (exp) < 2)
4695 return const0_rtx;
4696 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4697 EXPAND_NORMAL);
4698 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4699 && (call_expr_nargs (exp) < 3
4700 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4701 return target;
4704 void
4705 expand_builtin_trap (void)
4707 #ifdef HAVE_trap
4708 if (HAVE_trap)
4710 rtx insn = emit_insn (gen_trap ());
4711 /* For trap insns when not accumulating outgoing args force
4712 REG_ARGS_SIZE note to prevent crossjumping of calls with
4713 different args sizes. */
4714 if (!ACCUMULATE_OUTGOING_ARGS)
4715 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4717 else
4718 #endif
4719 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4720 emit_barrier ();
4723 /* Expand a call to __builtin_unreachable. We do nothing except emit
4724 a barrier saying that control flow will not pass here.
4726 It is the responsibility of the program being compiled to ensure
4727 that control flow does never reach __builtin_unreachable. */
4728 static void
4729 expand_builtin_unreachable (void)
4731 emit_barrier ();
4734 /* Expand EXP, a call to fabs, fabsf or fabsl.
4735 Return NULL_RTX if a normal call should be emitted rather than expanding
4736 the function inline. If convenient, the result should be placed
4737 in TARGET. SUBTARGET may be used as the target for computing
4738 the operand. */
4740 static rtx
4741 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4743 enum machine_mode mode;
4744 tree arg;
4745 rtx op0;
4747 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4748 return NULL_RTX;
4750 arg = CALL_EXPR_ARG (exp, 0);
4751 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4752 mode = TYPE_MODE (TREE_TYPE (arg));
4753 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4754 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4757 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4758 Return NULL is a normal call should be emitted rather than expanding the
4759 function inline. If convenient, the result should be placed in TARGET.
4760 SUBTARGET may be used as the target for computing the operand. */
4762 static rtx
4763 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4765 rtx op0, op1;
4766 tree arg;
4768 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4769 return NULL_RTX;
4771 arg = CALL_EXPR_ARG (exp, 0);
4772 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4774 arg = CALL_EXPR_ARG (exp, 1);
4775 op1 = expand_normal (arg);
4777 return expand_copysign (op0, op1, target);
4780 /* Create a new constant string literal and return a char* pointer to it.
4781 The STRING_CST value is the LEN characters at STR. */
4782 tree
4783 build_string_literal (int len, const char *str)
4785 tree t, elem, index, type;
4787 t = build_string (len, str);
4788 elem = build_type_variant (char_type_node, 1, 0);
4789 index = build_index_type (size_int (len - 1));
4790 type = build_array_type (elem, index);
4791 TREE_TYPE (t) = type;
4792 TREE_CONSTANT (t) = 1;
4793 TREE_READONLY (t) = 1;
4794 TREE_STATIC (t) = 1;
4796 type = build_pointer_type (elem);
4797 t = build1 (ADDR_EXPR, type,
4798 build4 (ARRAY_REF, elem,
4799 t, integer_zero_node, NULL_TREE, NULL_TREE));
4800 return t;
4803 /* Expand a call to __builtin___clear_cache. */
4805 static rtx
4806 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4808 #ifndef HAVE_clear_cache
4809 #ifdef CLEAR_INSN_CACHE
4810 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4811 does something. Just do the default expansion to a call to
4812 __clear_cache(). */
4813 return NULL_RTX;
4814 #else
4815 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4816 does nothing. There is no need to call it. Do nothing. */
4817 return const0_rtx;
4818 #endif /* CLEAR_INSN_CACHE */
4819 #else
4820 /* We have a "clear_cache" insn, and it will handle everything. */
4821 tree begin, end;
4822 rtx begin_rtx, end_rtx;
4824 /* We must not expand to a library call. If we did, any
4825 fallback library function in libgcc that might contain a call to
4826 __builtin___clear_cache() would recurse infinitely. */
4827 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4829 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4830 return const0_rtx;
4833 if (HAVE_clear_cache)
4835 struct expand_operand ops[2];
4837 begin = CALL_EXPR_ARG (exp, 0);
4838 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4840 end = CALL_EXPR_ARG (exp, 1);
4841 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4843 create_address_operand (&ops[0], begin_rtx);
4844 create_address_operand (&ops[1], end_rtx);
4845 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4846 return const0_rtx;
4848 return const0_rtx;
4849 #endif /* HAVE_clear_cache */
4852 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4854 static rtx
4855 round_trampoline_addr (rtx tramp)
4857 rtx temp, addend, mask;
4859 /* If we don't need too much alignment, we'll have been guaranteed
4860 proper alignment by get_trampoline_type. */
4861 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4862 return tramp;
4864 /* Round address up to desired boundary. */
4865 temp = gen_reg_rtx (Pmode);
4866 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4867 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4869 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4870 temp, 0, OPTAB_LIB_WIDEN);
4871 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4872 temp, 0, OPTAB_LIB_WIDEN);
4874 return tramp;
4877 static rtx
4878 expand_builtin_init_trampoline (tree exp, bool onstack)
4880 tree t_tramp, t_func, t_chain;
4881 rtx m_tramp, r_tramp, r_chain, tmp;
4883 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4884 POINTER_TYPE, VOID_TYPE))
4885 return NULL_RTX;
4887 t_tramp = CALL_EXPR_ARG (exp, 0);
4888 t_func = CALL_EXPR_ARG (exp, 1);
4889 t_chain = CALL_EXPR_ARG (exp, 2);
4891 r_tramp = expand_normal (t_tramp);
4892 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4893 MEM_NOTRAP_P (m_tramp) = 1;
4895 /* If ONSTACK, the TRAMP argument should be the address of a field
4896 within the local function's FRAME decl. Either way, let's see if
4897 we can fill in the MEM_ATTRs for this memory. */
4898 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4899 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4901 /* Creator of a heap trampoline is responsible for making sure the
4902 address is aligned to at least STACK_BOUNDARY. Normally malloc
4903 will ensure this anyhow. */
4904 tmp = round_trampoline_addr (r_tramp);
4905 if (tmp != r_tramp)
4907 m_tramp = change_address (m_tramp, BLKmode, tmp);
4908 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4909 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4912 /* The FUNC argument should be the address of the nested function.
4913 Extract the actual function decl to pass to the hook. */
4914 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4915 t_func = TREE_OPERAND (t_func, 0);
4916 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4918 r_chain = expand_normal (t_chain);
4920 /* Generate insns to initialize the trampoline. */
4921 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4923 if (onstack)
4925 trampolines_created = 1;
4927 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4928 "trampoline generated for nested function %qD", t_func);
4931 return const0_rtx;
4934 static rtx
4935 expand_builtin_adjust_trampoline (tree exp)
4937 rtx tramp;
4939 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4940 return NULL_RTX;
4942 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4943 tramp = round_trampoline_addr (tramp);
4944 if (targetm.calls.trampoline_adjust_address)
4945 tramp = targetm.calls.trampoline_adjust_address (tramp);
4947 return tramp;
4950 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4951 function. The function first checks whether the back end provides
4952 an insn to implement signbit for the respective mode. If not, it
4953 checks whether the floating point format of the value is such that
4954 the sign bit can be extracted. If that is not the case, the
4955 function returns NULL_RTX to indicate that a normal call should be
4956 emitted rather than expanding the function in-line. EXP is the
4957 expression that is a call to the builtin function; if convenient,
4958 the result should be placed in TARGET. */
4959 static rtx
4960 expand_builtin_signbit (tree exp, rtx target)
4962 const struct real_format *fmt;
4963 enum machine_mode fmode, imode, rmode;
4964 tree arg;
4965 int word, bitpos;
4966 enum insn_code icode;
4967 rtx temp;
4968 location_t loc = EXPR_LOCATION (exp);
4970 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4971 return NULL_RTX;
4973 arg = CALL_EXPR_ARG (exp, 0);
4974 fmode = TYPE_MODE (TREE_TYPE (arg));
4975 rmode = TYPE_MODE (TREE_TYPE (exp));
4976 fmt = REAL_MODE_FORMAT (fmode);
4978 arg = builtin_save_expr (arg);
4980 /* Expand the argument yielding a RTX expression. */
4981 temp = expand_normal (arg);
4983 /* Check if the back end provides an insn that handles signbit for the
4984 argument's mode. */
4985 icode = optab_handler (signbit_optab, fmode);
4986 if (icode != CODE_FOR_nothing)
4988 rtx last = get_last_insn ();
4989 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4990 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4991 return target;
4992 delete_insns_since (last);
4995 /* For floating point formats without a sign bit, implement signbit
4996 as "ARG < 0.0". */
4997 bitpos = fmt->signbit_ro;
4998 if (bitpos < 0)
5000 /* But we can't do this if the format supports signed zero. */
5001 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5002 return NULL_RTX;
5004 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5005 build_real (TREE_TYPE (arg), dconst0));
5006 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5009 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5011 imode = int_mode_for_mode (fmode);
5012 if (imode == BLKmode)
5013 return NULL_RTX;
5014 temp = gen_lowpart (imode, temp);
5016 else
5018 imode = word_mode;
5019 /* Handle targets with different FP word orders. */
5020 if (FLOAT_WORDS_BIG_ENDIAN)
5021 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5022 else
5023 word = bitpos / BITS_PER_WORD;
5024 temp = operand_subword_force (temp, word, fmode);
5025 bitpos = bitpos % BITS_PER_WORD;
5028 /* Force the intermediate word_mode (or narrower) result into a
5029 register. This avoids attempting to create paradoxical SUBREGs
5030 of floating point modes below. */
5031 temp = force_reg (imode, temp);
5033 /* If the bitpos is within the "result mode" lowpart, the operation
5034 can be implement with a single bitwise AND. Otherwise, we need
5035 a right shift and an AND. */
5037 if (bitpos < GET_MODE_BITSIZE (rmode))
5039 double_int mask = double_int_zero.set_bit (bitpos);
5041 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5042 temp = gen_lowpart (rmode, temp);
5043 temp = expand_binop (rmode, and_optab, temp,
5044 immed_double_int_const (mask, rmode),
5045 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5047 else
5049 /* Perform a logical right shift to place the signbit in the least
5050 significant bit, then truncate the result to the desired mode
5051 and mask just this bit. */
5052 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5053 temp = gen_lowpart (rmode, temp);
5054 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5055 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5058 return temp;
5061 /* Expand fork or exec calls. TARGET is the desired target of the
5062 call. EXP is the call. FN is the
5063 identificator of the actual function. IGNORE is nonzero if the
5064 value is to be ignored. */
5066 static rtx
5067 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5069 tree id, decl;
5070 tree call;
5072 /* If we are not profiling, just call the function. */
5073 if (!profile_arc_flag)
5074 return NULL_RTX;
5076 /* Otherwise call the wrapper. This should be equivalent for the rest of
5077 compiler, so the code does not diverge, and the wrapper may run the
5078 code necessary for keeping the profiling sane. */
5080 switch (DECL_FUNCTION_CODE (fn))
5082 case BUILT_IN_FORK:
5083 id = get_identifier ("__gcov_fork");
5084 break;
5086 case BUILT_IN_EXECL:
5087 id = get_identifier ("__gcov_execl");
5088 break;
5090 case BUILT_IN_EXECV:
5091 id = get_identifier ("__gcov_execv");
5092 break;
5094 case BUILT_IN_EXECLP:
5095 id = get_identifier ("__gcov_execlp");
5096 break;
5098 case BUILT_IN_EXECLE:
5099 id = get_identifier ("__gcov_execle");
5100 break;
5102 case BUILT_IN_EXECVP:
5103 id = get_identifier ("__gcov_execvp");
5104 break;
5106 case BUILT_IN_EXECVE:
5107 id = get_identifier ("__gcov_execve");
5108 break;
5110 default:
5111 gcc_unreachable ();
5114 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5115 FUNCTION_DECL, id, TREE_TYPE (fn));
5116 DECL_EXTERNAL (decl) = 1;
5117 TREE_PUBLIC (decl) = 1;
5118 DECL_ARTIFICIAL (decl) = 1;
5119 TREE_NOTHROW (decl) = 1;
5120 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5121 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5122 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5123 return expand_call (call, target, ignore);
5128 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5129 the pointer in these functions is void*, the tree optimizers may remove
5130 casts. The mode computed in expand_builtin isn't reliable either, due
5131 to __sync_bool_compare_and_swap.
5133 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5134 group of builtins. This gives us log2 of the mode size. */
5136 static inline enum machine_mode
5137 get_builtin_sync_mode (int fcode_diff)
5139 /* The size is not negotiable, so ask not to get BLKmode in return
5140 if the target indicates that a smaller size would be better. */
5141 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5144 /* Expand the memory expression LOC and return the appropriate memory operand
5145 for the builtin_sync operations. */
5147 static rtx
5148 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5150 rtx addr, mem;
5152 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5153 addr = convert_memory_address (Pmode, addr);
5155 /* Note that we explicitly do not want any alias information for this
5156 memory, so that we kill all other live memories. Otherwise we don't
5157 satisfy the full barrier semantics of the intrinsic. */
5158 mem = validize_mem (gen_rtx_MEM (mode, addr));
5160 /* The alignment needs to be at least according to that of the mode. */
5161 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5162 get_pointer_alignment (loc)));
5163 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5164 MEM_VOLATILE_P (mem) = 1;
5166 return mem;
5169 /* Make sure an argument is in the right mode.
5170 EXP is the tree argument.
5171 MODE is the mode it should be in. */
5173 static rtx
5174 expand_expr_force_mode (tree exp, enum machine_mode mode)
5176 rtx val;
5177 enum machine_mode old_mode;
5179 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5180 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5181 of CONST_INTs, where we know the old_mode only from the call argument. */
5183 old_mode = GET_MODE (val);
5184 if (old_mode == VOIDmode)
5185 old_mode = TYPE_MODE (TREE_TYPE (exp));
5186 val = convert_modes (mode, old_mode, val, 1);
5187 return val;
5191 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5192 EXP is the CALL_EXPR. CODE is the rtx code
5193 that corresponds to the arithmetic or logical operation from the name;
5194 an exception here is that NOT actually means NAND. TARGET is an optional
5195 place for us to store the results; AFTER is true if this is the
5196 fetch_and_xxx form. */
5198 static rtx
5199 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5200 enum rtx_code code, bool after,
5201 rtx target)
5203 rtx val, mem;
5204 location_t loc = EXPR_LOCATION (exp);
5206 if (code == NOT && warn_sync_nand)
5208 tree fndecl = get_callee_fndecl (exp);
5209 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5211 static bool warned_f_a_n, warned_n_a_f;
5213 switch (fcode)
5215 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5216 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5217 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5218 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5219 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5220 if (warned_f_a_n)
5221 break;
5223 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5224 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5225 warned_f_a_n = true;
5226 break;
5228 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5229 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5230 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5231 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5232 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5233 if (warned_n_a_f)
5234 break;
5236 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5237 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5238 warned_n_a_f = true;
5239 break;
5241 default:
5242 gcc_unreachable ();
5246 /* Expand the operands. */
5247 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5248 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5250 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5251 after);
5254 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5255 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5256 true if this is the boolean form. TARGET is a place for us to store the
5257 results; this is NOT optional if IS_BOOL is true. */
5259 static rtx
5260 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5261 bool is_bool, rtx target)
5263 rtx old_val, new_val, mem;
5264 rtx *pbool, *poval;
5266 /* Expand the operands. */
5267 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5268 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5269 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5271 pbool = poval = NULL;
5272 if (target != const0_rtx)
5274 if (is_bool)
5275 pbool = &target;
5276 else
5277 poval = &target;
5279 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5280 false, MEMMODEL_SEQ_CST,
5281 MEMMODEL_SEQ_CST))
5282 return NULL_RTX;
5284 return target;
5287 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5288 general form is actually an atomic exchange, and some targets only
5289 support a reduced form with the second argument being a constant 1.
5290 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5291 the results. */
5293 static rtx
5294 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5295 rtx target)
5297 rtx val, mem;
5299 /* Expand the operands. */
5300 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5301 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5303 return expand_sync_lock_test_and_set (target, mem, val);
5306 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5308 static void
5309 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5311 rtx mem;
5313 /* Expand the operands. */
5314 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5316 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5319 /* Given an integer representing an ``enum memmodel'', verify its
5320 correctness and return the memory model enum. */
5322 static enum memmodel
5323 get_memmodel (tree exp)
5325 rtx op;
5326 unsigned HOST_WIDE_INT val;
5328 /* If the parameter is not a constant, it's a run time value so we'll just
5329 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5330 if (TREE_CODE (exp) != INTEGER_CST)
5331 return MEMMODEL_SEQ_CST;
5333 op = expand_normal (exp);
5335 val = INTVAL (op);
5336 if (targetm.memmodel_check)
5337 val = targetm.memmodel_check (val);
5338 else if (val & ~MEMMODEL_MASK)
5340 warning (OPT_Winvalid_memory_model,
5341 "Unknown architecture specifier in memory model to builtin.");
5342 return MEMMODEL_SEQ_CST;
5345 if ((INTVAL(op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5347 warning (OPT_Winvalid_memory_model,
5348 "invalid memory model argument to builtin");
5349 return MEMMODEL_SEQ_CST;
5352 return (enum memmodel) val;
5355 /* Expand the __atomic_exchange intrinsic:
5356 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5357 EXP is the CALL_EXPR.
5358 TARGET is an optional place for us to store the results. */
5360 static rtx
5361 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5363 rtx val, mem;
5364 enum memmodel model;
5366 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5367 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5369 error ("invalid memory model for %<__atomic_exchange%>");
5370 return NULL_RTX;
5373 if (!flag_inline_atomics)
5374 return NULL_RTX;
5376 /* Expand the operands. */
5377 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5378 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5380 return expand_atomic_exchange (target, mem, val, model);
5383 /* Expand the __atomic_compare_exchange intrinsic:
5384 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5385 TYPE desired, BOOL weak,
5386 enum memmodel success,
5387 enum memmodel failure)
5388 EXP is the CALL_EXPR.
5389 TARGET is an optional place for us to store the results. */
5391 static rtx
5392 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5393 rtx target)
5395 rtx expect, desired, mem, oldval;
5396 enum memmodel success, failure;
5397 tree weak;
5398 bool is_weak;
5400 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5401 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5403 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5404 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5406 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5407 return NULL_RTX;
5410 if (failure > success)
5412 error ("failure memory model cannot be stronger than success "
5413 "memory model for %<__atomic_compare_exchange%>");
5414 return NULL_RTX;
5417 if (!flag_inline_atomics)
5418 return NULL_RTX;
5420 /* Expand the operands. */
5421 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5423 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5424 expect = convert_memory_address (Pmode, expect);
5425 expect = gen_rtx_MEM (mode, expect);
5426 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5428 weak = CALL_EXPR_ARG (exp, 3);
5429 is_weak = false;
5430 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
5431 is_weak = true;
5433 oldval = expect;
5434 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5435 &oldval, mem, oldval, desired,
5436 is_weak, success, failure))
5437 return NULL_RTX;
5439 if (oldval != expect)
5440 emit_move_insn (expect, oldval);
5442 return target;
5445 /* Expand the __atomic_load intrinsic:
5446 TYPE __atomic_load (TYPE *object, enum memmodel)
5447 EXP is the CALL_EXPR.
5448 TARGET is an optional place for us to store the results. */
5450 static rtx
5451 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5453 rtx mem;
5454 enum memmodel model;
5456 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5457 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5458 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5460 error ("invalid memory model for %<__atomic_load%>");
5461 return NULL_RTX;
5464 if (!flag_inline_atomics)
5465 return NULL_RTX;
5467 /* Expand the operand. */
5468 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5470 return expand_atomic_load (target, mem, model);
5474 /* Expand the __atomic_store intrinsic:
5475 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5476 EXP is the CALL_EXPR.
5477 TARGET is an optional place for us to store the results. */
5479 static rtx
5480 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5482 rtx mem, val;
5483 enum memmodel model;
5485 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5486 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5487 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5488 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5490 error ("invalid memory model for %<__atomic_store%>");
5491 return NULL_RTX;
5494 if (!flag_inline_atomics)
5495 return NULL_RTX;
5497 /* Expand the operands. */
5498 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5499 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5501 return expand_atomic_store (mem, val, model, false);
5504 /* Expand the __atomic_fetch_XXX intrinsic:
5505 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5506 EXP is the CALL_EXPR.
5507 TARGET is an optional place for us to store the results.
5508 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5509 FETCH_AFTER is true if returning the result of the operation.
5510 FETCH_AFTER is false if returning the value before the operation.
5511 IGNORE is true if the result is not used.
5512 EXT_CALL is the correct builtin for an external call if this cannot be
5513 resolved to an instruction sequence. */
5515 static rtx
5516 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5517 enum rtx_code code, bool fetch_after,
5518 bool ignore, enum built_in_function ext_call)
5520 rtx val, mem, ret;
5521 enum memmodel model;
5522 tree fndecl;
5523 tree addr;
5525 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5527 /* Expand the operands. */
5528 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5529 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5531 /* Only try generating instructions if inlining is turned on. */
5532 if (flag_inline_atomics)
5534 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5535 if (ret)
5536 return ret;
5539 /* Return if a different routine isn't needed for the library call. */
5540 if (ext_call == BUILT_IN_NONE)
5541 return NULL_RTX;
5543 /* Change the call to the specified function. */
5544 fndecl = get_callee_fndecl (exp);
5545 addr = CALL_EXPR_FN (exp);
5546 STRIP_NOPS (addr);
5548 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5549 TREE_OPERAND (addr, 0) = builtin_decl_explicit(ext_call);
5551 /* Expand the call here so we can emit trailing code. */
5552 ret = expand_call (exp, target, ignore);
5554 /* Replace the original function just in case it matters. */
5555 TREE_OPERAND (addr, 0) = fndecl;
5557 /* Then issue the arithmetic correction to return the right result. */
5558 if (!ignore)
5560 if (code == NOT)
5562 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5563 OPTAB_LIB_WIDEN);
5564 ret = expand_simple_unop (mode, NOT, ret, target, true);
5566 else
5567 ret = expand_simple_binop (mode, code, ret, val, target, true,
5568 OPTAB_LIB_WIDEN);
5570 return ret;
5574 #ifndef HAVE_atomic_clear
5575 # define HAVE_atomic_clear 0
5576 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5577 #endif
5579 /* Expand an atomic clear operation.
5580 void _atomic_clear (BOOL *obj, enum memmodel)
5581 EXP is the call expression. */
5583 static rtx
5584 expand_builtin_atomic_clear (tree exp)
5586 enum machine_mode mode;
5587 rtx mem, ret;
5588 enum memmodel model;
5590 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5591 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5592 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5594 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5595 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5597 error ("invalid memory model for %<__atomic_store%>");
5598 return const0_rtx;
5601 if (HAVE_atomic_clear)
5603 emit_insn (gen_atomic_clear (mem, model));
5604 return const0_rtx;
5607 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5608 Failing that, a store is issued by __atomic_store. The only way this can
5609 fail is if the bool type is larger than a word size. Unlikely, but
5610 handle it anyway for completeness. Assume a single threaded model since
5611 there is no atomic support in this case, and no barriers are required. */
5612 ret = expand_atomic_store (mem, const0_rtx, model, true);
5613 if (!ret)
5614 emit_move_insn (mem, const0_rtx);
5615 return const0_rtx;
5618 /* Expand an atomic test_and_set operation.
5619 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5620 EXP is the call expression. */
5622 static rtx
5623 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5625 rtx mem;
5626 enum memmodel model;
5627 enum machine_mode mode;
5629 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5630 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5631 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5633 return expand_atomic_test_and_set (target, mem, model);
5637 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5638 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5640 static tree
5641 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5643 int size;
5644 enum machine_mode mode;
5645 unsigned int mode_align, type_align;
5647 if (TREE_CODE (arg0) != INTEGER_CST)
5648 return NULL_TREE;
5650 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5651 mode = mode_for_size (size, MODE_INT, 0);
5652 mode_align = GET_MODE_ALIGNMENT (mode);
5654 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5655 type_align = mode_align;
5656 else
5658 tree ttype = TREE_TYPE (arg1);
5660 /* This function is usually invoked and folded immediately by the front
5661 end before anything else has a chance to look at it. The pointer
5662 parameter at this point is usually cast to a void *, so check for that
5663 and look past the cast. */
5664 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5665 && VOID_TYPE_P (TREE_TYPE (ttype)))
5666 arg1 = TREE_OPERAND (arg1, 0);
5668 ttype = TREE_TYPE (arg1);
5669 gcc_assert (POINTER_TYPE_P (ttype));
5671 /* Get the underlying type of the object. */
5672 ttype = TREE_TYPE (ttype);
5673 type_align = TYPE_ALIGN (ttype);
5676 /* If the object has smaller alignment, the the lock free routines cannot
5677 be used. */
5678 if (type_align < mode_align)
5679 return boolean_false_node;
5681 /* Check if a compare_and_swap pattern exists for the mode which represents
5682 the required size. The pattern is not allowed to fail, so the existence
5683 of the pattern indicates support is present. */
5684 if (can_compare_and_swap_p (mode, true))
5685 return boolean_true_node;
5686 else
5687 return boolean_false_node;
5690 /* Return true if the parameters to call EXP represent an object which will
5691 always generate lock free instructions. The first argument represents the
5692 size of the object, and the second parameter is a pointer to the object
5693 itself. If NULL is passed for the object, then the result is based on
5694 typical alignment for an object of the specified size. Otherwise return
5695 false. */
5697 static rtx
5698 expand_builtin_atomic_always_lock_free (tree exp)
5700 tree size;
5701 tree arg0 = CALL_EXPR_ARG (exp, 0);
5702 tree arg1 = CALL_EXPR_ARG (exp, 1);
5704 if (TREE_CODE (arg0) != INTEGER_CST)
5706 error ("non-constant argument 1 to __atomic_always_lock_free");
5707 return const0_rtx;
5710 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5711 if (size == boolean_true_node)
5712 return const1_rtx;
5713 return const0_rtx;
5716 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5717 is lock free on this architecture. */
5719 static tree
5720 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5722 if (!flag_inline_atomics)
5723 return NULL_TREE;
5725 /* If it isn't always lock free, don't generate a result. */
5726 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5727 return boolean_true_node;
5729 return NULL_TREE;
5732 /* Return true if the parameters to call EXP represent an object which will
5733 always generate lock free instructions. The first argument represents the
5734 size of the object, and the second parameter is a pointer to the object
5735 itself. If NULL is passed for the object, then the result is based on
5736 typical alignment for an object of the specified size. Otherwise return
5737 NULL*/
5739 static rtx
5740 expand_builtin_atomic_is_lock_free (tree exp)
5742 tree size;
5743 tree arg0 = CALL_EXPR_ARG (exp, 0);
5744 tree arg1 = CALL_EXPR_ARG (exp, 1);
5746 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5748 error ("non-integer argument 1 to __atomic_is_lock_free");
5749 return NULL_RTX;
5752 if (!flag_inline_atomics)
5753 return NULL_RTX;
5755 /* If the value is known at compile time, return the RTX for it. */
5756 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5757 if (size == boolean_true_node)
5758 return const1_rtx;
5760 return NULL_RTX;
5763 /* Expand the __atomic_thread_fence intrinsic:
5764 void __atomic_thread_fence (enum memmodel)
5765 EXP is the CALL_EXPR. */
5767 static void
5768 expand_builtin_atomic_thread_fence (tree exp)
5770 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5771 expand_mem_thread_fence (model);
5774 /* Expand the __atomic_signal_fence intrinsic:
5775 void __atomic_signal_fence (enum memmodel)
5776 EXP is the CALL_EXPR. */
5778 static void
5779 expand_builtin_atomic_signal_fence (tree exp)
5781 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5782 expand_mem_signal_fence (model);
5785 /* Expand the __sync_synchronize intrinsic. */
5787 static void
5788 expand_builtin_sync_synchronize (void)
5790 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5793 static rtx
5794 expand_builtin_thread_pointer (tree exp, rtx target)
5796 enum insn_code icode;
5797 if (!validate_arglist (exp, VOID_TYPE))
5798 return const0_rtx;
5799 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5800 if (icode != CODE_FOR_nothing)
5802 struct expand_operand op;
5803 if (!REG_P (target) || GET_MODE (target) != Pmode)
5804 target = gen_reg_rtx (Pmode);
5805 create_output_operand (&op, target, Pmode);
5806 expand_insn (icode, 1, &op);
5807 return target;
5809 error ("__builtin_thread_pointer is not supported on this target");
5810 return const0_rtx;
5813 static void
5814 expand_builtin_set_thread_pointer (tree exp)
5816 enum insn_code icode;
5817 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5818 return;
5819 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5820 if (icode != CODE_FOR_nothing)
5822 struct expand_operand op;
5823 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5824 Pmode, EXPAND_NORMAL);
5825 create_input_operand (&op, val, Pmode);
5826 expand_insn (icode, 1, &op);
5827 return;
5829 error ("__builtin_set_thread_pointer is not supported on this target");
5833 /* Expand an expression EXP that calls a built-in function,
5834 with result going to TARGET if that's convenient
5835 (and in mode MODE if that's convenient).
5836 SUBTARGET may be used as the target for computing one of EXP's operands.
5837 IGNORE is nonzero if the value is to be ignored. */
5840 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5841 int ignore)
5843 tree fndecl = get_callee_fndecl (exp);
5844 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5845 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5846 int flags;
5848 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5849 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5851 /* When not optimizing, generate calls to library functions for a certain
5852 set of builtins. */
5853 if (!optimize
5854 && !called_as_built_in (fndecl)
5855 && fcode != BUILT_IN_FORK
5856 && fcode != BUILT_IN_EXECL
5857 && fcode != BUILT_IN_EXECV
5858 && fcode != BUILT_IN_EXECLP
5859 && fcode != BUILT_IN_EXECLE
5860 && fcode != BUILT_IN_EXECVP
5861 && fcode != BUILT_IN_EXECVE
5862 && fcode != BUILT_IN_ALLOCA
5863 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5864 && fcode != BUILT_IN_FREE)
5865 return expand_call (exp, target, ignore);
5867 /* The built-in function expanders test for target == const0_rtx
5868 to determine whether the function's result will be ignored. */
5869 if (ignore)
5870 target = const0_rtx;
5872 /* If the result of a pure or const built-in function is ignored, and
5873 none of its arguments are volatile, we can avoid expanding the
5874 built-in call and just evaluate the arguments for side-effects. */
5875 if (target == const0_rtx
5876 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5877 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5879 bool volatilep = false;
5880 tree arg;
5881 call_expr_arg_iterator iter;
5883 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5884 if (TREE_THIS_VOLATILE (arg))
5886 volatilep = true;
5887 break;
5890 if (! volatilep)
5892 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5893 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5894 return const0_rtx;
5898 switch (fcode)
5900 CASE_FLT_FN (BUILT_IN_FABS):
5901 case BUILT_IN_FABSD32:
5902 case BUILT_IN_FABSD64:
5903 case BUILT_IN_FABSD128:
5904 target = expand_builtin_fabs (exp, target, subtarget);
5905 if (target)
5906 return target;
5907 break;
5909 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5910 target = expand_builtin_copysign (exp, target, subtarget);
5911 if (target)
5912 return target;
5913 break;
5915 /* Just do a normal library call if we were unable to fold
5916 the values. */
5917 CASE_FLT_FN (BUILT_IN_CABS):
5918 break;
5920 CASE_FLT_FN (BUILT_IN_EXP):
5921 CASE_FLT_FN (BUILT_IN_EXP10):
5922 CASE_FLT_FN (BUILT_IN_POW10):
5923 CASE_FLT_FN (BUILT_IN_EXP2):
5924 CASE_FLT_FN (BUILT_IN_EXPM1):
5925 CASE_FLT_FN (BUILT_IN_LOGB):
5926 CASE_FLT_FN (BUILT_IN_LOG):
5927 CASE_FLT_FN (BUILT_IN_LOG10):
5928 CASE_FLT_FN (BUILT_IN_LOG2):
5929 CASE_FLT_FN (BUILT_IN_LOG1P):
5930 CASE_FLT_FN (BUILT_IN_TAN):
5931 CASE_FLT_FN (BUILT_IN_ASIN):
5932 CASE_FLT_FN (BUILT_IN_ACOS):
5933 CASE_FLT_FN (BUILT_IN_ATAN):
5934 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5935 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5936 because of possible accuracy problems. */
5937 if (! flag_unsafe_math_optimizations)
5938 break;
5939 CASE_FLT_FN (BUILT_IN_SQRT):
5940 CASE_FLT_FN (BUILT_IN_FLOOR):
5941 CASE_FLT_FN (BUILT_IN_CEIL):
5942 CASE_FLT_FN (BUILT_IN_TRUNC):
5943 CASE_FLT_FN (BUILT_IN_ROUND):
5944 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5945 CASE_FLT_FN (BUILT_IN_RINT):
5946 target = expand_builtin_mathfn (exp, target, subtarget);
5947 if (target)
5948 return target;
5949 break;
5951 CASE_FLT_FN (BUILT_IN_FMA):
5952 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5953 if (target)
5954 return target;
5955 break;
5957 CASE_FLT_FN (BUILT_IN_ILOGB):
5958 if (! flag_unsafe_math_optimizations)
5959 break;
5960 CASE_FLT_FN (BUILT_IN_ISINF):
5961 CASE_FLT_FN (BUILT_IN_FINITE):
5962 case BUILT_IN_ISFINITE:
5963 case BUILT_IN_ISNORMAL:
5964 target = expand_builtin_interclass_mathfn (exp, target);
5965 if (target)
5966 return target;
5967 break;
5969 CASE_FLT_FN (BUILT_IN_ICEIL):
5970 CASE_FLT_FN (BUILT_IN_LCEIL):
5971 CASE_FLT_FN (BUILT_IN_LLCEIL):
5972 CASE_FLT_FN (BUILT_IN_LFLOOR):
5973 CASE_FLT_FN (BUILT_IN_IFLOOR):
5974 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5975 target = expand_builtin_int_roundingfn (exp, target);
5976 if (target)
5977 return target;
5978 break;
5980 CASE_FLT_FN (BUILT_IN_IRINT):
5981 CASE_FLT_FN (BUILT_IN_LRINT):
5982 CASE_FLT_FN (BUILT_IN_LLRINT):
5983 CASE_FLT_FN (BUILT_IN_IROUND):
5984 CASE_FLT_FN (BUILT_IN_LROUND):
5985 CASE_FLT_FN (BUILT_IN_LLROUND):
5986 target = expand_builtin_int_roundingfn_2 (exp, target);
5987 if (target)
5988 return target;
5989 break;
5991 CASE_FLT_FN (BUILT_IN_POWI):
5992 target = expand_builtin_powi (exp, target);
5993 if (target)
5994 return target;
5995 break;
5997 CASE_FLT_FN (BUILT_IN_ATAN2):
5998 CASE_FLT_FN (BUILT_IN_LDEXP):
5999 CASE_FLT_FN (BUILT_IN_SCALB):
6000 CASE_FLT_FN (BUILT_IN_SCALBN):
6001 CASE_FLT_FN (BUILT_IN_SCALBLN):
6002 if (! flag_unsafe_math_optimizations)
6003 break;
6005 CASE_FLT_FN (BUILT_IN_FMOD):
6006 CASE_FLT_FN (BUILT_IN_REMAINDER):
6007 CASE_FLT_FN (BUILT_IN_DREM):
6008 CASE_FLT_FN (BUILT_IN_POW):
6009 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6010 if (target)
6011 return target;
6012 break;
6014 CASE_FLT_FN (BUILT_IN_CEXPI):
6015 target = expand_builtin_cexpi (exp, target);
6016 gcc_assert (target);
6017 return target;
6019 CASE_FLT_FN (BUILT_IN_SIN):
6020 CASE_FLT_FN (BUILT_IN_COS):
6021 if (! flag_unsafe_math_optimizations)
6022 break;
6023 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6024 if (target)
6025 return target;
6026 break;
6028 CASE_FLT_FN (BUILT_IN_SINCOS):
6029 if (! flag_unsafe_math_optimizations)
6030 break;
6031 target = expand_builtin_sincos (exp);
6032 if (target)
6033 return target;
6034 break;
6036 case BUILT_IN_APPLY_ARGS:
6037 return expand_builtin_apply_args ();
6039 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6040 FUNCTION with a copy of the parameters described by
6041 ARGUMENTS, and ARGSIZE. It returns a block of memory
6042 allocated on the stack into which is stored all the registers
6043 that might possibly be used for returning the result of a
6044 function. ARGUMENTS is the value returned by
6045 __builtin_apply_args. ARGSIZE is the number of bytes of
6046 arguments that must be copied. ??? How should this value be
6047 computed? We'll also need a safe worst case value for varargs
6048 functions. */
6049 case BUILT_IN_APPLY:
6050 if (!validate_arglist (exp, POINTER_TYPE,
6051 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6052 && !validate_arglist (exp, REFERENCE_TYPE,
6053 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6054 return const0_rtx;
6055 else
6057 rtx ops[3];
6059 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6060 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6061 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6063 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6066 /* __builtin_return (RESULT) causes the function to return the
6067 value described by RESULT. RESULT is address of the block of
6068 memory returned by __builtin_apply. */
6069 case BUILT_IN_RETURN:
6070 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6071 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6072 return const0_rtx;
6074 case BUILT_IN_SAVEREGS:
6075 return expand_builtin_saveregs ();
6077 case BUILT_IN_VA_ARG_PACK:
6078 /* All valid uses of __builtin_va_arg_pack () are removed during
6079 inlining. */
6080 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6081 return const0_rtx;
6083 case BUILT_IN_VA_ARG_PACK_LEN:
6084 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6085 inlining. */
6086 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6087 return const0_rtx;
6089 /* Return the address of the first anonymous stack arg. */
6090 case BUILT_IN_NEXT_ARG:
6091 if (fold_builtin_next_arg (exp, false))
6092 return const0_rtx;
6093 return expand_builtin_next_arg ();
6095 case BUILT_IN_CLEAR_CACHE:
6096 target = expand_builtin___clear_cache (exp);
6097 if (target)
6098 return target;
6099 break;
6101 case BUILT_IN_CLASSIFY_TYPE:
6102 return expand_builtin_classify_type (exp);
6104 case BUILT_IN_CONSTANT_P:
6105 return const0_rtx;
6107 case BUILT_IN_FRAME_ADDRESS:
6108 case BUILT_IN_RETURN_ADDRESS:
6109 return expand_builtin_frame_address (fndecl, exp);
6111 /* Returns the address of the area where the structure is returned.
6112 0 otherwise. */
6113 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6114 if (call_expr_nargs (exp) != 0
6115 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6116 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6117 return const0_rtx;
6118 else
6119 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6121 case BUILT_IN_ALLOCA:
6122 case BUILT_IN_ALLOCA_WITH_ALIGN:
6123 /* If the allocation stems from the declaration of a variable-sized
6124 object, it cannot accumulate. */
6125 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6126 if (target)
6127 return target;
6128 break;
6130 case BUILT_IN_STACK_SAVE:
6131 return expand_stack_save ();
6133 case BUILT_IN_STACK_RESTORE:
6134 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6135 return const0_rtx;
6137 case BUILT_IN_BSWAP16:
6138 case BUILT_IN_BSWAP32:
6139 case BUILT_IN_BSWAP64:
6140 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6141 if (target)
6142 return target;
6143 break;
6145 CASE_INT_FN (BUILT_IN_FFS):
6146 target = expand_builtin_unop (target_mode, exp, target,
6147 subtarget, ffs_optab);
6148 if (target)
6149 return target;
6150 break;
6152 CASE_INT_FN (BUILT_IN_CLZ):
6153 target = expand_builtin_unop (target_mode, exp, target,
6154 subtarget, clz_optab);
6155 if (target)
6156 return target;
6157 break;
6159 CASE_INT_FN (BUILT_IN_CTZ):
6160 target = expand_builtin_unop (target_mode, exp, target,
6161 subtarget, ctz_optab);
6162 if (target)
6163 return target;
6164 break;
6166 CASE_INT_FN (BUILT_IN_CLRSB):
6167 target = expand_builtin_unop (target_mode, exp, target,
6168 subtarget, clrsb_optab);
6169 if (target)
6170 return target;
6171 break;
6173 CASE_INT_FN (BUILT_IN_POPCOUNT):
6174 target = expand_builtin_unop (target_mode, exp, target,
6175 subtarget, popcount_optab);
6176 if (target)
6177 return target;
6178 break;
6180 CASE_INT_FN (BUILT_IN_PARITY):
6181 target = expand_builtin_unop (target_mode, exp, target,
6182 subtarget, parity_optab);
6183 if (target)
6184 return target;
6185 break;
6187 case BUILT_IN_STRLEN:
6188 target = expand_builtin_strlen (exp, target, target_mode);
6189 if (target)
6190 return target;
6191 break;
6193 case BUILT_IN_STRCPY:
6194 target = expand_builtin_strcpy (exp, target);
6195 if (target)
6196 return target;
6197 break;
6199 case BUILT_IN_STRNCPY:
6200 target = expand_builtin_strncpy (exp, target);
6201 if (target)
6202 return target;
6203 break;
6205 case BUILT_IN_STPCPY:
6206 target = expand_builtin_stpcpy (exp, target, mode);
6207 if (target)
6208 return target;
6209 break;
6211 case BUILT_IN_MEMCPY:
6212 target = expand_builtin_memcpy (exp, target);
6213 if (target)
6214 return target;
6215 break;
6217 case BUILT_IN_MEMPCPY:
6218 target = expand_builtin_mempcpy (exp, target, mode);
6219 if (target)
6220 return target;
6221 break;
6223 case BUILT_IN_MEMSET:
6224 target = expand_builtin_memset (exp, target, mode);
6225 if (target)
6226 return target;
6227 break;
6229 case BUILT_IN_BZERO:
6230 target = expand_builtin_bzero (exp);
6231 if (target)
6232 return target;
6233 break;
6235 case BUILT_IN_STRCMP:
6236 target = expand_builtin_strcmp (exp, target);
6237 if (target)
6238 return target;
6239 break;
6241 case BUILT_IN_STRNCMP:
6242 target = expand_builtin_strncmp (exp, target, mode);
6243 if (target)
6244 return target;
6245 break;
6247 case BUILT_IN_BCMP:
6248 case BUILT_IN_MEMCMP:
6249 target = expand_builtin_memcmp (exp, target, mode);
6250 if (target)
6251 return target;
6252 break;
6254 case BUILT_IN_SETJMP:
6255 /* This should have been lowered to the builtins below. */
6256 gcc_unreachable ();
6258 case BUILT_IN_SETJMP_SETUP:
6259 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6260 and the receiver label. */
6261 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6263 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6264 VOIDmode, EXPAND_NORMAL);
6265 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6266 rtx label_r = label_rtx (label);
6268 /* This is copied from the handling of non-local gotos. */
6269 expand_builtin_setjmp_setup (buf_addr, label_r);
6270 nonlocal_goto_handler_labels
6271 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6272 nonlocal_goto_handler_labels);
6273 /* ??? Do not let expand_label treat us as such since we would
6274 not want to be both on the list of non-local labels and on
6275 the list of forced labels. */
6276 FORCED_LABEL (label) = 0;
6277 return const0_rtx;
6279 break;
6281 case BUILT_IN_SETJMP_DISPATCHER:
6282 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6283 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6285 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6286 rtx label_r = label_rtx (label);
6288 /* Remove the dispatcher label from the list of non-local labels
6289 since the receiver labels have been added to it above. */
6290 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6291 return const0_rtx;
6293 break;
6295 case BUILT_IN_SETJMP_RECEIVER:
6296 /* __builtin_setjmp_receiver is passed the receiver label. */
6297 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6299 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6300 rtx label_r = label_rtx (label);
6302 expand_builtin_setjmp_receiver (label_r);
6303 return const0_rtx;
6305 break;
6307 /* __builtin_longjmp is passed a pointer to an array of five words.
6308 It's similar to the C library longjmp function but works with
6309 __builtin_setjmp above. */
6310 case BUILT_IN_LONGJMP:
6311 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6313 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6314 VOIDmode, EXPAND_NORMAL);
6315 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6317 if (value != const1_rtx)
6319 error ("%<__builtin_longjmp%> second argument must be 1");
6320 return const0_rtx;
6323 expand_builtin_longjmp (buf_addr, value);
6324 return const0_rtx;
6326 break;
6328 case BUILT_IN_NONLOCAL_GOTO:
6329 target = expand_builtin_nonlocal_goto (exp);
6330 if (target)
6331 return target;
6332 break;
6334 /* This updates the setjmp buffer that is its argument with the value
6335 of the current stack pointer. */
6336 case BUILT_IN_UPDATE_SETJMP_BUF:
6337 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6339 rtx buf_addr
6340 = expand_normal (CALL_EXPR_ARG (exp, 0));
6342 expand_builtin_update_setjmp_buf (buf_addr);
6343 return const0_rtx;
6345 break;
6347 case BUILT_IN_TRAP:
6348 expand_builtin_trap ();
6349 return const0_rtx;
6351 case BUILT_IN_UNREACHABLE:
6352 expand_builtin_unreachable ();
6353 return const0_rtx;
6355 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6356 case BUILT_IN_SIGNBITD32:
6357 case BUILT_IN_SIGNBITD64:
6358 case BUILT_IN_SIGNBITD128:
6359 target = expand_builtin_signbit (exp, target);
6360 if (target)
6361 return target;
6362 break;
6364 /* Various hooks for the DWARF 2 __throw routine. */
6365 case BUILT_IN_UNWIND_INIT:
6366 expand_builtin_unwind_init ();
6367 return const0_rtx;
6368 case BUILT_IN_DWARF_CFA:
6369 return virtual_cfa_rtx;
6370 #ifdef DWARF2_UNWIND_INFO
6371 case BUILT_IN_DWARF_SP_COLUMN:
6372 return expand_builtin_dwarf_sp_column ();
6373 case BUILT_IN_INIT_DWARF_REG_SIZES:
6374 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6375 return const0_rtx;
6376 #endif
6377 case BUILT_IN_FROB_RETURN_ADDR:
6378 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6379 case BUILT_IN_EXTRACT_RETURN_ADDR:
6380 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6381 case BUILT_IN_EH_RETURN:
6382 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6383 CALL_EXPR_ARG (exp, 1));
6384 return const0_rtx;
6385 #ifdef EH_RETURN_DATA_REGNO
6386 case BUILT_IN_EH_RETURN_DATA_REGNO:
6387 return expand_builtin_eh_return_data_regno (exp);
6388 #endif
6389 case BUILT_IN_EXTEND_POINTER:
6390 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6391 case BUILT_IN_EH_POINTER:
6392 return expand_builtin_eh_pointer (exp);
6393 case BUILT_IN_EH_FILTER:
6394 return expand_builtin_eh_filter (exp);
6395 case BUILT_IN_EH_COPY_VALUES:
6396 return expand_builtin_eh_copy_values (exp);
6398 case BUILT_IN_VA_START:
6399 return expand_builtin_va_start (exp);
6400 case BUILT_IN_VA_END:
6401 return expand_builtin_va_end (exp);
6402 case BUILT_IN_VA_COPY:
6403 return expand_builtin_va_copy (exp);
6404 case BUILT_IN_EXPECT:
6405 return expand_builtin_expect (exp, target);
6406 case BUILT_IN_ASSUME_ALIGNED:
6407 return expand_builtin_assume_aligned (exp, target);
6408 case BUILT_IN_PREFETCH:
6409 expand_builtin_prefetch (exp);
6410 return const0_rtx;
6412 case BUILT_IN_INIT_TRAMPOLINE:
6413 return expand_builtin_init_trampoline (exp, true);
6414 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6415 return expand_builtin_init_trampoline (exp, false);
6416 case BUILT_IN_ADJUST_TRAMPOLINE:
6417 return expand_builtin_adjust_trampoline (exp);
6419 case BUILT_IN_FORK:
6420 case BUILT_IN_EXECL:
6421 case BUILT_IN_EXECV:
6422 case BUILT_IN_EXECLP:
6423 case BUILT_IN_EXECLE:
6424 case BUILT_IN_EXECVP:
6425 case BUILT_IN_EXECVE:
6426 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6427 if (target)
6428 return target;
6429 break;
6431 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6432 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6433 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6434 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6435 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6436 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6437 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6438 if (target)
6439 return target;
6440 break;
6442 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6443 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6444 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6445 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6446 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6447 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6448 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6449 if (target)
6450 return target;
6451 break;
6453 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6454 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6455 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6456 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6457 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6458 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6459 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6460 if (target)
6461 return target;
6462 break;
6464 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6465 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6466 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6467 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6468 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6469 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6470 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6471 if (target)
6472 return target;
6473 break;
6475 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6476 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6477 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6478 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6479 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6480 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6481 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6482 if (target)
6483 return target;
6484 break;
6486 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6487 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6488 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6489 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6490 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6491 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6492 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6493 if (target)
6494 return target;
6495 break;
6497 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6498 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6499 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6500 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6501 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6502 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6503 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6504 if (target)
6505 return target;
6506 break;
6508 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6509 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6510 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6511 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6512 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6513 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6514 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6515 if (target)
6516 return target;
6517 break;
6519 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6520 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6521 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6522 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6523 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6524 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6525 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6526 if (target)
6527 return target;
6528 break;
6530 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6531 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6532 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6533 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6534 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6535 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6536 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6537 if (target)
6538 return target;
6539 break;
6541 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6542 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6543 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6544 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6545 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6546 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6547 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6548 if (target)
6549 return target;
6550 break;
6552 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6553 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6554 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6555 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6556 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6557 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6558 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6559 if (target)
6560 return target;
6561 break;
6563 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6564 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6565 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6566 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6567 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6568 if (mode == VOIDmode)
6569 mode = TYPE_MODE (boolean_type_node);
6570 if (!target || !register_operand (target, mode))
6571 target = gen_reg_rtx (mode);
6573 mode = get_builtin_sync_mode
6574 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6575 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6576 if (target)
6577 return target;
6578 break;
6580 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6581 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6582 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6583 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6584 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6585 mode = get_builtin_sync_mode
6586 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6587 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6588 if (target)
6589 return target;
6590 break;
6592 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6593 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6594 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6595 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6596 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6597 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6598 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6599 if (target)
6600 return target;
6601 break;
6603 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6604 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6605 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6606 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6607 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6608 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6609 expand_builtin_sync_lock_release (mode, exp);
6610 return const0_rtx;
6612 case BUILT_IN_SYNC_SYNCHRONIZE:
6613 expand_builtin_sync_synchronize ();
6614 return const0_rtx;
6616 case BUILT_IN_ATOMIC_EXCHANGE_1:
6617 case BUILT_IN_ATOMIC_EXCHANGE_2:
6618 case BUILT_IN_ATOMIC_EXCHANGE_4:
6619 case BUILT_IN_ATOMIC_EXCHANGE_8:
6620 case BUILT_IN_ATOMIC_EXCHANGE_16:
6621 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6622 target = expand_builtin_atomic_exchange (mode, exp, target);
6623 if (target)
6624 return target;
6625 break;
6627 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6628 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6629 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6630 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6631 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6633 unsigned int nargs, z;
6634 vec<tree, va_gc> *vec;
6636 mode =
6637 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6638 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6639 if (target)
6640 return target;
6642 /* If this is turned into an external library call, the weak parameter
6643 must be dropped to match the expected parameter list. */
6644 nargs = call_expr_nargs (exp);
6645 vec_alloc (vec, nargs - 1);
6646 for (z = 0; z < 3; z++)
6647 vec->quick_push (CALL_EXPR_ARG (exp, z));
6648 /* Skip the boolean weak parameter. */
6649 for (z = 4; z < 6; z++)
6650 vec->quick_push (CALL_EXPR_ARG (exp, z));
6651 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6652 break;
6655 case BUILT_IN_ATOMIC_LOAD_1:
6656 case BUILT_IN_ATOMIC_LOAD_2:
6657 case BUILT_IN_ATOMIC_LOAD_4:
6658 case BUILT_IN_ATOMIC_LOAD_8:
6659 case BUILT_IN_ATOMIC_LOAD_16:
6660 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6661 target = expand_builtin_atomic_load (mode, exp, target);
6662 if (target)
6663 return target;
6664 break;
6666 case BUILT_IN_ATOMIC_STORE_1:
6667 case BUILT_IN_ATOMIC_STORE_2:
6668 case BUILT_IN_ATOMIC_STORE_4:
6669 case BUILT_IN_ATOMIC_STORE_8:
6670 case BUILT_IN_ATOMIC_STORE_16:
6671 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6672 target = expand_builtin_atomic_store (mode, exp);
6673 if (target)
6674 return const0_rtx;
6675 break;
6677 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6678 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6679 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6680 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6681 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6683 enum built_in_function lib;
6684 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6685 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6686 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6687 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6688 ignore, lib);
6689 if (target)
6690 return target;
6691 break;
6693 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6694 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6695 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6696 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6697 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6699 enum built_in_function lib;
6700 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6701 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6702 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6703 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6704 ignore, lib);
6705 if (target)
6706 return target;
6707 break;
6709 case BUILT_IN_ATOMIC_AND_FETCH_1:
6710 case BUILT_IN_ATOMIC_AND_FETCH_2:
6711 case BUILT_IN_ATOMIC_AND_FETCH_4:
6712 case BUILT_IN_ATOMIC_AND_FETCH_8:
6713 case BUILT_IN_ATOMIC_AND_FETCH_16:
6715 enum built_in_function lib;
6716 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6717 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6718 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6719 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6720 ignore, lib);
6721 if (target)
6722 return target;
6723 break;
6725 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6726 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6727 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6728 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6729 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6731 enum built_in_function lib;
6732 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6733 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6734 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6735 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6736 ignore, lib);
6737 if (target)
6738 return target;
6739 break;
6741 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6742 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6743 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6744 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6745 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6747 enum built_in_function lib;
6748 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6749 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6750 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6751 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6752 ignore, lib);
6753 if (target)
6754 return target;
6755 break;
6757 case BUILT_IN_ATOMIC_OR_FETCH_1:
6758 case BUILT_IN_ATOMIC_OR_FETCH_2:
6759 case BUILT_IN_ATOMIC_OR_FETCH_4:
6760 case BUILT_IN_ATOMIC_OR_FETCH_8:
6761 case BUILT_IN_ATOMIC_OR_FETCH_16:
6763 enum built_in_function lib;
6764 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6765 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6766 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6767 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6768 ignore, lib);
6769 if (target)
6770 return target;
6771 break;
6773 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6774 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6775 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6776 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6777 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6778 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6779 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6780 ignore, BUILT_IN_NONE);
6781 if (target)
6782 return target;
6783 break;
6785 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6786 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6787 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6788 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6789 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6790 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6791 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6792 ignore, BUILT_IN_NONE);
6793 if (target)
6794 return target;
6795 break;
6797 case BUILT_IN_ATOMIC_FETCH_AND_1:
6798 case BUILT_IN_ATOMIC_FETCH_AND_2:
6799 case BUILT_IN_ATOMIC_FETCH_AND_4:
6800 case BUILT_IN_ATOMIC_FETCH_AND_8:
6801 case BUILT_IN_ATOMIC_FETCH_AND_16:
6802 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6803 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6804 ignore, BUILT_IN_NONE);
6805 if (target)
6806 return target;
6807 break;
6809 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6810 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6811 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6812 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6813 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6814 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6815 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6816 ignore, BUILT_IN_NONE);
6817 if (target)
6818 return target;
6819 break;
6821 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6822 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6823 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6824 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6825 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6826 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6827 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6828 ignore, BUILT_IN_NONE);
6829 if (target)
6830 return target;
6831 break;
6833 case BUILT_IN_ATOMIC_FETCH_OR_1:
6834 case BUILT_IN_ATOMIC_FETCH_OR_2:
6835 case BUILT_IN_ATOMIC_FETCH_OR_4:
6836 case BUILT_IN_ATOMIC_FETCH_OR_8:
6837 case BUILT_IN_ATOMIC_FETCH_OR_16:
6838 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6839 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6840 ignore, BUILT_IN_NONE);
6841 if (target)
6842 return target;
6843 break;
6845 case BUILT_IN_ATOMIC_TEST_AND_SET:
6846 return expand_builtin_atomic_test_and_set (exp, target);
6848 case BUILT_IN_ATOMIC_CLEAR:
6849 return expand_builtin_atomic_clear (exp);
6851 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6852 return expand_builtin_atomic_always_lock_free (exp);
6854 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6855 target = expand_builtin_atomic_is_lock_free (exp);
6856 if (target)
6857 return target;
6858 break;
6860 case BUILT_IN_ATOMIC_THREAD_FENCE:
6861 expand_builtin_atomic_thread_fence (exp);
6862 return const0_rtx;
6864 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6865 expand_builtin_atomic_signal_fence (exp);
6866 return const0_rtx;
6868 case BUILT_IN_OBJECT_SIZE:
6869 return expand_builtin_object_size (exp);
6871 case BUILT_IN_MEMCPY_CHK:
6872 case BUILT_IN_MEMPCPY_CHK:
6873 case BUILT_IN_MEMMOVE_CHK:
6874 case BUILT_IN_MEMSET_CHK:
6875 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6876 if (target)
6877 return target;
6878 break;
6880 case BUILT_IN_STRCPY_CHK:
6881 case BUILT_IN_STPCPY_CHK:
6882 case BUILT_IN_STRNCPY_CHK:
6883 case BUILT_IN_STPNCPY_CHK:
6884 case BUILT_IN_STRCAT_CHK:
6885 case BUILT_IN_STRNCAT_CHK:
6886 case BUILT_IN_SNPRINTF_CHK:
6887 case BUILT_IN_VSNPRINTF_CHK:
6888 maybe_emit_chk_warning (exp, fcode);
6889 break;
6891 case BUILT_IN_SPRINTF_CHK:
6892 case BUILT_IN_VSPRINTF_CHK:
6893 maybe_emit_sprintf_chk_warning (exp, fcode);
6894 break;
6896 case BUILT_IN_FREE:
6897 if (warn_free_nonheap_object)
6898 maybe_emit_free_warning (exp);
6899 break;
6901 case BUILT_IN_THREAD_POINTER:
6902 return expand_builtin_thread_pointer (exp, target);
6904 case BUILT_IN_SET_THREAD_POINTER:
6905 expand_builtin_set_thread_pointer (exp);
6906 return const0_rtx;
6908 default: /* just do library call, if unknown builtin */
6909 break;
6912 /* The switch statement above can drop through to cause the function
6913 to be called normally. */
6914 return expand_call (exp, target, ignore);
6917 /* Determine whether a tree node represents a call to a built-in
6918 function. If the tree T is a call to a built-in function with
6919 the right number of arguments of the appropriate types, return
6920 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6921 Otherwise the return value is END_BUILTINS. */
6923 enum built_in_function
6924 builtin_mathfn_code (const_tree t)
6926 const_tree fndecl, arg, parmlist;
6927 const_tree argtype, parmtype;
6928 const_call_expr_arg_iterator iter;
6930 if (TREE_CODE (t) != CALL_EXPR
6931 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6932 return END_BUILTINS;
6934 fndecl = get_callee_fndecl (t);
6935 if (fndecl == NULL_TREE
6936 || TREE_CODE (fndecl) != FUNCTION_DECL
6937 || ! DECL_BUILT_IN (fndecl)
6938 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6939 return END_BUILTINS;
6941 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6942 init_const_call_expr_arg_iterator (t, &iter);
6943 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6945 /* If a function doesn't take a variable number of arguments,
6946 the last element in the list will have type `void'. */
6947 parmtype = TREE_VALUE (parmlist);
6948 if (VOID_TYPE_P (parmtype))
6950 if (more_const_call_expr_args_p (&iter))
6951 return END_BUILTINS;
6952 return DECL_FUNCTION_CODE (fndecl);
6955 if (! more_const_call_expr_args_p (&iter))
6956 return END_BUILTINS;
6958 arg = next_const_call_expr_arg (&iter);
6959 argtype = TREE_TYPE (arg);
6961 if (SCALAR_FLOAT_TYPE_P (parmtype))
6963 if (! SCALAR_FLOAT_TYPE_P (argtype))
6964 return END_BUILTINS;
6966 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6968 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6969 return END_BUILTINS;
6971 else if (POINTER_TYPE_P (parmtype))
6973 if (! POINTER_TYPE_P (argtype))
6974 return END_BUILTINS;
6976 else if (INTEGRAL_TYPE_P (parmtype))
6978 if (! INTEGRAL_TYPE_P (argtype))
6979 return END_BUILTINS;
6981 else
6982 return END_BUILTINS;
6985 /* Variable-length argument list. */
6986 return DECL_FUNCTION_CODE (fndecl);
6989 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6990 evaluate to a constant. */
6992 static tree
6993 fold_builtin_constant_p (tree arg)
6995 /* We return 1 for a numeric type that's known to be a constant
6996 value at compile-time or for an aggregate type that's a
6997 literal constant. */
6998 STRIP_NOPS (arg);
7000 /* If we know this is a constant, emit the constant of one. */
7001 if (CONSTANT_CLASS_P (arg)
7002 || (TREE_CODE (arg) == CONSTRUCTOR
7003 && TREE_CONSTANT (arg)))
7004 return integer_one_node;
7005 if (TREE_CODE (arg) == ADDR_EXPR)
7007 tree op = TREE_OPERAND (arg, 0);
7008 if (TREE_CODE (op) == STRING_CST
7009 || (TREE_CODE (op) == ARRAY_REF
7010 && integer_zerop (TREE_OPERAND (op, 1))
7011 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7012 return integer_one_node;
7015 /* If this expression has side effects, show we don't know it to be a
7016 constant. Likewise if it's a pointer or aggregate type since in
7017 those case we only want literals, since those are only optimized
7018 when generating RTL, not later.
7019 And finally, if we are compiling an initializer, not code, we
7020 need to return a definite result now; there's not going to be any
7021 more optimization done. */
7022 if (TREE_SIDE_EFFECTS (arg)
7023 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7024 || POINTER_TYPE_P (TREE_TYPE (arg))
7025 || cfun == 0
7026 || folding_initializer
7027 || force_folding_builtin_constant_p)
7028 return integer_zero_node;
7030 return NULL_TREE;
7033 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7034 return it as a truthvalue. */
7036 static tree
7037 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
7039 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7041 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7042 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7043 ret_type = TREE_TYPE (TREE_TYPE (fn));
7044 pred_type = TREE_VALUE (arg_types);
7045 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7047 pred = fold_convert_loc (loc, pred_type, pred);
7048 expected = fold_convert_loc (loc, expected_type, expected);
7049 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
7051 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7052 build_int_cst (ret_type, 0));
7055 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7056 NULL_TREE if no simplification is possible. */
7058 static tree
7059 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7061 tree inner, fndecl, inner_arg0;
7062 enum tree_code code;
7064 /* Distribute the expected value over short-circuiting operators.
7065 See through the cast from truthvalue_type_node to long. */
7066 inner_arg0 = arg0;
7067 while (TREE_CODE (inner_arg0) == NOP_EXPR
7068 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7069 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7070 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7072 /* If this is a builtin_expect within a builtin_expect keep the
7073 inner one. See through a comparison against a constant. It
7074 might have been added to create a thruthvalue. */
7075 inner = inner_arg0;
7077 if (COMPARISON_CLASS_P (inner)
7078 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7079 inner = TREE_OPERAND (inner, 0);
7081 if (TREE_CODE (inner) == CALL_EXPR
7082 && (fndecl = get_callee_fndecl (inner))
7083 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7084 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7085 return arg0;
7087 inner = inner_arg0;
7088 code = TREE_CODE (inner);
7089 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7091 tree op0 = TREE_OPERAND (inner, 0);
7092 tree op1 = TREE_OPERAND (inner, 1);
7094 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7095 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7096 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7098 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7101 /* If the argument isn't invariant then there's nothing else we can do. */
7102 if (!TREE_CONSTANT (inner_arg0))
7103 return NULL_TREE;
7105 /* If we expect that a comparison against the argument will fold to
7106 a constant return the constant. In practice, this means a true
7107 constant or the address of a non-weak symbol. */
7108 inner = inner_arg0;
7109 STRIP_NOPS (inner);
7110 if (TREE_CODE (inner) == ADDR_EXPR)
7114 inner = TREE_OPERAND (inner, 0);
7116 while (TREE_CODE (inner) == COMPONENT_REF
7117 || TREE_CODE (inner) == ARRAY_REF);
7118 if ((TREE_CODE (inner) == VAR_DECL
7119 || TREE_CODE (inner) == FUNCTION_DECL)
7120 && DECL_WEAK (inner))
7121 return NULL_TREE;
7124 /* Otherwise, ARG0 already has the proper type for the return value. */
7125 return arg0;
7128 /* Fold a call to __builtin_classify_type with argument ARG. */
7130 static tree
7131 fold_builtin_classify_type (tree arg)
7133 if (arg == 0)
7134 return build_int_cst (integer_type_node, no_type_class);
7136 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7139 /* Fold a call to __builtin_strlen with argument ARG. */
7141 static tree
7142 fold_builtin_strlen (location_t loc, tree type, tree arg)
7144 if (!validate_arg (arg, POINTER_TYPE))
7145 return NULL_TREE;
7146 else
7148 tree len = c_strlen (arg, 0);
7150 if (len)
7151 return fold_convert_loc (loc, type, len);
7153 return NULL_TREE;
7157 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7159 static tree
7160 fold_builtin_inf (location_t loc, tree type, int warn)
7162 REAL_VALUE_TYPE real;
7164 /* __builtin_inff is intended to be usable to define INFINITY on all
7165 targets. If an infinity is not available, INFINITY expands "to a
7166 positive constant of type float that overflows at translation
7167 time", footnote "In this case, using INFINITY will violate the
7168 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7169 Thus we pedwarn to ensure this constraint violation is
7170 diagnosed. */
7171 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7172 pedwarn (loc, 0, "target format does not support infinity");
7174 real_inf (&real);
7175 return build_real (type, real);
7178 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7180 static tree
7181 fold_builtin_nan (tree arg, tree type, int quiet)
7183 REAL_VALUE_TYPE real;
7184 const char *str;
7186 if (!validate_arg (arg, POINTER_TYPE))
7187 return NULL_TREE;
7188 str = c_getstr (arg);
7189 if (!str)
7190 return NULL_TREE;
7192 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7193 return NULL_TREE;
7195 return build_real (type, real);
7198 /* Return true if the floating point expression T has an integer value.
7199 We also allow +Inf, -Inf and NaN to be considered integer values. */
7201 static bool
7202 integer_valued_real_p (tree t)
7204 switch (TREE_CODE (t))
7206 case FLOAT_EXPR:
7207 return true;
7209 case ABS_EXPR:
7210 case SAVE_EXPR:
7211 return integer_valued_real_p (TREE_OPERAND (t, 0));
7213 case COMPOUND_EXPR:
7214 case MODIFY_EXPR:
7215 case BIND_EXPR:
7216 return integer_valued_real_p (TREE_OPERAND (t, 1));
7218 case PLUS_EXPR:
7219 case MINUS_EXPR:
7220 case MULT_EXPR:
7221 case MIN_EXPR:
7222 case MAX_EXPR:
7223 return integer_valued_real_p (TREE_OPERAND (t, 0))
7224 && integer_valued_real_p (TREE_OPERAND (t, 1));
7226 case COND_EXPR:
7227 return integer_valued_real_p (TREE_OPERAND (t, 1))
7228 && integer_valued_real_p (TREE_OPERAND (t, 2));
7230 case REAL_CST:
7231 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7233 case NOP_EXPR:
7235 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7236 if (TREE_CODE (type) == INTEGER_TYPE)
7237 return true;
7238 if (TREE_CODE (type) == REAL_TYPE)
7239 return integer_valued_real_p (TREE_OPERAND (t, 0));
7240 break;
7243 case CALL_EXPR:
7244 switch (builtin_mathfn_code (t))
7246 CASE_FLT_FN (BUILT_IN_CEIL):
7247 CASE_FLT_FN (BUILT_IN_FLOOR):
7248 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7249 CASE_FLT_FN (BUILT_IN_RINT):
7250 CASE_FLT_FN (BUILT_IN_ROUND):
7251 CASE_FLT_FN (BUILT_IN_TRUNC):
7252 return true;
7254 CASE_FLT_FN (BUILT_IN_FMIN):
7255 CASE_FLT_FN (BUILT_IN_FMAX):
7256 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7257 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7259 default:
7260 break;
7262 break;
7264 default:
7265 break;
7267 return false;
7270 /* FNDECL is assumed to be a builtin where truncation can be propagated
7271 across (for instance floor((double)f) == (double)floorf (f).
7272 Do the transformation for a call with argument ARG. */
7274 static tree
7275 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7277 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7279 if (!validate_arg (arg, REAL_TYPE))
7280 return NULL_TREE;
7282 /* Integer rounding functions are idempotent. */
7283 if (fcode == builtin_mathfn_code (arg))
7284 return arg;
7286 /* If argument is already integer valued, and we don't need to worry
7287 about setting errno, there's no need to perform rounding. */
7288 if (! flag_errno_math && integer_valued_real_p (arg))
7289 return arg;
7291 if (optimize)
7293 tree arg0 = strip_float_extensions (arg);
7294 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7295 tree newtype = TREE_TYPE (arg0);
7296 tree decl;
7298 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7299 && (decl = mathfn_built_in (newtype, fcode)))
7300 return fold_convert_loc (loc, ftype,
7301 build_call_expr_loc (loc, decl, 1,
7302 fold_convert_loc (loc,
7303 newtype,
7304 arg0)));
7306 return NULL_TREE;
7309 /* FNDECL is assumed to be builtin which can narrow the FP type of
7310 the argument, for instance lround((double)f) -> lroundf (f).
7311 Do the transformation for a call with argument ARG. */
7313 static tree
7314 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7316 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7318 if (!validate_arg (arg, REAL_TYPE))
7319 return NULL_TREE;
7321 /* If argument is already integer valued, and we don't need to worry
7322 about setting errno, there's no need to perform rounding. */
7323 if (! flag_errno_math && integer_valued_real_p (arg))
7324 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7325 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7327 if (optimize)
7329 tree ftype = TREE_TYPE (arg);
7330 tree arg0 = strip_float_extensions (arg);
7331 tree newtype = TREE_TYPE (arg0);
7332 tree decl;
7334 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7335 && (decl = mathfn_built_in (newtype, fcode)))
7336 return build_call_expr_loc (loc, decl, 1,
7337 fold_convert_loc (loc, newtype, arg0));
7340 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7341 sizeof (int) == sizeof (long). */
7342 if (TYPE_PRECISION (integer_type_node)
7343 == TYPE_PRECISION (long_integer_type_node))
7345 tree newfn = NULL_TREE;
7346 switch (fcode)
7348 CASE_FLT_FN (BUILT_IN_ICEIL):
7349 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7350 break;
7352 CASE_FLT_FN (BUILT_IN_IFLOOR):
7353 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7354 break;
7356 CASE_FLT_FN (BUILT_IN_IROUND):
7357 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7358 break;
7360 CASE_FLT_FN (BUILT_IN_IRINT):
7361 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7362 break;
7364 default:
7365 break;
7368 if (newfn)
7370 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7371 return fold_convert_loc (loc,
7372 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7376 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7377 sizeof (long long) == sizeof (long). */
7378 if (TYPE_PRECISION (long_long_integer_type_node)
7379 == TYPE_PRECISION (long_integer_type_node))
7381 tree newfn = NULL_TREE;
7382 switch (fcode)
7384 CASE_FLT_FN (BUILT_IN_LLCEIL):
7385 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7386 break;
7388 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7389 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7390 break;
7392 CASE_FLT_FN (BUILT_IN_LLROUND):
7393 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7394 break;
7396 CASE_FLT_FN (BUILT_IN_LLRINT):
7397 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7398 break;
7400 default:
7401 break;
7404 if (newfn)
7406 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7407 return fold_convert_loc (loc,
7408 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7412 return NULL_TREE;
7415 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7416 return type. Return NULL_TREE if no simplification can be made. */
7418 static tree
7419 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7421 tree res;
7423 if (!validate_arg (arg, COMPLEX_TYPE)
7424 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7425 return NULL_TREE;
7427 /* Calculate the result when the argument is a constant. */
7428 if (TREE_CODE (arg) == COMPLEX_CST
7429 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7430 type, mpfr_hypot)))
7431 return res;
7433 if (TREE_CODE (arg) == COMPLEX_EXPR)
7435 tree real = TREE_OPERAND (arg, 0);
7436 tree imag = TREE_OPERAND (arg, 1);
7438 /* If either part is zero, cabs is fabs of the other. */
7439 if (real_zerop (real))
7440 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7441 if (real_zerop (imag))
7442 return fold_build1_loc (loc, ABS_EXPR, type, real);
7444 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7445 if (flag_unsafe_math_optimizations
7446 && operand_equal_p (real, imag, OEP_PURE_SAME))
7448 const REAL_VALUE_TYPE sqrt2_trunc
7449 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7450 STRIP_NOPS (real);
7451 return fold_build2_loc (loc, MULT_EXPR, type,
7452 fold_build1_loc (loc, ABS_EXPR, type, real),
7453 build_real (type, sqrt2_trunc));
7457 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7458 if (TREE_CODE (arg) == NEGATE_EXPR
7459 || TREE_CODE (arg) == CONJ_EXPR)
7460 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7462 /* Don't do this when optimizing for size. */
7463 if (flag_unsafe_math_optimizations
7464 && optimize && optimize_function_for_speed_p (cfun))
7466 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7468 if (sqrtfn != NULL_TREE)
7470 tree rpart, ipart, result;
7472 arg = builtin_save_expr (arg);
7474 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7475 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7477 rpart = builtin_save_expr (rpart);
7478 ipart = builtin_save_expr (ipart);
7480 result = fold_build2_loc (loc, PLUS_EXPR, type,
7481 fold_build2_loc (loc, MULT_EXPR, type,
7482 rpart, rpart),
7483 fold_build2_loc (loc, MULT_EXPR, type,
7484 ipart, ipart));
7486 return build_call_expr_loc (loc, sqrtfn, 1, result);
7490 return NULL_TREE;
7493 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7494 complex tree type of the result. If NEG is true, the imaginary
7495 zero is negative. */
7497 static tree
7498 build_complex_cproj (tree type, bool neg)
7500 REAL_VALUE_TYPE rinf, rzero = dconst0;
7502 real_inf (&rinf);
7503 rzero.sign = neg;
7504 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7505 build_real (TREE_TYPE (type), rzero));
7508 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7509 return type. Return NULL_TREE if no simplification can be made. */
7511 static tree
7512 fold_builtin_cproj (location_t loc, tree arg, tree type)
7514 if (!validate_arg (arg, COMPLEX_TYPE)
7515 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7516 return NULL_TREE;
7518 /* If there are no infinities, return arg. */
7519 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7520 return non_lvalue_loc (loc, arg);
7522 /* Calculate the result when the argument is a constant. */
7523 if (TREE_CODE (arg) == COMPLEX_CST)
7525 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7526 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7528 if (real_isinf (real) || real_isinf (imag))
7529 return build_complex_cproj (type, imag->sign);
7530 else
7531 return arg;
7533 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7535 tree real = TREE_OPERAND (arg, 0);
7536 tree imag = TREE_OPERAND (arg, 1);
7538 STRIP_NOPS (real);
7539 STRIP_NOPS (imag);
7541 /* If the real part is inf and the imag part is known to be
7542 nonnegative, return (inf + 0i). Remember side-effects are
7543 possible in the imag part. */
7544 if (TREE_CODE (real) == REAL_CST
7545 && real_isinf (TREE_REAL_CST_PTR (real))
7546 && tree_expr_nonnegative_p (imag))
7547 return omit_one_operand_loc (loc, type,
7548 build_complex_cproj (type, false),
7549 arg);
7551 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7552 Remember side-effects are possible in the real part. */
7553 if (TREE_CODE (imag) == REAL_CST
7554 && real_isinf (TREE_REAL_CST_PTR (imag)))
7555 return
7556 omit_one_operand_loc (loc, type,
7557 build_complex_cproj (type, TREE_REAL_CST_PTR
7558 (imag)->sign), arg);
7561 return NULL_TREE;
7564 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7565 Return NULL_TREE if no simplification can be made. */
7567 static tree
7568 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7571 enum built_in_function fcode;
7572 tree res;
7574 if (!validate_arg (arg, REAL_TYPE))
7575 return NULL_TREE;
7577 /* Calculate the result when the argument is a constant. */
7578 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7579 return res;
7581 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7582 fcode = builtin_mathfn_code (arg);
7583 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7585 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7586 arg = fold_build2_loc (loc, MULT_EXPR, type,
7587 CALL_EXPR_ARG (arg, 0),
7588 build_real (type, dconsthalf));
7589 return build_call_expr_loc (loc, expfn, 1, arg);
7592 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7593 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7595 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7597 if (powfn)
7599 tree arg0 = CALL_EXPR_ARG (arg, 0);
7600 tree tree_root;
7601 /* The inner root was either sqrt or cbrt. */
7602 /* This was a conditional expression but it triggered a bug
7603 in Sun C 5.5. */
7604 REAL_VALUE_TYPE dconstroot;
7605 if (BUILTIN_SQRT_P (fcode))
7606 dconstroot = dconsthalf;
7607 else
7608 dconstroot = dconst_third ();
7610 /* Adjust for the outer root. */
7611 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7612 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7613 tree_root = build_real (type, dconstroot);
7614 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7618 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7619 if (flag_unsafe_math_optimizations
7620 && (fcode == BUILT_IN_POW
7621 || fcode == BUILT_IN_POWF
7622 || fcode == BUILT_IN_POWL))
7624 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7625 tree arg0 = CALL_EXPR_ARG (arg, 0);
7626 tree arg1 = CALL_EXPR_ARG (arg, 1);
7627 tree narg1;
7628 if (!tree_expr_nonnegative_p (arg0))
7629 arg0 = build1 (ABS_EXPR, type, arg0);
7630 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7631 build_real (type, dconsthalf));
7632 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7635 return NULL_TREE;
7638 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7639 Return NULL_TREE if no simplification can be made. */
7641 static tree
7642 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7644 const enum built_in_function fcode = builtin_mathfn_code (arg);
7645 tree res;
7647 if (!validate_arg (arg, REAL_TYPE))
7648 return NULL_TREE;
7650 /* Calculate the result when the argument is a constant. */
7651 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7652 return res;
7654 if (flag_unsafe_math_optimizations)
7656 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7657 if (BUILTIN_EXPONENT_P (fcode))
7659 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7660 const REAL_VALUE_TYPE third_trunc =
7661 real_value_truncate (TYPE_MODE (type), dconst_third ());
7662 arg = fold_build2_loc (loc, MULT_EXPR, type,
7663 CALL_EXPR_ARG (arg, 0),
7664 build_real (type, third_trunc));
7665 return build_call_expr_loc (loc, expfn, 1, arg);
7668 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7669 if (BUILTIN_SQRT_P (fcode))
7671 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7673 if (powfn)
7675 tree arg0 = CALL_EXPR_ARG (arg, 0);
7676 tree tree_root;
7677 REAL_VALUE_TYPE dconstroot = dconst_third ();
7679 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7680 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7681 tree_root = build_real (type, dconstroot);
7682 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7686 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7687 if (BUILTIN_CBRT_P (fcode))
7689 tree arg0 = CALL_EXPR_ARG (arg, 0);
7690 if (tree_expr_nonnegative_p (arg0))
7692 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7694 if (powfn)
7696 tree tree_root;
7697 REAL_VALUE_TYPE dconstroot;
7699 real_arithmetic (&dconstroot, MULT_EXPR,
7700 dconst_third_ptr (), dconst_third_ptr ());
7701 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7702 tree_root = build_real (type, dconstroot);
7703 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7708 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7709 if (fcode == BUILT_IN_POW
7710 || fcode == BUILT_IN_POWF
7711 || fcode == BUILT_IN_POWL)
7713 tree arg00 = CALL_EXPR_ARG (arg, 0);
7714 tree arg01 = CALL_EXPR_ARG (arg, 1);
7715 if (tree_expr_nonnegative_p (arg00))
7717 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7718 const REAL_VALUE_TYPE dconstroot
7719 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7720 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7721 build_real (type, dconstroot));
7722 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7726 return NULL_TREE;
7729 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7730 TYPE is the type of the return value. Return NULL_TREE if no
7731 simplification can be made. */
7733 static tree
7734 fold_builtin_cos (location_t loc,
7735 tree arg, tree type, tree fndecl)
7737 tree res, narg;
7739 if (!validate_arg (arg, REAL_TYPE))
7740 return NULL_TREE;
7742 /* Calculate the result when the argument is a constant. */
7743 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7744 return res;
7746 /* Optimize cos(-x) into cos (x). */
7747 if ((narg = fold_strip_sign_ops (arg)))
7748 return build_call_expr_loc (loc, fndecl, 1, narg);
7750 return NULL_TREE;
7753 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7754 Return NULL_TREE if no simplification can be made. */
7756 static tree
7757 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7759 if (validate_arg (arg, REAL_TYPE))
7761 tree res, narg;
7763 /* Calculate the result when the argument is a constant. */
7764 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7765 return res;
7767 /* Optimize cosh(-x) into cosh (x). */
7768 if ((narg = fold_strip_sign_ops (arg)))
7769 return build_call_expr_loc (loc, fndecl, 1, narg);
7772 return NULL_TREE;
7775 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7776 argument ARG. TYPE is the type of the return value. Return
7777 NULL_TREE if no simplification can be made. */
7779 static tree
7780 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7781 bool hyper)
7783 if (validate_arg (arg, COMPLEX_TYPE)
7784 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7786 tree tmp;
7788 /* Calculate the result when the argument is a constant. */
7789 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7790 return tmp;
7792 /* Optimize fn(-x) into fn(x). */
7793 if ((tmp = fold_strip_sign_ops (arg)))
7794 return build_call_expr_loc (loc, fndecl, 1, tmp);
7797 return NULL_TREE;
7800 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7801 Return NULL_TREE if no simplification can be made. */
7803 static tree
7804 fold_builtin_tan (tree arg, tree type)
7806 enum built_in_function fcode;
7807 tree res;
7809 if (!validate_arg (arg, REAL_TYPE))
7810 return NULL_TREE;
7812 /* Calculate the result when the argument is a constant. */
7813 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7814 return res;
7816 /* Optimize tan(atan(x)) = x. */
7817 fcode = builtin_mathfn_code (arg);
7818 if (flag_unsafe_math_optimizations
7819 && (fcode == BUILT_IN_ATAN
7820 || fcode == BUILT_IN_ATANF
7821 || fcode == BUILT_IN_ATANL))
7822 return CALL_EXPR_ARG (arg, 0);
7824 return NULL_TREE;
7827 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7828 NULL_TREE if no simplification can be made. */
7830 static tree
7831 fold_builtin_sincos (location_t loc,
7832 tree arg0, tree arg1, tree arg2)
7834 tree type;
7835 tree res, fn, call;
7837 if (!validate_arg (arg0, REAL_TYPE)
7838 || !validate_arg (arg1, POINTER_TYPE)
7839 || !validate_arg (arg2, POINTER_TYPE))
7840 return NULL_TREE;
7842 type = TREE_TYPE (arg0);
7844 /* Calculate the result when the argument is a constant. */
7845 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7846 return res;
7848 /* Canonicalize sincos to cexpi. */
7849 if (!targetm.libc_has_function (function_c99_math_complex))
7850 return NULL_TREE;
7851 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7852 if (!fn)
7853 return NULL_TREE;
7855 call = build_call_expr_loc (loc, fn, 1, arg0);
7856 call = builtin_save_expr (call);
7858 return build2 (COMPOUND_EXPR, void_type_node,
7859 build2 (MODIFY_EXPR, void_type_node,
7860 build_fold_indirect_ref_loc (loc, arg1),
7861 build1 (IMAGPART_EXPR, type, call)),
7862 build2 (MODIFY_EXPR, void_type_node,
7863 build_fold_indirect_ref_loc (loc, arg2),
7864 build1 (REALPART_EXPR, type, call)));
7867 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7868 NULL_TREE if no simplification can be made. */
7870 static tree
7871 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7873 tree rtype;
7874 tree realp, imagp, ifn;
7875 tree res;
7877 if (!validate_arg (arg0, COMPLEX_TYPE)
7878 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7879 return NULL_TREE;
7881 /* Calculate the result when the argument is a constant. */
7882 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7883 return res;
7885 rtype = TREE_TYPE (TREE_TYPE (arg0));
7887 /* In case we can figure out the real part of arg0 and it is constant zero
7888 fold to cexpi. */
7889 if (!targetm.libc_has_function (function_c99_math_complex))
7890 return NULL_TREE;
7891 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7892 if (!ifn)
7893 return NULL_TREE;
7895 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7896 && real_zerop (realp))
7898 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7899 return build_call_expr_loc (loc, ifn, 1, narg);
7902 /* In case we can easily decompose real and imaginary parts split cexp
7903 to exp (r) * cexpi (i). */
7904 if (flag_unsafe_math_optimizations
7905 && realp)
7907 tree rfn, rcall, icall;
7909 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7910 if (!rfn)
7911 return NULL_TREE;
7913 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7914 if (!imagp)
7915 return NULL_TREE;
7917 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7918 icall = builtin_save_expr (icall);
7919 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7920 rcall = builtin_save_expr (rcall);
7921 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7922 fold_build2_loc (loc, MULT_EXPR, rtype,
7923 rcall,
7924 fold_build1_loc (loc, REALPART_EXPR,
7925 rtype, icall)),
7926 fold_build2_loc (loc, MULT_EXPR, rtype,
7927 rcall,
7928 fold_build1_loc (loc, IMAGPART_EXPR,
7929 rtype, icall)));
7932 return NULL_TREE;
7935 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7936 Return NULL_TREE if no simplification can be made. */
7938 static tree
7939 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7941 if (!validate_arg (arg, REAL_TYPE))
7942 return NULL_TREE;
7944 /* Optimize trunc of constant value. */
7945 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7947 REAL_VALUE_TYPE r, x;
7948 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7950 x = TREE_REAL_CST (arg);
7951 real_trunc (&r, TYPE_MODE (type), &x);
7952 return build_real (type, r);
7955 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7958 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7959 Return NULL_TREE if no simplification can be made. */
7961 static tree
7962 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7964 if (!validate_arg (arg, REAL_TYPE))
7965 return NULL_TREE;
7967 /* Optimize floor of constant value. */
7968 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7970 REAL_VALUE_TYPE x;
7972 x = TREE_REAL_CST (arg);
7973 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7975 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7976 REAL_VALUE_TYPE r;
7978 real_floor (&r, TYPE_MODE (type), &x);
7979 return build_real (type, r);
7983 /* Fold floor (x) where x is nonnegative to trunc (x). */
7984 if (tree_expr_nonnegative_p (arg))
7986 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7987 if (truncfn)
7988 return build_call_expr_loc (loc, truncfn, 1, arg);
7991 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7994 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7995 Return NULL_TREE if no simplification can be made. */
7997 static tree
7998 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8000 if (!validate_arg (arg, REAL_TYPE))
8001 return NULL_TREE;
8003 /* Optimize ceil of constant value. */
8004 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8006 REAL_VALUE_TYPE x;
8008 x = TREE_REAL_CST (arg);
8009 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8011 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8012 REAL_VALUE_TYPE r;
8014 real_ceil (&r, TYPE_MODE (type), &x);
8015 return build_real (type, r);
8019 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8022 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8023 Return NULL_TREE if no simplification can be made. */
8025 static tree
8026 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8028 if (!validate_arg (arg, REAL_TYPE))
8029 return NULL_TREE;
8031 /* Optimize round of constant value. */
8032 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8034 REAL_VALUE_TYPE x;
8036 x = TREE_REAL_CST (arg);
8037 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8039 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8040 REAL_VALUE_TYPE r;
8042 real_round (&r, TYPE_MODE (type), &x);
8043 return build_real (type, r);
8047 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8050 /* Fold function call to builtin lround, lroundf or lroundl (or the
8051 corresponding long long versions) and other rounding functions. ARG
8052 is the argument to the call. Return NULL_TREE if no simplification
8053 can be made. */
8055 static tree
8056 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8058 if (!validate_arg (arg, REAL_TYPE))
8059 return NULL_TREE;
8061 /* Optimize lround of constant value. */
8062 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8064 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8066 if (real_isfinite (&x))
8068 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8069 tree ftype = TREE_TYPE (arg);
8070 double_int val;
8071 REAL_VALUE_TYPE r;
8073 switch (DECL_FUNCTION_CODE (fndecl))
8075 CASE_FLT_FN (BUILT_IN_IFLOOR):
8076 CASE_FLT_FN (BUILT_IN_LFLOOR):
8077 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8078 real_floor (&r, TYPE_MODE (ftype), &x);
8079 break;
8081 CASE_FLT_FN (BUILT_IN_ICEIL):
8082 CASE_FLT_FN (BUILT_IN_LCEIL):
8083 CASE_FLT_FN (BUILT_IN_LLCEIL):
8084 real_ceil (&r, TYPE_MODE (ftype), &x);
8085 break;
8087 CASE_FLT_FN (BUILT_IN_IROUND):
8088 CASE_FLT_FN (BUILT_IN_LROUND):
8089 CASE_FLT_FN (BUILT_IN_LLROUND):
8090 real_round (&r, TYPE_MODE (ftype), &x);
8091 break;
8093 default:
8094 gcc_unreachable ();
8097 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8098 if (double_int_fits_to_tree_p (itype, val))
8099 return double_int_to_tree (itype, val);
8103 switch (DECL_FUNCTION_CODE (fndecl))
8105 CASE_FLT_FN (BUILT_IN_LFLOOR):
8106 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8107 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8108 if (tree_expr_nonnegative_p (arg))
8109 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8110 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8111 break;
8112 default:;
8115 return fold_fixed_mathfn (loc, fndecl, arg);
8118 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8119 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8120 the argument to the call. Return NULL_TREE if no simplification can
8121 be made. */
8123 static tree
8124 fold_builtin_bitop (tree fndecl, tree arg)
8126 if (!validate_arg (arg, INTEGER_TYPE))
8127 return NULL_TREE;
8129 /* Optimize for constant argument. */
8130 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8132 HOST_WIDE_INT hi, width, result;
8133 unsigned HOST_WIDE_INT lo;
8134 tree type;
8136 type = TREE_TYPE (arg);
8137 width = TYPE_PRECISION (type);
8138 lo = TREE_INT_CST_LOW (arg);
8140 /* Clear all the bits that are beyond the type's precision. */
8141 if (width > HOST_BITS_PER_WIDE_INT)
8143 hi = TREE_INT_CST_HIGH (arg);
8144 if (width < HOST_BITS_PER_DOUBLE_INT)
8145 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
8147 else
8149 hi = 0;
8150 if (width < HOST_BITS_PER_WIDE_INT)
8151 lo &= ~(HOST_WIDE_INT_M1U << width);
8154 switch (DECL_FUNCTION_CODE (fndecl))
8156 CASE_INT_FN (BUILT_IN_FFS):
8157 if (lo != 0)
8158 result = ffs_hwi (lo);
8159 else if (hi != 0)
8160 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8161 else
8162 result = 0;
8163 break;
8165 CASE_INT_FN (BUILT_IN_CLZ):
8166 if (hi != 0)
8167 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8168 else if (lo != 0)
8169 result = width - floor_log2 (lo) - 1;
8170 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8171 result = width;
8172 break;
8174 CASE_INT_FN (BUILT_IN_CTZ):
8175 if (lo != 0)
8176 result = ctz_hwi (lo);
8177 else if (hi != 0)
8178 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8179 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8180 result = width;
8181 break;
8183 CASE_INT_FN (BUILT_IN_CLRSB):
8184 if (width > 2 * HOST_BITS_PER_WIDE_INT)
8185 return NULL_TREE;
8186 if (width > HOST_BITS_PER_WIDE_INT
8187 && (hi & ((unsigned HOST_WIDE_INT) 1
8188 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8190 hi = ~hi & ~(HOST_WIDE_INT_M1U
8191 << (width - HOST_BITS_PER_WIDE_INT - 1));
8192 lo = ~lo;
8194 else if (width <= HOST_BITS_PER_WIDE_INT
8195 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8196 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
8197 if (hi != 0)
8198 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8199 else if (lo != 0)
8200 result = width - floor_log2 (lo) - 2;
8201 else
8202 result = width - 1;
8203 break;
8205 CASE_INT_FN (BUILT_IN_POPCOUNT):
8206 result = 0;
8207 while (lo)
8208 result++, lo &= lo - 1;
8209 while (hi)
8210 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8211 break;
8213 CASE_INT_FN (BUILT_IN_PARITY):
8214 result = 0;
8215 while (lo)
8216 result++, lo &= lo - 1;
8217 while (hi)
8218 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8219 result &= 1;
8220 break;
8222 default:
8223 gcc_unreachable ();
8226 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8229 return NULL_TREE;
8232 /* Fold function call to builtin_bswap and the short, long and long long
8233 variants. Return NULL_TREE if no simplification can be made. */
8234 static tree
8235 fold_builtin_bswap (tree fndecl, tree arg)
8237 if (! validate_arg (arg, INTEGER_TYPE))
8238 return NULL_TREE;
8240 /* Optimize constant value. */
8241 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8243 HOST_WIDE_INT hi, width, r_hi = 0;
8244 unsigned HOST_WIDE_INT lo, r_lo = 0;
8245 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8247 width = TYPE_PRECISION (type);
8248 lo = TREE_INT_CST_LOW (arg);
8249 hi = TREE_INT_CST_HIGH (arg);
8251 switch (DECL_FUNCTION_CODE (fndecl))
8253 case BUILT_IN_BSWAP16:
8254 case BUILT_IN_BSWAP32:
8255 case BUILT_IN_BSWAP64:
8257 int s;
8259 for (s = 0; s < width; s += 8)
8261 int d = width - s - 8;
8262 unsigned HOST_WIDE_INT byte;
8264 if (s < HOST_BITS_PER_WIDE_INT)
8265 byte = (lo >> s) & 0xff;
8266 else
8267 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8269 if (d < HOST_BITS_PER_WIDE_INT)
8270 r_lo |= byte << d;
8271 else
8272 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8276 break;
8278 default:
8279 gcc_unreachable ();
8282 if (width < HOST_BITS_PER_WIDE_INT)
8283 return build_int_cst (type, r_lo);
8284 else
8285 return build_int_cst_wide (type, r_lo, r_hi);
8288 return NULL_TREE;
8291 /* A subroutine of fold_builtin to fold the various logarithmic
8292 functions. Return NULL_TREE if no simplification can me made.
8293 FUNC is the corresponding MPFR logarithm function. */
8295 static tree
8296 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8297 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8299 if (validate_arg (arg, REAL_TYPE))
8301 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8302 tree res;
8303 const enum built_in_function fcode = builtin_mathfn_code (arg);
8305 /* Calculate the result when the argument is a constant. */
8306 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8307 return res;
8309 /* Special case, optimize logN(expN(x)) = x. */
8310 if (flag_unsafe_math_optimizations
8311 && ((func == mpfr_log
8312 && (fcode == BUILT_IN_EXP
8313 || fcode == BUILT_IN_EXPF
8314 || fcode == BUILT_IN_EXPL))
8315 || (func == mpfr_log2
8316 && (fcode == BUILT_IN_EXP2
8317 || fcode == BUILT_IN_EXP2F
8318 || fcode == BUILT_IN_EXP2L))
8319 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8320 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8322 /* Optimize logN(func()) for various exponential functions. We
8323 want to determine the value "x" and the power "exponent" in
8324 order to transform logN(x**exponent) into exponent*logN(x). */
8325 if (flag_unsafe_math_optimizations)
8327 tree exponent = 0, x = 0;
8329 switch (fcode)
8331 CASE_FLT_FN (BUILT_IN_EXP):
8332 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8333 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8334 dconst_e ()));
8335 exponent = CALL_EXPR_ARG (arg, 0);
8336 break;
8337 CASE_FLT_FN (BUILT_IN_EXP2):
8338 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8339 x = build_real (type, dconst2);
8340 exponent = CALL_EXPR_ARG (arg, 0);
8341 break;
8342 CASE_FLT_FN (BUILT_IN_EXP10):
8343 CASE_FLT_FN (BUILT_IN_POW10):
8344 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8346 REAL_VALUE_TYPE dconst10;
8347 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8348 x = build_real (type, dconst10);
8350 exponent = CALL_EXPR_ARG (arg, 0);
8351 break;
8352 CASE_FLT_FN (BUILT_IN_SQRT):
8353 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8354 x = CALL_EXPR_ARG (arg, 0);
8355 exponent = build_real (type, dconsthalf);
8356 break;
8357 CASE_FLT_FN (BUILT_IN_CBRT):
8358 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8359 x = CALL_EXPR_ARG (arg, 0);
8360 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8361 dconst_third ()));
8362 break;
8363 CASE_FLT_FN (BUILT_IN_POW):
8364 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8365 x = CALL_EXPR_ARG (arg, 0);
8366 exponent = CALL_EXPR_ARG (arg, 1);
8367 break;
8368 default:
8369 break;
8372 /* Now perform the optimization. */
8373 if (x && exponent)
8375 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8376 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8381 return NULL_TREE;
8384 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8385 NULL_TREE if no simplification can be made. */
8387 static tree
8388 fold_builtin_hypot (location_t loc, tree fndecl,
8389 tree arg0, tree arg1, tree type)
8391 tree res, narg0, narg1;
8393 if (!validate_arg (arg0, REAL_TYPE)
8394 || !validate_arg (arg1, REAL_TYPE))
8395 return NULL_TREE;
8397 /* Calculate the result when the argument is a constant. */
8398 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8399 return res;
8401 /* If either argument to hypot has a negate or abs, strip that off.
8402 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8403 narg0 = fold_strip_sign_ops (arg0);
8404 narg1 = fold_strip_sign_ops (arg1);
8405 if (narg0 || narg1)
8407 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8408 narg1 ? narg1 : arg1);
8411 /* If either argument is zero, hypot is fabs of the other. */
8412 if (real_zerop (arg0))
8413 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8414 else if (real_zerop (arg1))
8415 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8417 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8418 if (flag_unsafe_math_optimizations
8419 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8421 const REAL_VALUE_TYPE sqrt2_trunc
8422 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8423 return fold_build2_loc (loc, MULT_EXPR, type,
8424 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8425 build_real (type, sqrt2_trunc));
8428 return NULL_TREE;
8432 /* Fold a builtin function call to pow, powf, or powl. Return
8433 NULL_TREE if no simplification can be made. */
8434 static tree
8435 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8437 tree res;
8439 if (!validate_arg (arg0, REAL_TYPE)
8440 || !validate_arg (arg1, REAL_TYPE))
8441 return NULL_TREE;
8443 /* Calculate the result when the argument is a constant. */
8444 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8445 return res;
8447 /* Optimize pow(1.0,y) = 1.0. */
8448 if (real_onep (arg0))
8449 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8451 if (TREE_CODE (arg1) == REAL_CST
8452 && !TREE_OVERFLOW (arg1))
8454 REAL_VALUE_TYPE cint;
8455 REAL_VALUE_TYPE c;
8456 HOST_WIDE_INT n;
8458 c = TREE_REAL_CST (arg1);
8460 /* Optimize pow(x,0.0) = 1.0. */
8461 if (REAL_VALUES_EQUAL (c, dconst0))
8462 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8463 arg0);
8465 /* Optimize pow(x,1.0) = x. */
8466 if (REAL_VALUES_EQUAL (c, dconst1))
8467 return arg0;
8469 /* Optimize pow(x,-1.0) = 1.0/x. */
8470 if (REAL_VALUES_EQUAL (c, dconstm1))
8471 return fold_build2_loc (loc, RDIV_EXPR, type,
8472 build_real (type, dconst1), arg0);
8474 /* Optimize pow(x,0.5) = sqrt(x). */
8475 if (flag_unsafe_math_optimizations
8476 && REAL_VALUES_EQUAL (c, dconsthalf))
8478 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8480 if (sqrtfn != NULL_TREE)
8481 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8484 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8485 if (flag_unsafe_math_optimizations)
8487 const REAL_VALUE_TYPE dconstroot
8488 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8490 if (REAL_VALUES_EQUAL (c, dconstroot))
8492 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8493 if (cbrtfn != NULL_TREE)
8494 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8498 /* Check for an integer exponent. */
8499 n = real_to_integer (&c);
8500 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8501 if (real_identical (&c, &cint))
8503 /* Attempt to evaluate pow at compile-time, unless this should
8504 raise an exception. */
8505 if (TREE_CODE (arg0) == REAL_CST
8506 && !TREE_OVERFLOW (arg0)
8507 && (n > 0
8508 || (!flag_trapping_math && !flag_errno_math)
8509 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8511 REAL_VALUE_TYPE x;
8512 bool inexact;
8514 x = TREE_REAL_CST (arg0);
8515 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8516 if (flag_unsafe_math_optimizations || !inexact)
8517 return build_real (type, x);
8520 /* Strip sign ops from even integer powers. */
8521 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8523 tree narg0 = fold_strip_sign_ops (arg0);
8524 if (narg0)
8525 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8530 if (flag_unsafe_math_optimizations)
8532 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8534 /* Optimize pow(expN(x),y) = expN(x*y). */
8535 if (BUILTIN_EXPONENT_P (fcode))
8537 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8538 tree arg = CALL_EXPR_ARG (arg0, 0);
8539 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8540 return build_call_expr_loc (loc, expfn, 1, arg);
8543 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8544 if (BUILTIN_SQRT_P (fcode))
8546 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8547 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8548 build_real (type, dconsthalf));
8549 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8552 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8553 if (BUILTIN_CBRT_P (fcode))
8555 tree arg = CALL_EXPR_ARG (arg0, 0);
8556 if (tree_expr_nonnegative_p (arg))
8558 const REAL_VALUE_TYPE dconstroot
8559 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8560 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8561 build_real (type, dconstroot));
8562 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8566 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8567 if (fcode == BUILT_IN_POW
8568 || fcode == BUILT_IN_POWF
8569 || fcode == BUILT_IN_POWL)
8571 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8572 if (tree_expr_nonnegative_p (arg00))
8574 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8575 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8576 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8581 return NULL_TREE;
8584 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8585 Return NULL_TREE if no simplification can be made. */
8586 static tree
8587 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8588 tree arg0, tree arg1, tree type)
8590 if (!validate_arg (arg0, REAL_TYPE)
8591 || !validate_arg (arg1, INTEGER_TYPE))
8592 return NULL_TREE;
8594 /* Optimize pow(1.0,y) = 1.0. */
8595 if (real_onep (arg0))
8596 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8598 if (host_integerp (arg1, 0))
8600 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8602 /* Evaluate powi at compile-time. */
8603 if (TREE_CODE (arg0) == REAL_CST
8604 && !TREE_OVERFLOW (arg0))
8606 REAL_VALUE_TYPE x;
8607 x = TREE_REAL_CST (arg0);
8608 real_powi (&x, TYPE_MODE (type), &x, c);
8609 return build_real (type, x);
8612 /* Optimize pow(x,0) = 1.0. */
8613 if (c == 0)
8614 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8615 arg0);
8617 /* Optimize pow(x,1) = x. */
8618 if (c == 1)
8619 return arg0;
8621 /* Optimize pow(x,-1) = 1.0/x. */
8622 if (c == -1)
8623 return fold_build2_loc (loc, RDIV_EXPR, type,
8624 build_real (type, dconst1), arg0);
8627 return NULL_TREE;
8630 /* A subroutine of fold_builtin to fold the various exponent
8631 functions. Return NULL_TREE if no simplification can be made.
8632 FUNC is the corresponding MPFR exponent function. */
8634 static tree
8635 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8636 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8638 if (validate_arg (arg, REAL_TYPE))
8640 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8641 tree res;
8643 /* Calculate the result when the argument is a constant. */
8644 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8645 return res;
8647 /* Optimize expN(logN(x)) = x. */
8648 if (flag_unsafe_math_optimizations)
8650 const enum built_in_function fcode = builtin_mathfn_code (arg);
8652 if ((func == mpfr_exp
8653 && (fcode == BUILT_IN_LOG
8654 || fcode == BUILT_IN_LOGF
8655 || fcode == BUILT_IN_LOGL))
8656 || (func == mpfr_exp2
8657 && (fcode == BUILT_IN_LOG2
8658 || fcode == BUILT_IN_LOG2F
8659 || fcode == BUILT_IN_LOG2L))
8660 || (func == mpfr_exp10
8661 && (fcode == BUILT_IN_LOG10
8662 || fcode == BUILT_IN_LOG10F
8663 || fcode == BUILT_IN_LOG10L)))
8664 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8668 return NULL_TREE;
8671 /* Return true if VAR is a VAR_DECL or a component thereof. */
8673 static bool
8674 var_decl_component_p (tree var)
8676 tree inner = var;
8677 while (handled_component_p (inner))
8678 inner = TREE_OPERAND (inner, 0);
8679 return SSA_VAR_P (inner);
8682 /* Fold function call to builtin memset. Return
8683 NULL_TREE if no simplification can be made. */
8685 static tree
8686 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8687 tree type, bool ignore)
8689 tree var, ret, etype;
8690 unsigned HOST_WIDE_INT length, cval;
8692 if (! validate_arg (dest, POINTER_TYPE)
8693 || ! validate_arg (c, INTEGER_TYPE)
8694 || ! validate_arg (len, INTEGER_TYPE))
8695 return NULL_TREE;
8697 if (! host_integerp (len, 1))
8698 return NULL_TREE;
8700 /* If the LEN parameter is zero, return DEST. */
8701 if (integer_zerop (len))
8702 return omit_one_operand_loc (loc, type, dest, c);
8704 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8705 return NULL_TREE;
8707 var = dest;
8708 STRIP_NOPS (var);
8709 if (TREE_CODE (var) != ADDR_EXPR)
8710 return NULL_TREE;
8712 var = TREE_OPERAND (var, 0);
8713 if (TREE_THIS_VOLATILE (var))
8714 return NULL_TREE;
8716 etype = TREE_TYPE (var);
8717 if (TREE_CODE (etype) == ARRAY_TYPE)
8718 etype = TREE_TYPE (etype);
8720 if (!INTEGRAL_TYPE_P (etype)
8721 && !POINTER_TYPE_P (etype))
8722 return NULL_TREE;
8724 if (! var_decl_component_p (var))
8725 return NULL_TREE;
8727 length = tree_low_cst (len, 1);
8728 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8729 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8730 return NULL_TREE;
8732 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8733 return NULL_TREE;
8735 if (integer_zerop (c))
8736 cval = 0;
8737 else
8739 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8740 return NULL_TREE;
8742 cval = TREE_INT_CST_LOW (c);
8743 cval &= 0xff;
8744 cval |= cval << 8;
8745 cval |= cval << 16;
8746 cval |= (cval << 31) << 1;
8749 ret = build_int_cst_type (etype, cval);
8750 var = build_fold_indirect_ref_loc (loc,
8751 fold_convert_loc (loc,
8752 build_pointer_type (etype),
8753 dest));
8754 ret = build2 (MODIFY_EXPR, etype, var, ret);
8755 if (ignore)
8756 return ret;
8758 return omit_one_operand_loc (loc, type, dest, ret);
8761 /* Fold function call to builtin memset. Return
8762 NULL_TREE if no simplification can be made. */
8764 static tree
8765 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8767 if (! validate_arg (dest, POINTER_TYPE)
8768 || ! validate_arg (size, INTEGER_TYPE))
8769 return NULL_TREE;
8771 if (!ignore)
8772 return NULL_TREE;
8774 /* New argument list transforming bzero(ptr x, int y) to
8775 memset(ptr x, int 0, size_t y). This is done this way
8776 so that if it isn't expanded inline, we fallback to
8777 calling bzero instead of memset. */
8779 return fold_builtin_memset (loc, dest, integer_zero_node,
8780 fold_convert_loc (loc, size_type_node, size),
8781 void_type_node, ignore);
8784 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8785 NULL_TREE if no simplification can be made.
8786 If ENDP is 0, return DEST (like memcpy).
8787 If ENDP is 1, return DEST+LEN (like mempcpy).
8788 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8789 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8790 (memmove). */
8792 static tree
8793 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8794 tree len, tree type, bool ignore, int endp)
8796 tree destvar, srcvar, expr;
8798 if (! validate_arg (dest, POINTER_TYPE)
8799 || ! validate_arg (src, POINTER_TYPE)
8800 || ! validate_arg (len, INTEGER_TYPE))
8801 return NULL_TREE;
8803 /* If the LEN parameter is zero, return DEST. */
8804 if (integer_zerop (len))
8805 return omit_one_operand_loc (loc, type, dest, src);
8807 /* If SRC and DEST are the same (and not volatile), return
8808 DEST{,+LEN,+LEN-1}. */
8809 if (operand_equal_p (src, dest, 0))
8810 expr = len;
8811 else
8813 tree srctype, desttype;
8814 unsigned int src_align, dest_align;
8815 tree off0;
8817 if (endp == 3)
8819 src_align = get_pointer_alignment (src);
8820 dest_align = get_pointer_alignment (dest);
8822 /* Both DEST and SRC must be pointer types.
8823 ??? This is what old code did. Is the testing for pointer types
8824 really mandatory?
8826 If either SRC is readonly or length is 1, we can use memcpy. */
8827 if (!dest_align || !src_align)
8828 return NULL_TREE;
8829 if (readonly_data_expr (src)
8830 || (host_integerp (len, 1)
8831 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8832 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8834 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8835 if (!fn)
8836 return NULL_TREE;
8837 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8840 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8841 if (TREE_CODE (src) == ADDR_EXPR
8842 && TREE_CODE (dest) == ADDR_EXPR)
8844 tree src_base, dest_base, fn;
8845 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8846 HOST_WIDE_INT size = -1;
8847 HOST_WIDE_INT maxsize = -1;
8849 srcvar = TREE_OPERAND (src, 0);
8850 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8851 &size, &maxsize);
8852 destvar = TREE_OPERAND (dest, 0);
8853 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8854 &size, &maxsize);
8855 if (host_integerp (len, 1))
8856 maxsize = tree_low_cst (len, 1);
8857 else
8858 maxsize = -1;
8859 src_offset /= BITS_PER_UNIT;
8860 dest_offset /= BITS_PER_UNIT;
8861 if (SSA_VAR_P (src_base)
8862 && SSA_VAR_P (dest_base))
8864 if (operand_equal_p (src_base, dest_base, 0)
8865 && ranges_overlap_p (src_offset, maxsize,
8866 dest_offset, maxsize))
8867 return NULL_TREE;
8869 else if (TREE_CODE (src_base) == MEM_REF
8870 && TREE_CODE (dest_base) == MEM_REF)
8872 double_int off;
8873 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8874 TREE_OPERAND (dest_base, 0), 0))
8875 return NULL_TREE;
8876 off = mem_ref_offset (src_base) +
8877 double_int::from_shwi (src_offset);
8878 if (!off.fits_shwi ())
8879 return NULL_TREE;
8880 src_offset = off.low;
8881 off = mem_ref_offset (dest_base) +
8882 double_int::from_shwi (dest_offset);
8883 if (!off.fits_shwi ())
8884 return NULL_TREE;
8885 dest_offset = off.low;
8886 if (ranges_overlap_p (src_offset, maxsize,
8887 dest_offset, maxsize))
8888 return NULL_TREE;
8890 else
8891 return NULL_TREE;
8893 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8894 if (!fn)
8895 return NULL_TREE;
8896 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8899 /* If the destination and source do not alias optimize into
8900 memcpy as well. */
8901 if ((is_gimple_min_invariant (dest)
8902 || TREE_CODE (dest) == SSA_NAME)
8903 && (is_gimple_min_invariant (src)
8904 || TREE_CODE (src) == SSA_NAME))
8906 ao_ref destr, srcr;
8907 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8908 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8909 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8911 tree fn;
8912 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8913 if (!fn)
8914 return NULL_TREE;
8915 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8919 return NULL_TREE;
8922 if (!host_integerp (len, 0))
8923 return NULL_TREE;
8924 /* FIXME:
8925 This logic lose for arguments like (type *)malloc (sizeof (type)),
8926 since we strip the casts of up to VOID return value from malloc.
8927 Perhaps we ought to inherit type from non-VOID argument here? */
8928 STRIP_NOPS (src);
8929 STRIP_NOPS (dest);
8930 if (!POINTER_TYPE_P (TREE_TYPE (src))
8931 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8932 return NULL_TREE;
8933 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8934 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8936 tree tem = TREE_OPERAND (src, 0);
8937 STRIP_NOPS (tem);
8938 if (tem != TREE_OPERAND (src, 0))
8939 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8941 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8943 tree tem = TREE_OPERAND (dest, 0);
8944 STRIP_NOPS (tem);
8945 if (tem != TREE_OPERAND (dest, 0))
8946 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8948 srctype = TREE_TYPE (TREE_TYPE (src));
8949 if (TREE_CODE (srctype) == ARRAY_TYPE
8950 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8952 srctype = TREE_TYPE (srctype);
8953 STRIP_NOPS (src);
8954 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8956 desttype = TREE_TYPE (TREE_TYPE (dest));
8957 if (TREE_CODE (desttype) == ARRAY_TYPE
8958 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8960 desttype = TREE_TYPE (desttype);
8961 STRIP_NOPS (dest);
8962 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8964 if (TREE_ADDRESSABLE (srctype)
8965 || TREE_ADDRESSABLE (desttype))
8966 return NULL_TREE;
8968 src_align = get_pointer_alignment (src);
8969 dest_align = get_pointer_alignment (dest);
8970 if (dest_align < TYPE_ALIGN (desttype)
8971 || src_align < TYPE_ALIGN (srctype))
8972 return NULL_TREE;
8974 if (!ignore)
8975 dest = builtin_save_expr (dest);
8977 /* Build accesses at offset zero with a ref-all character type. */
8978 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8979 ptr_mode, true), 0);
8981 destvar = dest;
8982 STRIP_NOPS (destvar);
8983 if (TREE_CODE (destvar) == ADDR_EXPR
8984 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8985 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8986 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8987 else
8988 destvar = NULL_TREE;
8990 srcvar = src;
8991 STRIP_NOPS (srcvar);
8992 if (TREE_CODE (srcvar) == ADDR_EXPR
8993 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8994 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8996 if (!destvar
8997 || src_align >= TYPE_ALIGN (desttype))
8998 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8999 srcvar, off0);
9000 else if (!STRICT_ALIGNMENT)
9002 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
9003 src_align);
9004 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
9006 else
9007 srcvar = NULL_TREE;
9009 else
9010 srcvar = NULL_TREE;
9012 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9013 return NULL_TREE;
9015 if (srcvar == NULL_TREE)
9017 STRIP_NOPS (src);
9018 if (src_align >= TYPE_ALIGN (desttype))
9019 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
9020 else
9022 if (STRICT_ALIGNMENT)
9023 return NULL_TREE;
9024 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
9025 src_align);
9026 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
9029 else if (destvar == NULL_TREE)
9031 STRIP_NOPS (dest);
9032 if (dest_align >= TYPE_ALIGN (srctype))
9033 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
9034 else
9036 if (STRICT_ALIGNMENT)
9037 return NULL_TREE;
9038 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
9039 dest_align);
9040 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
9044 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
9047 if (ignore)
9048 return expr;
9050 if (endp == 0 || endp == 3)
9051 return omit_one_operand_loc (loc, type, dest, expr);
9053 if (expr == len)
9054 expr = NULL_TREE;
9056 if (endp == 2)
9057 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9058 ssize_int (1));
9060 dest = fold_build_pointer_plus_loc (loc, dest, len);
9061 dest = fold_convert_loc (loc, type, dest);
9062 if (expr)
9063 dest = omit_one_operand_loc (loc, type, dest, expr);
9064 return dest;
9067 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9068 If LEN is not NULL, it represents the length of the string to be
9069 copied. Return NULL_TREE if no simplification can be made. */
9071 tree
9072 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9074 tree fn;
9076 if (!validate_arg (dest, POINTER_TYPE)
9077 || !validate_arg (src, POINTER_TYPE))
9078 return NULL_TREE;
9080 /* If SRC and DEST are the same (and not volatile), return DEST. */
9081 if (operand_equal_p (src, dest, 0))
9082 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9084 if (optimize_function_for_size_p (cfun))
9085 return NULL_TREE;
9087 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9088 if (!fn)
9089 return NULL_TREE;
9091 if (!len)
9093 len = c_strlen (src, 1);
9094 if (! len || TREE_SIDE_EFFECTS (len))
9095 return NULL_TREE;
9098 len = fold_convert_loc (loc, size_type_node, len);
9099 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9100 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9101 build_call_expr_loc (loc, fn, 3, dest, src, len));
9104 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9105 Return NULL_TREE if no simplification can be made. */
9107 static tree
9108 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9110 tree fn, len, lenp1, call, type;
9112 if (!validate_arg (dest, POINTER_TYPE)
9113 || !validate_arg (src, POINTER_TYPE))
9114 return NULL_TREE;
9116 len = c_strlen (src, 1);
9117 if (!len
9118 || TREE_CODE (len) != INTEGER_CST)
9119 return NULL_TREE;
9121 if (optimize_function_for_size_p (cfun)
9122 /* If length is zero it's small enough. */
9123 && !integer_zerop (len))
9124 return NULL_TREE;
9126 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9127 if (!fn)
9128 return NULL_TREE;
9130 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9131 fold_convert_loc (loc, size_type_node, len),
9132 build_int_cst (size_type_node, 1));
9133 /* We use dest twice in building our expression. Save it from
9134 multiple expansions. */
9135 dest = builtin_save_expr (dest);
9136 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9138 type = TREE_TYPE (TREE_TYPE (fndecl));
9139 dest = fold_build_pointer_plus_loc (loc, dest, len);
9140 dest = fold_convert_loc (loc, type, dest);
9141 dest = omit_one_operand_loc (loc, type, dest, call);
9142 return dest;
9145 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9146 If SLEN is not NULL, it represents the length of the source string.
9147 Return NULL_TREE if no simplification can be made. */
9149 tree
9150 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9151 tree src, tree len, tree slen)
9153 tree fn;
9155 if (!validate_arg (dest, POINTER_TYPE)
9156 || !validate_arg (src, POINTER_TYPE)
9157 || !validate_arg (len, INTEGER_TYPE))
9158 return NULL_TREE;
9160 /* If the LEN parameter is zero, return DEST. */
9161 if (integer_zerop (len))
9162 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9164 /* We can't compare slen with len as constants below if len is not a
9165 constant. */
9166 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9167 return NULL_TREE;
9169 if (!slen)
9170 slen = c_strlen (src, 1);
9172 /* Now, we must be passed a constant src ptr parameter. */
9173 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9174 return NULL_TREE;
9176 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9178 /* We do not support simplification of this case, though we do
9179 support it when expanding trees into RTL. */
9180 /* FIXME: generate a call to __builtin_memset. */
9181 if (tree_int_cst_lt (slen, len))
9182 return NULL_TREE;
9184 /* OK transform into builtin memcpy. */
9185 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9186 if (!fn)
9187 return NULL_TREE;
9189 len = fold_convert_loc (loc, size_type_node, len);
9190 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9191 build_call_expr_loc (loc, fn, 3, dest, src, len));
9194 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9195 arguments to the call, and TYPE is its return type.
9196 Return NULL_TREE if no simplification can be made. */
9198 static tree
9199 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9201 if (!validate_arg (arg1, POINTER_TYPE)
9202 || !validate_arg (arg2, INTEGER_TYPE)
9203 || !validate_arg (len, INTEGER_TYPE))
9204 return NULL_TREE;
9205 else
9207 const char *p1;
9209 if (TREE_CODE (arg2) != INTEGER_CST
9210 || !host_integerp (len, 1))
9211 return NULL_TREE;
9213 p1 = c_getstr (arg1);
9214 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9216 char c;
9217 const char *r;
9218 tree tem;
9220 if (target_char_cast (arg2, &c))
9221 return NULL_TREE;
9223 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9225 if (r == NULL)
9226 return build_int_cst (TREE_TYPE (arg1), 0);
9228 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9229 return fold_convert_loc (loc, type, tem);
9231 return NULL_TREE;
9235 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9236 Return NULL_TREE if no simplification can be made. */
9238 static tree
9239 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9241 const char *p1, *p2;
9243 if (!validate_arg (arg1, POINTER_TYPE)
9244 || !validate_arg (arg2, POINTER_TYPE)
9245 || !validate_arg (len, INTEGER_TYPE))
9246 return NULL_TREE;
9248 /* If the LEN parameter is zero, return zero. */
9249 if (integer_zerop (len))
9250 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9251 arg1, arg2);
9253 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9254 if (operand_equal_p (arg1, arg2, 0))
9255 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9257 p1 = c_getstr (arg1);
9258 p2 = c_getstr (arg2);
9260 /* If all arguments are constant, and the value of len is not greater
9261 than the lengths of arg1 and arg2, evaluate at compile-time. */
9262 if (host_integerp (len, 1) && p1 && p2
9263 && compare_tree_int (len, strlen (p1) + 1) <= 0
9264 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9266 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9268 if (r > 0)
9269 return integer_one_node;
9270 else if (r < 0)
9271 return integer_minus_one_node;
9272 else
9273 return integer_zero_node;
9276 /* If len parameter is one, return an expression corresponding to
9277 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9278 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9280 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9281 tree cst_uchar_ptr_node
9282 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9284 tree ind1
9285 = fold_convert_loc (loc, integer_type_node,
9286 build1 (INDIRECT_REF, cst_uchar_node,
9287 fold_convert_loc (loc,
9288 cst_uchar_ptr_node,
9289 arg1)));
9290 tree ind2
9291 = fold_convert_loc (loc, integer_type_node,
9292 build1 (INDIRECT_REF, cst_uchar_node,
9293 fold_convert_loc (loc,
9294 cst_uchar_ptr_node,
9295 arg2)));
9296 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9299 return NULL_TREE;
9302 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9303 Return NULL_TREE if no simplification can be made. */
9305 static tree
9306 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9308 const char *p1, *p2;
9310 if (!validate_arg (arg1, POINTER_TYPE)
9311 || !validate_arg (arg2, POINTER_TYPE))
9312 return NULL_TREE;
9314 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9315 if (operand_equal_p (arg1, arg2, 0))
9316 return integer_zero_node;
9318 p1 = c_getstr (arg1);
9319 p2 = c_getstr (arg2);
9321 if (p1 && p2)
9323 const int i = strcmp (p1, p2);
9324 if (i < 0)
9325 return integer_minus_one_node;
9326 else if (i > 0)
9327 return integer_one_node;
9328 else
9329 return integer_zero_node;
9332 /* If the second arg is "", return *(const unsigned char*)arg1. */
9333 if (p2 && *p2 == '\0')
9335 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9336 tree cst_uchar_ptr_node
9337 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9339 return fold_convert_loc (loc, integer_type_node,
9340 build1 (INDIRECT_REF, cst_uchar_node,
9341 fold_convert_loc (loc,
9342 cst_uchar_ptr_node,
9343 arg1)));
9346 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9347 if (p1 && *p1 == '\0')
9349 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9350 tree cst_uchar_ptr_node
9351 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9353 tree temp
9354 = fold_convert_loc (loc, integer_type_node,
9355 build1 (INDIRECT_REF, cst_uchar_node,
9356 fold_convert_loc (loc,
9357 cst_uchar_ptr_node,
9358 arg2)));
9359 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9362 return NULL_TREE;
9365 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9366 Return NULL_TREE if no simplification can be made. */
9368 static tree
9369 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9371 const char *p1, *p2;
9373 if (!validate_arg (arg1, POINTER_TYPE)
9374 || !validate_arg (arg2, POINTER_TYPE)
9375 || !validate_arg (len, INTEGER_TYPE))
9376 return NULL_TREE;
9378 /* If the LEN parameter is zero, return zero. */
9379 if (integer_zerop (len))
9380 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9381 arg1, arg2);
9383 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9384 if (operand_equal_p (arg1, arg2, 0))
9385 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9387 p1 = c_getstr (arg1);
9388 p2 = c_getstr (arg2);
9390 if (host_integerp (len, 1) && p1 && p2)
9392 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9393 if (i > 0)
9394 return integer_one_node;
9395 else if (i < 0)
9396 return integer_minus_one_node;
9397 else
9398 return integer_zero_node;
9401 /* If the second arg is "", and the length is greater than zero,
9402 return *(const unsigned char*)arg1. */
9403 if (p2 && *p2 == '\0'
9404 && TREE_CODE (len) == INTEGER_CST
9405 && tree_int_cst_sgn (len) == 1)
9407 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9408 tree cst_uchar_ptr_node
9409 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9411 return fold_convert_loc (loc, integer_type_node,
9412 build1 (INDIRECT_REF, cst_uchar_node,
9413 fold_convert_loc (loc,
9414 cst_uchar_ptr_node,
9415 arg1)));
9418 /* If the first arg is "", and the length is greater than zero,
9419 return -*(const unsigned char*)arg2. */
9420 if (p1 && *p1 == '\0'
9421 && TREE_CODE (len) == INTEGER_CST
9422 && tree_int_cst_sgn (len) == 1)
9424 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9425 tree cst_uchar_ptr_node
9426 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9428 tree temp = fold_convert_loc (loc, integer_type_node,
9429 build1 (INDIRECT_REF, cst_uchar_node,
9430 fold_convert_loc (loc,
9431 cst_uchar_ptr_node,
9432 arg2)));
9433 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9436 /* If len parameter is one, return an expression corresponding to
9437 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9438 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9440 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9441 tree cst_uchar_ptr_node
9442 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9444 tree ind1 = fold_convert_loc (loc, integer_type_node,
9445 build1 (INDIRECT_REF, cst_uchar_node,
9446 fold_convert_loc (loc,
9447 cst_uchar_ptr_node,
9448 arg1)));
9449 tree ind2 = fold_convert_loc (loc, integer_type_node,
9450 build1 (INDIRECT_REF, cst_uchar_node,
9451 fold_convert_loc (loc,
9452 cst_uchar_ptr_node,
9453 arg2)));
9454 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9457 return NULL_TREE;
9460 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9461 ARG. Return NULL_TREE if no simplification can be made. */
9463 static tree
9464 fold_builtin_signbit (location_t loc, tree arg, tree type)
9466 if (!validate_arg (arg, REAL_TYPE))
9467 return NULL_TREE;
9469 /* If ARG is a compile-time constant, determine the result. */
9470 if (TREE_CODE (arg) == REAL_CST
9471 && !TREE_OVERFLOW (arg))
9473 REAL_VALUE_TYPE c;
9475 c = TREE_REAL_CST (arg);
9476 return (REAL_VALUE_NEGATIVE (c)
9477 ? build_one_cst (type)
9478 : build_zero_cst (type));
9481 /* If ARG is non-negative, the result is always zero. */
9482 if (tree_expr_nonnegative_p (arg))
9483 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9485 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9486 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9487 return fold_convert (type,
9488 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9489 build_real (TREE_TYPE (arg), dconst0)));
9491 return NULL_TREE;
9494 /* Fold function call to builtin copysign, copysignf or copysignl with
9495 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9496 be made. */
9498 static tree
9499 fold_builtin_copysign (location_t loc, tree fndecl,
9500 tree arg1, tree arg2, tree type)
9502 tree tem;
9504 if (!validate_arg (arg1, REAL_TYPE)
9505 || !validate_arg (arg2, REAL_TYPE))
9506 return NULL_TREE;
9508 /* copysign(X,X) is X. */
9509 if (operand_equal_p (arg1, arg2, 0))
9510 return fold_convert_loc (loc, type, arg1);
9512 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9513 if (TREE_CODE (arg1) == REAL_CST
9514 && TREE_CODE (arg2) == REAL_CST
9515 && !TREE_OVERFLOW (arg1)
9516 && !TREE_OVERFLOW (arg2))
9518 REAL_VALUE_TYPE c1, c2;
9520 c1 = TREE_REAL_CST (arg1);
9521 c2 = TREE_REAL_CST (arg2);
9522 /* c1.sign := c2.sign. */
9523 real_copysign (&c1, &c2);
9524 return build_real (type, c1);
9527 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9528 Remember to evaluate Y for side-effects. */
9529 if (tree_expr_nonnegative_p (arg2))
9530 return omit_one_operand_loc (loc, type,
9531 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9532 arg2);
9534 /* Strip sign changing operations for the first argument. */
9535 tem = fold_strip_sign_ops (arg1);
9536 if (tem)
9537 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9539 return NULL_TREE;
9542 /* Fold a call to builtin isascii with argument ARG. */
9544 static tree
9545 fold_builtin_isascii (location_t loc, tree arg)
9547 if (!validate_arg (arg, INTEGER_TYPE))
9548 return NULL_TREE;
9549 else
9551 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9552 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9553 build_int_cst (integer_type_node,
9554 ~ (unsigned HOST_WIDE_INT) 0x7f));
9555 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9556 arg, integer_zero_node);
9560 /* Fold a call to builtin toascii with argument ARG. */
9562 static tree
9563 fold_builtin_toascii (location_t loc, tree arg)
9565 if (!validate_arg (arg, INTEGER_TYPE))
9566 return NULL_TREE;
9568 /* Transform toascii(c) -> (c & 0x7f). */
9569 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9570 build_int_cst (integer_type_node, 0x7f));
9573 /* Fold a call to builtin isdigit with argument ARG. */
9575 static tree
9576 fold_builtin_isdigit (location_t loc, tree arg)
9578 if (!validate_arg (arg, INTEGER_TYPE))
9579 return NULL_TREE;
9580 else
9582 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9583 /* According to the C standard, isdigit is unaffected by locale.
9584 However, it definitely is affected by the target character set. */
9585 unsigned HOST_WIDE_INT target_digit0
9586 = lang_hooks.to_target_charset ('0');
9588 if (target_digit0 == 0)
9589 return NULL_TREE;
9591 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9592 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9593 build_int_cst (unsigned_type_node, target_digit0));
9594 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9595 build_int_cst (unsigned_type_node, 9));
9599 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9601 static tree
9602 fold_builtin_fabs (location_t loc, tree arg, tree type)
9604 if (!validate_arg (arg, REAL_TYPE))
9605 return NULL_TREE;
9607 arg = fold_convert_loc (loc, type, arg);
9608 if (TREE_CODE (arg) == REAL_CST)
9609 return fold_abs_const (arg, type);
9610 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9613 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9615 static tree
9616 fold_builtin_abs (location_t loc, tree arg, tree type)
9618 if (!validate_arg (arg, INTEGER_TYPE))
9619 return NULL_TREE;
9621 arg = fold_convert_loc (loc, type, arg);
9622 if (TREE_CODE (arg) == INTEGER_CST)
9623 return fold_abs_const (arg, type);
9624 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9627 /* Fold a fma operation with arguments ARG[012]. */
9629 tree
9630 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9631 tree type, tree arg0, tree arg1, tree arg2)
9633 if (TREE_CODE (arg0) == REAL_CST
9634 && TREE_CODE (arg1) == REAL_CST
9635 && TREE_CODE (arg2) == REAL_CST)
9636 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9638 return NULL_TREE;
9641 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9643 static tree
9644 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9646 if (validate_arg (arg0, REAL_TYPE)
9647 && validate_arg(arg1, REAL_TYPE)
9648 && validate_arg(arg2, REAL_TYPE))
9650 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9651 if (tem)
9652 return tem;
9654 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9655 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9656 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9658 return NULL_TREE;
9661 /* Fold a call to builtin fmin or fmax. */
9663 static tree
9664 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9665 tree type, bool max)
9667 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9669 /* Calculate the result when the argument is a constant. */
9670 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9672 if (res)
9673 return res;
9675 /* If either argument is NaN, return the other one. Avoid the
9676 transformation if we get (and honor) a signalling NaN. Using
9677 omit_one_operand() ensures we create a non-lvalue. */
9678 if (TREE_CODE (arg0) == REAL_CST
9679 && real_isnan (&TREE_REAL_CST (arg0))
9680 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9681 || ! TREE_REAL_CST (arg0).signalling))
9682 return omit_one_operand_loc (loc, type, arg1, arg0);
9683 if (TREE_CODE (arg1) == REAL_CST
9684 && real_isnan (&TREE_REAL_CST (arg1))
9685 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9686 || ! TREE_REAL_CST (arg1).signalling))
9687 return omit_one_operand_loc (loc, type, arg0, arg1);
9689 /* Transform fmin/fmax(x,x) -> x. */
9690 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9691 return omit_one_operand_loc (loc, type, arg0, arg1);
9693 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9694 functions to return the numeric arg if the other one is NaN.
9695 These tree codes don't honor that, so only transform if
9696 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9697 handled, so we don't have to worry about it either. */
9698 if (flag_finite_math_only)
9699 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9700 fold_convert_loc (loc, type, arg0),
9701 fold_convert_loc (loc, type, arg1));
9703 return NULL_TREE;
9706 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9708 static tree
9709 fold_builtin_carg (location_t loc, tree arg, tree type)
9711 if (validate_arg (arg, COMPLEX_TYPE)
9712 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9714 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9716 if (atan2_fn)
9718 tree new_arg = builtin_save_expr (arg);
9719 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9720 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9721 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9725 return NULL_TREE;
9728 /* Fold a call to builtin logb/ilogb. */
9730 static tree
9731 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9733 if (! validate_arg (arg, REAL_TYPE))
9734 return NULL_TREE;
9736 STRIP_NOPS (arg);
9738 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9740 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9742 switch (value->cl)
9744 case rvc_nan:
9745 case rvc_inf:
9746 /* If arg is Inf or NaN and we're logb, return it. */
9747 if (TREE_CODE (rettype) == REAL_TYPE)
9749 /* For logb(-Inf) we have to return +Inf. */
9750 if (real_isinf (value) && real_isneg (value))
9752 REAL_VALUE_TYPE tem;
9753 real_inf (&tem);
9754 return build_real (rettype, tem);
9756 return fold_convert_loc (loc, rettype, arg);
9758 /* Fall through... */
9759 case rvc_zero:
9760 /* Zero may set errno and/or raise an exception for logb, also
9761 for ilogb we don't know FP_ILOGB0. */
9762 return NULL_TREE;
9763 case rvc_normal:
9764 /* For normal numbers, proceed iff radix == 2. In GCC,
9765 normalized significands are in the range [0.5, 1.0). We
9766 want the exponent as if they were [1.0, 2.0) so get the
9767 exponent and subtract 1. */
9768 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9769 return fold_convert_loc (loc, rettype,
9770 build_int_cst (integer_type_node,
9771 REAL_EXP (value)-1));
9772 break;
9776 return NULL_TREE;
9779 /* Fold a call to builtin significand, if radix == 2. */
9781 static tree
9782 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9784 if (! validate_arg (arg, REAL_TYPE))
9785 return NULL_TREE;
9787 STRIP_NOPS (arg);
9789 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9791 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9793 switch (value->cl)
9795 case rvc_zero:
9796 case rvc_nan:
9797 case rvc_inf:
9798 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9799 return fold_convert_loc (loc, rettype, arg);
9800 case rvc_normal:
9801 /* For normal numbers, proceed iff radix == 2. */
9802 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9804 REAL_VALUE_TYPE result = *value;
9805 /* In GCC, normalized significands are in the range [0.5,
9806 1.0). We want them to be [1.0, 2.0) so set the
9807 exponent to 1. */
9808 SET_REAL_EXP (&result, 1);
9809 return build_real (rettype, result);
9811 break;
9815 return NULL_TREE;
9818 /* Fold a call to builtin frexp, we can assume the base is 2. */
9820 static tree
9821 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9823 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9824 return NULL_TREE;
9826 STRIP_NOPS (arg0);
9828 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9829 return NULL_TREE;
9831 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9833 /* Proceed if a valid pointer type was passed in. */
9834 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9836 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9837 tree frac, exp;
9839 switch (value->cl)
9841 case rvc_zero:
9842 /* For +-0, return (*exp = 0, +-0). */
9843 exp = integer_zero_node;
9844 frac = arg0;
9845 break;
9846 case rvc_nan:
9847 case rvc_inf:
9848 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9849 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9850 case rvc_normal:
9852 /* Since the frexp function always expects base 2, and in
9853 GCC normalized significands are already in the range
9854 [0.5, 1.0), we have exactly what frexp wants. */
9855 REAL_VALUE_TYPE frac_rvt = *value;
9856 SET_REAL_EXP (&frac_rvt, 0);
9857 frac = build_real (rettype, frac_rvt);
9858 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9860 break;
9861 default:
9862 gcc_unreachable ();
9865 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9866 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9867 TREE_SIDE_EFFECTS (arg1) = 1;
9868 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9871 return NULL_TREE;
9874 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9875 then we can assume the base is two. If it's false, then we have to
9876 check the mode of the TYPE parameter in certain cases. */
9878 static tree
9879 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9880 tree type, bool ldexp)
9882 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9884 STRIP_NOPS (arg0);
9885 STRIP_NOPS (arg1);
9887 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9888 if (real_zerop (arg0) || integer_zerop (arg1)
9889 || (TREE_CODE (arg0) == REAL_CST
9890 && !real_isfinite (&TREE_REAL_CST (arg0))))
9891 return omit_one_operand_loc (loc, type, arg0, arg1);
9893 /* If both arguments are constant, then try to evaluate it. */
9894 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9895 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9896 && host_integerp (arg1, 0))
9898 /* Bound the maximum adjustment to twice the range of the
9899 mode's valid exponents. Use abs to ensure the range is
9900 positive as a sanity check. */
9901 const long max_exp_adj = 2 *
9902 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9903 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9905 /* Get the user-requested adjustment. */
9906 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9908 /* The requested adjustment must be inside this range. This
9909 is a preliminary cap to avoid things like overflow, we
9910 may still fail to compute the result for other reasons. */
9911 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9913 REAL_VALUE_TYPE initial_result;
9915 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9917 /* Ensure we didn't overflow. */
9918 if (! real_isinf (&initial_result))
9920 const REAL_VALUE_TYPE trunc_result
9921 = real_value_truncate (TYPE_MODE (type), initial_result);
9923 /* Only proceed if the target mode can hold the
9924 resulting value. */
9925 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9926 return build_real (type, trunc_result);
9932 return NULL_TREE;
9935 /* Fold a call to builtin modf. */
9937 static tree
9938 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9940 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9941 return NULL_TREE;
9943 STRIP_NOPS (arg0);
9945 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9946 return NULL_TREE;
9948 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9950 /* Proceed if a valid pointer type was passed in. */
9951 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9953 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9954 REAL_VALUE_TYPE trunc, frac;
9956 switch (value->cl)
9958 case rvc_nan:
9959 case rvc_zero:
9960 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9961 trunc = frac = *value;
9962 break;
9963 case rvc_inf:
9964 /* For +-Inf, return (*arg1 = arg0, +-0). */
9965 frac = dconst0;
9966 frac.sign = value->sign;
9967 trunc = *value;
9968 break;
9969 case rvc_normal:
9970 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9971 real_trunc (&trunc, VOIDmode, value);
9972 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9973 /* If the original number was negative and already
9974 integral, then the fractional part is -0.0. */
9975 if (value->sign && frac.cl == rvc_zero)
9976 frac.sign = value->sign;
9977 break;
9980 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9981 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9982 build_real (rettype, trunc));
9983 TREE_SIDE_EFFECTS (arg1) = 1;
9984 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9985 build_real (rettype, frac));
9988 return NULL_TREE;
9991 /* Given a location LOC, an interclass builtin function decl FNDECL
9992 and its single argument ARG, return an folded expression computing
9993 the same, or NULL_TREE if we either couldn't or didn't want to fold
9994 (the latter happen if there's an RTL instruction available). */
9996 static tree
9997 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9999 enum machine_mode mode;
10001 if (!validate_arg (arg, REAL_TYPE))
10002 return NULL_TREE;
10004 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
10005 return NULL_TREE;
10007 mode = TYPE_MODE (TREE_TYPE (arg));
10009 /* If there is no optab, try generic code. */
10010 switch (DECL_FUNCTION_CODE (fndecl))
10012 tree result;
10014 CASE_FLT_FN (BUILT_IN_ISINF):
10016 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
10017 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
10018 tree const type = TREE_TYPE (arg);
10019 REAL_VALUE_TYPE r;
10020 char buf[128];
10022 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10023 real_from_string (&r, buf);
10024 result = build_call_expr (isgr_fn, 2,
10025 fold_build1_loc (loc, ABS_EXPR, type, arg),
10026 build_real (type, r));
10027 return result;
10029 CASE_FLT_FN (BUILT_IN_FINITE):
10030 case BUILT_IN_ISFINITE:
10032 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
10033 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10034 tree const type = TREE_TYPE (arg);
10035 REAL_VALUE_TYPE r;
10036 char buf[128];
10038 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10039 real_from_string (&r, buf);
10040 result = build_call_expr (isle_fn, 2,
10041 fold_build1_loc (loc, ABS_EXPR, type, arg),
10042 build_real (type, r));
10043 /*result = fold_build2_loc (loc, UNGT_EXPR,
10044 TREE_TYPE (TREE_TYPE (fndecl)),
10045 fold_build1_loc (loc, ABS_EXPR, type, arg),
10046 build_real (type, r));
10047 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10048 TREE_TYPE (TREE_TYPE (fndecl)),
10049 result);*/
10050 return result;
10052 case BUILT_IN_ISNORMAL:
10054 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10055 islessequal(fabs(x),DBL_MAX). */
10056 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10057 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10058 tree const type = TREE_TYPE (arg);
10059 REAL_VALUE_TYPE rmax, rmin;
10060 char buf[128];
10062 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10063 real_from_string (&rmax, buf);
10064 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10065 real_from_string (&rmin, buf);
10066 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10067 result = build_call_expr (isle_fn, 2, arg,
10068 build_real (type, rmax));
10069 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
10070 build_call_expr (isge_fn, 2, arg,
10071 build_real (type, rmin)));
10072 return result;
10074 default:
10075 break;
10078 return NULL_TREE;
10081 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10082 ARG is the argument for the call. */
10084 static tree
10085 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10087 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10088 REAL_VALUE_TYPE r;
10090 if (!validate_arg (arg, REAL_TYPE))
10091 return NULL_TREE;
10093 switch (builtin_index)
10095 case BUILT_IN_ISINF:
10096 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10097 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10099 if (TREE_CODE (arg) == REAL_CST)
10101 r = TREE_REAL_CST (arg);
10102 if (real_isinf (&r))
10103 return real_compare (GT_EXPR, &r, &dconst0)
10104 ? integer_one_node : integer_minus_one_node;
10105 else
10106 return integer_zero_node;
10109 return NULL_TREE;
10111 case BUILT_IN_ISINF_SIGN:
10113 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10114 /* In a boolean context, GCC will fold the inner COND_EXPR to
10115 1. So e.g. "if (isinf_sign(x))" would be folded to just
10116 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10117 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10118 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10119 tree tmp = NULL_TREE;
10121 arg = builtin_save_expr (arg);
10123 if (signbit_fn && isinf_fn)
10125 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10126 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10128 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10129 signbit_call, integer_zero_node);
10130 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10131 isinf_call, integer_zero_node);
10133 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10134 integer_minus_one_node, integer_one_node);
10135 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10136 isinf_call, tmp,
10137 integer_zero_node);
10140 return tmp;
10143 case BUILT_IN_ISFINITE:
10144 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10145 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10146 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10148 if (TREE_CODE (arg) == REAL_CST)
10150 r = TREE_REAL_CST (arg);
10151 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10154 return NULL_TREE;
10156 case BUILT_IN_ISNAN:
10157 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10158 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10160 if (TREE_CODE (arg) == REAL_CST)
10162 r = TREE_REAL_CST (arg);
10163 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10166 arg = builtin_save_expr (arg);
10167 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10169 default:
10170 gcc_unreachable ();
10174 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10175 This builtin will generate code to return the appropriate floating
10176 point classification depending on the value of the floating point
10177 number passed in. The possible return values must be supplied as
10178 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10179 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10180 one floating point argument which is "type generic". */
10182 static tree
10183 fold_builtin_fpclassify (location_t loc, tree exp)
10185 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10186 arg, type, res, tmp;
10187 enum machine_mode mode;
10188 REAL_VALUE_TYPE r;
10189 char buf[128];
10191 /* Verify the required arguments in the original call. */
10192 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10193 INTEGER_TYPE, INTEGER_TYPE,
10194 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10195 return NULL_TREE;
10197 fp_nan = CALL_EXPR_ARG (exp, 0);
10198 fp_infinite = CALL_EXPR_ARG (exp, 1);
10199 fp_normal = CALL_EXPR_ARG (exp, 2);
10200 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10201 fp_zero = CALL_EXPR_ARG (exp, 4);
10202 arg = CALL_EXPR_ARG (exp, 5);
10203 type = TREE_TYPE (arg);
10204 mode = TYPE_MODE (type);
10205 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10207 /* fpclassify(x) ->
10208 isnan(x) ? FP_NAN :
10209 (fabs(x) == Inf ? FP_INFINITE :
10210 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10211 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10213 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10214 build_real (type, dconst0));
10215 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10216 tmp, fp_zero, fp_subnormal);
10218 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10219 real_from_string (&r, buf);
10220 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10221 arg, build_real (type, r));
10222 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10224 if (HONOR_INFINITIES (mode))
10226 real_inf (&r);
10227 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10228 build_real (type, r));
10229 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10230 fp_infinite, res);
10233 if (HONOR_NANS (mode))
10235 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10236 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10239 return res;
10242 /* Fold a call to an unordered comparison function such as
10243 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10244 being called and ARG0 and ARG1 are the arguments for the call.
10245 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10246 the opposite of the desired result. UNORDERED_CODE is used
10247 for modes that can hold NaNs and ORDERED_CODE is used for
10248 the rest. */
10250 static tree
10251 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10252 enum tree_code unordered_code,
10253 enum tree_code ordered_code)
10255 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10256 enum tree_code code;
10257 tree type0, type1;
10258 enum tree_code code0, code1;
10259 tree cmp_type = NULL_TREE;
10261 type0 = TREE_TYPE (arg0);
10262 type1 = TREE_TYPE (arg1);
10264 code0 = TREE_CODE (type0);
10265 code1 = TREE_CODE (type1);
10267 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10268 /* Choose the wider of two real types. */
10269 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10270 ? type0 : type1;
10271 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10272 cmp_type = type0;
10273 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10274 cmp_type = type1;
10276 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10277 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10279 if (unordered_code == UNORDERED_EXPR)
10281 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10282 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10283 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10286 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10287 : ordered_code;
10288 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10289 fold_build2_loc (loc, code, type, arg0, arg1));
10292 /* Fold a call to built-in function FNDECL with 0 arguments.
10293 IGNORE is true if the result of the function call is ignored. This
10294 function returns NULL_TREE if no simplification was possible. */
10296 static tree
10297 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10299 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10300 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10301 switch (fcode)
10303 CASE_FLT_FN (BUILT_IN_INF):
10304 case BUILT_IN_INFD32:
10305 case BUILT_IN_INFD64:
10306 case BUILT_IN_INFD128:
10307 return fold_builtin_inf (loc, type, true);
10309 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10310 return fold_builtin_inf (loc, type, false);
10312 case BUILT_IN_CLASSIFY_TYPE:
10313 return fold_builtin_classify_type (NULL_TREE);
10315 case BUILT_IN_UNREACHABLE:
10316 if (flag_sanitize & SANITIZE_UNREACHABLE)
10317 return ubsan_instrument_unreachable (loc);
10318 break;
10320 default:
10321 break;
10323 return NULL_TREE;
10326 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10327 IGNORE is true if the result of the function call is ignored. This
10328 function returns NULL_TREE if no simplification was possible. */
10330 static tree
10331 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10333 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10334 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10335 switch (fcode)
10337 case BUILT_IN_CONSTANT_P:
10339 tree val = fold_builtin_constant_p (arg0);
10341 /* Gimplification will pull the CALL_EXPR for the builtin out of
10342 an if condition. When not optimizing, we'll not CSE it back.
10343 To avoid link error types of regressions, return false now. */
10344 if (!val && !optimize)
10345 val = integer_zero_node;
10347 return val;
10350 case BUILT_IN_CLASSIFY_TYPE:
10351 return fold_builtin_classify_type (arg0);
10353 case BUILT_IN_STRLEN:
10354 return fold_builtin_strlen (loc, type, arg0);
10356 CASE_FLT_FN (BUILT_IN_FABS):
10357 case BUILT_IN_FABSD32:
10358 case BUILT_IN_FABSD64:
10359 case BUILT_IN_FABSD128:
10360 return fold_builtin_fabs (loc, arg0, type);
10362 case BUILT_IN_ABS:
10363 case BUILT_IN_LABS:
10364 case BUILT_IN_LLABS:
10365 case BUILT_IN_IMAXABS:
10366 return fold_builtin_abs (loc, arg0, type);
10368 CASE_FLT_FN (BUILT_IN_CONJ):
10369 if (validate_arg (arg0, COMPLEX_TYPE)
10370 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10371 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10372 break;
10374 CASE_FLT_FN (BUILT_IN_CREAL):
10375 if (validate_arg (arg0, COMPLEX_TYPE)
10376 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10377 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10378 break;
10380 CASE_FLT_FN (BUILT_IN_CIMAG):
10381 if (validate_arg (arg0, COMPLEX_TYPE)
10382 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10383 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10384 break;
10386 CASE_FLT_FN (BUILT_IN_CCOS):
10387 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
10389 CASE_FLT_FN (BUILT_IN_CCOSH):
10390 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
10392 CASE_FLT_FN (BUILT_IN_CPROJ):
10393 return fold_builtin_cproj(loc, arg0, type);
10395 CASE_FLT_FN (BUILT_IN_CSIN):
10396 if (validate_arg (arg0, COMPLEX_TYPE)
10397 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10398 return do_mpc_arg1 (arg0, type, mpc_sin);
10399 break;
10401 CASE_FLT_FN (BUILT_IN_CSINH):
10402 if (validate_arg (arg0, COMPLEX_TYPE)
10403 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10404 return do_mpc_arg1 (arg0, type, mpc_sinh);
10405 break;
10407 CASE_FLT_FN (BUILT_IN_CTAN):
10408 if (validate_arg (arg0, COMPLEX_TYPE)
10409 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10410 return do_mpc_arg1 (arg0, type, mpc_tan);
10411 break;
10413 CASE_FLT_FN (BUILT_IN_CTANH):
10414 if (validate_arg (arg0, COMPLEX_TYPE)
10415 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10416 return do_mpc_arg1 (arg0, type, mpc_tanh);
10417 break;
10419 CASE_FLT_FN (BUILT_IN_CLOG):
10420 if (validate_arg (arg0, COMPLEX_TYPE)
10421 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10422 return do_mpc_arg1 (arg0, type, mpc_log);
10423 break;
10425 CASE_FLT_FN (BUILT_IN_CSQRT):
10426 if (validate_arg (arg0, COMPLEX_TYPE)
10427 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10428 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10429 break;
10431 CASE_FLT_FN (BUILT_IN_CASIN):
10432 if (validate_arg (arg0, COMPLEX_TYPE)
10433 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10434 return do_mpc_arg1 (arg0, type, mpc_asin);
10435 break;
10437 CASE_FLT_FN (BUILT_IN_CACOS):
10438 if (validate_arg (arg0, COMPLEX_TYPE)
10439 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10440 return do_mpc_arg1 (arg0, type, mpc_acos);
10441 break;
10443 CASE_FLT_FN (BUILT_IN_CATAN):
10444 if (validate_arg (arg0, COMPLEX_TYPE)
10445 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10446 return do_mpc_arg1 (arg0, type, mpc_atan);
10447 break;
10449 CASE_FLT_FN (BUILT_IN_CASINH):
10450 if (validate_arg (arg0, COMPLEX_TYPE)
10451 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10452 return do_mpc_arg1 (arg0, type, mpc_asinh);
10453 break;
10455 CASE_FLT_FN (BUILT_IN_CACOSH):
10456 if (validate_arg (arg0, COMPLEX_TYPE)
10457 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10458 return do_mpc_arg1 (arg0, type, mpc_acosh);
10459 break;
10461 CASE_FLT_FN (BUILT_IN_CATANH):
10462 if (validate_arg (arg0, COMPLEX_TYPE)
10463 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10464 return do_mpc_arg1 (arg0, type, mpc_atanh);
10465 break;
10467 CASE_FLT_FN (BUILT_IN_CABS):
10468 return fold_builtin_cabs (loc, arg0, type, fndecl);
10470 CASE_FLT_FN (BUILT_IN_CARG):
10471 return fold_builtin_carg (loc, arg0, type);
10473 CASE_FLT_FN (BUILT_IN_SQRT):
10474 return fold_builtin_sqrt (loc, arg0, type);
10476 CASE_FLT_FN (BUILT_IN_CBRT):
10477 return fold_builtin_cbrt (loc, arg0, type);
10479 CASE_FLT_FN (BUILT_IN_ASIN):
10480 if (validate_arg (arg0, REAL_TYPE))
10481 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10482 &dconstm1, &dconst1, true);
10483 break;
10485 CASE_FLT_FN (BUILT_IN_ACOS):
10486 if (validate_arg (arg0, REAL_TYPE))
10487 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10488 &dconstm1, &dconst1, true);
10489 break;
10491 CASE_FLT_FN (BUILT_IN_ATAN):
10492 if (validate_arg (arg0, REAL_TYPE))
10493 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10494 break;
10496 CASE_FLT_FN (BUILT_IN_ASINH):
10497 if (validate_arg (arg0, REAL_TYPE))
10498 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10499 break;
10501 CASE_FLT_FN (BUILT_IN_ACOSH):
10502 if (validate_arg (arg0, REAL_TYPE))
10503 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10504 &dconst1, NULL, true);
10505 break;
10507 CASE_FLT_FN (BUILT_IN_ATANH):
10508 if (validate_arg (arg0, REAL_TYPE))
10509 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10510 &dconstm1, &dconst1, false);
10511 break;
10513 CASE_FLT_FN (BUILT_IN_SIN):
10514 if (validate_arg (arg0, REAL_TYPE))
10515 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10516 break;
10518 CASE_FLT_FN (BUILT_IN_COS):
10519 return fold_builtin_cos (loc, arg0, type, fndecl);
10521 CASE_FLT_FN (BUILT_IN_TAN):
10522 return fold_builtin_tan (arg0, type);
10524 CASE_FLT_FN (BUILT_IN_CEXP):
10525 return fold_builtin_cexp (loc, arg0, type);
10527 CASE_FLT_FN (BUILT_IN_CEXPI):
10528 if (validate_arg (arg0, REAL_TYPE))
10529 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10530 break;
10532 CASE_FLT_FN (BUILT_IN_SINH):
10533 if (validate_arg (arg0, REAL_TYPE))
10534 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10535 break;
10537 CASE_FLT_FN (BUILT_IN_COSH):
10538 return fold_builtin_cosh (loc, arg0, type, fndecl);
10540 CASE_FLT_FN (BUILT_IN_TANH):
10541 if (validate_arg (arg0, REAL_TYPE))
10542 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10543 break;
10545 CASE_FLT_FN (BUILT_IN_ERF):
10546 if (validate_arg (arg0, REAL_TYPE))
10547 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10548 break;
10550 CASE_FLT_FN (BUILT_IN_ERFC):
10551 if (validate_arg (arg0, REAL_TYPE))
10552 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10553 break;
10555 CASE_FLT_FN (BUILT_IN_TGAMMA):
10556 if (validate_arg (arg0, REAL_TYPE))
10557 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10558 break;
10560 CASE_FLT_FN (BUILT_IN_EXP):
10561 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10563 CASE_FLT_FN (BUILT_IN_EXP2):
10564 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10566 CASE_FLT_FN (BUILT_IN_EXP10):
10567 CASE_FLT_FN (BUILT_IN_POW10):
10568 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10570 CASE_FLT_FN (BUILT_IN_EXPM1):
10571 if (validate_arg (arg0, REAL_TYPE))
10572 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10573 break;
10575 CASE_FLT_FN (BUILT_IN_LOG):
10576 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10578 CASE_FLT_FN (BUILT_IN_LOG2):
10579 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10581 CASE_FLT_FN (BUILT_IN_LOG10):
10582 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10584 CASE_FLT_FN (BUILT_IN_LOG1P):
10585 if (validate_arg (arg0, REAL_TYPE))
10586 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10587 &dconstm1, NULL, false);
10588 break;
10590 CASE_FLT_FN (BUILT_IN_J0):
10591 if (validate_arg (arg0, REAL_TYPE))
10592 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10593 NULL, NULL, 0);
10594 break;
10596 CASE_FLT_FN (BUILT_IN_J1):
10597 if (validate_arg (arg0, REAL_TYPE))
10598 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10599 NULL, NULL, 0);
10600 break;
10602 CASE_FLT_FN (BUILT_IN_Y0):
10603 if (validate_arg (arg0, REAL_TYPE))
10604 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10605 &dconst0, NULL, false);
10606 break;
10608 CASE_FLT_FN (BUILT_IN_Y1):
10609 if (validate_arg (arg0, REAL_TYPE))
10610 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10611 &dconst0, NULL, false);
10612 break;
10614 CASE_FLT_FN (BUILT_IN_NAN):
10615 case BUILT_IN_NAND32:
10616 case BUILT_IN_NAND64:
10617 case BUILT_IN_NAND128:
10618 return fold_builtin_nan (arg0, type, true);
10620 CASE_FLT_FN (BUILT_IN_NANS):
10621 return fold_builtin_nan (arg0, type, false);
10623 CASE_FLT_FN (BUILT_IN_FLOOR):
10624 return fold_builtin_floor (loc, fndecl, arg0);
10626 CASE_FLT_FN (BUILT_IN_CEIL):
10627 return fold_builtin_ceil (loc, fndecl, arg0);
10629 CASE_FLT_FN (BUILT_IN_TRUNC):
10630 return fold_builtin_trunc (loc, fndecl, arg0);
10632 CASE_FLT_FN (BUILT_IN_ROUND):
10633 return fold_builtin_round (loc, fndecl, arg0);
10635 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10636 CASE_FLT_FN (BUILT_IN_RINT):
10637 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10639 CASE_FLT_FN (BUILT_IN_ICEIL):
10640 CASE_FLT_FN (BUILT_IN_LCEIL):
10641 CASE_FLT_FN (BUILT_IN_LLCEIL):
10642 CASE_FLT_FN (BUILT_IN_LFLOOR):
10643 CASE_FLT_FN (BUILT_IN_IFLOOR):
10644 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10645 CASE_FLT_FN (BUILT_IN_IROUND):
10646 CASE_FLT_FN (BUILT_IN_LROUND):
10647 CASE_FLT_FN (BUILT_IN_LLROUND):
10648 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10650 CASE_FLT_FN (BUILT_IN_IRINT):
10651 CASE_FLT_FN (BUILT_IN_LRINT):
10652 CASE_FLT_FN (BUILT_IN_LLRINT):
10653 return fold_fixed_mathfn (loc, fndecl, arg0);
10655 case BUILT_IN_BSWAP16:
10656 case BUILT_IN_BSWAP32:
10657 case BUILT_IN_BSWAP64:
10658 return fold_builtin_bswap (fndecl, arg0);
10660 CASE_INT_FN (BUILT_IN_FFS):
10661 CASE_INT_FN (BUILT_IN_CLZ):
10662 CASE_INT_FN (BUILT_IN_CTZ):
10663 CASE_INT_FN (BUILT_IN_CLRSB):
10664 CASE_INT_FN (BUILT_IN_POPCOUNT):
10665 CASE_INT_FN (BUILT_IN_PARITY):
10666 return fold_builtin_bitop (fndecl, arg0);
10668 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10669 return fold_builtin_signbit (loc, arg0, type);
10671 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10672 return fold_builtin_significand (loc, arg0, type);
10674 CASE_FLT_FN (BUILT_IN_ILOGB):
10675 CASE_FLT_FN (BUILT_IN_LOGB):
10676 return fold_builtin_logb (loc, arg0, type);
10678 case BUILT_IN_ISASCII:
10679 return fold_builtin_isascii (loc, arg0);
10681 case BUILT_IN_TOASCII:
10682 return fold_builtin_toascii (loc, arg0);
10684 case BUILT_IN_ISDIGIT:
10685 return fold_builtin_isdigit (loc, arg0);
10687 CASE_FLT_FN (BUILT_IN_FINITE):
10688 case BUILT_IN_FINITED32:
10689 case BUILT_IN_FINITED64:
10690 case BUILT_IN_FINITED128:
10691 case BUILT_IN_ISFINITE:
10693 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10694 if (ret)
10695 return ret;
10696 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10699 CASE_FLT_FN (BUILT_IN_ISINF):
10700 case BUILT_IN_ISINFD32:
10701 case BUILT_IN_ISINFD64:
10702 case BUILT_IN_ISINFD128:
10704 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10705 if (ret)
10706 return ret;
10707 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10710 case BUILT_IN_ISNORMAL:
10711 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10713 case BUILT_IN_ISINF_SIGN:
10714 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10716 CASE_FLT_FN (BUILT_IN_ISNAN):
10717 case BUILT_IN_ISNAND32:
10718 case BUILT_IN_ISNAND64:
10719 case BUILT_IN_ISNAND128:
10720 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10722 case BUILT_IN_PRINTF:
10723 case BUILT_IN_PRINTF_UNLOCKED:
10724 case BUILT_IN_VPRINTF:
10725 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10727 case BUILT_IN_FREE:
10728 if (integer_zerop (arg0))
10729 return build_empty_stmt (loc);
10730 break;
10732 default:
10733 break;
10736 return NULL_TREE;
10740 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10741 IGNORE is true if the result of the function call is ignored. This
10742 function returns NULL_TREE if no simplification was possible. */
10744 static tree
10745 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10747 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10748 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10750 switch (fcode)
10752 CASE_FLT_FN (BUILT_IN_JN):
10753 if (validate_arg (arg0, INTEGER_TYPE)
10754 && validate_arg (arg1, REAL_TYPE))
10755 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10756 break;
10758 CASE_FLT_FN (BUILT_IN_YN):
10759 if (validate_arg (arg0, INTEGER_TYPE)
10760 && validate_arg (arg1, REAL_TYPE))
10761 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10762 &dconst0, false);
10763 break;
10765 CASE_FLT_FN (BUILT_IN_DREM):
10766 CASE_FLT_FN (BUILT_IN_REMAINDER):
10767 if (validate_arg (arg0, REAL_TYPE)
10768 && validate_arg(arg1, REAL_TYPE))
10769 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10770 break;
10772 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10773 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10774 if (validate_arg (arg0, REAL_TYPE)
10775 && validate_arg(arg1, POINTER_TYPE))
10776 return do_mpfr_lgamma_r (arg0, arg1, type);
10777 break;
10779 CASE_FLT_FN (BUILT_IN_ATAN2):
10780 if (validate_arg (arg0, REAL_TYPE)
10781 && validate_arg(arg1, REAL_TYPE))
10782 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10783 break;
10785 CASE_FLT_FN (BUILT_IN_FDIM):
10786 if (validate_arg (arg0, REAL_TYPE)
10787 && validate_arg(arg1, REAL_TYPE))
10788 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10789 break;
10791 CASE_FLT_FN (BUILT_IN_HYPOT):
10792 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10794 CASE_FLT_FN (BUILT_IN_CPOW):
10795 if (validate_arg (arg0, COMPLEX_TYPE)
10796 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10797 && validate_arg (arg1, COMPLEX_TYPE)
10798 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10799 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10800 break;
10802 CASE_FLT_FN (BUILT_IN_LDEXP):
10803 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10804 CASE_FLT_FN (BUILT_IN_SCALBN):
10805 CASE_FLT_FN (BUILT_IN_SCALBLN):
10806 return fold_builtin_load_exponent (loc, arg0, arg1,
10807 type, /*ldexp=*/false);
10809 CASE_FLT_FN (BUILT_IN_FREXP):
10810 return fold_builtin_frexp (loc, arg0, arg1, type);
10812 CASE_FLT_FN (BUILT_IN_MODF):
10813 return fold_builtin_modf (loc, arg0, arg1, type);
10815 case BUILT_IN_BZERO:
10816 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10818 case BUILT_IN_FPUTS:
10819 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10821 case BUILT_IN_FPUTS_UNLOCKED:
10822 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10824 case BUILT_IN_STRSTR:
10825 return fold_builtin_strstr (loc, arg0, arg1, type);
10827 case BUILT_IN_STRCAT:
10828 return fold_builtin_strcat (loc, arg0, arg1);
10830 case BUILT_IN_STRSPN:
10831 return fold_builtin_strspn (loc, arg0, arg1);
10833 case BUILT_IN_STRCSPN:
10834 return fold_builtin_strcspn (loc, arg0, arg1);
10836 case BUILT_IN_STRCHR:
10837 case BUILT_IN_INDEX:
10838 return fold_builtin_strchr (loc, arg0, arg1, type);
10840 case BUILT_IN_STRRCHR:
10841 case BUILT_IN_RINDEX:
10842 return fold_builtin_strrchr (loc, arg0, arg1, type);
10844 case BUILT_IN_STRCPY:
10845 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10847 case BUILT_IN_STPCPY:
10848 if (ignore)
10850 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10851 if (!fn)
10852 break;
10854 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10856 else
10857 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10858 break;
10860 case BUILT_IN_STRCMP:
10861 return fold_builtin_strcmp (loc, arg0, arg1);
10863 case BUILT_IN_STRPBRK:
10864 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10866 case BUILT_IN_EXPECT:
10867 return fold_builtin_expect (loc, arg0, arg1);
10869 CASE_FLT_FN (BUILT_IN_POW):
10870 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10872 CASE_FLT_FN (BUILT_IN_POWI):
10873 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10875 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10876 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10878 CASE_FLT_FN (BUILT_IN_FMIN):
10879 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10881 CASE_FLT_FN (BUILT_IN_FMAX):
10882 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10884 case BUILT_IN_ISGREATER:
10885 return fold_builtin_unordered_cmp (loc, fndecl,
10886 arg0, arg1, UNLE_EXPR, LE_EXPR);
10887 case BUILT_IN_ISGREATEREQUAL:
10888 return fold_builtin_unordered_cmp (loc, fndecl,
10889 arg0, arg1, UNLT_EXPR, LT_EXPR);
10890 case BUILT_IN_ISLESS:
10891 return fold_builtin_unordered_cmp (loc, fndecl,
10892 arg0, arg1, UNGE_EXPR, GE_EXPR);
10893 case BUILT_IN_ISLESSEQUAL:
10894 return fold_builtin_unordered_cmp (loc, fndecl,
10895 arg0, arg1, UNGT_EXPR, GT_EXPR);
10896 case BUILT_IN_ISLESSGREATER:
10897 return fold_builtin_unordered_cmp (loc, fndecl,
10898 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10899 case BUILT_IN_ISUNORDERED:
10900 return fold_builtin_unordered_cmp (loc, fndecl,
10901 arg0, arg1, UNORDERED_EXPR,
10902 NOP_EXPR);
10904 /* We do the folding for va_start in the expander. */
10905 case BUILT_IN_VA_START:
10906 break;
10908 case BUILT_IN_SPRINTF:
10909 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10911 case BUILT_IN_OBJECT_SIZE:
10912 return fold_builtin_object_size (arg0, arg1);
10914 case BUILT_IN_PRINTF:
10915 case BUILT_IN_PRINTF_UNLOCKED:
10916 case BUILT_IN_VPRINTF:
10917 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10919 case BUILT_IN_PRINTF_CHK:
10920 case BUILT_IN_VPRINTF_CHK:
10921 if (!validate_arg (arg0, INTEGER_TYPE)
10922 || TREE_SIDE_EFFECTS (arg0))
10923 return NULL_TREE;
10924 else
10925 return fold_builtin_printf (loc, fndecl,
10926 arg1, NULL_TREE, ignore, fcode);
10927 break;
10929 case BUILT_IN_FPRINTF:
10930 case BUILT_IN_FPRINTF_UNLOCKED:
10931 case BUILT_IN_VFPRINTF:
10932 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10933 ignore, fcode);
10935 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10936 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10938 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10939 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10941 default:
10942 break;
10944 return NULL_TREE;
10947 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10948 and ARG2. IGNORE is true if the result of the function call is ignored.
10949 This function returns NULL_TREE if no simplification was possible. */
10951 static tree
10952 fold_builtin_3 (location_t loc, tree fndecl,
10953 tree arg0, tree arg1, tree arg2, bool ignore)
10955 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10956 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10957 switch (fcode)
10960 CASE_FLT_FN (BUILT_IN_SINCOS):
10961 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10963 CASE_FLT_FN (BUILT_IN_FMA):
10964 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10965 break;
10967 CASE_FLT_FN (BUILT_IN_REMQUO):
10968 if (validate_arg (arg0, REAL_TYPE)
10969 && validate_arg(arg1, REAL_TYPE)
10970 && validate_arg(arg2, POINTER_TYPE))
10971 return do_mpfr_remquo (arg0, arg1, arg2);
10972 break;
10974 case BUILT_IN_MEMSET:
10975 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10977 case BUILT_IN_BCOPY:
10978 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10979 void_type_node, true, /*endp=*/3);
10981 case BUILT_IN_MEMCPY:
10982 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10983 type, ignore, /*endp=*/0);
10985 case BUILT_IN_MEMPCPY:
10986 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10987 type, ignore, /*endp=*/1);
10989 case BUILT_IN_MEMMOVE:
10990 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10991 type, ignore, /*endp=*/3);
10993 case BUILT_IN_STRNCAT:
10994 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10996 case BUILT_IN_STRNCPY:
10997 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10999 case BUILT_IN_STRNCMP:
11000 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
11002 case BUILT_IN_MEMCHR:
11003 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
11005 case BUILT_IN_BCMP:
11006 case BUILT_IN_MEMCMP:
11007 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
11009 case BUILT_IN_SPRINTF:
11010 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
11012 case BUILT_IN_SNPRINTF:
11013 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
11015 case BUILT_IN_STRCPY_CHK:
11016 case BUILT_IN_STPCPY_CHK:
11017 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
11018 ignore, fcode);
11020 case BUILT_IN_STRCAT_CHK:
11021 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
11023 case BUILT_IN_PRINTF_CHK:
11024 case BUILT_IN_VPRINTF_CHK:
11025 if (!validate_arg (arg0, INTEGER_TYPE)
11026 || TREE_SIDE_EFFECTS (arg0))
11027 return NULL_TREE;
11028 else
11029 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
11030 break;
11032 case BUILT_IN_FPRINTF:
11033 case BUILT_IN_FPRINTF_UNLOCKED:
11034 case BUILT_IN_VFPRINTF:
11035 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
11036 ignore, fcode);
11038 case BUILT_IN_FPRINTF_CHK:
11039 case BUILT_IN_VFPRINTF_CHK:
11040 if (!validate_arg (arg1, INTEGER_TYPE)
11041 || TREE_SIDE_EFFECTS (arg1))
11042 return NULL_TREE;
11043 else
11044 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
11045 ignore, fcode);
11047 default:
11048 break;
11050 return NULL_TREE;
11053 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11054 ARG2, and ARG3. IGNORE is true if the result of the function call is
11055 ignored. This function returns NULL_TREE if no simplification was
11056 possible. */
11058 static tree
11059 fold_builtin_4 (location_t loc, tree fndecl,
11060 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11062 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11064 switch (fcode)
11066 case BUILT_IN_MEMCPY_CHK:
11067 case BUILT_IN_MEMPCPY_CHK:
11068 case BUILT_IN_MEMMOVE_CHK:
11069 case BUILT_IN_MEMSET_CHK:
11070 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11071 NULL_TREE, ignore,
11072 DECL_FUNCTION_CODE (fndecl));
11074 case BUILT_IN_STRNCPY_CHK:
11075 case BUILT_IN_STPNCPY_CHK:
11076 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
11077 ignore, fcode);
11079 case BUILT_IN_STRNCAT_CHK:
11080 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11082 case BUILT_IN_SNPRINTF:
11083 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
11085 case BUILT_IN_FPRINTF_CHK:
11086 case BUILT_IN_VFPRINTF_CHK:
11087 if (!validate_arg (arg1, INTEGER_TYPE)
11088 || TREE_SIDE_EFFECTS (arg1))
11089 return NULL_TREE;
11090 else
11091 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11092 ignore, fcode);
11093 break;
11095 default:
11096 break;
11098 return NULL_TREE;
11101 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11102 arguments, where NARGS <= 4. IGNORE is true if the result of the
11103 function call is ignored. This function returns NULL_TREE if no
11104 simplification was possible. Note that this only folds builtins with
11105 fixed argument patterns. Foldings that do varargs-to-varargs
11106 transformations, or that match calls with more than 4 arguments,
11107 need to be handled with fold_builtin_varargs instead. */
11109 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11111 static tree
11112 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11114 tree ret = NULL_TREE;
11116 switch (nargs)
11118 case 0:
11119 ret = fold_builtin_0 (loc, fndecl, ignore);
11120 break;
11121 case 1:
11122 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11123 break;
11124 case 2:
11125 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11126 break;
11127 case 3:
11128 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11129 break;
11130 case 4:
11131 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11132 ignore);
11133 break;
11134 default:
11135 break;
11137 if (ret)
11139 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11140 SET_EXPR_LOCATION (ret, loc);
11141 TREE_NO_WARNING (ret) = 1;
11142 return ret;
11144 return NULL_TREE;
11147 /* Builtins with folding operations that operate on "..." arguments
11148 need special handling; we need to store the arguments in a convenient
11149 data structure before attempting any folding. Fortunately there are
11150 only a few builtins that fall into this category. FNDECL is the
11151 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11152 result of the function call is ignored. */
11154 static tree
11155 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11156 bool ignore ATTRIBUTE_UNUSED)
11158 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11159 tree ret = NULL_TREE;
11161 switch (fcode)
11163 case BUILT_IN_SPRINTF_CHK:
11164 case BUILT_IN_VSPRINTF_CHK:
11165 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11166 break;
11168 case BUILT_IN_SNPRINTF_CHK:
11169 case BUILT_IN_VSNPRINTF_CHK:
11170 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11171 break;
11173 case BUILT_IN_FPCLASSIFY:
11174 ret = fold_builtin_fpclassify (loc, exp);
11175 break;
11177 default:
11178 break;
11180 if (ret)
11182 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11183 SET_EXPR_LOCATION (ret, loc);
11184 TREE_NO_WARNING (ret) = 1;
11185 return ret;
11187 return NULL_TREE;
11190 /* Return true if FNDECL shouldn't be folded right now.
11191 If a built-in function has an inline attribute always_inline
11192 wrapper, defer folding it after always_inline functions have
11193 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11194 might not be performed. */
11196 bool
11197 avoid_folding_inline_builtin (tree fndecl)
11199 return (DECL_DECLARED_INLINE_P (fndecl)
11200 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11201 && cfun
11202 && !cfun->always_inline_functions_inlined
11203 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11206 /* A wrapper function for builtin folding that prevents warnings for
11207 "statement without effect" and the like, caused by removing the
11208 call node earlier than the warning is generated. */
11210 tree
11211 fold_call_expr (location_t loc, tree exp, bool ignore)
11213 tree ret = NULL_TREE;
11214 tree fndecl = get_callee_fndecl (exp);
11215 if (fndecl
11216 && TREE_CODE (fndecl) == FUNCTION_DECL
11217 && DECL_BUILT_IN (fndecl)
11218 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11219 yet. Defer folding until we see all the arguments
11220 (after inlining). */
11221 && !CALL_EXPR_VA_ARG_PACK (exp))
11223 int nargs = call_expr_nargs (exp);
11225 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11226 instead last argument is __builtin_va_arg_pack (). Defer folding
11227 even in that case, until arguments are finalized. */
11228 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11230 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11231 if (fndecl2
11232 && TREE_CODE (fndecl2) == FUNCTION_DECL
11233 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11234 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11235 return NULL_TREE;
11238 if (avoid_folding_inline_builtin (fndecl))
11239 return NULL_TREE;
11241 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11242 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11243 CALL_EXPR_ARGP (exp), ignore);
11244 else
11246 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11248 tree *args = CALL_EXPR_ARGP (exp);
11249 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11251 if (!ret)
11252 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11253 if (ret)
11254 return ret;
11257 return NULL_TREE;
11260 /* Conveniently construct a function call expression. FNDECL names the
11261 function to be called and N arguments are passed in the array
11262 ARGARRAY. */
11264 tree
11265 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11267 tree fntype = TREE_TYPE (fndecl);
11268 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11270 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11273 /* Conveniently construct a function call expression. FNDECL names the
11274 function to be called and the arguments are passed in the vector
11275 VEC. */
11277 tree
11278 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11280 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11281 vec_safe_address (vec));
11285 /* Conveniently construct a function call expression. FNDECL names the
11286 function to be called, N is the number of arguments, and the "..."
11287 parameters are the argument expressions. */
11289 tree
11290 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11292 va_list ap;
11293 tree *argarray = XALLOCAVEC (tree, n);
11294 int i;
11296 va_start (ap, n);
11297 for (i = 0; i < n; i++)
11298 argarray[i] = va_arg (ap, tree);
11299 va_end (ap);
11300 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11303 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11304 varargs macros aren't supported by all bootstrap compilers. */
11306 tree
11307 build_call_expr (tree fndecl, int n, ...)
11309 va_list ap;
11310 tree *argarray = XALLOCAVEC (tree, n);
11311 int i;
11313 va_start (ap, n);
11314 for (i = 0; i < n; i++)
11315 argarray[i] = va_arg (ap, tree);
11316 va_end (ap);
11317 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11320 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11321 N arguments are passed in the array ARGARRAY. */
11323 tree
11324 fold_builtin_call_array (location_t loc, tree type,
11325 tree fn,
11326 int n,
11327 tree *argarray)
11329 tree ret = NULL_TREE;
11330 tree exp;
11332 if (TREE_CODE (fn) == ADDR_EXPR)
11334 tree fndecl = TREE_OPERAND (fn, 0);
11335 if (TREE_CODE (fndecl) == FUNCTION_DECL
11336 && DECL_BUILT_IN (fndecl))
11338 /* If last argument is __builtin_va_arg_pack (), arguments to this
11339 function are not finalized yet. Defer folding until they are. */
11340 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11342 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11343 if (fndecl2
11344 && TREE_CODE (fndecl2) == FUNCTION_DECL
11345 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11346 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11347 return build_call_array_loc (loc, type, fn, n, argarray);
11349 if (avoid_folding_inline_builtin (fndecl))
11350 return build_call_array_loc (loc, type, fn, n, argarray);
11351 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11353 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11354 if (ret)
11355 return ret;
11357 return build_call_array_loc (loc, type, fn, n, argarray);
11359 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11361 /* First try the transformations that don't require consing up
11362 an exp. */
11363 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11364 if (ret)
11365 return ret;
11368 /* If we got this far, we need to build an exp. */
11369 exp = build_call_array_loc (loc, type, fn, n, argarray);
11370 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11371 return ret ? ret : exp;
11375 return build_call_array_loc (loc, type, fn, n, argarray);
11378 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11379 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11380 of arguments in ARGS to be omitted. OLDNARGS is the number of
11381 elements in ARGS. */
11383 static tree
11384 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11385 int skip, tree fndecl, int n, va_list newargs)
11387 int nargs = oldnargs - skip + n;
11388 tree *buffer;
11390 if (n > 0)
11392 int i, j;
11394 buffer = XALLOCAVEC (tree, nargs);
11395 for (i = 0; i < n; i++)
11396 buffer[i] = va_arg (newargs, tree);
11397 for (j = skip; j < oldnargs; j++, i++)
11398 buffer[i] = args[j];
11400 else
11401 buffer = args + skip;
11403 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11406 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11407 list ARGS along with N new arguments specified as the "..."
11408 parameters. SKIP is the number of arguments in ARGS to be omitted.
11409 OLDNARGS is the number of elements in ARGS. */
11411 static tree
11412 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11413 int skip, tree fndecl, int n, ...)
11415 va_list ap;
11416 tree t;
11418 va_start (ap, n);
11419 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11420 va_end (ap);
11422 return t;
11425 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11426 along with N new arguments specified as the "..." parameters. SKIP
11427 is the number of arguments in EXP to be omitted. This function is used
11428 to do varargs-to-varargs transformations. */
11430 static tree
11431 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11433 va_list ap;
11434 tree t;
11436 va_start (ap, n);
11437 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11438 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11439 va_end (ap);
11441 return t;
11444 /* Validate a single argument ARG against a tree code CODE representing
11445 a type. */
11447 static bool
11448 validate_arg (const_tree arg, enum tree_code code)
11450 if (!arg)
11451 return false;
11452 else if (code == POINTER_TYPE)
11453 return POINTER_TYPE_P (TREE_TYPE (arg));
11454 else if (code == INTEGER_TYPE)
11455 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11456 return code == TREE_CODE (TREE_TYPE (arg));
11459 /* This function validates the types of a function call argument list
11460 against a specified list of tree_codes. If the last specifier is a 0,
11461 that represents an ellipses, otherwise the last specifier must be a
11462 VOID_TYPE.
11464 This is the GIMPLE version of validate_arglist. Eventually we want to
11465 completely convert builtins.c to work from GIMPLEs and the tree based
11466 validate_arglist will then be removed. */
11468 bool
11469 validate_gimple_arglist (const_gimple call, ...)
11471 enum tree_code code;
11472 bool res = 0;
11473 va_list ap;
11474 const_tree arg;
11475 size_t i;
11477 va_start (ap, call);
11478 i = 0;
11482 code = (enum tree_code) va_arg (ap, int);
11483 switch (code)
11485 case 0:
11486 /* This signifies an ellipses, any further arguments are all ok. */
11487 res = true;
11488 goto end;
11489 case VOID_TYPE:
11490 /* This signifies an endlink, if no arguments remain, return
11491 true, otherwise return false. */
11492 res = (i == gimple_call_num_args (call));
11493 goto end;
11494 default:
11495 /* If no parameters remain or the parameter's code does not
11496 match the specified code, return false. Otherwise continue
11497 checking any remaining arguments. */
11498 arg = gimple_call_arg (call, i++);
11499 if (!validate_arg (arg, code))
11500 goto end;
11501 break;
11504 while (1);
11506 /* We need gotos here since we can only have one VA_CLOSE in a
11507 function. */
11508 end: ;
11509 va_end (ap);
11511 return res;
11514 /* This function validates the types of a function call argument list
11515 against a specified list of tree_codes. If the last specifier is a 0,
11516 that represents an ellipses, otherwise the last specifier must be a
11517 VOID_TYPE. */
11519 bool
11520 validate_arglist (const_tree callexpr, ...)
11522 enum tree_code code;
11523 bool res = 0;
11524 va_list ap;
11525 const_call_expr_arg_iterator iter;
11526 const_tree arg;
11528 va_start (ap, callexpr);
11529 init_const_call_expr_arg_iterator (callexpr, &iter);
11533 code = (enum tree_code) va_arg (ap, int);
11534 switch (code)
11536 case 0:
11537 /* This signifies an ellipses, any further arguments are all ok. */
11538 res = true;
11539 goto end;
11540 case VOID_TYPE:
11541 /* This signifies an endlink, if no arguments remain, return
11542 true, otherwise return false. */
11543 res = !more_const_call_expr_args_p (&iter);
11544 goto end;
11545 default:
11546 /* If no parameters remain or the parameter's code does not
11547 match the specified code, return false. Otherwise continue
11548 checking any remaining arguments. */
11549 arg = next_const_call_expr_arg (&iter);
11550 if (!validate_arg (arg, code))
11551 goto end;
11552 break;
11555 while (1);
11557 /* We need gotos here since we can only have one VA_CLOSE in a
11558 function. */
11559 end: ;
11560 va_end (ap);
11562 return res;
11565 /* Default target-specific builtin expander that does nothing. */
11568 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11569 rtx target ATTRIBUTE_UNUSED,
11570 rtx subtarget ATTRIBUTE_UNUSED,
11571 enum machine_mode mode ATTRIBUTE_UNUSED,
11572 int ignore ATTRIBUTE_UNUSED)
11574 return NULL_RTX;
11577 /* Returns true is EXP represents data that would potentially reside
11578 in a readonly section. */
11580 static bool
11581 readonly_data_expr (tree exp)
11583 STRIP_NOPS (exp);
11585 if (TREE_CODE (exp) != ADDR_EXPR)
11586 return false;
11588 exp = get_base_address (TREE_OPERAND (exp, 0));
11589 if (!exp)
11590 return false;
11592 /* Make sure we call decl_readonly_section only for trees it
11593 can handle (since it returns true for everything it doesn't
11594 understand). */
11595 if (TREE_CODE (exp) == STRING_CST
11596 || TREE_CODE (exp) == CONSTRUCTOR
11597 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11598 return decl_readonly_section (exp, 0);
11599 else
11600 return false;
11603 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11604 to the call, and TYPE is its return type.
11606 Return NULL_TREE if no simplification was possible, otherwise return the
11607 simplified form of the call as a tree.
11609 The simplified form may be a constant or other expression which
11610 computes the same value, but in a more efficient manner (including
11611 calls to other builtin functions).
11613 The call may contain arguments which need to be evaluated, but
11614 which are not useful to determine the result of the call. In
11615 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11616 COMPOUND_EXPR will be an argument which must be evaluated.
11617 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11618 COMPOUND_EXPR in the chain will contain the tree for the simplified
11619 form of the builtin function call. */
11621 static tree
11622 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11624 if (!validate_arg (s1, POINTER_TYPE)
11625 || !validate_arg (s2, POINTER_TYPE))
11626 return NULL_TREE;
11627 else
11629 tree fn;
11630 const char *p1, *p2;
11632 p2 = c_getstr (s2);
11633 if (p2 == NULL)
11634 return NULL_TREE;
11636 p1 = c_getstr (s1);
11637 if (p1 != NULL)
11639 const char *r = strstr (p1, p2);
11640 tree tem;
11642 if (r == NULL)
11643 return build_int_cst (TREE_TYPE (s1), 0);
11645 /* Return an offset into the constant string argument. */
11646 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11647 return fold_convert_loc (loc, type, tem);
11650 /* The argument is const char *, and the result is char *, so we need
11651 a type conversion here to avoid a warning. */
11652 if (p2[0] == '\0')
11653 return fold_convert_loc (loc, type, s1);
11655 if (p2[1] != '\0')
11656 return NULL_TREE;
11658 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11659 if (!fn)
11660 return NULL_TREE;
11662 /* New argument list transforming strstr(s1, s2) to
11663 strchr(s1, s2[0]). */
11664 return build_call_expr_loc (loc, fn, 2, s1,
11665 build_int_cst (integer_type_node, p2[0]));
11669 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11670 the call, and TYPE is its return type.
11672 Return NULL_TREE if no simplification was possible, otherwise return the
11673 simplified form of the call as a tree.
11675 The simplified form may be a constant or other expression which
11676 computes the same value, but in a more efficient manner (including
11677 calls to other builtin functions).
11679 The call may contain arguments which need to be evaluated, but
11680 which are not useful to determine the result of the call. In
11681 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11682 COMPOUND_EXPR will be an argument which must be evaluated.
11683 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11684 COMPOUND_EXPR in the chain will contain the tree for the simplified
11685 form of the builtin function call. */
11687 static tree
11688 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11690 if (!validate_arg (s1, POINTER_TYPE)
11691 || !validate_arg (s2, INTEGER_TYPE))
11692 return NULL_TREE;
11693 else
11695 const char *p1;
11697 if (TREE_CODE (s2) != INTEGER_CST)
11698 return NULL_TREE;
11700 p1 = c_getstr (s1);
11701 if (p1 != NULL)
11703 char c;
11704 const char *r;
11705 tree tem;
11707 if (target_char_cast (s2, &c))
11708 return NULL_TREE;
11710 r = strchr (p1, c);
11712 if (r == NULL)
11713 return build_int_cst (TREE_TYPE (s1), 0);
11715 /* Return an offset into the constant string argument. */
11716 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11717 return fold_convert_loc (loc, type, tem);
11719 return NULL_TREE;
11723 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11724 the call, and TYPE is its return type.
11726 Return NULL_TREE if no simplification was possible, otherwise return the
11727 simplified form of the call as a tree.
11729 The simplified form may be a constant or other expression which
11730 computes the same value, but in a more efficient manner (including
11731 calls to other builtin functions).
11733 The call may contain arguments which need to be evaluated, but
11734 which are not useful to determine the result of the call. In
11735 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11736 COMPOUND_EXPR will be an argument which must be evaluated.
11737 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11738 COMPOUND_EXPR in the chain will contain the tree for the simplified
11739 form of the builtin function call. */
11741 static tree
11742 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11744 if (!validate_arg (s1, POINTER_TYPE)
11745 || !validate_arg (s2, INTEGER_TYPE))
11746 return NULL_TREE;
11747 else
11749 tree fn;
11750 const char *p1;
11752 if (TREE_CODE (s2) != INTEGER_CST)
11753 return NULL_TREE;
11755 p1 = c_getstr (s1);
11756 if (p1 != NULL)
11758 char c;
11759 const char *r;
11760 tree tem;
11762 if (target_char_cast (s2, &c))
11763 return NULL_TREE;
11765 r = strrchr (p1, c);
11767 if (r == NULL)
11768 return build_int_cst (TREE_TYPE (s1), 0);
11770 /* Return an offset into the constant string argument. */
11771 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11772 return fold_convert_loc (loc, type, tem);
11775 if (! integer_zerop (s2))
11776 return NULL_TREE;
11778 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11779 if (!fn)
11780 return NULL_TREE;
11782 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11783 return build_call_expr_loc (loc, fn, 2, s1, s2);
11787 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11788 to the call, and TYPE is its return type.
11790 Return NULL_TREE if no simplification was possible, otherwise return the
11791 simplified form of the call as a tree.
11793 The simplified form may be a constant or other expression which
11794 computes the same value, but in a more efficient manner (including
11795 calls to other builtin functions).
11797 The call may contain arguments which need to be evaluated, but
11798 which are not useful to determine the result of the call. In
11799 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11800 COMPOUND_EXPR will be an argument which must be evaluated.
11801 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11802 COMPOUND_EXPR in the chain will contain the tree for the simplified
11803 form of the builtin function call. */
11805 static tree
11806 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11808 if (!validate_arg (s1, POINTER_TYPE)
11809 || !validate_arg (s2, POINTER_TYPE))
11810 return NULL_TREE;
11811 else
11813 tree fn;
11814 const char *p1, *p2;
11816 p2 = c_getstr (s2);
11817 if (p2 == NULL)
11818 return NULL_TREE;
11820 p1 = c_getstr (s1);
11821 if (p1 != NULL)
11823 const char *r = strpbrk (p1, p2);
11824 tree tem;
11826 if (r == NULL)
11827 return build_int_cst (TREE_TYPE (s1), 0);
11829 /* Return an offset into the constant string argument. */
11830 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11831 return fold_convert_loc (loc, type, tem);
11834 if (p2[0] == '\0')
11835 /* strpbrk(x, "") == NULL.
11836 Evaluate and ignore s1 in case it had side-effects. */
11837 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11839 if (p2[1] != '\0')
11840 return NULL_TREE; /* Really call strpbrk. */
11842 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11843 if (!fn)
11844 return NULL_TREE;
11846 /* New argument list transforming strpbrk(s1, s2) to
11847 strchr(s1, s2[0]). */
11848 return build_call_expr_loc (loc, fn, 2, s1,
11849 build_int_cst (integer_type_node, p2[0]));
11853 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11854 to the call.
11856 Return NULL_TREE if no simplification was possible, otherwise return the
11857 simplified form of the call as a tree.
11859 The simplified form may be a constant or other expression which
11860 computes the same value, but in a more efficient manner (including
11861 calls to other builtin functions).
11863 The call may contain arguments which need to be evaluated, but
11864 which are not useful to determine the result of the call. In
11865 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11866 COMPOUND_EXPR will be an argument which must be evaluated.
11867 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11868 COMPOUND_EXPR in the chain will contain the tree for the simplified
11869 form of the builtin function call. */
11871 static tree
11872 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11874 if (!validate_arg (dst, POINTER_TYPE)
11875 || !validate_arg (src, POINTER_TYPE))
11876 return NULL_TREE;
11877 else
11879 const char *p = c_getstr (src);
11881 /* If the string length is zero, return the dst parameter. */
11882 if (p && *p == '\0')
11883 return dst;
11885 if (optimize_insn_for_speed_p ())
11887 /* See if we can store by pieces into (dst + strlen(dst)). */
11888 tree newdst, call;
11889 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11890 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11892 if (!strlen_fn || !strcpy_fn)
11893 return NULL_TREE;
11895 /* If we don't have a movstr we don't want to emit an strcpy
11896 call. We have to do that if the length of the source string
11897 isn't computable (in that case we can use memcpy probably
11898 later expanding to a sequence of mov instructions). If we
11899 have movstr instructions we can emit strcpy calls. */
11900 if (!HAVE_movstr)
11902 tree len = c_strlen (src, 1);
11903 if (! len || TREE_SIDE_EFFECTS (len))
11904 return NULL_TREE;
11907 /* Stabilize the argument list. */
11908 dst = builtin_save_expr (dst);
11910 /* Create strlen (dst). */
11911 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11912 /* Create (dst p+ strlen (dst)). */
11914 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11915 newdst = builtin_save_expr (newdst);
11917 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11918 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11920 return NULL_TREE;
11924 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11925 arguments to the call.
11927 Return NULL_TREE if no simplification was possible, otherwise return the
11928 simplified form of the call as a tree.
11930 The simplified form may be a constant or other expression which
11931 computes the same value, but in a more efficient manner (including
11932 calls to other builtin functions).
11934 The call may contain arguments which need to be evaluated, but
11935 which are not useful to determine the result of the call. In
11936 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11937 COMPOUND_EXPR will be an argument which must be evaluated.
11938 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11939 COMPOUND_EXPR in the chain will contain the tree for the simplified
11940 form of the builtin function call. */
11942 static tree
11943 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11945 if (!validate_arg (dst, POINTER_TYPE)
11946 || !validate_arg (src, POINTER_TYPE)
11947 || !validate_arg (len, INTEGER_TYPE))
11948 return NULL_TREE;
11949 else
11951 const char *p = c_getstr (src);
11953 /* If the requested length is zero, or the src parameter string
11954 length is zero, return the dst parameter. */
11955 if (integer_zerop (len) || (p && *p == '\0'))
11956 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11958 /* If the requested len is greater than or equal to the string
11959 length, call strcat. */
11960 if (TREE_CODE (len) == INTEGER_CST && p
11961 && compare_tree_int (len, strlen (p)) >= 0)
11963 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11965 /* If the replacement _DECL isn't initialized, don't do the
11966 transformation. */
11967 if (!fn)
11968 return NULL_TREE;
11970 return build_call_expr_loc (loc, fn, 2, dst, src);
11972 return NULL_TREE;
11976 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11977 to the call.
11979 Return NULL_TREE if no simplification was possible, otherwise return the
11980 simplified form of the call as a tree.
11982 The simplified form may be a constant or other expression which
11983 computes the same value, but in a more efficient manner (including
11984 calls to other builtin functions).
11986 The call may contain arguments which need to be evaluated, but
11987 which are not useful to determine the result of the call. In
11988 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11989 COMPOUND_EXPR will be an argument which must be evaluated.
11990 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11991 COMPOUND_EXPR in the chain will contain the tree for the simplified
11992 form of the builtin function call. */
11994 static tree
11995 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11997 if (!validate_arg (s1, POINTER_TYPE)
11998 || !validate_arg (s2, POINTER_TYPE))
11999 return NULL_TREE;
12000 else
12002 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
12004 /* If both arguments are constants, evaluate at compile-time. */
12005 if (p1 && p2)
12007 const size_t r = strspn (p1, p2);
12008 return build_int_cst (size_type_node, r);
12011 /* If either argument is "", return NULL_TREE. */
12012 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
12013 /* Evaluate and ignore both arguments in case either one has
12014 side-effects. */
12015 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
12016 s1, s2);
12017 return NULL_TREE;
12021 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
12022 to the call.
12024 Return NULL_TREE if no simplification was possible, otherwise return the
12025 simplified form of the call as a tree.
12027 The simplified form may be a constant or other expression which
12028 computes the same value, but in a more efficient manner (including
12029 calls to other builtin functions).
12031 The call may contain arguments which need to be evaluated, but
12032 which are not useful to determine the result of the call. In
12033 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12034 COMPOUND_EXPR will be an argument which must be evaluated.
12035 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12036 COMPOUND_EXPR in the chain will contain the tree for the simplified
12037 form of the builtin function call. */
12039 static tree
12040 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
12042 if (!validate_arg (s1, POINTER_TYPE)
12043 || !validate_arg (s2, POINTER_TYPE))
12044 return NULL_TREE;
12045 else
12047 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
12049 /* If both arguments are constants, evaluate at compile-time. */
12050 if (p1 && p2)
12052 const size_t r = strcspn (p1, p2);
12053 return build_int_cst (size_type_node, r);
12056 /* If the first argument is "", return NULL_TREE. */
12057 if (p1 && *p1 == '\0')
12059 /* Evaluate and ignore argument s2 in case it has
12060 side-effects. */
12061 return omit_one_operand_loc (loc, size_type_node,
12062 size_zero_node, s2);
12065 /* If the second argument is "", return __builtin_strlen(s1). */
12066 if (p2 && *p2 == '\0')
12068 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
12070 /* If the replacement _DECL isn't initialized, don't do the
12071 transformation. */
12072 if (!fn)
12073 return NULL_TREE;
12075 return build_call_expr_loc (loc, fn, 1, s1);
12077 return NULL_TREE;
12081 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
12082 to the call. IGNORE is true if the value returned
12083 by the builtin will be ignored. UNLOCKED is true is true if this
12084 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
12085 the known length of the string. Return NULL_TREE if no simplification
12086 was possible. */
12088 tree
12089 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
12090 bool ignore, bool unlocked, tree len)
12092 /* If we're using an unlocked function, assume the other unlocked
12093 functions exist explicitly. */
12094 tree const fn_fputc = (unlocked
12095 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
12096 : builtin_decl_implicit (BUILT_IN_FPUTC));
12097 tree const fn_fwrite = (unlocked
12098 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
12099 : builtin_decl_implicit (BUILT_IN_FWRITE));
12101 /* If the return value is used, don't do the transformation. */
12102 if (!ignore)
12103 return NULL_TREE;
12105 /* Verify the arguments in the original call. */
12106 if (!validate_arg (arg0, POINTER_TYPE)
12107 || !validate_arg (arg1, POINTER_TYPE))
12108 return NULL_TREE;
12110 if (! len)
12111 len = c_strlen (arg0, 0);
12113 /* Get the length of the string passed to fputs. If the length
12114 can't be determined, punt. */
12115 if (!len
12116 || TREE_CODE (len) != INTEGER_CST)
12117 return NULL_TREE;
12119 switch (compare_tree_int (len, 1))
12121 case -1: /* length is 0, delete the call entirely . */
12122 return omit_one_operand_loc (loc, integer_type_node,
12123 integer_zero_node, arg1);;
12125 case 0: /* length is 1, call fputc. */
12127 const char *p = c_getstr (arg0);
12129 if (p != NULL)
12131 if (fn_fputc)
12132 return build_call_expr_loc (loc, fn_fputc, 2,
12133 build_int_cst
12134 (integer_type_node, p[0]), arg1);
12135 else
12136 return NULL_TREE;
12139 /* FALLTHROUGH */
12140 case 1: /* length is greater than 1, call fwrite. */
12142 /* If optimizing for size keep fputs. */
12143 if (optimize_function_for_size_p (cfun))
12144 return NULL_TREE;
12145 /* New argument list transforming fputs(string, stream) to
12146 fwrite(string, 1, len, stream). */
12147 if (fn_fwrite)
12148 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12149 size_one_node, len, arg1);
12150 else
12151 return NULL_TREE;
12153 default:
12154 gcc_unreachable ();
12156 return NULL_TREE;
12159 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12160 produced. False otherwise. This is done so that we don't output the error
12161 or warning twice or three times. */
12163 bool
12164 fold_builtin_next_arg (tree exp, bool va_start_p)
12166 tree fntype = TREE_TYPE (current_function_decl);
12167 int nargs = call_expr_nargs (exp);
12168 tree arg;
12169 /* There is good chance the current input_location points inside the
12170 definition of the va_start macro (perhaps on the token for
12171 builtin) in a system header, so warnings will not be emitted.
12172 Use the location in real source code. */
12173 source_location current_location =
12174 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12175 NULL);
12177 if (!stdarg_p (fntype))
12179 error ("%<va_start%> used in function with fixed args");
12180 return true;
12183 if (va_start_p)
12185 if (va_start_p && (nargs != 2))
12187 error ("wrong number of arguments to function %<va_start%>");
12188 return true;
12190 arg = CALL_EXPR_ARG (exp, 1);
12192 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12193 when we checked the arguments and if needed issued a warning. */
12194 else
12196 if (nargs == 0)
12198 /* Evidently an out of date version of <stdarg.h>; can't validate
12199 va_start's second argument, but can still work as intended. */
12200 warning_at (current_location,
12201 OPT_Wvarargs,
12202 "%<__builtin_next_arg%> called without an argument");
12203 return true;
12205 else if (nargs > 1)
12207 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12208 return true;
12210 arg = CALL_EXPR_ARG (exp, 0);
12213 if (TREE_CODE (arg) == SSA_NAME)
12214 arg = SSA_NAME_VAR (arg);
12216 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12217 or __builtin_next_arg (0) the first time we see it, after checking
12218 the arguments and if needed issuing a warning. */
12219 if (!integer_zerop (arg))
12221 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12223 /* Strip off all nops for the sake of the comparison. This
12224 is not quite the same as STRIP_NOPS. It does more.
12225 We must also strip off INDIRECT_EXPR for C++ reference
12226 parameters. */
12227 while (CONVERT_EXPR_P (arg)
12228 || TREE_CODE (arg) == INDIRECT_REF)
12229 arg = TREE_OPERAND (arg, 0);
12230 if (arg != last_parm)
12232 /* FIXME: Sometimes with the tree optimizers we can get the
12233 not the last argument even though the user used the last
12234 argument. We just warn and set the arg to be the last
12235 argument so that we will get wrong-code because of
12236 it. */
12237 warning_at (current_location,
12238 OPT_Wvarargs,
12239 "second parameter of %<va_start%> not last named argument");
12242 /* Undefined by C99 7.15.1.4p4 (va_start):
12243 "If the parameter parmN is declared with the register storage
12244 class, with a function or array type, or with a type that is
12245 not compatible with the type that results after application of
12246 the default argument promotions, the behavior is undefined."
12248 else if (DECL_REGISTER (arg))
12250 warning_at (current_location,
12251 OPT_Wvarargs,
12252 "undefined behaviour when second parameter of "
12253 "%<va_start%> is declared with %<register%> storage");
12256 /* We want to verify the second parameter just once before the tree
12257 optimizers are run and then avoid keeping it in the tree,
12258 as otherwise we could warn even for correct code like:
12259 void foo (int i, ...)
12260 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12261 if (va_start_p)
12262 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12263 else
12264 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12266 return false;
12270 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12271 ORIG may be null if this is a 2-argument call. We don't attempt to
12272 simplify calls with more than 3 arguments.
12274 Return NULL_TREE if no simplification was possible, otherwise return the
12275 simplified form of the call as a tree. If IGNORED is true, it means that
12276 the caller does not use the returned value of the function. */
12278 static tree
12279 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12280 tree orig, int ignored)
12282 tree call, retval;
12283 const char *fmt_str = NULL;
12285 /* Verify the required arguments in the original call. We deal with two
12286 types of sprintf() calls: 'sprintf (str, fmt)' and
12287 'sprintf (dest, "%s", orig)'. */
12288 if (!validate_arg (dest, POINTER_TYPE)
12289 || !validate_arg (fmt, POINTER_TYPE))
12290 return NULL_TREE;
12291 if (orig && !validate_arg (orig, POINTER_TYPE))
12292 return NULL_TREE;
12294 /* Check whether the format is a literal string constant. */
12295 fmt_str = c_getstr (fmt);
12296 if (fmt_str == NULL)
12297 return NULL_TREE;
12299 call = NULL_TREE;
12300 retval = NULL_TREE;
12302 if (!init_target_chars ())
12303 return NULL_TREE;
12305 /* If the format doesn't contain % args or %%, use strcpy. */
12306 if (strchr (fmt_str, target_percent) == NULL)
12308 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12310 if (!fn)
12311 return NULL_TREE;
12313 /* Don't optimize sprintf (buf, "abc", ptr++). */
12314 if (orig)
12315 return NULL_TREE;
12317 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12318 'format' is known to contain no % formats. */
12319 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12320 if (!ignored)
12321 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12324 /* If the format is "%s", use strcpy if the result isn't used. */
12325 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12327 tree fn;
12328 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12330 if (!fn)
12331 return NULL_TREE;
12333 /* Don't crash on sprintf (str1, "%s"). */
12334 if (!orig)
12335 return NULL_TREE;
12337 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12338 if (!ignored)
12340 retval = c_strlen (orig, 1);
12341 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12342 return NULL_TREE;
12344 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12347 if (call && retval)
12349 retval = fold_convert_loc
12350 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12351 retval);
12352 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12354 else
12355 return call;
12358 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12359 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12360 attempt to simplify calls with more than 4 arguments.
12362 Return NULL_TREE if no simplification was possible, otherwise return the
12363 simplified form of the call as a tree. If IGNORED is true, it means that
12364 the caller does not use the returned value of the function. */
12366 static tree
12367 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12368 tree orig, int ignored)
12370 tree call, retval;
12371 const char *fmt_str = NULL;
12372 unsigned HOST_WIDE_INT destlen;
12374 /* Verify the required arguments in the original call. We deal with two
12375 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12376 'snprintf (dest, cst, "%s", orig)'. */
12377 if (!validate_arg (dest, POINTER_TYPE)
12378 || !validate_arg (destsize, INTEGER_TYPE)
12379 || !validate_arg (fmt, POINTER_TYPE))
12380 return NULL_TREE;
12381 if (orig && !validate_arg (orig, POINTER_TYPE))
12382 return NULL_TREE;
12384 if (!host_integerp (destsize, 1))
12385 return NULL_TREE;
12387 /* Check whether the format is a literal string constant. */
12388 fmt_str = c_getstr (fmt);
12389 if (fmt_str == NULL)
12390 return NULL_TREE;
12392 call = NULL_TREE;
12393 retval = NULL_TREE;
12395 if (!init_target_chars ())
12396 return NULL_TREE;
12398 destlen = tree_low_cst (destsize, 1);
12400 /* If the format doesn't contain % args or %%, use strcpy. */
12401 if (strchr (fmt_str, target_percent) == NULL)
12403 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12404 size_t len = strlen (fmt_str);
12406 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12407 if (orig)
12408 return NULL_TREE;
12410 /* We could expand this as
12411 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12412 or to
12413 memcpy (str, fmt_with_nul_at_cstm1, cst);
12414 but in the former case that might increase code size
12415 and in the latter case grow .rodata section too much.
12416 So punt for now. */
12417 if (len >= destlen)
12418 return NULL_TREE;
12420 if (!fn)
12421 return NULL_TREE;
12423 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12424 'format' is known to contain no % formats and
12425 strlen (fmt) < cst. */
12426 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12428 if (!ignored)
12429 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12432 /* If the format is "%s", use strcpy if the result isn't used. */
12433 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12435 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12436 unsigned HOST_WIDE_INT origlen;
12438 /* Don't crash on snprintf (str1, cst, "%s"). */
12439 if (!orig)
12440 return NULL_TREE;
12442 retval = c_strlen (orig, 1);
12443 if (!retval || !host_integerp (retval, 1))
12444 return NULL_TREE;
12446 origlen = tree_low_cst (retval, 1);
12447 /* We could expand this as
12448 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12449 or to
12450 memcpy (str1, str2_with_nul_at_cstm1, cst);
12451 but in the former case that might increase code size
12452 and in the latter case grow .rodata section too much.
12453 So punt for now. */
12454 if (origlen >= destlen)
12455 return NULL_TREE;
12457 /* Convert snprintf (str1, cst, "%s", str2) into
12458 strcpy (str1, str2) if strlen (str2) < cst. */
12459 if (!fn)
12460 return NULL_TREE;
12462 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12464 if (ignored)
12465 retval = NULL_TREE;
12468 if (call && retval)
12470 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12471 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12472 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12474 else
12475 return call;
12478 /* Expand a call EXP to __builtin_object_size. */
12481 expand_builtin_object_size (tree exp)
12483 tree ost;
12484 int object_size_type;
12485 tree fndecl = get_callee_fndecl (exp);
12487 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12489 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12490 exp, fndecl);
12491 expand_builtin_trap ();
12492 return const0_rtx;
12495 ost = CALL_EXPR_ARG (exp, 1);
12496 STRIP_NOPS (ost);
12498 if (TREE_CODE (ost) != INTEGER_CST
12499 || tree_int_cst_sgn (ost) < 0
12500 || compare_tree_int (ost, 3) > 0)
12502 error ("%Klast argument of %D is not integer constant between 0 and 3",
12503 exp, fndecl);
12504 expand_builtin_trap ();
12505 return const0_rtx;
12508 object_size_type = tree_low_cst (ost, 0);
12510 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12513 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12514 FCODE is the BUILT_IN_* to use.
12515 Return NULL_RTX if we failed; the caller should emit a normal call,
12516 otherwise try to get the result in TARGET, if convenient (and in
12517 mode MODE if that's convenient). */
12519 static rtx
12520 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12521 enum built_in_function fcode)
12523 tree dest, src, len, size;
12525 if (!validate_arglist (exp,
12526 POINTER_TYPE,
12527 fcode == BUILT_IN_MEMSET_CHK
12528 ? INTEGER_TYPE : POINTER_TYPE,
12529 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12530 return NULL_RTX;
12532 dest = CALL_EXPR_ARG (exp, 0);
12533 src = CALL_EXPR_ARG (exp, 1);
12534 len = CALL_EXPR_ARG (exp, 2);
12535 size = CALL_EXPR_ARG (exp, 3);
12537 if (! host_integerp (size, 1))
12538 return NULL_RTX;
12540 if (host_integerp (len, 1) || integer_all_onesp (size))
12542 tree fn;
12544 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12546 warning_at (tree_nonartificial_location (exp),
12547 0, "%Kcall to %D will always overflow destination buffer",
12548 exp, get_callee_fndecl (exp));
12549 return NULL_RTX;
12552 fn = NULL_TREE;
12553 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12554 mem{cpy,pcpy,move,set} is available. */
12555 switch (fcode)
12557 case BUILT_IN_MEMCPY_CHK:
12558 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12559 break;
12560 case BUILT_IN_MEMPCPY_CHK:
12561 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12562 break;
12563 case BUILT_IN_MEMMOVE_CHK:
12564 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12565 break;
12566 case BUILT_IN_MEMSET_CHK:
12567 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12568 break;
12569 default:
12570 break;
12573 if (! fn)
12574 return NULL_RTX;
12576 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12577 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12578 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12579 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12581 else if (fcode == BUILT_IN_MEMSET_CHK)
12582 return NULL_RTX;
12583 else
12585 unsigned int dest_align = get_pointer_alignment (dest);
12587 /* If DEST is not a pointer type, call the normal function. */
12588 if (dest_align == 0)
12589 return NULL_RTX;
12591 /* If SRC and DEST are the same (and not volatile), do nothing. */
12592 if (operand_equal_p (src, dest, 0))
12594 tree expr;
12596 if (fcode != BUILT_IN_MEMPCPY_CHK)
12598 /* Evaluate and ignore LEN in case it has side-effects. */
12599 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12600 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12603 expr = fold_build_pointer_plus (dest, len);
12604 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12607 /* __memmove_chk special case. */
12608 if (fcode == BUILT_IN_MEMMOVE_CHK)
12610 unsigned int src_align = get_pointer_alignment (src);
12612 if (src_align == 0)
12613 return NULL_RTX;
12615 /* If src is categorized for a readonly section we can use
12616 normal __memcpy_chk. */
12617 if (readonly_data_expr (src))
12619 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12620 if (!fn)
12621 return NULL_RTX;
12622 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12623 dest, src, len, size);
12624 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12625 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12626 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12629 return NULL_RTX;
12633 /* Emit warning if a buffer overflow is detected at compile time. */
12635 static void
12636 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12638 int is_strlen = 0;
12639 tree len, size;
12640 location_t loc = tree_nonartificial_location (exp);
12642 switch (fcode)
12644 case BUILT_IN_STRCPY_CHK:
12645 case BUILT_IN_STPCPY_CHK:
12646 /* For __strcat_chk the warning will be emitted only if overflowing
12647 by at least strlen (dest) + 1 bytes. */
12648 case BUILT_IN_STRCAT_CHK:
12649 len = CALL_EXPR_ARG (exp, 1);
12650 size = CALL_EXPR_ARG (exp, 2);
12651 is_strlen = 1;
12652 break;
12653 case BUILT_IN_STRNCAT_CHK:
12654 case BUILT_IN_STRNCPY_CHK:
12655 case BUILT_IN_STPNCPY_CHK:
12656 len = CALL_EXPR_ARG (exp, 2);
12657 size = CALL_EXPR_ARG (exp, 3);
12658 break;
12659 case BUILT_IN_SNPRINTF_CHK:
12660 case BUILT_IN_VSNPRINTF_CHK:
12661 len = CALL_EXPR_ARG (exp, 1);
12662 size = CALL_EXPR_ARG (exp, 3);
12663 break;
12664 default:
12665 gcc_unreachable ();
12668 if (!len || !size)
12669 return;
12671 if (! host_integerp (size, 1) || integer_all_onesp (size))
12672 return;
12674 if (is_strlen)
12676 len = c_strlen (len, 1);
12677 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12678 return;
12680 else if (fcode == BUILT_IN_STRNCAT_CHK)
12682 tree src = CALL_EXPR_ARG (exp, 1);
12683 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12684 return;
12685 src = c_strlen (src, 1);
12686 if (! src || ! host_integerp (src, 1))
12688 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12689 exp, get_callee_fndecl (exp));
12690 return;
12692 else if (tree_int_cst_lt (src, size))
12693 return;
12695 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12696 return;
12698 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12699 exp, get_callee_fndecl (exp));
12702 /* Emit warning if a buffer overflow is detected at compile time
12703 in __sprintf_chk/__vsprintf_chk calls. */
12705 static void
12706 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12708 tree size, len, fmt;
12709 const char *fmt_str;
12710 int nargs = call_expr_nargs (exp);
12712 /* Verify the required arguments in the original call. */
12714 if (nargs < 4)
12715 return;
12716 size = CALL_EXPR_ARG (exp, 2);
12717 fmt = CALL_EXPR_ARG (exp, 3);
12719 if (! host_integerp (size, 1) || integer_all_onesp (size))
12720 return;
12722 /* Check whether the format is a literal string constant. */
12723 fmt_str = c_getstr (fmt);
12724 if (fmt_str == NULL)
12725 return;
12727 if (!init_target_chars ())
12728 return;
12730 /* If the format doesn't contain % args or %%, we know its size. */
12731 if (strchr (fmt_str, target_percent) == 0)
12732 len = build_int_cstu (size_type_node, strlen (fmt_str));
12733 /* If the format is "%s" and first ... argument is a string literal,
12734 we know it too. */
12735 else if (fcode == BUILT_IN_SPRINTF_CHK
12736 && strcmp (fmt_str, target_percent_s) == 0)
12738 tree arg;
12740 if (nargs < 5)
12741 return;
12742 arg = CALL_EXPR_ARG (exp, 4);
12743 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12744 return;
12746 len = c_strlen (arg, 1);
12747 if (!len || ! host_integerp (len, 1))
12748 return;
12750 else
12751 return;
12753 if (! tree_int_cst_lt (len, size))
12754 warning_at (tree_nonartificial_location (exp),
12755 0, "%Kcall to %D will always overflow destination buffer",
12756 exp, get_callee_fndecl (exp));
12759 /* Emit warning if a free is called with address of a variable. */
12761 static void
12762 maybe_emit_free_warning (tree exp)
12764 tree arg = CALL_EXPR_ARG (exp, 0);
12766 STRIP_NOPS (arg);
12767 if (TREE_CODE (arg) != ADDR_EXPR)
12768 return;
12770 arg = get_base_address (TREE_OPERAND (arg, 0));
12771 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12772 return;
12774 if (SSA_VAR_P (arg))
12775 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12776 "%Kattempt to free a non-heap object %qD", exp, arg);
12777 else
12778 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12779 "%Kattempt to free a non-heap object", exp);
12782 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12783 if possible. */
12785 tree
12786 fold_builtin_object_size (tree ptr, tree ost)
12788 unsigned HOST_WIDE_INT bytes;
12789 int object_size_type;
12791 if (!validate_arg (ptr, POINTER_TYPE)
12792 || !validate_arg (ost, INTEGER_TYPE))
12793 return NULL_TREE;
12795 STRIP_NOPS (ost);
12797 if (TREE_CODE (ost) != INTEGER_CST
12798 || tree_int_cst_sgn (ost) < 0
12799 || compare_tree_int (ost, 3) > 0)
12800 return NULL_TREE;
12802 object_size_type = tree_low_cst (ost, 0);
12804 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12805 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12806 and (size_t) 0 for types 2 and 3. */
12807 if (TREE_SIDE_EFFECTS (ptr))
12808 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12810 if (TREE_CODE (ptr) == ADDR_EXPR)
12812 bytes = compute_builtin_object_size (ptr, object_size_type);
12813 if (double_int_fits_to_tree_p (size_type_node,
12814 double_int::from_uhwi (bytes)))
12815 return build_int_cstu (size_type_node, bytes);
12817 else if (TREE_CODE (ptr) == SSA_NAME)
12819 /* If object size is not known yet, delay folding until
12820 later. Maybe subsequent passes will help determining
12821 it. */
12822 bytes = compute_builtin_object_size (ptr, object_size_type);
12823 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12824 && double_int_fits_to_tree_p (size_type_node,
12825 double_int::from_uhwi (bytes)))
12826 return build_int_cstu (size_type_node, bytes);
12829 return NULL_TREE;
12832 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12833 DEST, SRC, LEN, and SIZE are the arguments to the call.
12834 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12835 code of the builtin. If MAXLEN is not NULL, it is maximum length
12836 passed as third argument. */
12838 tree
12839 fold_builtin_memory_chk (location_t loc, tree fndecl,
12840 tree dest, tree src, tree len, tree size,
12841 tree maxlen, bool ignore,
12842 enum built_in_function fcode)
12844 tree fn;
12846 if (!validate_arg (dest, POINTER_TYPE)
12847 || !validate_arg (src,
12848 (fcode == BUILT_IN_MEMSET_CHK
12849 ? INTEGER_TYPE : POINTER_TYPE))
12850 || !validate_arg (len, INTEGER_TYPE)
12851 || !validate_arg (size, INTEGER_TYPE))
12852 return NULL_TREE;
12854 /* If SRC and DEST are the same (and not volatile), return DEST
12855 (resp. DEST+LEN for __mempcpy_chk). */
12856 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12858 if (fcode != BUILT_IN_MEMPCPY_CHK)
12859 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12860 dest, len);
12861 else
12863 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12864 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12868 if (! host_integerp (size, 1))
12869 return NULL_TREE;
12871 if (! integer_all_onesp (size))
12873 if (! host_integerp (len, 1))
12875 /* If LEN is not constant, try MAXLEN too.
12876 For MAXLEN only allow optimizing into non-_ocs function
12877 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12878 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12880 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12882 /* (void) __mempcpy_chk () can be optimized into
12883 (void) __memcpy_chk (). */
12884 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12885 if (!fn)
12886 return NULL_TREE;
12888 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12890 return NULL_TREE;
12893 else
12894 maxlen = len;
12896 if (tree_int_cst_lt (size, maxlen))
12897 return NULL_TREE;
12900 fn = NULL_TREE;
12901 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12902 mem{cpy,pcpy,move,set} is available. */
12903 switch (fcode)
12905 case BUILT_IN_MEMCPY_CHK:
12906 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12907 break;
12908 case BUILT_IN_MEMPCPY_CHK:
12909 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12910 break;
12911 case BUILT_IN_MEMMOVE_CHK:
12912 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12913 break;
12914 case BUILT_IN_MEMSET_CHK:
12915 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12916 break;
12917 default:
12918 break;
12921 if (!fn)
12922 return NULL_TREE;
12924 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12927 /* Fold a call to the __st[rp]cpy_chk builtin.
12928 DEST, SRC, and SIZE are the arguments to the call.
12929 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12930 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12931 strings passed as second argument. */
12933 tree
12934 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12935 tree src, tree size,
12936 tree maxlen, bool ignore,
12937 enum built_in_function fcode)
12939 tree len, fn;
12941 if (!validate_arg (dest, POINTER_TYPE)
12942 || !validate_arg (src, POINTER_TYPE)
12943 || !validate_arg (size, INTEGER_TYPE))
12944 return NULL_TREE;
12946 /* If SRC and DEST are the same (and not volatile), return DEST. */
12947 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12948 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12950 if (! host_integerp (size, 1))
12951 return NULL_TREE;
12953 if (! integer_all_onesp (size))
12955 len = c_strlen (src, 1);
12956 if (! len || ! host_integerp (len, 1))
12958 /* If LEN is not constant, try MAXLEN too.
12959 For MAXLEN only allow optimizing into non-_ocs function
12960 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12961 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12963 if (fcode == BUILT_IN_STPCPY_CHK)
12965 if (! ignore)
12966 return NULL_TREE;
12968 /* If return value of __stpcpy_chk is ignored,
12969 optimize into __strcpy_chk. */
12970 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12971 if (!fn)
12972 return NULL_TREE;
12974 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12977 if (! len || TREE_SIDE_EFFECTS (len))
12978 return NULL_TREE;
12980 /* If c_strlen returned something, but not a constant,
12981 transform __strcpy_chk into __memcpy_chk. */
12982 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12983 if (!fn)
12984 return NULL_TREE;
12986 len = fold_convert_loc (loc, size_type_node, len);
12987 len = size_binop_loc (loc, PLUS_EXPR, len,
12988 build_int_cst (size_type_node, 1));
12989 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12990 build_call_expr_loc (loc, fn, 4,
12991 dest, src, len, size));
12994 else
12995 maxlen = len;
12997 if (! tree_int_cst_lt (maxlen, size))
12998 return NULL_TREE;
13001 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
13002 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
13003 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
13004 if (!fn)
13005 return NULL_TREE;
13007 return build_call_expr_loc (loc, fn, 2, dest, src);
13010 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
13011 are the arguments to the call. If MAXLEN is not NULL, it is maximum
13012 length passed as third argument. IGNORE is true if return value can be
13013 ignored. FCODE is the BUILT_IN_* code of the builtin. */
13015 tree
13016 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
13017 tree len, tree size, tree maxlen, bool ignore,
13018 enum built_in_function fcode)
13020 tree fn;
13022 if (!validate_arg (dest, POINTER_TYPE)
13023 || !validate_arg (src, POINTER_TYPE)
13024 || !validate_arg (len, INTEGER_TYPE)
13025 || !validate_arg (size, INTEGER_TYPE))
13026 return NULL_TREE;
13028 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
13030 /* If return value of __stpncpy_chk is ignored,
13031 optimize into __strncpy_chk. */
13032 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
13033 if (fn)
13034 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
13037 if (! host_integerp (size, 1))
13038 return NULL_TREE;
13040 if (! integer_all_onesp (size))
13042 if (! host_integerp (len, 1))
13044 /* If LEN is not constant, try MAXLEN too.
13045 For MAXLEN only allow optimizing into non-_ocs function
13046 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13047 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13048 return NULL_TREE;
13050 else
13051 maxlen = len;
13053 if (tree_int_cst_lt (size, maxlen))
13054 return NULL_TREE;
13057 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
13058 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
13059 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
13060 if (!fn)
13061 return NULL_TREE;
13063 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13066 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
13067 are the arguments to the call. */
13069 static tree
13070 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
13071 tree src, tree size)
13073 tree fn;
13074 const char *p;
13076 if (!validate_arg (dest, POINTER_TYPE)
13077 || !validate_arg (src, POINTER_TYPE)
13078 || !validate_arg (size, INTEGER_TYPE))
13079 return NULL_TREE;
13081 p = c_getstr (src);
13082 /* If the SRC parameter is "", return DEST. */
13083 if (p && *p == '\0')
13084 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13086 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
13087 return NULL_TREE;
13089 /* If __builtin_strcat_chk is used, assume strcat is available. */
13090 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
13091 if (!fn)
13092 return NULL_TREE;
13094 return build_call_expr_loc (loc, fn, 2, dest, src);
13097 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
13098 LEN, and SIZE. */
13100 static tree
13101 fold_builtin_strncat_chk (location_t loc, tree fndecl,
13102 tree dest, tree src, tree len, tree size)
13104 tree fn;
13105 const char *p;
13107 if (!validate_arg (dest, POINTER_TYPE)
13108 || !validate_arg (src, POINTER_TYPE)
13109 || !validate_arg (size, INTEGER_TYPE)
13110 || !validate_arg (size, INTEGER_TYPE))
13111 return NULL_TREE;
13113 p = c_getstr (src);
13114 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13115 if (p && *p == '\0')
13116 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
13117 else if (integer_zerop (len))
13118 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13120 if (! host_integerp (size, 1))
13121 return NULL_TREE;
13123 if (! integer_all_onesp (size))
13125 tree src_len = c_strlen (src, 1);
13126 if (src_len
13127 && host_integerp (src_len, 1)
13128 && host_integerp (len, 1)
13129 && ! tree_int_cst_lt (len, src_len))
13131 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13132 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13133 if (!fn)
13134 return NULL_TREE;
13136 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13138 return NULL_TREE;
13141 /* If __builtin_strncat_chk is used, assume strncat is available. */
13142 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13143 if (!fn)
13144 return NULL_TREE;
13146 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13149 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13150 Return NULL_TREE if a normal call should be emitted rather than
13151 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13152 or BUILT_IN_VSPRINTF_CHK. */
13154 static tree
13155 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13156 enum built_in_function fcode)
13158 tree dest, size, len, fn, fmt, flag;
13159 const char *fmt_str;
13161 /* Verify the required arguments in the original call. */
13162 if (nargs < 4)
13163 return NULL_TREE;
13164 dest = args[0];
13165 if (!validate_arg (dest, POINTER_TYPE))
13166 return NULL_TREE;
13167 flag = args[1];
13168 if (!validate_arg (flag, INTEGER_TYPE))
13169 return NULL_TREE;
13170 size = args[2];
13171 if (!validate_arg (size, INTEGER_TYPE))
13172 return NULL_TREE;
13173 fmt = args[3];
13174 if (!validate_arg (fmt, POINTER_TYPE))
13175 return NULL_TREE;
13177 if (! host_integerp (size, 1))
13178 return NULL_TREE;
13180 len = NULL_TREE;
13182 if (!init_target_chars ())
13183 return NULL_TREE;
13185 /* Check whether the format is a literal string constant. */
13186 fmt_str = c_getstr (fmt);
13187 if (fmt_str != NULL)
13189 /* If the format doesn't contain % args or %%, we know the size. */
13190 if (strchr (fmt_str, target_percent) == 0)
13192 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13193 len = build_int_cstu (size_type_node, strlen (fmt_str));
13195 /* If the format is "%s" and first ... argument is a string literal,
13196 we know the size too. */
13197 else if (fcode == BUILT_IN_SPRINTF_CHK
13198 && strcmp (fmt_str, target_percent_s) == 0)
13200 tree arg;
13202 if (nargs == 5)
13204 arg = args[4];
13205 if (validate_arg (arg, POINTER_TYPE))
13207 len = c_strlen (arg, 1);
13208 if (! len || ! host_integerp (len, 1))
13209 len = NULL_TREE;
13215 if (! integer_all_onesp (size))
13217 if (! len || ! tree_int_cst_lt (len, size))
13218 return NULL_TREE;
13221 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13222 or if format doesn't contain % chars or is "%s". */
13223 if (! integer_zerop (flag))
13225 if (fmt_str == NULL)
13226 return NULL_TREE;
13227 if (strchr (fmt_str, target_percent) != NULL
13228 && strcmp (fmt_str, target_percent_s))
13229 return NULL_TREE;
13232 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13233 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13234 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13235 if (!fn)
13236 return NULL_TREE;
13238 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13241 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13242 a normal call should be emitted rather than expanding the function
13243 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13245 static tree
13246 fold_builtin_sprintf_chk (location_t loc, tree exp,
13247 enum built_in_function fcode)
13249 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13250 CALL_EXPR_ARGP (exp), fcode);
13253 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13254 NULL_TREE if a normal call should be emitted rather than expanding
13255 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13256 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13257 passed as second argument. */
13259 static tree
13260 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13261 tree maxlen, enum built_in_function fcode)
13263 tree dest, size, len, fn, fmt, flag;
13264 const char *fmt_str;
13266 /* Verify the required arguments in the original call. */
13267 if (nargs < 5)
13268 return NULL_TREE;
13269 dest = args[0];
13270 if (!validate_arg (dest, POINTER_TYPE))
13271 return NULL_TREE;
13272 len = args[1];
13273 if (!validate_arg (len, INTEGER_TYPE))
13274 return NULL_TREE;
13275 flag = args[2];
13276 if (!validate_arg (flag, INTEGER_TYPE))
13277 return NULL_TREE;
13278 size = args[3];
13279 if (!validate_arg (size, INTEGER_TYPE))
13280 return NULL_TREE;
13281 fmt = args[4];
13282 if (!validate_arg (fmt, POINTER_TYPE))
13283 return NULL_TREE;
13285 if (! host_integerp (size, 1))
13286 return NULL_TREE;
13288 if (! integer_all_onesp (size))
13290 if (! host_integerp (len, 1))
13292 /* If LEN is not constant, try MAXLEN too.
13293 For MAXLEN only allow optimizing into non-_ocs function
13294 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13295 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13296 return NULL_TREE;
13298 else
13299 maxlen = len;
13301 if (tree_int_cst_lt (size, maxlen))
13302 return NULL_TREE;
13305 if (!init_target_chars ())
13306 return NULL_TREE;
13308 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13309 or if format doesn't contain % chars or is "%s". */
13310 if (! integer_zerop (flag))
13312 fmt_str = c_getstr (fmt);
13313 if (fmt_str == NULL)
13314 return NULL_TREE;
13315 if (strchr (fmt_str, target_percent) != NULL
13316 && strcmp (fmt_str, target_percent_s))
13317 return NULL_TREE;
13320 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13321 available. */
13322 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13323 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13324 if (!fn)
13325 return NULL_TREE;
13327 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13330 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13331 a normal call should be emitted rather than expanding the function
13332 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13333 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13334 passed as second argument. */
13336 tree
13337 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13338 enum built_in_function fcode)
13340 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13341 CALL_EXPR_ARGP (exp), maxlen, fcode);
13344 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13345 FMT and ARG are the arguments to the call; we don't fold cases with
13346 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13348 Return NULL_TREE if no simplification was possible, otherwise return the
13349 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13350 code of the function to be simplified. */
13352 static tree
13353 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13354 tree arg, bool ignore,
13355 enum built_in_function fcode)
13357 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13358 const char *fmt_str = NULL;
13360 /* If the return value is used, don't do the transformation. */
13361 if (! ignore)
13362 return NULL_TREE;
13364 /* Verify the required arguments in the original call. */
13365 if (!validate_arg (fmt, POINTER_TYPE))
13366 return NULL_TREE;
13368 /* Check whether the format is a literal string constant. */
13369 fmt_str = c_getstr (fmt);
13370 if (fmt_str == NULL)
13371 return NULL_TREE;
13373 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13375 /* If we're using an unlocked function, assume the other
13376 unlocked functions exist explicitly. */
13377 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13378 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13380 else
13382 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13383 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13386 if (!init_target_chars ())
13387 return NULL_TREE;
13389 if (strcmp (fmt_str, target_percent_s) == 0
13390 || strchr (fmt_str, target_percent) == NULL)
13392 const char *str;
13394 if (strcmp (fmt_str, target_percent_s) == 0)
13396 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13397 return NULL_TREE;
13399 if (!arg || !validate_arg (arg, POINTER_TYPE))
13400 return NULL_TREE;
13402 str = c_getstr (arg);
13403 if (str == NULL)
13404 return NULL_TREE;
13406 else
13408 /* The format specifier doesn't contain any '%' characters. */
13409 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13410 && arg)
13411 return NULL_TREE;
13412 str = fmt_str;
13415 /* If the string was "", printf does nothing. */
13416 if (str[0] == '\0')
13417 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13419 /* If the string has length of 1, call putchar. */
13420 if (str[1] == '\0')
13422 /* Given printf("c"), (where c is any one character,)
13423 convert "c"[0] to an int and pass that to the replacement
13424 function. */
13425 newarg = build_int_cst (integer_type_node, str[0]);
13426 if (fn_putchar)
13427 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13429 else
13431 /* If the string was "string\n", call puts("string"). */
13432 size_t len = strlen (str);
13433 if ((unsigned char)str[len - 1] == target_newline
13434 && (size_t) (int) len == len
13435 && (int) len > 0)
13437 char *newstr;
13438 tree offset_node, string_cst;
13440 /* Create a NUL-terminated string that's one char shorter
13441 than the original, stripping off the trailing '\n'. */
13442 newarg = build_string_literal (len, str);
13443 string_cst = string_constant (newarg, &offset_node);
13444 gcc_checking_assert (string_cst
13445 && (TREE_STRING_LENGTH (string_cst)
13446 == (int) len)
13447 && integer_zerop (offset_node)
13448 && (unsigned char)
13449 TREE_STRING_POINTER (string_cst)[len - 1]
13450 == target_newline);
13451 /* build_string_literal creates a new STRING_CST,
13452 modify it in place to avoid double copying. */
13453 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13454 newstr[len - 1] = '\0';
13455 if (fn_puts)
13456 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13458 else
13459 /* We'd like to arrange to call fputs(string,stdout) here,
13460 but we need stdout and don't have a way to get it yet. */
13461 return NULL_TREE;
13465 /* The other optimizations can be done only on the non-va_list variants. */
13466 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13467 return NULL_TREE;
13469 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13470 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13472 if (!arg || !validate_arg (arg, POINTER_TYPE))
13473 return NULL_TREE;
13474 if (fn_puts)
13475 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13478 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13479 else if (strcmp (fmt_str, target_percent_c) == 0)
13481 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13482 return NULL_TREE;
13483 if (fn_putchar)
13484 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13487 if (!call)
13488 return NULL_TREE;
13490 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13493 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13494 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13495 more than 3 arguments, and ARG may be null in the 2-argument case.
13497 Return NULL_TREE if no simplification was possible, otherwise return the
13498 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13499 code of the function to be simplified. */
13501 static tree
13502 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13503 tree fmt, tree arg, bool ignore,
13504 enum built_in_function fcode)
13506 tree fn_fputc, fn_fputs, call = NULL_TREE;
13507 const char *fmt_str = NULL;
13509 /* If the return value is used, don't do the transformation. */
13510 if (! ignore)
13511 return NULL_TREE;
13513 /* Verify the required arguments in the original call. */
13514 if (!validate_arg (fp, POINTER_TYPE))
13515 return NULL_TREE;
13516 if (!validate_arg (fmt, POINTER_TYPE))
13517 return NULL_TREE;
13519 /* Check whether the format is a literal string constant. */
13520 fmt_str = c_getstr (fmt);
13521 if (fmt_str == NULL)
13522 return NULL_TREE;
13524 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13526 /* If we're using an unlocked function, assume the other
13527 unlocked functions exist explicitly. */
13528 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13529 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13531 else
13533 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13534 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13537 if (!init_target_chars ())
13538 return NULL_TREE;
13540 /* If the format doesn't contain % args or %%, use strcpy. */
13541 if (strchr (fmt_str, target_percent) == NULL)
13543 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13544 && arg)
13545 return NULL_TREE;
13547 /* If the format specifier was "", fprintf does nothing. */
13548 if (fmt_str[0] == '\0')
13550 /* If FP has side-effects, just wait until gimplification is
13551 done. */
13552 if (TREE_SIDE_EFFECTS (fp))
13553 return NULL_TREE;
13555 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13558 /* When "string" doesn't contain %, replace all cases of
13559 fprintf (fp, string) with fputs (string, fp). The fputs
13560 builtin will take care of special cases like length == 1. */
13561 if (fn_fputs)
13562 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13565 /* The other optimizations can be done only on the non-va_list variants. */
13566 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13567 return NULL_TREE;
13569 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13570 else if (strcmp (fmt_str, target_percent_s) == 0)
13572 if (!arg || !validate_arg (arg, POINTER_TYPE))
13573 return NULL_TREE;
13574 if (fn_fputs)
13575 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13578 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13579 else if (strcmp (fmt_str, target_percent_c) == 0)
13581 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13582 return NULL_TREE;
13583 if (fn_fputc)
13584 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13587 if (!call)
13588 return NULL_TREE;
13589 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13592 /* Initialize format string characters in the target charset. */
13594 static bool
13595 init_target_chars (void)
13597 static bool init;
13598 if (!init)
13600 target_newline = lang_hooks.to_target_charset ('\n');
13601 target_percent = lang_hooks.to_target_charset ('%');
13602 target_c = lang_hooks.to_target_charset ('c');
13603 target_s = lang_hooks.to_target_charset ('s');
13604 if (target_newline == 0 || target_percent == 0 || target_c == 0
13605 || target_s == 0)
13606 return false;
13608 target_percent_c[0] = target_percent;
13609 target_percent_c[1] = target_c;
13610 target_percent_c[2] = '\0';
13612 target_percent_s[0] = target_percent;
13613 target_percent_s[1] = target_s;
13614 target_percent_s[2] = '\0';
13616 target_percent_s_newline[0] = target_percent;
13617 target_percent_s_newline[1] = target_s;
13618 target_percent_s_newline[2] = target_newline;
13619 target_percent_s_newline[3] = '\0';
13621 init = true;
13623 return true;
13626 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13627 and no overflow/underflow occurred. INEXACT is true if M was not
13628 exactly calculated. TYPE is the tree type for the result. This
13629 function assumes that you cleared the MPFR flags and then
13630 calculated M to see if anything subsequently set a flag prior to
13631 entering this function. Return NULL_TREE if any checks fail. */
13633 static tree
13634 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13636 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13637 overflow/underflow occurred. If -frounding-math, proceed iff the
13638 result of calling FUNC was exact. */
13639 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13640 && (!flag_rounding_math || !inexact))
13642 REAL_VALUE_TYPE rr;
13644 real_from_mpfr (&rr, m, type, GMP_RNDN);
13645 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13646 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13647 but the mpft_t is not, then we underflowed in the
13648 conversion. */
13649 if (real_isfinite (&rr)
13650 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13652 REAL_VALUE_TYPE rmode;
13654 real_convert (&rmode, TYPE_MODE (type), &rr);
13655 /* Proceed iff the specified mode can hold the value. */
13656 if (real_identical (&rmode, &rr))
13657 return build_real (type, rmode);
13660 return NULL_TREE;
13663 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13664 number and no overflow/underflow occurred. INEXACT is true if M
13665 was not exactly calculated. TYPE is the tree type for the result.
13666 This function assumes that you cleared the MPFR flags and then
13667 calculated M to see if anything subsequently set a flag prior to
13668 entering this function. Return NULL_TREE if any checks fail, if
13669 FORCE_CONVERT is true, then bypass the checks. */
13671 static tree
13672 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13674 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13675 overflow/underflow occurred. If -frounding-math, proceed iff the
13676 result of calling FUNC was exact. */
13677 if (force_convert
13678 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13679 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13680 && (!flag_rounding_math || !inexact)))
13682 REAL_VALUE_TYPE re, im;
13684 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13685 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13686 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13687 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13688 but the mpft_t is not, then we underflowed in the
13689 conversion. */
13690 if (force_convert
13691 || (real_isfinite (&re) && real_isfinite (&im)
13692 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13693 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13695 REAL_VALUE_TYPE re_mode, im_mode;
13697 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13698 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13699 /* Proceed iff the specified mode can hold the value. */
13700 if (force_convert
13701 || (real_identical (&re_mode, &re)
13702 && real_identical (&im_mode, &im)))
13703 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13704 build_real (TREE_TYPE (type), im_mode));
13707 return NULL_TREE;
13710 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13711 FUNC on it and return the resulting value as a tree with type TYPE.
13712 If MIN and/or MAX are not NULL, then the supplied ARG must be
13713 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13714 acceptable values, otherwise they are not. The mpfr precision is
13715 set to the precision of TYPE. We assume that function FUNC returns
13716 zero if the result could be calculated exactly within the requested
13717 precision. */
13719 static tree
13720 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13721 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13722 bool inclusive)
13724 tree result = NULL_TREE;
13726 STRIP_NOPS (arg);
13728 /* To proceed, MPFR must exactly represent the target floating point
13729 format, which only happens when the target base equals two. */
13730 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13731 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13733 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13735 if (real_isfinite (ra)
13736 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13737 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13739 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13740 const int prec = fmt->p;
13741 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13742 int inexact;
13743 mpfr_t m;
13745 mpfr_init2 (m, prec);
13746 mpfr_from_real (m, ra, GMP_RNDN);
13747 mpfr_clear_flags ();
13748 inexact = func (m, m, rnd);
13749 result = do_mpfr_ckconv (m, type, inexact);
13750 mpfr_clear (m);
13754 return result;
13757 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13758 FUNC on it and return the resulting value as a tree with type TYPE.
13759 The mpfr precision is set to the precision of TYPE. We assume that
13760 function FUNC returns zero if the result could be calculated
13761 exactly within the requested precision. */
13763 static tree
13764 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13765 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13767 tree result = NULL_TREE;
13769 STRIP_NOPS (arg1);
13770 STRIP_NOPS (arg2);
13772 /* To proceed, MPFR must exactly represent the target floating point
13773 format, which only happens when the target base equals two. */
13774 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13775 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13776 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13778 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13779 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13781 if (real_isfinite (ra1) && real_isfinite (ra2))
13783 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13784 const int prec = fmt->p;
13785 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13786 int inexact;
13787 mpfr_t m1, m2;
13789 mpfr_inits2 (prec, m1, m2, NULL);
13790 mpfr_from_real (m1, ra1, GMP_RNDN);
13791 mpfr_from_real (m2, ra2, GMP_RNDN);
13792 mpfr_clear_flags ();
13793 inexact = func (m1, m1, m2, rnd);
13794 result = do_mpfr_ckconv (m1, type, inexact);
13795 mpfr_clears (m1, m2, NULL);
13799 return result;
13802 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13803 FUNC on it and return the resulting value as a tree with type TYPE.
13804 The mpfr precision is set to the precision of TYPE. We assume that
13805 function FUNC returns zero if the result could be calculated
13806 exactly within the requested precision. */
13808 static tree
13809 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13810 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13812 tree result = NULL_TREE;
13814 STRIP_NOPS (arg1);
13815 STRIP_NOPS (arg2);
13816 STRIP_NOPS (arg3);
13818 /* To proceed, MPFR must exactly represent the target floating point
13819 format, which only happens when the target base equals two. */
13820 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13821 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13822 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13823 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13825 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13826 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13827 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13829 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13831 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13832 const int prec = fmt->p;
13833 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13834 int inexact;
13835 mpfr_t m1, m2, m3;
13837 mpfr_inits2 (prec, m1, m2, m3, NULL);
13838 mpfr_from_real (m1, ra1, GMP_RNDN);
13839 mpfr_from_real (m2, ra2, GMP_RNDN);
13840 mpfr_from_real (m3, ra3, GMP_RNDN);
13841 mpfr_clear_flags ();
13842 inexact = func (m1, m1, m2, m3, rnd);
13843 result = do_mpfr_ckconv (m1, type, inexact);
13844 mpfr_clears (m1, m2, m3, NULL);
13848 return result;
13851 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13852 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13853 If ARG_SINP and ARG_COSP are NULL then the result is returned
13854 as a complex value.
13855 The type is taken from the type of ARG and is used for setting the
13856 precision of the calculation and results. */
13858 static tree
13859 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13861 tree const type = TREE_TYPE (arg);
13862 tree result = NULL_TREE;
13864 STRIP_NOPS (arg);
13866 /* To proceed, MPFR must exactly represent the target floating point
13867 format, which only happens when the target base equals two. */
13868 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13869 && TREE_CODE (arg) == REAL_CST
13870 && !TREE_OVERFLOW (arg))
13872 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13874 if (real_isfinite (ra))
13876 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13877 const int prec = fmt->p;
13878 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13879 tree result_s, result_c;
13880 int inexact;
13881 mpfr_t m, ms, mc;
13883 mpfr_inits2 (prec, m, ms, mc, NULL);
13884 mpfr_from_real (m, ra, GMP_RNDN);
13885 mpfr_clear_flags ();
13886 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13887 result_s = do_mpfr_ckconv (ms, type, inexact);
13888 result_c = do_mpfr_ckconv (mc, type, inexact);
13889 mpfr_clears (m, ms, mc, NULL);
13890 if (result_s && result_c)
13892 /* If we are to return in a complex value do so. */
13893 if (!arg_sinp && !arg_cosp)
13894 return build_complex (build_complex_type (type),
13895 result_c, result_s);
13897 /* Dereference the sin/cos pointer arguments. */
13898 arg_sinp = build_fold_indirect_ref (arg_sinp);
13899 arg_cosp = build_fold_indirect_ref (arg_cosp);
13900 /* Proceed if valid pointer type were passed in. */
13901 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13902 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13904 /* Set the values. */
13905 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13906 result_s);
13907 TREE_SIDE_EFFECTS (result_s) = 1;
13908 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13909 result_c);
13910 TREE_SIDE_EFFECTS (result_c) = 1;
13911 /* Combine the assignments into a compound expr. */
13912 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13913 result_s, result_c));
13918 return result;
13921 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13922 two-argument mpfr order N Bessel function FUNC on them and return
13923 the resulting value as a tree with type TYPE. The mpfr precision
13924 is set to the precision of TYPE. We assume that function FUNC
13925 returns zero if the result could be calculated exactly within the
13926 requested precision. */
13927 static tree
13928 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13929 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13930 const REAL_VALUE_TYPE *min, bool inclusive)
13932 tree result = NULL_TREE;
13934 STRIP_NOPS (arg1);
13935 STRIP_NOPS (arg2);
13937 /* To proceed, MPFR must exactly represent the target floating point
13938 format, which only happens when the target base equals two. */
13939 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13940 && host_integerp (arg1, 0)
13941 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13943 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13944 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13946 if (n == (long)n
13947 && real_isfinite (ra)
13948 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13950 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13951 const int prec = fmt->p;
13952 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13953 int inexact;
13954 mpfr_t m;
13956 mpfr_init2 (m, prec);
13957 mpfr_from_real (m, ra, GMP_RNDN);
13958 mpfr_clear_flags ();
13959 inexact = func (m, n, m, rnd);
13960 result = do_mpfr_ckconv (m, type, inexact);
13961 mpfr_clear (m);
13965 return result;
13968 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13969 the pointer *(ARG_QUO) and return the result. The type is taken
13970 from the type of ARG0 and is used for setting the precision of the
13971 calculation and results. */
13973 static tree
13974 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13976 tree const type = TREE_TYPE (arg0);
13977 tree result = NULL_TREE;
13979 STRIP_NOPS (arg0);
13980 STRIP_NOPS (arg1);
13982 /* To proceed, MPFR must exactly represent the target floating point
13983 format, which only happens when the target base equals two. */
13984 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13985 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13986 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13988 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13989 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13991 if (real_isfinite (ra0) && real_isfinite (ra1))
13993 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13994 const int prec = fmt->p;
13995 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13996 tree result_rem;
13997 long integer_quo;
13998 mpfr_t m0, m1;
14000 mpfr_inits2 (prec, m0, m1, NULL);
14001 mpfr_from_real (m0, ra0, GMP_RNDN);
14002 mpfr_from_real (m1, ra1, GMP_RNDN);
14003 mpfr_clear_flags ();
14004 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
14005 /* Remquo is independent of the rounding mode, so pass
14006 inexact=0 to do_mpfr_ckconv(). */
14007 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
14008 mpfr_clears (m0, m1, NULL);
14009 if (result_rem)
14011 /* MPFR calculates quo in the host's long so it may
14012 return more bits in quo than the target int can hold
14013 if sizeof(host long) > sizeof(target int). This can
14014 happen even for native compilers in LP64 mode. In
14015 these cases, modulo the quo value with the largest
14016 number that the target int can hold while leaving one
14017 bit for the sign. */
14018 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
14019 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
14021 /* Dereference the quo pointer argument. */
14022 arg_quo = build_fold_indirect_ref (arg_quo);
14023 /* Proceed iff a valid pointer type was passed in. */
14024 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
14026 /* Set the value. */
14027 tree result_quo
14028 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
14029 build_int_cst (TREE_TYPE (arg_quo),
14030 integer_quo));
14031 TREE_SIDE_EFFECTS (result_quo) = 1;
14032 /* Combine the quo assignment with the rem. */
14033 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14034 result_quo, result_rem));
14039 return result;
14042 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
14043 resulting value as a tree with type TYPE. The mpfr precision is
14044 set to the precision of TYPE. We assume that this mpfr function
14045 returns zero if the result could be calculated exactly within the
14046 requested precision. In addition, the integer pointer represented
14047 by ARG_SG will be dereferenced and set to the appropriate signgam
14048 (-1,1) value. */
14050 static tree
14051 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
14053 tree result = NULL_TREE;
14055 STRIP_NOPS (arg);
14057 /* To proceed, MPFR must exactly represent the target floating point
14058 format, which only happens when the target base equals two. Also
14059 verify ARG is a constant and that ARG_SG is an int pointer. */
14060 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
14061 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
14062 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
14063 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
14065 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
14067 /* In addition to NaN and Inf, the argument cannot be zero or a
14068 negative integer. */
14069 if (real_isfinite (ra)
14070 && ra->cl != rvc_zero
14071 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
14073 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14074 const int prec = fmt->p;
14075 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
14076 int inexact, sg;
14077 mpfr_t m;
14078 tree result_lg;
14080 mpfr_init2 (m, prec);
14081 mpfr_from_real (m, ra, GMP_RNDN);
14082 mpfr_clear_flags ();
14083 inexact = mpfr_lgamma (m, &sg, m, rnd);
14084 result_lg = do_mpfr_ckconv (m, type, inexact);
14085 mpfr_clear (m);
14086 if (result_lg)
14088 tree result_sg;
14090 /* Dereference the arg_sg pointer argument. */
14091 arg_sg = build_fold_indirect_ref (arg_sg);
14092 /* Assign the signgam value into *arg_sg. */
14093 result_sg = fold_build2 (MODIFY_EXPR,
14094 TREE_TYPE (arg_sg), arg_sg,
14095 build_int_cst (TREE_TYPE (arg_sg), sg));
14096 TREE_SIDE_EFFECTS (result_sg) = 1;
14097 /* Combine the signgam assignment with the lgamma result. */
14098 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14099 result_sg, result_lg));
14104 return result;
14107 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14108 function FUNC on it and return the resulting value as a tree with
14109 type TYPE. The mpfr precision is set to the precision of TYPE. We
14110 assume that function FUNC returns zero if the result could be
14111 calculated exactly within the requested precision. */
14113 static tree
14114 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14116 tree result = NULL_TREE;
14118 STRIP_NOPS (arg);
14120 /* To proceed, MPFR must exactly represent the target floating point
14121 format, which only happens when the target base equals two. */
14122 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14123 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14124 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14126 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14127 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14129 if (real_isfinite (re) && real_isfinite (im))
14131 const struct real_format *const fmt =
14132 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14133 const int prec = fmt->p;
14134 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14135 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14136 int inexact;
14137 mpc_t m;
14139 mpc_init2 (m, prec);
14140 mpfr_from_real (mpc_realref(m), re, rnd);
14141 mpfr_from_real (mpc_imagref(m), im, rnd);
14142 mpfr_clear_flags ();
14143 inexact = func (m, m, crnd);
14144 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14145 mpc_clear (m);
14149 return result;
14152 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14153 mpc function FUNC on it and return the resulting value as a tree
14154 with type TYPE. The mpfr precision is set to the precision of
14155 TYPE. We assume that function FUNC returns zero if the result
14156 could be calculated exactly within the requested precision. If
14157 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14158 in the arguments and/or results. */
14160 tree
14161 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14162 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14164 tree result = NULL_TREE;
14166 STRIP_NOPS (arg0);
14167 STRIP_NOPS (arg1);
14169 /* To proceed, MPFR must exactly represent the target floating point
14170 format, which only happens when the target base equals two. */
14171 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14172 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14173 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14174 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14175 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14177 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14178 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14179 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14180 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14182 if (do_nonfinite
14183 || (real_isfinite (re0) && real_isfinite (im0)
14184 && real_isfinite (re1) && real_isfinite (im1)))
14186 const struct real_format *const fmt =
14187 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14188 const int prec = fmt->p;
14189 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14190 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14191 int inexact;
14192 mpc_t m0, m1;
14194 mpc_init2 (m0, prec);
14195 mpc_init2 (m1, prec);
14196 mpfr_from_real (mpc_realref(m0), re0, rnd);
14197 mpfr_from_real (mpc_imagref(m0), im0, rnd);
14198 mpfr_from_real (mpc_realref(m1), re1, rnd);
14199 mpfr_from_real (mpc_imagref(m1), im1, rnd);
14200 mpfr_clear_flags ();
14201 inexact = func (m0, m0, m1, crnd);
14202 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14203 mpc_clear (m0);
14204 mpc_clear (m1);
14208 return result;
14211 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14212 a normal call should be emitted rather than expanding the function
14213 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14215 static tree
14216 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14218 int nargs = gimple_call_num_args (stmt);
14220 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14221 (nargs > 0
14222 ? gimple_call_arg_ptr (stmt, 0)
14223 : &error_mark_node), fcode);
14226 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14227 a normal call should be emitted rather than expanding the function
14228 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14229 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14230 passed as second argument. */
14232 tree
14233 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14234 enum built_in_function fcode)
14236 int nargs = gimple_call_num_args (stmt);
14238 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14239 (nargs > 0
14240 ? gimple_call_arg_ptr (stmt, 0)
14241 : &error_mark_node), maxlen, fcode);
14244 /* Builtins with folding operations that operate on "..." arguments
14245 need special handling; we need to store the arguments in a convenient
14246 data structure before attempting any folding. Fortunately there are
14247 only a few builtins that fall into this category. FNDECL is the
14248 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14249 result of the function call is ignored. */
14251 static tree
14252 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14253 bool ignore ATTRIBUTE_UNUSED)
14255 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14256 tree ret = NULL_TREE;
14258 switch (fcode)
14260 case BUILT_IN_SPRINTF_CHK:
14261 case BUILT_IN_VSPRINTF_CHK:
14262 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14263 break;
14265 case BUILT_IN_SNPRINTF_CHK:
14266 case BUILT_IN_VSNPRINTF_CHK:
14267 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14269 default:
14270 break;
14272 if (ret)
14274 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14275 TREE_NO_WARNING (ret) = 1;
14276 return ret;
14278 return NULL_TREE;
14281 /* A wrapper function for builtin folding that prevents warnings for
14282 "statement without effect" and the like, caused by removing the
14283 call node earlier than the warning is generated. */
14285 tree
14286 fold_call_stmt (gimple stmt, bool ignore)
14288 tree ret = NULL_TREE;
14289 tree fndecl = gimple_call_fndecl (stmt);
14290 location_t loc = gimple_location (stmt);
14291 if (fndecl
14292 && TREE_CODE (fndecl) == FUNCTION_DECL
14293 && DECL_BUILT_IN (fndecl)
14294 && !gimple_call_va_arg_pack_p (stmt))
14296 int nargs = gimple_call_num_args (stmt);
14297 tree *args = (nargs > 0
14298 ? gimple_call_arg_ptr (stmt, 0)
14299 : &error_mark_node);
14301 if (avoid_folding_inline_builtin (fndecl))
14302 return NULL_TREE;
14303 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14305 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14307 else
14309 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14310 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14311 if (!ret)
14312 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14313 if (ret)
14315 /* Propagate location information from original call to
14316 expansion of builtin. Otherwise things like
14317 maybe_emit_chk_warning, that operate on the expansion
14318 of a builtin, will use the wrong location information. */
14319 if (gimple_has_location (stmt))
14321 tree realret = ret;
14322 if (TREE_CODE (ret) == NOP_EXPR)
14323 realret = TREE_OPERAND (ret, 0);
14324 if (CAN_HAVE_LOCATION_P (realret)
14325 && !EXPR_HAS_LOCATION (realret))
14326 SET_EXPR_LOCATION (realret, loc);
14327 return realret;
14329 return ret;
14333 return NULL_TREE;
14336 /* Look up the function in builtin_decl that corresponds to DECL
14337 and set ASMSPEC as its user assembler name. DECL must be a
14338 function decl that declares a builtin. */
14340 void
14341 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14343 tree builtin;
14344 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14345 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14346 && asmspec != 0);
14348 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14349 set_user_assembler_name (builtin, asmspec);
14350 switch (DECL_FUNCTION_CODE (decl))
14352 case BUILT_IN_MEMCPY:
14353 init_block_move_fn (asmspec);
14354 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14355 break;
14356 case BUILT_IN_MEMSET:
14357 init_block_clear_fn (asmspec);
14358 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14359 break;
14360 case BUILT_IN_MEMMOVE:
14361 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14362 break;
14363 case BUILT_IN_MEMCMP:
14364 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14365 break;
14366 case BUILT_IN_ABORT:
14367 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14368 break;
14369 case BUILT_IN_FFS:
14370 if (INT_TYPE_SIZE < BITS_PER_WORD)
14372 set_user_assembler_libfunc ("ffs", asmspec);
14373 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14374 MODE_INT, 0), "ffs");
14376 break;
14377 default:
14378 break;
14382 /* Return true if DECL is a builtin that expands to a constant or similarly
14383 simple code. */
14384 bool
14385 is_simple_builtin (tree decl)
14387 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14388 switch (DECL_FUNCTION_CODE (decl))
14390 /* Builtins that expand to constants. */
14391 case BUILT_IN_CONSTANT_P:
14392 case BUILT_IN_EXPECT:
14393 case BUILT_IN_OBJECT_SIZE:
14394 case BUILT_IN_UNREACHABLE:
14395 /* Simple register moves or loads from stack. */
14396 case BUILT_IN_ASSUME_ALIGNED:
14397 case BUILT_IN_RETURN_ADDRESS:
14398 case BUILT_IN_EXTRACT_RETURN_ADDR:
14399 case BUILT_IN_FROB_RETURN_ADDR:
14400 case BUILT_IN_RETURN:
14401 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14402 case BUILT_IN_FRAME_ADDRESS:
14403 case BUILT_IN_VA_END:
14404 case BUILT_IN_STACK_SAVE:
14405 case BUILT_IN_STACK_RESTORE:
14406 /* Exception state returns or moves registers around. */
14407 case BUILT_IN_EH_FILTER:
14408 case BUILT_IN_EH_POINTER:
14409 case BUILT_IN_EH_COPY_VALUES:
14410 return true;
14412 default:
14413 return false;
14416 return false;
14419 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14420 most probably expanded inline into reasonably simple code. This is a
14421 superset of is_simple_builtin. */
14422 bool
14423 is_inexpensive_builtin (tree decl)
14425 if (!decl)
14426 return false;
14427 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14428 return true;
14429 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14430 switch (DECL_FUNCTION_CODE (decl))
14432 case BUILT_IN_ABS:
14433 case BUILT_IN_ALLOCA:
14434 case BUILT_IN_ALLOCA_WITH_ALIGN:
14435 case BUILT_IN_BSWAP16:
14436 case BUILT_IN_BSWAP32:
14437 case BUILT_IN_BSWAP64:
14438 case BUILT_IN_CLZ:
14439 case BUILT_IN_CLZIMAX:
14440 case BUILT_IN_CLZL:
14441 case BUILT_IN_CLZLL:
14442 case BUILT_IN_CTZ:
14443 case BUILT_IN_CTZIMAX:
14444 case BUILT_IN_CTZL:
14445 case BUILT_IN_CTZLL:
14446 case BUILT_IN_FFS:
14447 case BUILT_IN_FFSIMAX:
14448 case BUILT_IN_FFSL:
14449 case BUILT_IN_FFSLL:
14450 case BUILT_IN_IMAXABS:
14451 case BUILT_IN_FINITE:
14452 case BUILT_IN_FINITEF:
14453 case BUILT_IN_FINITEL:
14454 case BUILT_IN_FINITED32:
14455 case BUILT_IN_FINITED64:
14456 case BUILT_IN_FINITED128:
14457 case BUILT_IN_FPCLASSIFY:
14458 case BUILT_IN_ISFINITE:
14459 case BUILT_IN_ISINF_SIGN:
14460 case BUILT_IN_ISINF:
14461 case BUILT_IN_ISINFF:
14462 case BUILT_IN_ISINFL:
14463 case BUILT_IN_ISINFD32:
14464 case BUILT_IN_ISINFD64:
14465 case BUILT_IN_ISINFD128:
14466 case BUILT_IN_ISNAN:
14467 case BUILT_IN_ISNANF:
14468 case BUILT_IN_ISNANL:
14469 case BUILT_IN_ISNAND32:
14470 case BUILT_IN_ISNAND64:
14471 case BUILT_IN_ISNAND128:
14472 case BUILT_IN_ISNORMAL:
14473 case BUILT_IN_ISGREATER:
14474 case BUILT_IN_ISGREATEREQUAL:
14475 case BUILT_IN_ISLESS:
14476 case BUILT_IN_ISLESSEQUAL:
14477 case BUILT_IN_ISLESSGREATER:
14478 case BUILT_IN_ISUNORDERED:
14479 case BUILT_IN_VA_ARG_PACK:
14480 case BUILT_IN_VA_ARG_PACK_LEN:
14481 case BUILT_IN_VA_COPY:
14482 case BUILT_IN_TRAP:
14483 case BUILT_IN_SAVEREGS:
14484 case BUILT_IN_POPCOUNTL:
14485 case BUILT_IN_POPCOUNTLL:
14486 case BUILT_IN_POPCOUNTIMAX:
14487 case BUILT_IN_POPCOUNT:
14488 case BUILT_IN_PARITYL:
14489 case BUILT_IN_PARITYLL:
14490 case BUILT_IN_PARITYIMAX:
14491 case BUILT_IN_PARITY:
14492 case BUILT_IN_LABS:
14493 case BUILT_IN_LLABS:
14494 case BUILT_IN_PREFETCH:
14495 return true;
14497 default:
14498 return is_simple_builtin (decl);
14501 return false;