ChangeLog entry:
[official-gcc.git] / gcc / builtins.c
blob72e259194a72242557d941655b57fdd7b00409c7
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
52 #include "builtins.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
57 #endif
58 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
60 struct target_builtins default_target_builtins;
61 #if SWITCHABLE_TARGET
62 struct target_builtins *this_target_builtins = &default_target_builtins;
63 #endif
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
74 #undef DEF_BUILTIN
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 builtin_info_type builtin_info;
80 static const char *c_getstr (tree);
81 static rtx c_readstr (const char *, enum machine_mode);
82 static int target_char_cast (tree, char *);
83 static rtx get_memory_rtx (tree, tree);
84 static int apply_args_size (void);
85 static int apply_result_size (void);
86 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87 static rtx result_vector (int, rtx);
88 #endif
89 static void expand_builtin_update_setjmp_buf (rtx);
90 static void expand_builtin_prefetch (tree);
91 static rtx expand_builtin_apply_args (void);
92 static rtx expand_builtin_apply_args_1 (void);
93 static rtx expand_builtin_apply (rtx, rtx, rtx);
94 static void expand_builtin_return (rtx);
95 static enum type_class type_to_class (tree);
96 static rtx expand_builtin_classify_type (tree);
97 static void expand_errno_check (tree, rtx);
98 static rtx expand_builtin_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_next_arg (void);
108 static rtx expand_builtin_va_start (tree);
109 static rtx expand_builtin_va_end (tree);
110 static rtx expand_builtin_va_copy (tree);
111 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strcmp (tree, rtx);
113 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
114 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
115 static rtx expand_builtin_memcpy (tree, rtx);
116 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
118 enum machine_mode, int);
119 static rtx expand_builtin_strcpy (tree, rtx);
120 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
121 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strncpy (tree, rtx);
123 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
126 static rtx expand_builtin_bzero (tree);
127 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_alloca (tree, bool);
129 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
130 static rtx expand_builtin_frame_address (tree, tree);
131 static tree stabilize_va_list_loc (location_t, tree, int);
132 static rtx expand_builtin_expect (tree, rtx);
133 static tree fold_builtin_constant_p (tree);
134 static tree fold_builtin_expect (location_t, tree, tree);
135 static tree fold_builtin_classify_type (tree);
136 static tree fold_builtin_strlen (location_t, tree, tree);
137 static tree fold_builtin_inf (location_t, tree, int);
138 static tree fold_builtin_nan (tree, tree, int);
139 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
140 static bool validate_arg (const_tree, enum tree_code code);
141 static bool integer_valued_real_p (tree);
142 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
143 static bool readonly_data_expr (tree);
144 static rtx expand_builtin_fabs (tree, rtx, rtx);
145 static rtx expand_builtin_signbit (tree, rtx);
146 static tree fold_builtin_sqrt (location_t, tree, tree);
147 static tree fold_builtin_cbrt (location_t, tree, tree);
148 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
149 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
150 static tree fold_builtin_cos (location_t, tree, tree, tree);
151 static tree fold_builtin_cosh (location_t, tree, tree, tree);
152 static tree fold_builtin_tan (tree, tree);
153 static tree fold_builtin_trunc (location_t, tree, tree);
154 static tree fold_builtin_floor (location_t, tree, tree);
155 static tree fold_builtin_ceil (location_t, tree, tree);
156 static tree fold_builtin_round (location_t, tree, tree);
157 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
158 static tree fold_builtin_bitop (tree, tree);
159 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
160 static tree fold_builtin_strchr (location_t, tree, tree, tree);
161 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_strcmp (location_t, tree, tree);
164 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
165 static tree fold_builtin_signbit (location_t, tree, tree);
166 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_isascii (location_t, tree);
168 static tree fold_builtin_toascii (location_t, tree);
169 static tree fold_builtin_isdigit (location_t, tree);
170 static tree fold_builtin_fabs (location_t, tree, tree);
171 static tree fold_builtin_abs (location_t, tree, tree);
172 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
173 enum tree_code);
174 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
175 static tree fold_builtin_0 (location_t, tree, bool);
176 static tree fold_builtin_1 (location_t, tree, tree, bool);
177 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
178 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
179 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
180 static tree fold_builtin_varargs (location_t, tree, tree, bool);
182 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
183 static tree fold_builtin_strstr (location_t, tree, tree, tree);
184 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
185 static tree fold_builtin_strcat (location_t, tree, tree);
186 static tree fold_builtin_strncat (location_t, tree, tree, tree);
187 static tree fold_builtin_strspn (location_t, tree, tree);
188 static tree fold_builtin_strcspn (location_t, tree, tree);
189 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
190 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
192 static rtx expand_builtin_object_size (tree);
193 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
194 enum built_in_function);
195 static void maybe_emit_chk_warning (tree, enum built_in_function);
196 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_free_warning (tree);
198 static tree fold_builtin_object_size (tree, tree);
199 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
200 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
201 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
202 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
203 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
204 enum built_in_function);
205 static bool init_target_chars (void);
207 static unsigned HOST_WIDE_INT target_newline;
208 static unsigned HOST_WIDE_INT target_percent;
209 static unsigned HOST_WIDE_INT target_c;
210 static unsigned HOST_WIDE_INT target_s;
211 static char target_percent_c[3];
212 static char target_percent_s[3];
213 static char target_percent_s_newline[4];
214 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
215 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
216 static tree do_mpfr_arg2 (tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218 static tree do_mpfr_arg3 (tree, tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220 static tree do_mpfr_sincos (tree, tree, tree);
221 static tree do_mpfr_bessel_n (tree, tree, tree,
222 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, bool);
224 static tree do_mpfr_remquo (tree, tree, tree);
225 static tree do_mpfr_lgamma_r (tree, tree, tree);
226 static void expand_builtin_sync_synchronize (void);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
230 static bool
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 if (strncmp (name, "__atomic_", 9) == 0)
238 return true;
239 return false;
243 /* Return true if DECL is a function symbol representing a built-in. */
245 bool
246 is_builtin_fn (tree decl)
248 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
252 /* Return true if NODE should be considered for inline expansion regardless
253 of the optimization level. This means whenever a function is invoked with
254 its "internal" name, which normally contains the prefix "__builtin". */
256 static bool
257 called_as_built_in (tree node)
259 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
260 we want the name used to call the function, not the name it
261 will have. */
262 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
263 return is_builtin_name (name);
266 /* Compute values M and N such that M divides (address of EXP - N) and
267 such that N < M. Store N in *BITPOSP and return M.
269 Note that the address (and thus the alignment) computed here is based
270 on the address to which a symbol resolves, whereas DECL_ALIGN is based
271 on the address at which an object is actually located. These two
272 addresses are not always the same. For example, on ARM targets,
273 the address &foo of a Thumb function foo() has the lowest bit set,
274 whereas foo() itself starts on an even address. */
276 unsigned int
277 get_object_alignment_1 (tree exp, unsigned HOST_WIDE_INT *bitposp)
279 HOST_WIDE_INT bitsize, bitpos;
280 tree offset;
281 enum machine_mode mode;
282 int unsignedp, volatilep;
283 unsigned int align, inner;
285 /* Get the innermost object and the constant (bitpos) and possibly
286 variable (offset) offset of the access. */
287 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
288 &mode, &unsignedp, &volatilep, true);
290 /* Extract alignment information from the innermost object and
291 possibly adjust bitpos and offset. */
292 if (TREE_CODE (exp) == CONST_DECL)
293 exp = DECL_INITIAL (exp);
294 if (DECL_P (exp)
295 && TREE_CODE (exp) != LABEL_DECL)
297 if (TREE_CODE (exp) == FUNCTION_DECL)
299 /* Function addresses can encode extra information besides their
300 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
301 allows the low bit to be used as a virtual bit, we know
302 that the address itself must be 2-byte aligned. */
303 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
304 align = 2 * BITS_PER_UNIT;
305 else
306 align = BITS_PER_UNIT;
308 else
309 align = DECL_ALIGN (exp);
311 else if (CONSTANT_CLASS_P (exp))
313 align = TYPE_ALIGN (TREE_TYPE (exp));
314 #ifdef CONSTANT_ALIGNMENT
315 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
316 #endif
318 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
319 align = TYPE_ALIGN (TREE_TYPE (exp));
320 else if (TREE_CODE (exp) == INDIRECT_REF)
321 align = TYPE_ALIGN (TREE_TYPE (exp));
322 else if (TREE_CODE (exp) == MEM_REF)
324 tree addr = TREE_OPERAND (exp, 0);
325 struct ptr_info_def *pi;
326 if (TREE_CODE (addr) == BIT_AND_EXPR
327 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
329 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
330 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
331 align *= BITS_PER_UNIT;
332 addr = TREE_OPERAND (addr, 0);
334 else
335 align = BITS_PER_UNIT;
336 if (TREE_CODE (addr) == SSA_NAME
337 && (pi = SSA_NAME_PTR_INFO (addr)))
339 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
340 align = MAX (pi->align * BITS_PER_UNIT, align);
342 else if (TREE_CODE (addr) == ADDR_EXPR)
343 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0)));
344 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
346 else if (TREE_CODE (exp) == TARGET_MEM_REF)
348 struct ptr_info_def *pi;
349 tree addr = TMR_BASE (exp);
350 if (TREE_CODE (addr) == BIT_AND_EXPR
351 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
353 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
354 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
355 align *= BITS_PER_UNIT;
356 addr = TREE_OPERAND (addr, 0);
358 else
359 align = BITS_PER_UNIT;
360 if (TREE_CODE (addr) == SSA_NAME
361 && (pi = SSA_NAME_PTR_INFO (addr)))
363 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
364 align = MAX (pi->align * BITS_PER_UNIT, align);
366 else if (TREE_CODE (addr) == ADDR_EXPR)
367 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0)));
368 if (TMR_OFFSET (exp))
369 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
370 if (TMR_INDEX (exp) && TMR_STEP (exp))
372 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
373 align = MIN (align, (step & -step) * BITS_PER_UNIT);
375 else if (TMR_INDEX (exp))
376 align = BITS_PER_UNIT;
377 if (TMR_INDEX2 (exp))
378 align = BITS_PER_UNIT;
380 else
381 align = BITS_PER_UNIT;
383 /* If there is a non-constant offset part extract the maximum
384 alignment that can prevail. */
385 inner = ~0U;
386 while (offset)
388 tree next_offset;
390 if (TREE_CODE (offset) == PLUS_EXPR)
392 next_offset = TREE_OPERAND (offset, 0);
393 offset = TREE_OPERAND (offset, 1);
395 else
396 next_offset = NULL;
397 if (host_integerp (offset, 1))
399 /* Any overflow in calculating offset_bits won't change
400 the alignment. */
401 unsigned offset_bits
402 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
404 if (offset_bits)
405 inner = MIN (inner, (offset_bits & -offset_bits));
407 else if (TREE_CODE (offset) == MULT_EXPR
408 && host_integerp (TREE_OPERAND (offset, 1), 1))
410 /* Any overflow in calculating offset_factor won't change
411 the alignment. */
412 unsigned offset_factor
413 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
414 * BITS_PER_UNIT);
416 if (offset_factor)
417 inner = MIN (inner, (offset_factor & -offset_factor));
419 else
421 inner = MIN (inner, BITS_PER_UNIT);
422 break;
424 offset = next_offset;
427 /* Alignment is innermost object alignment adjusted by the constant
428 and non-constant offset parts. */
429 align = MIN (align, inner);
430 bitpos = bitpos & (align - 1);
432 *bitposp = bitpos;
433 return align;
436 /* Return the alignment in bits of EXP, an object. */
438 unsigned int
439 get_object_alignment (tree exp)
441 unsigned HOST_WIDE_INT bitpos = 0;
442 unsigned int align;
444 align = get_object_alignment_1 (exp, &bitpos);
446 /* align and bitpos now specify known low bits of the pointer.
447 ptr & (align - 1) == bitpos. */
449 if (bitpos != 0)
450 align = (bitpos & -bitpos);
452 return align;
455 /* Return the alignment of object EXP, also considering its type when we do
456 not know of explicit misalignment. Only handle MEM_REF and TARGET_MEM_REF.
458 ??? Note that, in the general case, the type of an expression is not kept
459 consistent with misalignment information by the front-end, for example when
460 taking the address of a member of a packed structure. However, in most of
461 the cases, expressions have the alignment of their type so we optimistically
462 fall back to this alignment when we cannot compute a misalignment. */
464 unsigned int
465 get_object_or_type_alignment (tree exp)
467 unsigned HOST_WIDE_INT misalign;
468 unsigned int align = get_object_alignment_1 (exp, &misalign);
470 gcc_assert (TREE_CODE (exp) == MEM_REF || TREE_CODE (exp) == TARGET_MEM_REF);
472 if (misalign != 0)
473 align = (misalign & -misalign);
474 else
475 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)), align);
477 return align;
480 /* For a pointer valued expression EXP compute values M and N such that
481 M divides (EXP - N) and such that N < M. Store N in *BITPOSP and return M.
483 If EXP is not a pointer, 0 is returned. */
485 unsigned int
486 get_pointer_alignment_1 (tree exp, unsigned HOST_WIDE_INT *bitposp)
488 STRIP_NOPS (exp);
490 if (TREE_CODE (exp) == ADDR_EXPR)
491 return get_object_alignment_1 (TREE_OPERAND (exp, 0), bitposp);
492 else if (TREE_CODE (exp) == SSA_NAME
493 && POINTER_TYPE_P (TREE_TYPE (exp)))
495 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
496 if (!pi)
498 *bitposp = 0;
499 return BITS_PER_UNIT;
501 *bitposp = pi->misalign * BITS_PER_UNIT;
502 return pi->align * BITS_PER_UNIT;
505 *bitposp = 0;
506 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
509 /* Return the alignment in bits of EXP, a pointer valued expression.
510 The alignment returned is, by default, the alignment of the thing that
511 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
513 Otherwise, look at the expression to see if we can do better, i.e., if the
514 expression is actually pointing at an object whose alignment is tighter. */
516 unsigned int
517 get_pointer_alignment (tree exp)
519 unsigned HOST_WIDE_INT bitpos = 0;
520 unsigned int align;
522 align = get_pointer_alignment_1 (exp, &bitpos);
524 /* align and bitpos now specify known low bits of the pointer.
525 ptr & (align - 1) == bitpos. */
527 if (bitpos != 0)
528 align = (bitpos & -bitpos);
530 return align;
533 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
534 way, because it could contain a zero byte in the middle.
535 TREE_STRING_LENGTH is the size of the character array, not the string.
537 ONLY_VALUE should be nonzero if the result is not going to be emitted
538 into the instruction stream and zero if it is going to be expanded.
539 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
540 is returned, otherwise NULL, since
541 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
542 evaluate the side-effects.
544 The value returned is of type `ssizetype'.
546 Unfortunately, string_constant can't access the values of const char
547 arrays with initializers, so neither can we do so here. */
549 tree
550 c_strlen (tree src, int only_value)
552 tree offset_node;
553 HOST_WIDE_INT offset;
554 int max;
555 const char *ptr;
556 location_t loc;
558 STRIP_NOPS (src);
559 if (TREE_CODE (src) == COND_EXPR
560 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
562 tree len1, len2;
564 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
565 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
566 if (tree_int_cst_equal (len1, len2))
567 return len1;
570 if (TREE_CODE (src) == COMPOUND_EXPR
571 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
572 return c_strlen (TREE_OPERAND (src, 1), only_value);
574 loc = EXPR_LOC_OR_HERE (src);
576 src = string_constant (src, &offset_node);
577 if (src == 0)
578 return NULL_TREE;
580 max = TREE_STRING_LENGTH (src) - 1;
581 ptr = TREE_STRING_POINTER (src);
583 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
585 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
586 compute the offset to the following null if we don't know where to
587 start searching for it. */
588 int i;
590 for (i = 0; i < max; i++)
591 if (ptr[i] == 0)
592 return NULL_TREE;
594 /* We don't know the starting offset, but we do know that the string
595 has no internal zero bytes. We can assume that the offset falls
596 within the bounds of the string; otherwise, the programmer deserves
597 what he gets. Subtract the offset from the length of the string,
598 and return that. This would perhaps not be valid if we were dealing
599 with named arrays in addition to literal string constants. */
601 return size_diffop_loc (loc, size_int (max), offset_node);
604 /* We have a known offset into the string. Start searching there for
605 a null character if we can represent it as a single HOST_WIDE_INT. */
606 if (offset_node == 0)
607 offset = 0;
608 else if (! host_integerp (offset_node, 0))
609 offset = -1;
610 else
611 offset = tree_low_cst (offset_node, 0);
613 /* If the offset is known to be out of bounds, warn, and call strlen at
614 runtime. */
615 if (offset < 0 || offset > max)
617 /* Suppress multiple warnings for propagated constant strings. */
618 if (! TREE_NO_WARNING (src))
620 warning_at (loc, 0, "offset outside bounds of constant string");
621 TREE_NO_WARNING (src) = 1;
623 return NULL_TREE;
626 /* Use strlen to search for the first zero byte. Since any strings
627 constructed with build_string will have nulls appended, we win even
628 if we get handed something like (char[4])"abcd".
630 Since OFFSET is our starting index into the string, no further
631 calculation is needed. */
632 return ssize_int (strlen (ptr + offset));
635 /* Return a char pointer for a C string if it is a string constant
636 or sum of string constant and integer constant. */
638 static const char *
639 c_getstr (tree src)
641 tree offset_node;
643 src = string_constant (src, &offset_node);
644 if (src == 0)
645 return 0;
647 if (offset_node == 0)
648 return TREE_STRING_POINTER (src);
649 else if (!host_integerp (offset_node, 1)
650 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
651 return 0;
653 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
656 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
657 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
659 static rtx
660 c_readstr (const char *str, enum machine_mode mode)
662 HOST_WIDE_INT c[2];
663 HOST_WIDE_INT ch;
664 unsigned int i, j;
666 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
668 c[0] = 0;
669 c[1] = 0;
670 ch = 1;
671 for (i = 0; i < GET_MODE_SIZE (mode); i++)
673 j = i;
674 if (WORDS_BIG_ENDIAN)
675 j = GET_MODE_SIZE (mode) - i - 1;
676 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
677 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
678 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
679 j *= BITS_PER_UNIT;
680 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
682 if (ch)
683 ch = (unsigned char) str[i];
684 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
686 return immed_double_const (c[0], c[1], mode);
689 /* Cast a target constant CST to target CHAR and if that value fits into
690 host char type, return zero and put that value into variable pointed to by
691 P. */
693 static int
694 target_char_cast (tree cst, char *p)
696 unsigned HOST_WIDE_INT val, hostval;
698 if (TREE_CODE (cst) != INTEGER_CST
699 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
700 return 1;
702 val = TREE_INT_CST_LOW (cst);
703 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
704 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
706 hostval = val;
707 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
708 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
710 if (val != hostval)
711 return 1;
713 *p = hostval;
714 return 0;
717 /* Similar to save_expr, but assumes that arbitrary code is not executed
718 in between the multiple evaluations. In particular, we assume that a
719 non-addressable local variable will not be modified. */
721 static tree
722 builtin_save_expr (tree exp)
724 if (TREE_CODE (exp) == SSA_NAME
725 || (TREE_ADDRESSABLE (exp) == 0
726 && (TREE_CODE (exp) == PARM_DECL
727 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
728 return exp;
730 return save_expr (exp);
733 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
734 times to get the address of either a higher stack frame, or a return
735 address located within it (depending on FNDECL_CODE). */
737 static rtx
738 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
740 int i;
742 #ifdef INITIAL_FRAME_ADDRESS_RTX
743 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
744 #else
745 rtx tem;
747 /* For a zero count with __builtin_return_address, we don't care what
748 frame address we return, because target-specific definitions will
749 override us. Therefore frame pointer elimination is OK, and using
750 the soft frame pointer is OK.
752 For a nonzero count, or a zero count with __builtin_frame_address,
753 we require a stable offset from the current frame pointer to the
754 previous one, so we must use the hard frame pointer, and
755 we must disable frame pointer elimination. */
756 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
757 tem = frame_pointer_rtx;
758 else
760 tem = hard_frame_pointer_rtx;
762 /* Tell reload not to eliminate the frame pointer. */
763 crtl->accesses_prior_frames = 1;
765 #endif
767 /* Some machines need special handling before we can access
768 arbitrary frames. For example, on the SPARC, we must first flush
769 all register windows to the stack. */
770 #ifdef SETUP_FRAME_ADDRESSES
771 if (count > 0)
772 SETUP_FRAME_ADDRESSES ();
773 #endif
775 /* On the SPARC, the return address is not in the frame, it is in a
776 register. There is no way to access it off of the current frame
777 pointer, but it can be accessed off the previous frame pointer by
778 reading the value from the register window save area. */
779 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
780 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
781 count--;
782 #endif
784 /* Scan back COUNT frames to the specified frame. */
785 for (i = 0; i < count; i++)
787 /* Assume the dynamic chain pointer is in the word that the
788 frame address points to, unless otherwise specified. */
789 #ifdef DYNAMIC_CHAIN_ADDRESS
790 tem = DYNAMIC_CHAIN_ADDRESS (tem);
791 #endif
792 tem = memory_address (Pmode, tem);
793 tem = gen_frame_mem (Pmode, tem);
794 tem = copy_to_reg (tem);
797 /* For __builtin_frame_address, return what we've got. But, on
798 the SPARC for example, we may have to add a bias. */
799 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
800 #ifdef FRAME_ADDR_RTX
801 return FRAME_ADDR_RTX (tem);
802 #else
803 return tem;
804 #endif
806 /* For __builtin_return_address, get the return address from that frame. */
807 #ifdef RETURN_ADDR_RTX
808 tem = RETURN_ADDR_RTX (count, tem);
809 #else
810 tem = memory_address (Pmode,
811 plus_constant (tem, GET_MODE_SIZE (Pmode)));
812 tem = gen_frame_mem (Pmode, tem);
813 #endif
814 return tem;
817 /* Alias set used for setjmp buffer. */
818 static alias_set_type setjmp_alias_set = -1;
820 /* Construct the leading half of a __builtin_setjmp call. Control will
821 return to RECEIVER_LABEL. This is also called directly by the SJLJ
822 exception handling code. */
824 void
825 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
827 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
828 rtx stack_save;
829 rtx mem;
831 if (setjmp_alias_set == -1)
832 setjmp_alias_set = new_alias_set ();
834 buf_addr = convert_memory_address (Pmode, buf_addr);
836 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
838 /* We store the frame pointer and the address of receiver_label in
839 the buffer and use the rest of it for the stack save area, which
840 is machine-dependent. */
842 mem = gen_rtx_MEM (Pmode, buf_addr);
843 set_mem_alias_set (mem, setjmp_alias_set);
844 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
846 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
847 set_mem_alias_set (mem, setjmp_alias_set);
849 emit_move_insn (validize_mem (mem),
850 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
852 stack_save = gen_rtx_MEM (sa_mode,
853 plus_constant (buf_addr,
854 2 * GET_MODE_SIZE (Pmode)));
855 set_mem_alias_set (stack_save, setjmp_alias_set);
856 emit_stack_save (SAVE_NONLOCAL, &stack_save);
858 /* If there is further processing to do, do it. */
859 #ifdef HAVE_builtin_setjmp_setup
860 if (HAVE_builtin_setjmp_setup)
861 emit_insn (gen_builtin_setjmp_setup (buf_addr));
862 #endif
864 /* We have a nonlocal label. */
865 cfun->has_nonlocal_label = 1;
868 /* Construct the trailing part of a __builtin_setjmp call. This is
869 also called directly by the SJLJ exception handling code. */
871 void
872 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
874 rtx chain;
876 /* Clobber the FP when we get here, so we have to make sure it's
877 marked as used by this function. */
878 emit_use (hard_frame_pointer_rtx);
880 /* Mark the static chain as clobbered here so life information
881 doesn't get messed up for it. */
882 chain = targetm.calls.static_chain (current_function_decl, true);
883 if (chain && REG_P (chain))
884 emit_clobber (chain);
886 /* Now put in the code to restore the frame pointer, and argument
887 pointer, if needed. */
888 #ifdef HAVE_nonlocal_goto
889 if (! HAVE_nonlocal_goto)
890 #endif
892 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
893 /* This might change the hard frame pointer in ways that aren't
894 apparent to early optimization passes, so force a clobber. */
895 emit_clobber (hard_frame_pointer_rtx);
898 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
899 if (fixed_regs[ARG_POINTER_REGNUM])
901 #ifdef ELIMINABLE_REGS
902 size_t i;
903 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
905 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
906 if (elim_regs[i].from == ARG_POINTER_REGNUM
907 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
908 break;
910 if (i == ARRAY_SIZE (elim_regs))
911 #endif
913 /* Now restore our arg pointer from the address at which it
914 was saved in our stack frame. */
915 emit_move_insn (crtl->args.internal_arg_pointer,
916 copy_to_reg (get_arg_pointer_save_area ()));
919 #endif
921 #ifdef HAVE_builtin_setjmp_receiver
922 if (HAVE_builtin_setjmp_receiver)
923 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
924 else
925 #endif
926 #ifdef HAVE_nonlocal_goto_receiver
927 if (HAVE_nonlocal_goto_receiver)
928 emit_insn (gen_nonlocal_goto_receiver ());
929 else
930 #endif
931 { /* Nothing */ }
933 /* We must not allow the code we just generated to be reordered by
934 scheduling. Specifically, the update of the frame pointer must
935 happen immediately, not later. */
936 emit_insn (gen_blockage ());
939 /* __builtin_longjmp is passed a pointer to an array of five words (not
940 all will be used on all machines). It operates similarly to the C
941 library function of the same name, but is more efficient. Much of
942 the code below is copied from the handling of non-local gotos. */
944 static void
945 expand_builtin_longjmp (rtx buf_addr, rtx value)
947 rtx fp, lab, stack, insn, last;
948 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
950 /* DRAP is needed for stack realign if longjmp is expanded to current
951 function */
952 if (SUPPORTS_STACK_ALIGNMENT)
953 crtl->need_drap = true;
955 if (setjmp_alias_set == -1)
956 setjmp_alias_set = new_alias_set ();
958 buf_addr = convert_memory_address (Pmode, buf_addr);
960 buf_addr = force_reg (Pmode, buf_addr);
962 /* We require that the user must pass a second argument of 1, because
963 that is what builtin_setjmp will return. */
964 gcc_assert (value == const1_rtx);
966 last = get_last_insn ();
967 #ifdef HAVE_builtin_longjmp
968 if (HAVE_builtin_longjmp)
969 emit_insn (gen_builtin_longjmp (buf_addr));
970 else
971 #endif
973 fp = gen_rtx_MEM (Pmode, buf_addr);
974 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
975 GET_MODE_SIZE (Pmode)));
977 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
978 2 * GET_MODE_SIZE (Pmode)));
979 set_mem_alias_set (fp, setjmp_alias_set);
980 set_mem_alias_set (lab, setjmp_alias_set);
981 set_mem_alias_set (stack, setjmp_alias_set);
983 /* Pick up FP, label, and SP from the block and jump. This code is
984 from expand_goto in stmt.c; see there for detailed comments. */
985 #ifdef HAVE_nonlocal_goto
986 if (HAVE_nonlocal_goto)
987 /* We have to pass a value to the nonlocal_goto pattern that will
988 get copied into the static_chain pointer, but it does not matter
989 what that value is, because builtin_setjmp does not use it. */
990 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
991 else
992 #endif
994 lab = copy_to_reg (lab);
996 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
997 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
999 emit_move_insn (hard_frame_pointer_rtx, fp);
1000 emit_stack_restore (SAVE_NONLOCAL, stack);
1002 emit_use (hard_frame_pointer_rtx);
1003 emit_use (stack_pointer_rtx);
1004 emit_indirect_jump (lab);
1008 /* Search backwards and mark the jump insn as a non-local goto.
1009 Note that this precludes the use of __builtin_longjmp to a
1010 __builtin_setjmp target in the same function. However, we've
1011 already cautioned the user that these functions are for
1012 internal exception handling use only. */
1013 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1015 gcc_assert (insn != last);
1017 if (JUMP_P (insn))
1019 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1020 break;
1022 else if (CALL_P (insn))
1023 break;
1027 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1028 and the address of the save area. */
1030 static rtx
1031 expand_builtin_nonlocal_goto (tree exp)
1033 tree t_label, t_save_area;
1034 rtx r_label, r_save_area, r_fp, r_sp, insn;
1036 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1037 return NULL_RTX;
1039 t_label = CALL_EXPR_ARG (exp, 0);
1040 t_save_area = CALL_EXPR_ARG (exp, 1);
1042 r_label = expand_normal (t_label);
1043 r_label = convert_memory_address (Pmode, r_label);
1044 r_save_area = expand_normal (t_save_area);
1045 r_save_area = convert_memory_address (Pmode, r_save_area);
1046 /* Copy the address of the save location to a register just in case it was
1047 based on the frame pointer. */
1048 r_save_area = copy_to_reg (r_save_area);
1049 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1050 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1051 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
1053 crtl->has_nonlocal_goto = 1;
1055 #ifdef HAVE_nonlocal_goto
1056 /* ??? We no longer need to pass the static chain value, afaik. */
1057 if (HAVE_nonlocal_goto)
1058 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1059 else
1060 #endif
1062 r_label = copy_to_reg (r_label);
1064 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1065 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1067 /* Restore frame pointer for containing function. */
1068 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1069 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1071 /* USE of hard_frame_pointer_rtx added for consistency;
1072 not clear if really needed. */
1073 emit_use (hard_frame_pointer_rtx);
1074 emit_use (stack_pointer_rtx);
1076 /* If the architecture is using a GP register, we must
1077 conservatively assume that the target function makes use of it.
1078 The prologue of functions with nonlocal gotos must therefore
1079 initialize the GP register to the appropriate value, and we
1080 must then make sure that this value is live at the point
1081 of the jump. (Note that this doesn't necessarily apply
1082 to targets with a nonlocal_goto pattern; they are free
1083 to implement it in their own way. Note also that this is
1084 a no-op if the GP register is a global invariant.) */
1085 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1086 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1087 emit_use (pic_offset_table_rtx);
1089 emit_indirect_jump (r_label);
1092 /* Search backwards to the jump insn and mark it as a
1093 non-local goto. */
1094 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1096 if (JUMP_P (insn))
1098 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1099 break;
1101 else if (CALL_P (insn))
1102 break;
1105 return const0_rtx;
1108 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1109 (not all will be used on all machines) that was passed to __builtin_setjmp.
1110 It updates the stack pointer in that block to correspond to the current
1111 stack pointer. */
1113 static void
1114 expand_builtin_update_setjmp_buf (rtx buf_addr)
1116 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1117 rtx stack_save
1118 = gen_rtx_MEM (sa_mode,
1119 memory_address
1120 (sa_mode,
1121 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1123 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1126 /* Expand a call to __builtin_prefetch. For a target that does not support
1127 data prefetch, evaluate the memory address argument in case it has side
1128 effects. */
1130 static void
1131 expand_builtin_prefetch (tree exp)
1133 tree arg0, arg1, arg2;
1134 int nargs;
1135 rtx op0, op1, op2;
1137 if (!validate_arglist (exp, POINTER_TYPE, 0))
1138 return;
1140 arg0 = CALL_EXPR_ARG (exp, 0);
1142 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1143 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1144 locality). */
1145 nargs = call_expr_nargs (exp);
1146 if (nargs > 1)
1147 arg1 = CALL_EXPR_ARG (exp, 1);
1148 else
1149 arg1 = integer_zero_node;
1150 if (nargs > 2)
1151 arg2 = CALL_EXPR_ARG (exp, 2);
1152 else
1153 arg2 = integer_three_node;
1155 /* Argument 0 is an address. */
1156 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1158 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1159 if (TREE_CODE (arg1) != INTEGER_CST)
1161 error ("second argument to %<__builtin_prefetch%> must be a constant");
1162 arg1 = integer_zero_node;
1164 op1 = expand_normal (arg1);
1165 /* Argument 1 must be either zero or one. */
1166 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1168 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1169 " using zero");
1170 op1 = const0_rtx;
1173 /* Argument 2 (locality) must be a compile-time constant int. */
1174 if (TREE_CODE (arg2) != INTEGER_CST)
1176 error ("third argument to %<__builtin_prefetch%> must be a constant");
1177 arg2 = integer_zero_node;
1179 op2 = expand_normal (arg2);
1180 /* Argument 2 must be 0, 1, 2, or 3. */
1181 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1183 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1184 op2 = const0_rtx;
1187 #ifdef HAVE_prefetch
1188 if (HAVE_prefetch)
1190 struct expand_operand ops[3];
1192 create_address_operand (&ops[0], op0);
1193 create_integer_operand (&ops[1], INTVAL (op1));
1194 create_integer_operand (&ops[2], INTVAL (op2));
1195 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1196 return;
1198 #endif
1200 /* Don't do anything with direct references to volatile memory, but
1201 generate code to handle other side effects. */
1202 if (!MEM_P (op0) && side_effects_p (op0))
1203 emit_insn (op0);
1206 /* Get a MEM rtx for expression EXP which is the address of an operand
1207 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1208 the maximum length of the block of memory that might be accessed or
1209 NULL if unknown. */
1211 static rtx
1212 get_memory_rtx (tree exp, tree len)
1214 tree orig_exp = exp;
1215 rtx addr, mem;
1216 HOST_WIDE_INT off;
1218 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1219 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1220 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1221 exp = TREE_OPERAND (exp, 0);
1223 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1224 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1226 /* Get an expression we can use to find the attributes to assign to MEM.
1227 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1228 we can. First remove any nops. */
1229 while (CONVERT_EXPR_P (exp)
1230 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1231 exp = TREE_OPERAND (exp, 0);
1233 off = 0;
1234 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1235 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1236 && host_integerp (TREE_OPERAND (exp, 1), 0)
1237 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1238 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1239 else if (TREE_CODE (exp) == ADDR_EXPR)
1240 exp = TREE_OPERAND (exp, 0);
1241 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1242 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1243 else
1244 exp = NULL;
1246 /* Honor attributes derived from exp, except for the alias set
1247 (as builtin stringops may alias with anything) and the size
1248 (as stringops may access multiple array elements). */
1249 if (exp)
1251 set_mem_attributes (mem, exp, 0);
1253 if (off)
1254 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1256 /* Allow the string and memory builtins to overflow from one
1257 field into another, see http://gcc.gnu.org/PR23561.
1258 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1259 memory accessed by the string or memory builtin will fit
1260 within the field. */
1261 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1263 tree mem_expr = MEM_EXPR (mem);
1264 HOST_WIDE_INT offset = -1, length = -1;
1265 tree inner = exp;
1267 while (TREE_CODE (inner) == ARRAY_REF
1268 || CONVERT_EXPR_P (inner)
1269 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1270 || TREE_CODE (inner) == SAVE_EXPR)
1271 inner = TREE_OPERAND (inner, 0);
1273 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1275 if (MEM_OFFSET_KNOWN_P (mem))
1276 offset = MEM_OFFSET (mem);
1278 if (offset >= 0 && len && host_integerp (len, 0))
1279 length = tree_low_cst (len, 0);
1281 while (TREE_CODE (inner) == COMPONENT_REF)
1283 tree field = TREE_OPERAND (inner, 1);
1284 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1285 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1287 /* Bitfields are generally not byte-addressable. */
1288 gcc_assert (!DECL_BIT_FIELD (field)
1289 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1290 % BITS_PER_UNIT) == 0
1291 && host_integerp (DECL_SIZE (field), 0)
1292 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1293 % BITS_PER_UNIT) == 0));
1295 /* If we can prove that the memory starting at XEXP (mem, 0) and
1296 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1297 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1298 fields without DECL_SIZE_UNIT like flexible array members. */
1299 if (length >= 0
1300 && DECL_SIZE_UNIT (field)
1301 && host_integerp (DECL_SIZE_UNIT (field), 0))
1303 HOST_WIDE_INT size
1304 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1305 if (offset <= size
1306 && length <= size
1307 && offset + length <= size)
1308 break;
1311 if (offset >= 0
1312 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1313 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1314 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1315 / BITS_PER_UNIT;
1316 else
1318 offset = -1;
1319 length = -1;
1322 mem_expr = TREE_OPERAND (mem_expr, 0);
1323 inner = TREE_OPERAND (inner, 0);
1326 if (mem_expr == NULL)
1327 offset = -1;
1328 if (mem_expr != MEM_EXPR (mem))
1330 set_mem_expr (mem, mem_expr);
1331 if (offset >= 0)
1332 set_mem_offset (mem, offset);
1333 else
1334 clear_mem_offset (mem);
1337 set_mem_alias_set (mem, 0);
1338 clear_mem_size (mem);
1341 return mem;
1344 /* Built-in functions to perform an untyped call and return. */
1346 #define apply_args_mode \
1347 (this_target_builtins->x_apply_args_mode)
1348 #define apply_result_mode \
1349 (this_target_builtins->x_apply_result_mode)
1351 /* Return the size required for the block returned by __builtin_apply_args,
1352 and initialize apply_args_mode. */
1354 static int
1355 apply_args_size (void)
1357 static int size = -1;
1358 int align;
1359 unsigned int regno;
1360 enum machine_mode mode;
1362 /* The values computed by this function never change. */
1363 if (size < 0)
1365 /* The first value is the incoming arg-pointer. */
1366 size = GET_MODE_SIZE (Pmode);
1368 /* The second value is the structure value address unless this is
1369 passed as an "invisible" first argument. */
1370 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1371 size += GET_MODE_SIZE (Pmode);
1373 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1374 if (FUNCTION_ARG_REGNO_P (regno))
1376 mode = targetm.calls.get_raw_arg_mode (regno);
1378 gcc_assert (mode != VOIDmode);
1380 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1381 if (size % align != 0)
1382 size = CEIL (size, align) * align;
1383 size += GET_MODE_SIZE (mode);
1384 apply_args_mode[regno] = mode;
1386 else
1388 apply_args_mode[regno] = VOIDmode;
1391 return size;
1394 /* Return the size required for the block returned by __builtin_apply,
1395 and initialize apply_result_mode. */
1397 static int
1398 apply_result_size (void)
1400 static int size = -1;
1401 int align, regno;
1402 enum machine_mode mode;
1404 /* The values computed by this function never change. */
1405 if (size < 0)
1407 size = 0;
1409 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1410 if (targetm.calls.function_value_regno_p (regno))
1412 mode = targetm.calls.get_raw_result_mode (regno);
1414 gcc_assert (mode != VOIDmode);
1416 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1417 if (size % align != 0)
1418 size = CEIL (size, align) * align;
1419 size += GET_MODE_SIZE (mode);
1420 apply_result_mode[regno] = mode;
1422 else
1423 apply_result_mode[regno] = VOIDmode;
1425 /* Allow targets that use untyped_call and untyped_return to override
1426 the size so that machine-specific information can be stored here. */
1427 #ifdef APPLY_RESULT_SIZE
1428 size = APPLY_RESULT_SIZE;
1429 #endif
1431 return size;
1434 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1435 /* Create a vector describing the result block RESULT. If SAVEP is true,
1436 the result block is used to save the values; otherwise it is used to
1437 restore the values. */
1439 static rtx
1440 result_vector (int savep, rtx result)
1442 int regno, size, align, nelts;
1443 enum machine_mode mode;
1444 rtx reg, mem;
1445 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1447 size = nelts = 0;
1448 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1449 if ((mode = apply_result_mode[regno]) != VOIDmode)
1451 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1452 if (size % align != 0)
1453 size = CEIL (size, align) * align;
1454 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1455 mem = adjust_address (result, mode, size);
1456 savevec[nelts++] = (savep
1457 ? gen_rtx_SET (VOIDmode, mem, reg)
1458 : gen_rtx_SET (VOIDmode, reg, mem));
1459 size += GET_MODE_SIZE (mode);
1461 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1463 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1465 /* Save the state required to perform an untyped call with the same
1466 arguments as were passed to the current function. */
1468 static rtx
1469 expand_builtin_apply_args_1 (void)
1471 rtx registers, tem;
1472 int size, align, regno;
1473 enum machine_mode mode;
1474 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1476 /* Create a block where the arg-pointer, structure value address,
1477 and argument registers can be saved. */
1478 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1480 /* Walk past the arg-pointer and structure value address. */
1481 size = GET_MODE_SIZE (Pmode);
1482 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1483 size += GET_MODE_SIZE (Pmode);
1485 /* Save each register used in calling a function to the block. */
1486 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1487 if ((mode = apply_args_mode[regno]) != VOIDmode)
1489 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1490 if (size % align != 0)
1491 size = CEIL (size, align) * align;
1493 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1495 emit_move_insn (adjust_address (registers, mode, size), tem);
1496 size += GET_MODE_SIZE (mode);
1499 /* Save the arg pointer to the block. */
1500 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1501 #ifdef STACK_GROWS_DOWNWARD
1502 /* We need the pointer as the caller actually passed them to us, not
1503 as we might have pretended they were passed. Make sure it's a valid
1504 operand, as emit_move_insn isn't expected to handle a PLUS. */
1506 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1507 NULL_RTX);
1508 #endif
1509 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1511 size = GET_MODE_SIZE (Pmode);
1513 /* Save the structure value address unless this is passed as an
1514 "invisible" first argument. */
1515 if (struct_incoming_value)
1517 emit_move_insn (adjust_address (registers, Pmode, size),
1518 copy_to_reg (struct_incoming_value));
1519 size += GET_MODE_SIZE (Pmode);
1522 /* Return the address of the block. */
1523 return copy_addr_to_reg (XEXP (registers, 0));
1526 /* __builtin_apply_args returns block of memory allocated on
1527 the stack into which is stored the arg pointer, structure
1528 value address, static chain, and all the registers that might
1529 possibly be used in performing a function call. The code is
1530 moved to the start of the function so the incoming values are
1531 saved. */
1533 static rtx
1534 expand_builtin_apply_args (void)
1536 /* Don't do __builtin_apply_args more than once in a function.
1537 Save the result of the first call and reuse it. */
1538 if (apply_args_value != 0)
1539 return apply_args_value;
1541 /* When this function is called, it means that registers must be
1542 saved on entry to this function. So we migrate the
1543 call to the first insn of this function. */
1544 rtx temp;
1545 rtx seq;
1547 start_sequence ();
1548 temp = expand_builtin_apply_args_1 ();
1549 seq = get_insns ();
1550 end_sequence ();
1552 apply_args_value = temp;
1554 /* Put the insns after the NOTE that starts the function.
1555 If this is inside a start_sequence, make the outer-level insn
1556 chain current, so the code is placed at the start of the
1557 function. If internal_arg_pointer is a non-virtual pseudo,
1558 it needs to be placed after the function that initializes
1559 that pseudo. */
1560 push_topmost_sequence ();
1561 if (REG_P (crtl->args.internal_arg_pointer)
1562 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1563 emit_insn_before (seq, parm_birth_insn);
1564 else
1565 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1566 pop_topmost_sequence ();
1567 return temp;
1571 /* Perform an untyped call and save the state required to perform an
1572 untyped return of whatever value was returned by the given function. */
1574 static rtx
1575 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1577 int size, align, regno;
1578 enum machine_mode mode;
1579 rtx incoming_args, result, reg, dest, src, call_insn;
1580 rtx old_stack_level = 0;
1581 rtx call_fusage = 0;
1582 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1584 arguments = convert_memory_address (Pmode, arguments);
1586 /* Create a block where the return registers can be saved. */
1587 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1589 /* Fetch the arg pointer from the ARGUMENTS block. */
1590 incoming_args = gen_reg_rtx (Pmode);
1591 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1592 #ifndef STACK_GROWS_DOWNWARD
1593 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1594 incoming_args, 0, OPTAB_LIB_WIDEN);
1595 #endif
1597 /* Push a new argument block and copy the arguments. Do not allow
1598 the (potential) memcpy call below to interfere with our stack
1599 manipulations. */
1600 do_pending_stack_adjust ();
1601 NO_DEFER_POP;
1603 /* Save the stack with nonlocal if available. */
1604 #ifdef HAVE_save_stack_nonlocal
1605 if (HAVE_save_stack_nonlocal)
1606 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1607 else
1608 #endif
1609 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1611 /* Allocate a block of memory onto the stack and copy the memory
1612 arguments to the outgoing arguments address. We can pass TRUE
1613 as the 4th argument because we just saved the stack pointer
1614 and will restore it right after the call. */
1615 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1617 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1618 may have already set current_function_calls_alloca to true.
1619 current_function_calls_alloca won't be set if argsize is zero,
1620 so we have to guarantee need_drap is true here. */
1621 if (SUPPORTS_STACK_ALIGNMENT)
1622 crtl->need_drap = true;
1624 dest = virtual_outgoing_args_rtx;
1625 #ifndef STACK_GROWS_DOWNWARD
1626 if (CONST_INT_P (argsize))
1627 dest = plus_constant (dest, -INTVAL (argsize));
1628 else
1629 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1630 #endif
1631 dest = gen_rtx_MEM (BLKmode, dest);
1632 set_mem_align (dest, PARM_BOUNDARY);
1633 src = gen_rtx_MEM (BLKmode, incoming_args);
1634 set_mem_align (src, PARM_BOUNDARY);
1635 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1637 /* Refer to the argument block. */
1638 apply_args_size ();
1639 arguments = gen_rtx_MEM (BLKmode, arguments);
1640 set_mem_align (arguments, PARM_BOUNDARY);
1642 /* Walk past the arg-pointer and structure value address. */
1643 size = GET_MODE_SIZE (Pmode);
1644 if (struct_value)
1645 size += GET_MODE_SIZE (Pmode);
1647 /* Restore each of the registers previously saved. Make USE insns
1648 for each of these registers for use in making the call. */
1649 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1650 if ((mode = apply_args_mode[regno]) != VOIDmode)
1652 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1653 if (size % align != 0)
1654 size = CEIL (size, align) * align;
1655 reg = gen_rtx_REG (mode, regno);
1656 emit_move_insn (reg, adjust_address (arguments, mode, size));
1657 use_reg (&call_fusage, reg);
1658 size += GET_MODE_SIZE (mode);
1661 /* Restore the structure value address unless this is passed as an
1662 "invisible" first argument. */
1663 size = GET_MODE_SIZE (Pmode);
1664 if (struct_value)
1666 rtx value = gen_reg_rtx (Pmode);
1667 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1668 emit_move_insn (struct_value, value);
1669 if (REG_P (struct_value))
1670 use_reg (&call_fusage, struct_value);
1671 size += GET_MODE_SIZE (Pmode);
1674 /* All arguments and registers used for the call are set up by now! */
1675 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1677 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1678 and we don't want to load it into a register as an optimization,
1679 because prepare_call_address already did it if it should be done. */
1680 if (GET_CODE (function) != SYMBOL_REF)
1681 function = memory_address (FUNCTION_MODE, function);
1683 /* Generate the actual call instruction and save the return value. */
1684 #ifdef HAVE_untyped_call
1685 if (HAVE_untyped_call)
1686 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1687 result, result_vector (1, result)));
1688 else
1689 #endif
1690 #ifdef HAVE_call_value
1691 if (HAVE_call_value)
1693 rtx valreg = 0;
1695 /* Locate the unique return register. It is not possible to
1696 express a call that sets more than one return register using
1697 call_value; use untyped_call for that. In fact, untyped_call
1698 only needs to save the return registers in the given block. */
1699 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1700 if ((mode = apply_result_mode[regno]) != VOIDmode)
1702 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1704 valreg = gen_rtx_REG (mode, regno);
1707 emit_call_insn (GEN_CALL_VALUE (valreg,
1708 gen_rtx_MEM (FUNCTION_MODE, function),
1709 const0_rtx, NULL_RTX, const0_rtx));
1711 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1713 else
1714 #endif
1715 gcc_unreachable ();
1717 /* Find the CALL insn we just emitted, and attach the register usage
1718 information. */
1719 call_insn = last_call_insn ();
1720 add_function_usage_to (call_insn, call_fusage);
1722 /* Restore the stack. */
1723 #ifdef HAVE_save_stack_nonlocal
1724 if (HAVE_save_stack_nonlocal)
1725 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1726 else
1727 #endif
1728 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1729 fixup_args_size_notes (call_insn, get_last_insn(), 0);
1731 OK_DEFER_POP;
1733 /* Return the address of the result block. */
1734 result = copy_addr_to_reg (XEXP (result, 0));
1735 return convert_memory_address (ptr_mode, result);
1738 /* Perform an untyped return. */
1740 static void
1741 expand_builtin_return (rtx result)
1743 int size, align, regno;
1744 enum machine_mode mode;
1745 rtx reg;
1746 rtx call_fusage = 0;
1748 result = convert_memory_address (Pmode, result);
1750 apply_result_size ();
1751 result = gen_rtx_MEM (BLKmode, result);
1753 #ifdef HAVE_untyped_return
1754 if (HAVE_untyped_return)
1756 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1757 emit_barrier ();
1758 return;
1760 #endif
1762 /* Restore the return value and note that each value is used. */
1763 size = 0;
1764 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1765 if ((mode = apply_result_mode[regno]) != VOIDmode)
1767 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1768 if (size % align != 0)
1769 size = CEIL (size, align) * align;
1770 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1771 emit_move_insn (reg, adjust_address (result, mode, size));
1773 push_to_sequence (call_fusage);
1774 emit_use (reg);
1775 call_fusage = get_insns ();
1776 end_sequence ();
1777 size += GET_MODE_SIZE (mode);
1780 /* Put the USE insns before the return. */
1781 emit_insn (call_fusage);
1783 /* Return whatever values was restored by jumping directly to the end
1784 of the function. */
1785 expand_naked_return ();
1788 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1790 static enum type_class
1791 type_to_class (tree type)
1793 switch (TREE_CODE (type))
1795 case VOID_TYPE: return void_type_class;
1796 case INTEGER_TYPE: return integer_type_class;
1797 case ENUMERAL_TYPE: return enumeral_type_class;
1798 case BOOLEAN_TYPE: return boolean_type_class;
1799 case POINTER_TYPE: return pointer_type_class;
1800 case REFERENCE_TYPE: return reference_type_class;
1801 case OFFSET_TYPE: return offset_type_class;
1802 case REAL_TYPE: return real_type_class;
1803 case COMPLEX_TYPE: return complex_type_class;
1804 case FUNCTION_TYPE: return function_type_class;
1805 case METHOD_TYPE: return method_type_class;
1806 case RECORD_TYPE: return record_type_class;
1807 case UNION_TYPE:
1808 case QUAL_UNION_TYPE: return union_type_class;
1809 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1810 ? string_type_class : array_type_class);
1811 case LANG_TYPE: return lang_type_class;
1812 default: return no_type_class;
1816 /* Expand a call EXP to __builtin_classify_type. */
1818 static rtx
1819 expand_builtin_classify_type (tree exp)
1821 if (call_expr_nargs (exp))
1822 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1823 return GEN_INT (no_type_class);
1826 /* This helper macro, meant to be used in mathfn_built_in below,
1827 determines which among a set of three builtin math functions is
1828 appropriate for a given type mode. The `F' and `L' cases are
1829 automatically generated from the `double' case. */
1830 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1831 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1832 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1833 fcodel = BUILT_IN_MATHFN##L ; break;
1834 /* Similar to above, but appends _R after any F/L suffix. */
1835 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1836 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1837 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1838 fcodel = BUILT_IN_MATHFN##L_R ; break;
1840 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1841 if available. If IMPLICIT is true use the implicit builtin declaration,
1842 otherwise use the explicit declaration. If we can't do the conversion,
1843 return zero. */
1845 static tree
1846 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1848 enum built_in_function fcode, fcodef, fcodel, fcode2;
1850 switch (fn)
1852 CASE_MATHFN (BUILT_IN_ACOS)
1853 CASE_MATHFN (BUILT_IN_ACOSH)
1854 CASE_MATHFN (BUILT_IN_ASIN)
1855 CASE_MATHFN (BUILT_IN_ASINH)
1856 CASE_MATHFN (BUILT_IN_ATAN)
1857 CASE_MATHFN (BUILT_IN_ATAN2)
1858 CASE_MATHFN (BUILT_IN_ATANH)
1859 CASE_MATHFN (BUILT_IN_CBRT)
1860 CASE_MATHFN (BUILT_IN_CEIL)
1861 CASE_MATHFN (BUILT_IN_CEXPI)
1862 CASE_MATHFN (BUILT_IN_COPYSIGN)
1863 CASE_MATHFN (BUILT_IN_COS)
1864 CASE_MATHFN (BUILT_IN_COSH)
1865 CASE_MATHFN (BUILT_IN_DREM)
1866 CASE_MATHFN (BUILT_IN_ERF)
1867 CASE_MATHFN (BUILT_IN_ERFC)
1868 CASE_MATHFN (BUILT_IN_EXP)
1869 CASE_MATHFN (BUILT_IN_EXP10)
1870 CASE_MATHFN (BUILT_IN_EXP2)
1871 CASE_MATHFN (BUILT_IN_EXPM1)
1872 CASE_MATHFN (BUILT_IN_FABS)
1873 CASE_MATHFN (BUILT_IN_FDIM)
1874 CASE_MATHFN (BUILT_IN_FLOOR)
1875 CASE_MATHFN (BUILT_IN_FMA)
1876 CASE_MATHFN (BUILT_IN_FMAX)
1877 CASE_MATHFN (BUILT_IN_FMIN)
1878 CASE_MATHFN (BUILT_IN_FMOD)
1879 CASE_MATHFN (BUILT_IN_FREXP)
1880 CASE_MATHFN (BUILT_IN_GAMMA)
1881 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1882 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1883 CASE_MATHFN (BUILT_IN_HYPOT)
1884 CASE_MATHFN (BUILT_IN_ILOGB)
1885 CASE_MATHFN (BUILT_IN_ICEIL)
1886 CASE_MATHFN (BUILT_IN_IFLOOR)
1887 CASE_MATHFN (BUILT_IN_INF)
1888 CASE_MATHFN (BUILT_IN_IRINT)
1889 CASE_MATHFN (BUILT_IN_IROUND)
1890 CASE_MATHFN (BUILT_IN_ISINF)
1891 CASE_MATHFN (BUILT_IN_J0)
1892 CASE_MATHFN (BUILT_IN_J1)
1893 CASE_MATHFN (BUILT_IN_JN)
1894 CASE_MATHFN (BUILT_IN_LCEIL)
1895 CASE_MATHFN (BUILT_IN_LDEXP)
1896 CASE_MATHFN (BUILT_IN_LFLOOR)
1897 CASE_MATHFN (BUILT_IN_LGAMMA)
1898 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1899 CASE_MATHFN (BUILT_IN_LLCEIL)
1900 CASE_MATHFN (BUILT_IN_LLFLOOR)
1901 CASE_MATHFN (BUILT_IN_LLRINT)
1902 CASE_MATHFN (BUILT_IN_LLROUND)
1903 CASE_MATHFN (BUILT_IN_LOG)
1904 CASE_MATHFN (BUILT_IN_LOG10)
1905 CASE_MATHFN (BUILT_IN_LOG1P)
1906 CASE_MATHFN (BUILT_IN_LOG2)
1907 CASE_MATHFN (BUILT_IN_LOGB)
1908 CASE_MATHFN (BUILT_IN_LRINT)
1909 CASE_MATHFN (BUILT_IN_LROUND)
1910 CASE_MATHFN (BUILT_IN_MODF)
1911 CASE_MATHFN (BUILT_IN_NAN)
1912 CASE_MATHFN (BUILT_IN_NANS)
1913 CASE_MATHFN (BUILT_IN_NEARBYINT)
1914 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1915 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1916 CASE_MATHFN (BUILT_IN_POW)
1917 CASE_MATHFN (BUILT_IN_POWI)
1918 CASE_MATHFN (BUILT_IN_POW10)
1919 CASE_MATHFN (BUILT_IN_REMAINDER)
1920 CASE_MATHFN (BUILT_IN_REMQUO)
1921 CASE_MATHFN (BUILT_IN_RINT)
1922 CASE_MATHFN (BUILT_IN_ROUND)
1923 CASE_MATHFN (BUILT_IN_SCALB)
1924 CASE_MATHFN (BUILT_IN_SCALBLN)
1925 CASE_MATHFN (BUILT_IN_SCALBN)
1926 CASE_MATHFN (BUILT_IN_SIGNBIT)
1927 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1928 CASE_MATHFN (BUILT_IN_SIN)
1929 CASE_MATHFN (BUILT_IN_SINCOS)
1930 CASE_MATHFN (BUILT_IN_SINH)
1931 CASE_MATHFN (BUILT_IN_SQRT)
1932 CASE_MATHFN (BUILT_IN_TAN)
1933 CASE_MATHFN (BUILT_IN_TANH)
1934 CASE_MATHFN (BUILT_IN_TGAMMA)
1935 CASE_MATHFN (BUILT_IN_TRUNC)
1936 CASE_MATHFN (BUILT_IN_Y0)
1937 CASE_MATHFN (BUILT_IN_Y1)
1938 CASE_MATHFN (BUILT_IN_YN)
1940 default:
1941 return NULL_TREE;
1944 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1945 fcode2 = fcode;
1946 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1947 fcode2 = fcodef;
1948 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1949 fcode2 = fcodel;
1950 else
1951 return NULL_TREE;
1953 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1954 return NULL_TREE;
1956 return builtin_decl_explicit (fcode2);
1959 /* Like mathfn_built_in_1(), but always use the implicit array. */
1961 tree
1962 mathfn_built_in (tree type, enum built_in_function fn)
1964 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1967 /* If errno must be maintained, expand the RTL to check if the result,
1968 TARGET, of a built-in function call, EXP, is NaN, and if so set
1969 errno to EDOM. */
1971 static void
1972 expand_errno_check (tree exp, rtx target)
1974 rtx lab = gen_label_rtx ();
1976 /* Test the result; if it is NaN, set errno=EDOM because
1977 the argument was not in the domain. */
1978 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1979 NULL_RTX, NULL_RTX, lab,
1980 /* The jump is very likely. */
1981 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1983 #ifdef TARGET_EDOM
1984 /* If this built-in doesn't throw an exception, set errno directly. */
1985 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1987 #ifdef GEN_ERRNO_RTX
1988 rtx errno_rtx = GEN_ERRNO_RTX;
1989 #else
1990 rtx errno_rtx
1991 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1992 #endif
1993 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1994 emit_label (lab);
1995 return;
1997 #endif
1999 /* Make sure the library call isn't expanded as a tail call. */
2000 CALL_EXPR_TAILCALL (exp) = 0;
2002 /* We can't set errno=EDOM directly; let the library call do it.
2003 Pop the arguments right away in case the call gets deleted. */
2004 NO_DEFER_POP;
2005 expand_call (exp, target, 0);
2006 OK_DEFER_POP;
2007 emit_label (lab);
2010 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2011 Return NULL_RTX if a normal call should be emitted rather than expanding
2012 the function in-line. EXP is the expression that is a call to the builtin
2013 function; if convenient, the result should be placed in TARGET.
2014 SUBTARGET may be used as the target for computing one of EXP's operands. */
2016 static rtx
2017 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2019 optab builtin_optab;
2020 rtx op0, insns;
2021 tree fndecl = get_callee_fndecl (exp);
2022 enum machine_mode mode;
2023 bool errno_set = false;
2024 tree arg;
2026 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2027 return NULL_RTX;
2029 arg = CALL_EXPR_ARG (exp, 0);
2031 switch (DECL_FUNCTION_CODE (fndecl))
2033 CASE_FLT_FN (BUILT_IN_SQRT):
2034 errno_set = ! tree_expr_nonnegative_p (arg);
2035 builtin_optab = sqrt_optab;
2036 break;
2037 CASE_FLT_FN (BUILT_IN_EXP):
2038 errno_set = true; builtin_optab = exp_optab; break;
2039 CASE_FLT_FN (BUILT_IN_EXP10):
2040 CASE_FLT_FN (BUILT_IN_POW10):
2041 errno_set = true; builtin_optab = exp10_optab; break;
2042 CASE_FLT_FN (BUILT_IN_EXP2):
2043 errno_set = true; builtin_optab = exp2_optab; break;
2044 CASE_FLT_FN (BUILT_IN_EXPM1):
2045 errno_set = true; builtin_optab = expm1_optab; break;
2046 CASE_FLT_FN (BUILT_IN_LOGB):
2047 errno_set = true; builtin_optab = logb_optab; break;
2048 CASE_FLT_FN (BUILT_IN_LOG):
2049 errno_set = true; builtin_optab = log_optab; break;
2050 CASE_FLT_FN (BUILT_IN_LOG10):
2051 errno_set = true; builtin_optab = log10_optab; break;
2052 CASE_FLT_FN (BUILT_IN_LOG2):
2053 errno_set = true; builtin_optab = log2_optab; break;
2054 CASE_FLT_FN (BUILT_IN_LOG1P):
2055 errno_set = true; builtin_optab = log1p_optab; break;
2056 CASE_FLT_FN (BUILT_IN_ASIN):
2057 builtin_optab = asin_optab; break;
2058 CASE_FLT_FN (BUILT_IN_ACOS):
2059 builtin_optab = acos_optab; break;
2060 CASE_FLT_FN (BUILT_IN_TAN):
2061 builtin_optab = tan_optab; break;
2062 CASE_FLT_FN (BUILT_IN_ATAN):
2063 builtin_optab = atan_optab; break;
2064 CASE_FLT_FN (BUILT_IN_FLOOR):
2065 builtin_optab = floor_optab; break;
2066 CASE_FLT_FN (BUILT_IN_CEIL):
2067 builtin_optab = ceil_optab; break;
2068 CASE_FLT_FN (BUILT_IN_TRUNC):
2069 builtin_optab = btrunc_optab; break;
2070 CASE_FLT_FN (BUILT_IN_ROUND):
2071 builtin_optab = round_optab; break;
2072 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2073 builtin_optab = nearbyint_optab;
2074 if (flag_trapping_math)
2075 break;
2076 /* Else fallthrough and expand as rint. */
2077 CASE_FLT_FN (BUILT_IN_RINT):
2078 builtin_optab = rint_optab; break;
2079 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2080 builtin_optab = significand_optab; break;
2081 default:
2082 gcc_unreachable ();
2085 /* Make a suitable register to place result in. */
2086 mode = TYPE_MODE (TREE_TYPE (exp));
2088 if (! flag_errno_math || ! HONOR_NANS (mode))
2089 errno_set = false;
2091 /* Before working hard, check whether the instruction is available. */
2092 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2093 && (!errno_set || !optimize_insn_for_size_p ()))
2095 target = gen_reg_rtx (mode);
2097 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2098 need to expand the argument again. This way, we will not perform
2099 side-effects more the once. */
2100 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2102 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2104 start_sequence ();
2106 /* Compute into TARGET.
2107 Set TARGET to wherever the result comes back. */
2108 target = expand_unop (mode, builtin_optab, op0, target, 0);
2110 if (target != 0)
2112 if (errno_set)
2113 expand_errno_check (exp, target);
2115 /* Output the entire sequence. */
2116 insns = get_insns ();
2117 end_sequence ();
2118 emit_insn (insns);
2119 return target;
2122 /* If we were unable to expand via the builtin, stop the sequence
2123 (without outputting the insns) and call to the library function
2124 with the stabilized argument list. */
2125 end_sequence ();
2128 return expand_call (exp, target, target == const0_rtx);
2131 /* Expand a call to the builtin binary math functions (pow and atan2).
2132 Return NULL_RTX if a normal call should be emitted rather than expanding the
2133 function in-line. EXP is the expression that is a call to the builtin
2134 function; if convenient, the result should be placed in TARGET.
2135 SUBTARGET may be used as the target for computing one of EXP's
2136 operands. */
2138 static rtx
2139 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2141 optab builtin_optab;
2142 rtx op0, op1, insns;
2143 int op1_type = REAL_TYPE;
2144 tree fndecl = get_callee_fndecl (exp);
2145 tree arg0, arg1;
2146 enum machine_mode mode;
2147 bool errno_set = true;
2149 switch (DECL_FUNCTION_CODE (fndecl))
2151 CASE_FLT_FN (BUILT_IN_SCALBN):
2152 CASE_FLT_FN (BUILT_IN_SCALBLN):
2153 CASE_FLT_FN (BUILT_IN_LDEXP):
2154 op1_type = INTEGER_TYPE;
2155 default:
2156 break;
2159 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2160 return NULL_RTX;
2162 arg0 = CALL_EXPR_ARG (exp, 0);
2163 arg1 = CALL_EXPR_ARG (exp, 1);
2165 switch (DECL_FUNCTION_CODE (fndecl))
2167 CASE_FLT_FN (BUILT_IN_POW):
2168 builtin_optab = pow_optab; break;
2169 CASE_FLT_FN (BUILT_IN_ATAN2):
2170 builtin_optab = atan2_optab; break;
2171 CASE_FLT_FN (BUILT_IN_SCALB):
2172 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2173 return 0;
2174 builtin_optab = scalb_optab; break;
2175 CASE_FLT_FN (BUILT_IN_SCALBN):
2176 CASE_FLT_FN (BUILT_IN_SCALBLN):
2177 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2178 return 0;
2179 /* Fall through... */
2180 CASE_FLT_FN (BUILT_IN_LDEXP):
2181 builtin_optab = ldexp_optab; break;
2182 CASE_FLT_FN (BUILT_IN_FMOD):
2183 builtin_optab = fmod_optab; break;
2184 CASE_FLT_FN (BUILT_IN_REMAINDER):
2185 CASE_FLT_FN (BUILT_IN_DREM):
2186 builtin_optab = remainder_optab; break;
2187 default:
2188 gcc_unreachable ();
2191 /* Make a suitable register to place result in. */
2192 mode = TYPE_MODE (TREE_TYPE (exp));
2194 /* Before working hard, check whether the instruction is available. */
2195 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2196 return NULL_RTX;
2198 target = gen_reg_rtx (mode);
2200 if (! flag_errno_math || ! HONOR_NANS (mode))
2201 errno_set = false;
2203 if (errno_set && optimize_insn_for_size_p ())
2204 return 0;
2206 /* Always stabilize the argument list. */
2207 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2208 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2210 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2211 op1 = expand_normal (arg1);
2213 start_sequence ();
2215 /* Compute into TARGET.
2216 Set TARGET to wherever the result comes back. */
2217 target = expand_binop (mode, builtin_optab, op0, op1,
2218 target, 0, OPTAB_DIRECT);
2220 /* If we were unable to expand via the builtin, stop the sequence
2221 (without outputting the insns) and call to the library function
2222 with the stabilized argument list. */
2223 if (target == 0)
2225 end_sequence ();
2226 return expand_call (exp, target, target == const0_rtx);
2229 if (errno_set)
2230 expand_errno_check (exp, target);
2232 /* Output the entire sequence. */
2233 insns = get_insns ();
2234 end_sequence ();
2235 emit_insn (insns);
2237 return target;
2240 /* Expand a call to the builtin trinary math functions (fma).
2241 Return NULL_RTX if a normal call should be emitted rather than expanding the
2242 function in-line. EXP is the expression that is a call to the builtin
2243 function; if convenient, the result should be placed in TARGET.
2244 SUBTARGET may be used as the target for computing one of EXP's
2245 operands. */
2247 static rtx
2248 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2250 optab builtin_optab;
2251 rtx op0, op1, op2, insns;
2252 tree fndecl = get_callee_fndecl (exp);
2253 tree arg0, arg1, arg2;
2254 enum machine_mode mode;
2256 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2257 return NULL_RTX;
2259 arg0 = CALL_EXPR_ARG (exp, 0);
2260 arg1 = CALL_EXPR_ARG (exp, 1);
2261 arg2 = CALL_EXPR_ARG (exp, 2);
2263 switch (DECL_FUNCTION_CODE (fndecl))
2265 CASE_FLT_FN (BUILT_IN_FMA):
2266 builtin_optab = fma_optab; break;
2267 default:
2268 gcc_unreachable ();
2271 /* Make a suitable register to place result in. */
2272 mode = TYPE_MODE (TREE_TYPE (exp));
2274 /* Before working hard, check whether the instruction is available. */
2275 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2276 return NULL_RTX;
2278 target = gen_reg_rtx (mode);
2280 /* Always stabilize the argument list. */
2281 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2282 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2283 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2285 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2286 op1 = expand_normal (arg1);
2287 op2 = expand_normal (arg2);
2289 start_sequence ();
2291 /* Compute into TARGET.
2292 Set TARGET to wherever the result comes back. */
2293 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2294 target, 0);
2296 /* If we were unable to expand via the builtin, stop the sequence
2297 (without outputting the insns) and call to the library function
2298 with the stabilized argument list. */
2299 if (target == 0)
2301 end_sequence ();
2302 return expand_call (exp, target, target == const0_rtx);
2305 /* Output the entire sequence. */
2306 insns = get_insns ();
2307 end_sequence ();
2308 emit_insn (insns);
2310 return target;
2313 /* Expand a call to the builtin sin and cos math functions.
2314 Return NULL_RTX if a normal call should be emitted rather than expanding the
2315 function in-line. EXP is the expression that is a call to the builtin
2316 function; if convenient, the result should be placed in TARGET.
2317 SUBTARGET may be used as the target for computing one of EXP's
2318 operands. */
2320 static rtx
2321 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2323 optab builtin_optab;
2324 rtx op0, insns;
2325 tree fndecl = get_callee_fndecl (exp);
2326 enum machine_mode mode;
2327 tree arg;
2329 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2330 return NULL_RTX;
2332 arg = CALL_EXPR_ARG (exp, 0);
2334 switch (DECL_FUNCTION_CODE (fndecl))
2336 CASE_FLT_FN (BUILT_IN_SIN):
2337 CASE_FLT_FN (BUILT_IN_COS):
2338 builtin_optab = sincos_optab; break;
2339 default:
2340 gcc_unreachable ();
2343 /* Make a suitable register to place result in. */
2344 mode = TYPE_MODE (TREE_TYPE (exp));
2346 /* Check if sincos insn is available, otherwise fallback
2347 to sin or cos insn. */
2348 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2349 switch (DECL_FUNCTION_CODE (fndecl))
2351 CASE_FLT_FN (BUILT_IN_SIN):
2352 builtin_optab = sin_optab; break;
2353 CASE_FLT_FN (BUILT_IN_COS):
2354 builtin_optab = cos_optab; break;
2355 default:
2356 gcc_unreachable ();
2359 /* Before working hard, check whether the instruction is available. */
2360 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2362 target = gen_reg_rtx (mode);
2364 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2365 need to expand the argument again. This way, we will not perform
2366 side-effects more the once. */
2367 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2369 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2371 start_sequence ();
2373 /* Compute into TARGET.
2374 Set TARGET to wherever the result comes back. */
2375 if (builtin_optab == sincos_optab)
2377 int result;
2379 switch (DECL_FUNCTION_CODE (fndecl))
2381 CASE_FLT_FN (BUILT_IN_SIN):
2382 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2383 break;
2384 CASE_FLT_FN (BUILT_IN_COS):
2385 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2386 break;
2387 default:
2388 gcc_unreachable ();
2390 gcc_assert (result);
2392 else
2394 target = expand_unop (mode, builtin_optab, op0, target, 0);
2397 if (target != 0)
2399 /* Output the entire sequence. */
2400 insns = get_insns ();
2401 end_sequence ();
2402 emit_insn (insns);
2403 return target;
2406 /* If we were unable to expand via the builtin, stop the sequence
2407 (without outputting the insns) and call to the library function
2408 with the stabilized argument list. */
2409 end_sequence ();
2412 target = expand_call (exp, target, target == const0_rtx);
2414 return target;
2417 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2418 return an RTL instruction code that implements the functionality.
2419 If that isn't possible or available return CODE_FOR_nothing. */
2421 static enum insn_code
2422 interclass_mathfn_icode (tree arg, tree fndecl)
2424 bool errno_set = false;
2425 optab builtin_optab = 0;
2426 enum machine_mode mode;
2428 switch (DECL_FUNCTION_CODE (fndecl))
2430 CASE_FLT_FN (BUILT_IN_ILOGB):
2431 errno_set = true; builtin_optab = ilogb_optab; break;
2432 CASE_FLT_FN (BUILT_IN_ISINF):
2433 builtin_optab = isinf_optab; break;
2434 case BUILT_IN_ISNORMAL:
2435 case BUILT_IN_ISFINITE:
2436 CASE_FLT_FN (BUILT_IN_FINITE):
2437 case BUILT_IN_FINITED32:
2438 case BUILT_IN_FINITED64:
2439 case BUILT_IN_FINITED128:
2440 case BUILT_IN_ISINFD32:
2441 case BUILT_IN_ISINFD64:
2442 case BUILT_IN_ISINFD128:
2443 /* These builtins have no optabs (yet). */
2444 break;
2445 default:
2446 gcc_unreachable ();
2449 /* There's no easy way to detect the case we need to set EDOM. */
2450 if (flag_errno_math && errno_set)
2451 return CODE_FOR_nothing;
2453 /* Optab mode depends on the mode of the input argument. */
2454 mode = TYPE_MODE (TREE_TYPE (arg));
2456 if (builtin_optab)
2457 return optab_handler (builtin_optab, mode);
2458 return CODE_FOR_nothing;
2461 /* Expand a call to one of the builtin math functions that operate on
2462 floating point argument and output an integer result (ilogb, isinf,
2463 isnan, etc).
2464 Return 0 if a normal call should be emitted rather than expanding the
2465 function in-line. EXP is the expression that is a call to the builtin
2466 function; if convenient, the result should be placed in TARGET. */
2468 static rtx
2469 expand_builtin_interclass_mathfn (tree exp, rtx target)
2471 enum insn_code icode = CODE_FOR_nothing;
2472 rtx op0;
2473 tree fndecl = get_callee_fndecl (exp);
2474 enum machine_mode mode;
2475 tree arg;
2477 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2478 return NULL_RTX;
2480 arg = CALL_EXPR_ARG (exp, 0);
2481 icode = interclass_mathfn_icode (arg, fndecl);
2482 mode = TYPE_MODE (TREE_TYPE (arg));
2484 if (icode != CODE_FOR_nothing)
2486 struct expand_operand ops[1];
2487 rtx last = get_last_insn ();
2488 tree orig_arg = arg;
2490 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2491 need to expand the argument again. This way, we will not perform
2492 side-effects more the once. */
2493 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2495 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2497 if (mode != GET_MODE (op0))
2498 op0 = convert_to_mode (mode, op0, 0);
2500 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2501 if (maybe_legitimize_operands (icode, 0, 1, ops)
2502 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2503 return ops[0].value;
2505 delete_insns_since (last);
2506 CALL_EXPR_ARG (exp, 0) = orig_arg;
2509 return NULL_RTX;
2512 /* Expand a call to the builtin sincos math function.
2513 Return NULL_RTX if a normal call should be emitted rather than expanding the
2514 function in-line. EXP is the expression that is a call to the builtin
2515 function. */
2517 static rtx
2518 expand_builtin_sincos (tree exp)
2520 rtx op0, op1, op2, target1, target2;
2521 enum machine_mode mode;
2522 tree arg, sinp, cosp;
2523 int result;
2524 location_t loc = EXPR_LOCATION (exp);
2525 tree alias_type, alias_off;
2527 if (!validate_arglist (exp, REAL_TYPE,
2528 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2529 return NULL_RTX;
2531 arg = CALL_EXPR_ARG (exp, 0);
2532 sinp = CALL_EXPR_ARG (exp, 1);
2533 cosp = CALL_EXPR_ARG (exp, 2);
2535 /* Make a suitable register to place result in. */
2536 mode = TYPE_MODE (TREE_TYPE (arg));
2538 /* Check if sincos insn is available, otherwise emit the call. */
2539 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2540 return NULL_RTX;
2542 target1 = gen_reg_rtx (mode);
2543 target2 = gen_reg_rtx (mode);
2545 op0 = expand_normal (arg);
2546 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2547 alias_off = build_int_cst (alias_type, 0);
2548 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2549 sinp, alias_off));
2550 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2551 cosp, alias_off));
2553 /* Compute into target1 and target2.
2554 Set TARGET to wherever the result comes back. */
2555 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2556 gcc_assert (result);
2558 /* Move target1 and target2 to the memory locations indicated
2559 by op1 and op2. */
2560 emit_move_insn (op1, target1);
2561 emit_move_insn (op2, target2);
2563 return const0_rtx;
2566 /* Expand a call to the internal cexpi builtin to the sincos math function.
2567 EXP is the expression that is a call to the builtin function; if convenient,
2568 the result should be placed in TARGET. */
2570 static rtx
2571 expand_builtin_cexpi (tree exp, rtx target)
2573 tree fndecl = get_callee_fndecl (exp);
2574 tree arg, type;
2575 enum machine_mode mode;
2576 rtx op0, op1, op2;
2577 location_t loc = EXPR_LOCATION (exp);
2579 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2580 return NULL_RTX;
2582 arg = CALL_EXPR_ARG (exp, 0);
2583 type = TREE_TYPE (arg);
2584 mode = TYPE_MODE (TREE_TYPE (arg));
2586 /* Try expanding via a sincos optab, fall back to emitting a libcall
2587 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2588 is only generated from sincos, cexp or if we have either of them. */
2589 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2591 op1 = gen_reg_rtx (mode);
2592 op2 = gen_reg_rtx (mode);
2594 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2596 /* Compute into op1 and op2. */
2597 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2599 else if (TARGET_HAS_SINCOS)
2601 tree call, fn = NULL_TREE;
2602 tree top1, top2;
2603 rtx op1a, op2a;
2605 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2606 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2607 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2608 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2609 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2610 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2611 else
2612 gcc_unreachable ();
2614 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2615 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2616 op1a = copy_addr_to_reg (XEXP (op1, 0));
2617 op2a = copy_addr_to_reg (XEXP (op2, 0));
2618 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2619 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2621 /* Make sure not to fold the sincos call again. */
2622 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2623 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2624 call, 3, arg, top1, top2));
2626 else
2628 tree call, fn = NULL_TREE, narg;
2629 tree ctype = build_complex_type (type);
2631 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2632 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2633 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2634 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2635 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2636 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2637 else
2638 gcc_unreachable ();
2640 /* If we don't have a decl for cexp create one. This is the
2641 friendliest fallback if the user calls __builtin_cexpi
2642 without full target C99 function support. */
2643 if (fn == NULL_TREE)
2645 tree fntype;
2646 const char *name = NULL;
2648 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2649 name = "cexpf";
2650 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2651 name = "cexp";
2652 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2653 name = "cexpl";
2655 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2656 fn = build_fn_decl (name, fntype);
2659 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2660 build_real (type, dconst0), arg);
2662 /* Make sure not to fold the cexp call again. */
2663 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2664 return expand_expr (build_call_nary (ctype, call, 1, narg),
2665 target, VOIDmode, EXPAND_NORMAL);
2668 /* Now build the proper return type. */
2669 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2670 make_tree (TREE_TYPE (arg), op2),
2671 make_tree (TREE_TYPE (arg), op1)),
2672 target, VOIDmode, EXPAND_NORMAL);
2675 /* Conveniently construct a function call expression. FNDECL names the
2676 function to be called, N is the number of arguments, and the "..."
2677 parameters are the argument expressions. Unlike build_call_exr
2678 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2680 static tree
2681 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2683 va_list ap;
2684 tree fntype = TREE_TYPE (fndecl);
2685 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2687 va_start (ap, n);
2688 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2689 va_end (ap);
2690 SET_EXPR_LOCATION (fn, loc);
2691 return fn;
2694 /* Expand a call to one of the builtin rounding functions gcc defines
2695 as an extension (lfloor and lceil). As these are gcc extensions we
2696 do not need to worry about setting errno to EDOM.
2697 If expanding via optab fails, lower expression to (int)(floor(x)).
2698 EXP is the expression that is a call to the builtin function;
2699 if convenient, the result should be placed in TARGET. */
2701 static rtx
2702 expand_builtin_int_roundingfn (tree exp, rtx target)
2704 convert_optab builtin_optab;
2705 rtx op0, insns, tmp;
2706 tree fndecl = get_callee_fndecl (exp);
2707 enum built_in_function fallback_fn;
2708 tree fallback_fndecl;
2709 enum machine_mode mode;
2710 tree arg;
2712 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2713 gcc_unreachable ();
2715 arg = CALL_EXPR_ARG (exp, 0);
2717 switch (DECL_FUNCTION_CODE (fndecl))
2719 CASE_FLT_FN (BUILT_IN_ICEIL):
2720 CASE_FLT_FN (BUILT_IN_LCEIL):
2721 CASE_FLT_FN (BUILT_IN_LLCEIL):
2722 builtin_optab = lceil_optab;
2723 fallback_fn = BUILT_IN_CEIL;
2724 break;
2726 CASE_FLT_FN (BUILT_IN_IFLOOR):
2727 CASE_FLT_FN (BUILT_IN_LFLOOR):
2728 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2729 builtin_optab = lfloor_optab;
2730 fallback_fn = BUILT_IN_FLOOR;
2731 break;
2733 default:
2734 gcc_unreachable ();
2737 /* Make a suitable register to place result in. */
2738 mode = TYPE_MODE (TREE_TYPE (exp));
2740 target = gen_reg_rtx (mode);
2742 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2743 need to expand the argument again. This way, we will not perform
2744 side-effects more the once. */
2745 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2747 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2749 start_sequence ();
2751 /* Compute into TARGET. */
2752 if (expand_sfix_optab (target, op0, builtin_optab))
2754 /* Output the entire sequence. */
2755 insns = get_insns ();
2756 end_sequence ();
2757 emit_insn (insns);
2758 return target;
2761 /* If we were unable to expand via the builtin, stop the sequence
2762 (without outputting the insns). */
2763 end_sequence ();
2765 /* Fall back to floating point rounding optab. */
2766 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2768 /* For non-C99 targets we may end up without a fallback fndecl here
2769 if the user called __builtin_lfloor directly. In this case emit
2770 a call to the floor/ceil variants nevertheless. This should result
2771 in the best user experience for not full C99 targets. */
2772 if (fallback_fndecl == NULL_TREE)
2774 tree fntype;
2775 const char *name = NULL;
2777 switch (DECL_FUNCTION_CODE (fndecl))
2779 case BUILT_IN_ICEIL:
2780 case BUILT_IN_LCEIL:
2781 case BUILT_IN_LLCEIL:
2782 name = "ceil";
2783 break;
2784 case BUILT_IN_ICEILF:
2785 case BUILT_IN_LCEILF:
2786 case BUILT_IN_LLCEILF:
2787 name = "ceilf";
2788 break;
2789 case BUILT_IN_ICEILL:
2790 case BUILT_IN_LCEILL:
2791 case BUILT_IN_LLCEILL:
2792 name = "ceill";
2793 break;
2794 case BUILT_IN_IFLOOR:
2795 case BUILT_IN_LFLOOR:
2796 case BUILT_IN_LLFLOOR:
2797 name = "floor";
2798 break;
2799 case BUILT_IN_IFLOORF:
2800 case BUILT_IN_LFLOORF:
2801 case BUILT_IN_LLFLOORF:
2802 name = "floorf";
2803 break;
2804 case BUILT_IN_IFLOORL:
2805 case BUILT_IN_LFLOORL:
2806 case BUILT_IN_LLFLOORL:
2807 name = "floorl";
2808 break;
2809 default:
2810 gcc_unreachable ();
2813 fntype = build_function_type_list (TREE_TYPE (arg),
2814 TREE_TYPE (arg), NULL_TREE);
2815 fallback_fndecl = build_fn_decl (name, fntype);
2818 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2820 tmp = expand_normal (exp);
2822 /* Truncate the result of floating point optab to integer
2823 via expand_fix (). */
2824 target = gen_reg_rtx (mode);
2825 expand_fix (target, tmp, 0);
2827 return target;
2830 /* Expand a call to one of the builtin math functions doing integer
2831 conversion (lrint).
2832 Return 0 if a normal call should be emitted rather than expanding the
2833 function in-line. EXP is the expression that is a call to the builtin
2834 function; if convenient, the result should be placed in TARGET. */
2836 static rtx
2837 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2839 convert_optab builtin_optab;
2840 rtx op0, insns;
2841 tree fndecl = get_callee_fndecl (exp);
2842 tree arg;
2843 enum machine_mode mode;
2844 enum built_in_function fallback_fn = BUILT_IN_NONE;
2846 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2847 gcc_unreachable ();
2849 arg = CALL_EXPR_ARG (exp, 0);
2851 switch (DECL_FUNCTION_CODE (fndecl))
2853 CASE_FLT_FN (BUILT_IN_IRINT):
2854 fallback_fn = BUILT_IN_LRINT;
2855 /* FALLTHRU */
2856 CASE_FLT_FN (BUILT_IN_LRINT):
2857 CASE_FLT_FN (BUILT_IN_LLRINT):
2858 builtin_optab = lrint_optab;
2859 break;
2861 CASE_FLT_FN (BUILT_IN_IROUND):
2862 fallback_fn = BUILT_IN_LROUND;
2863 /* FALLTHRU */
2864 CASE_FLT_FN (BUILT_IN_LROUND):
2865 CASE_FLT_FN (BUILT_IN_LLROUND):
2866 builtin_optab = lround_optab;
2867 break;
2869 default:
2870 gcc_unreachable ();
2873 /* There's no easy way to detect the case we need to set EDOM. */
2874 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2875 return NULL_RTX;
2877 /* Make a suitable register to place result in. */
2878 mode = TYPE_MODE (TREE_TYPE (exp));
2880 /* There's no easy way to detect the case we need to set EDOM. */
2881 if (!flag_errno_math)
2883 target = gen_reg_rtx (mode);
2885 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2886 need to expand the argument again. This way, we will not perform
2887 side-effects more the once. */
2888 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2890 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2892 start_sequence ();
2894 if (expand_sfix_optab (target, op0, builtin_optab))
2896 /* Output the entire sequence. */
2897 insns = get_insns ();
2898 end_sequence ();
2899 emit_insn (insns);
2900 return target;
2903 /* If we were unable to expand via the builtin, stop the sequence
2904 (without outputting the insns) and call to the library function
2905 with the stabilized argument list. */
2906 end_sequence ();
2909 if (fallback_fn != BUILT_IN_NONE)
2911 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2912 targets, (int) round (x) should never be transformed into
2913 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2914 a call to lround in the hope that the target provides at least some
2915 C99 functions. This should result in the best user experience for
2916 not full C99 targets. */
2917 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2918 fallback_fn, 0);
2920 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2921 fallback_fndecl, 1, arg);
2923 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2924 return convert_to_mode (mode, target, 0);
2927 target = expand_call (exp, target, target == const0_rtx);
2929 return target;
2932 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2933 a normal call should be emitted rather than expanding the function
2934 in-line. EXP is the expression that is a call to the builtin
2935 function; if convenient, the result should be placed in TARGET. */
2937 static rtx
2938 expand_builtin_powi (tree exp, rtx target)
2940 tree arg0, arg1;
2941 rtx op0, op1;
2942 enum machine_mode mode;
2943 enum machine_mode mode2;
2945 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2946 return NULL_RTX;
2948 arg0 = CALL_EXPR_ARG (exp, 0);
2949 arg1 = CALL_EXPR_ARG (exp, 1);
2950 mode = TYPE_MODE (TREE_TYPE (exp));
2952 /* Emit a libcall to libgcc. */
2954 /* Mode of the 2nd argument must match that of an int. */
2955 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2957 if (target == NULL_RTX)
2958 target = gen_reg_rtx (mode);
2960 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2961 if (GET_MODE (op0) != mode)
2962 op0 = convert_to_mode (mode, op0, 0);
2963 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2964 if (GET_MODE (op1) != mode2)
2965 op1 = convert_to_mode (mode2, op1, 0);
2967 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2968 target, LCT_CONST, mode, 2,
2969 op0, mode, op1, mode2);
2971 return target;
2974 /* Expand expression EXP which is a call to the strlen builtin. Return
2975 NULL_RTX if we failed the caller should emit a normal call, otherwise
2976 try to get the result in TARGET, if convenient. */
2978 static rtx
2979 expand_builtin_strlen (tree exp, rtx target,
2980 enum machine_mode target_mode)
2982 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2983 return NULL_RTX;
2984 else
2986 struct expand_operand ops[4];
2987 rtx pat;
2988 tree len;
2989 tree src = CALL_EXPR_ARG (exp, 0);
2990 rtx src_reg, before_strlen;
2991 enum machine_mode insn_mode = target_mode;
2992 enum insn_code icode = CODE_FOR_nothing;
2993 unsigned int align;
2995 /* If the length can be computed at compile-time, return it. */
2996 len = c_strlen (src, 0);
2997 if (len)
2998 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3000 /* If the length can be computed at compile-time and is constant
3001 integer, but there are side-effects in src, evaluate
3002 src for side-effects, then return len.
3003 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3004 can be optimized into: i++; x = 3; */
3005 len = c_strlen (src, 1);
3006 if (len && TREE_CODE (len) == INTEGER_CST)
3008 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3009 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3012 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3014 /* If SRC is not a pointer type, don't do this operation inline. */
3015 if (align == 0)
3016 return NULL_RTX;
3018 /* Bail out if we can't compute strlen in the right mode. */
3019 while (insn_mode != VOIDmode)
3021 icode = optab_handler (strlen_optab, insn_mode);
3022 if (icode != CODE_FOR_nothing)
3023 break;
3025 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3027 if (insn_mode == VOIDmode)
3028 return NULL_RTX;
3030 /* Make a place to hold the source address. We will not expand
3031 the actual source until we are sure that the expansion will
3032 not fail -- there are trees that cannot be expanded twice. */
3033 src_reg = gen_reg_rtx (Pmode);
3035 /* Mark the beginning of the strlen sequence so we can emit the
3036 source operand later. */
3037 before_strlen = get_last_insn ();
3039 create_output_operand (&ops[0], target, insn_mode);
3040 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3041 create_integer_operand (&ops[2], 0);
3042 create_integer_operand (&ops[3], align);
3043 if (!maybe_expand_insn (icode, 4, ops))
3044 return NULL_RTX;
3046 /* Now that we are assured of success, expand the source. */
3047 start_sequence ();
3048 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3049 if (pat != src_reg)
3051 #ifdef POINTERS_EXTEND_UNSIGNED
3052 if (GET_MODE (pat) != Pmode)
3053 pat = convert_to_mode (Pmode, pat,
3054 POINTERS_EXTEND_UNSIGNED);
3055 #endif
3056 emit_move_insn (src_reg, pat);
3058 pat = get_insns ();
3059 end_sequence ();
3061 if (before_strlen)
3062 emit_insn_after (pat, before_strlen);
3063 else
3064 emit_insn_before (pat, get_insns ());
3066 /* Return the value in the proper mode for this function. */
3067 if (GET_MODE (ops[0].value) == target_mode)
3068 target = ops[0].value;
3069 else if (target != 0)
3070 convert_move (target, ops[0].value, 0);
3071 else
3072 target = convert_to_mode (target_mode, ops[0].value, 0);
3074 return target;
3078 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3079 bytes from constant string DATA + OFFSET and return it as target
3080 constant. */
3082 static rtx
3083 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3084 enum machine_mode mode)
3086 const char *str = (const char *) data;
3088 gcc_assert (offset >= 0
3089 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3090 <= strlen (str) + 1));
3092 return c_readstr (str + offset, mode);
3095 /* Expand a call EXP to the memcpy builtin.
3096 Return NULL_RTX if we failed, the caller should emit a normal call,
3097 otherwise try to get the result in TARGET, if convenient (and in
3098 mode MODE if that's convenient). */
3100 static rtx
3101 expand_builtin_memcpy (tree exp, rtx target)
3103 if (!validate_arglist (exp,
3104 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3105 return NULL_RTX;
3106 else
3108 tree dest = CALL_EXPR_ARG (exp, 0);
3109 tree src = CALL_EXPR_ARG (exp, 1);
3110 tree len = CALL_EXPR_ARG (exp, 2);
3111 const char *src_str;
3112 unsigned int src_align = get_pointer_alignment (src);
3113 unsigned int dest_align = get_pointer_alignment (dest);
3114 rtx dest_mem, src_mem, dest_addr, len_rtx;
3115 HOST_WIDE_INT expected_size = -1;
3116 unsigned int expected_align = 0;
3118 /* If DEST is not a pointer type, call the normal function. */
3119 if (dest_align == 0)
3120 return NULL_RTX;
3122 /* If either SRC is not a pointer type, don't do this
3123 operation in-line. */
3124 if (src_align == 0)
3125 return NULL_RTX;
3127 if (currently_expanding_gimple_stmt)
3128 stringop_block_profile (currently_expanding_gimple_stmt,
3129 &expected_align, &expected_size);
3131 if (expected_align < dest_align)
3132 expected_align = dest_align;
3133 dest_mem = get_memory_rtx (dest, len);
3134 set_mem_align (dest_mem, dest_align);
3135 len_rtx = expand_normal (len);
3136 src_str = c_getstr (src);
3138 /* If SRC is a string constant and block move would be done
3139 by pieces, we can avoid loading the string from memory
3140 and only stored the computed constants. */
3141 if (src_str
3142 && CONST_INT_P (len_rtx)
3143 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3144 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3145 CONST_CAST (char *, src_str),
3146 dest_align, false))
3148 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3149 builtin_memcpy_read_str,
3150 CONST_CAST (char *, src_str),
3151 dest_align, false, 0);
3152 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3153 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3154 return dest_mem;
3157 src_mem = get_memory_rtx (src, len);
3158 set_mem_align (src_mem, src_align);
3160 /* Copy word part most expediently. */
3161 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3162 CALL_EXPR_TAILCALL (exp)
3163 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3164 expected_align, expected_size);
3166 if (dest_addr == 0)
3168 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3169 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3171 return dest_addr;
3175 /* Expand a call EXP to the mempcpy builtin.
3176 Return NULL_RTX if we failed; the caller should emit a normal call,
3177 otherwise try to get the result in TARGET, if convenient (and in
3178 mode MODE if that's convenient). If ENDP is 0 return the
3179 destination pointer, if ENDP is 1 return the end pointer ala
3180 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3181 stpcpy. */
3183 static rtx
3184 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3186 if (!validate_arglist (exp,
3187 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3188 return NULL_RTX;
3189 else
3191 tree dest = CALL_EXPR_ARG (exp, 0);
3192 tree src = CALL_EXPR_ARG (exp, 1);
3193 tree len = CALL_EXPR_ARG (exp, 2);
3194 return expand_builtin_mempcpy_args (dest, src, len,
3195 target, mode, /*endp=*/ 1);
3199 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3200 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3201 so that this can also be called without constructing an actual CALL_EXPR.
3202 The other arguments and return value are the same as for
3203 expand_builtin_mempcpy. */
3205 static rtx
3206 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3207 rtx target, enum machine_mode mode, int endp)
3209 /* If return value is ignored, transform mempcpy into memcpy. */
3210 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3212 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3213 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3214 dest, src, len);
3215 return expand_expr (result, target, mode, EXPAND_NORMAL);
3217 else
3219 const char *src_str;
3220 unsigned int src_align = get_pointer_alignment (src);
3221 unsigned int dest_align = get_pointer_alignment (dest);
3222 rtx dest_mem, src_mem, len_rtx;
3224 /* If either SRC or DEST is not a pointer type, don't do this
3225 operation in-line. */
3226 if (dest_align == 0 || src_align == 0)
3227 return NULL_RTX;
3229 /* If LEN is not constant, call the normal function. */
3230 if (! host_integerp (len, 1))
3231 return NULL_RTX;
3233 len_rtx = expand_normal (len);
3234 src_str = c_getstr (src);
3236 /* If SRC is a string constant and block move would be done
3237 by pieces, we can avoid loading the string from memory
3238 and only stored the computed constants. */
3239 if (src_str
3240 && CONST_INT_P (len_rtx)
3241 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3242 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3243 CONST_CAST (char *, src_str),
3244 dest_align, false))
3246 dest_mem = get_memory_rtx (dest, len);
3247 set_mem_align (dest_mem, dest_align);
3248 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3249 builtin_memcpy_read_str,
3250 CONST_CAST (char *, src_str),
3251 dest_align, false, endp);
3252 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3253 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3254 return dest_mem;
3257 if (CONST_INT_P (len_rtx)
3258 && can_move_by_pieces (INTVAL (len_rtx),
3259 MIN (dest_align, src_align)))
3261 dest_mem = get_memory_rtx (dest, len);
3262 set_mem_align (dest_mem, dest_align);
3263 src_mem = get_memory_rtx (src, len);
3264 set_mem_align (src_mem, src_align);
3265 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3266 MIN (dest_align, src_align), endp);
3267 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3268 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3269 return dest_mem;
3272 return NULL_RTX;
3276 #ifndef HAVE_movstr
3277 # define HAVE_movstr 0
3278 # define CODE_FOR_movstr CODE_FOR_nothing
3279 #endif
3281 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3282 we failed, the caller should emit a normal call, otherwise try to
3283 get the result in TARGET, if convenient. If ENDP is 0 return the
3284 destination pointer, if ENDP is 1 return the end pointer ala
3285 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3286 stpcpy. */
3288 static rtx
3289 expand_movstr (tree dest, tree src, rtx target, int endp)
3291 struct expand_operand ops[3];
3292 rtx dest_mem;
3293 rtx src_mem;
3295 if (!HAVE_movstr)
3296 return NULL_RTX;
3298 dest_mem = get_memory_rtx (dest, NULL);
3299 src_mem = get_memory_rtx (src, NULL);
3300 if (!endp)
3302 target = force_reg (Pmode, XEXP (dest_mem, 0));
3303 dest_mem = replace_equiv_address (dest_mem, target);
3306 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3307 create_fixed_operand (&ops[1], dest_mem);
3308 create_fixed_operand (&ops[2], src_mem);
3309 expand_insn (CODE_FOR_movstr, 3, ops);
3311 if (endp && target != const0_rtx)
3313 target = ops[0].value;
3314 /* movstr is supposed to set end to the address of the NUL
3315 terminator. If the caller requested a mempcpy-like return value,
3316 adjust it. */
3317 if (endp == 1)
3319 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), target), 1);
3320 emit_move_insn (target, force_operand (tem, NULL_RTX));
3323 return target;
3326 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3327 NULL_RTX if we failed the caller should emit a normal call, otherwise
3328 try to get the result in TARGET, if convenient (and in mode MODE if that's
3329 convenient). */
3331 static rtx
3332 expand_builtin_strcpy (tree exp, rtx target)
3334 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3336 tree dest = CALL_EXPR_ARG (exp, 0);
3337 tree src = CALL_EXPR_ARG (exp, 1);
3338 return expand_builtin_strcpy_args (dest, src, target);
3340 return NULL_RTX;
3343 /* Helper function to do the actual work for expand_builtin_strcpy. The
3344 arguments to the builtin_strcpy call DEST and SRC are broken out
3345 so that this can also be called without constructing an actual CALL_EXPR.
3346 The other arguments and return value are the same as for
3347 expand_builtin_strcpy. */
3349 static rtx
3350 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3352 return expand_movstr (dest, src, target, /*endp=*/0);
3355 /* Expand a call EXP to the stpcpy builtin.
3356 Return NULL_RTX if we failed the caller should emit a normal call,
3357 otherwise try to get the result in TARGET, if convenient (and in
3358 mode MODE if that's convenient). */
3360 static rtx
3361 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3363 tree dst, src;
3364 location_t loc = EXPR_LOCATION (exp);
3366 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3367 return NULL_RTX;
3369 dst = CALL_EXPR_ARG (exp, 0);
3370 src = CALL_EXPR_ARG (exp, 1);
3372 /* If return value is ignored, transform stpcpy into strcpy. */
3373 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3375 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3376 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3377 return expand_expr (result, target, mode, EXPAND_NORMAL);
3379 else
3381 tree len, lenp1;
3382 rtx ret;
3384 /* Ensure we get an actual string whose length can be evaluated at
3385 compile-time, not an expression containing a string. This is
3386 because the latter will potentially produce pessimized code
3387 when used to produce the return value. */
3388 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3389 return expand_movstr (dst, src, target, /*endp=*/2);
3391 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3392 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3393 target, mode, /*endp=*/2);
3395 if (ret)
3396 return ret;
3398 if (TREE_CODE (len) == INTEGER_CST)
3400 rtx len_rtx = expand_normal (len);
3402 if (CONST_INT_P (len_rtx))
3404 ret = expand_builtin_strcpy_args (dst, src, target);
3406 if (ret)
3408 if (! target)
3410 if (mode != VOIDmode)
3411 target = gen_reg_rtx (mode);
3412 else
3413 target = gen_reg_rtx (GET_MODE (ret));
3415 if (GET_MODE (target) != GET_MODE (ret))
3416 ret = gen_lowpart (GET_MODE (target), ret);
3418 ret = plus_constant (ret, INTVAL (len_rtx));
3419 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3420 gcc_assert (ret);
3422 return target;
3427 return expand_movstr (dst, src, target, /*endp=*/2);
3431 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3432 bytes from constant string DATA + OFFSET and return it as target
3433 constant. */
3436 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3437 enum machine_mode mode)
3439 const char *str = (const char *) data;
3441 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3442 return const0_rtx;
3444 return c_readstr (str + offset, mode);
3447 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3448 NULL_RTX if we failed the caller should emit a normal call. */
3450 static rtx
3451 expand_builtin_strncpy (tree exp, rtx target)
3453 location_t loc = EXPR_LOCATION (exp);
3455 if (validate_arglist (exp,
3456 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3458 tree dest = CALL_EXPR_ARG (exp, 0);
3459 tree src = CALL_EXPR_ARG (exp, 1);
3460 tree len = CALL_EXPR_ARG (exp, 2);
3461 tree slen = c_strlen (src, 1);
3463 /* We must be passed a constant len and src parameter. */
3464 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3465 return NULL_RTX;
3467 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3469 /* We're required to pad with trailing zeros if the requested
3470 len is greater than strlen(s2)+1. In that case try to
3471 use store_by_pieces, if it fails, punt. */
3472 if (tree_int_cst_lt (slen, len))
3474 unsigned int dest_align = get_pointer_alignment (dest);
3475 const char *p = c_getstr (src);
3476 rtx dest_mem;
3478 if (!p || dest_align == 0 || !host_integerp (len, 1)
3479 || !can_store_by_pieces (tree_low_cst (len, 1),
3480 builtin_strncpy_read_str,
3481 CONST_CAST (char *, p),
3482 dest_align, false))
3483 return NULL_RTX;
3485 dest_mem = get_memory_rtx (dest, len);
3486 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3487 builtin_strncpy_read_str,
3488 CONST_CAST (char *, p), dest_align, false, 0);
3489 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3490 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3491 return dest_mem;
3494 return NULL_RTX;
3497 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3498 bytes from constant string DATA + OFFSET and return it as target
3499 constant. */
3502 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3503 enum machine_mode mode)
3505 const char *c = (const char *) data;
3506 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3508 memset (p, *c, GET_MODE_SIZE (mode));
3510 return c_readstr (p, mode);
3513 /* Callback routine for store_by_pieces. Return the RTL of a register
3514 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3515 char value given in the RTL register data. For example, if mode is
3516 4 bytes wide, return the RTL for 0x01010101*data. */
3518 static rtx
3519 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3520 enum machine_mode mode)
3522 rtx target, coeff;
3523 size_t size;
3524 char *p;
3526 size = GET_MODE_SIZE (mode);
3527 if (size == 1)
3528 return (rtx) data;
3530 p = XALLOCAVEC (char, size);
3531 memset (p, 1, size);
3532 coeff = c_readstr (p, mode);
3534 target = convert_to_mode (mode, (rtx) data, 1);
3535 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3536 return force_reg (mode, target);
3539 /* Expand expression EXP, which is a call to the memset builtin. Return
3540 NULL_RTX if we failed the caller should emit a normal call, otherwise
3541 try to get the result in TARGET, if convenient (and in mode MODE if that's
3542 convenient). */
3544 static rtx
3545 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3547 if (!validate_arglist (exp,
3548 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3549 return NULL_RTX;
3550 else
3552 tree dest = CALL_EXPR_ARG (exp, 0);
3553 tree val = CALL_EXPR_ARG (exp, 1);
3554 tree len = CALL_EXPR_ARG (exp, 2);
3555 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3559 /* Helper function to do the actual work for expand_builtin_memset. The
3560 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3561 so that this can also be called without constructing an actual CALL_EXPR.
3562 The other arguments and return value are the same as for
3563 expand_builtin_memset. */
3565 static rtx
3566 expand_builtin_memset_args (tree dest, tree val, tree len,
3567 rtx target, enum machine_mode mode, tree orig_exp)
3569 tree fndecl, fn;
3570 enum built_in_function fcode;
3571 enum machine_mode val_mode;
3572 char c;
3573 unsigned int dest_align;
3574 rtx dest_mem, dest_addr, len_rtx;
3575 HOST_WIDE_INT expected_size = -1;
3576 unsigned int expected_align = 0;
3578 dest_align = get_pointer_alignment (dest);
3580 /* If DEST is not a pointer type, don't do this operation in-line. */
3581 if (dest_align == 0)
3582 return NULL_RTX;
3584 if (currently_expanding_gimple_stmt)
3585 stringop_block_profile (currently_expanding_gimple_stmt,
3586 &expected_align, &expected_size);
3588 if (expected_align < dest_align)
3589 expected_align = dest_align;
3591 /* If the LEN parameter is zero, return DEST. */
3592 if (integer_zerop (len))
3594 /* Evaluate and ignore VAL in case it has side-effects. */
3595 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3596 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3599 /* Stabilize the arguments in case we fail. */
3600 dest = builtin_save_expr (dest);
3601 val = builtin_save_expr (val);
3602 len = builtin_save_expr (len);
3604 len_rtx = expand_normal (len);
3605 dest_mem = get_memory_rtx (dest, len);
3606 val_mode = TYPE_MODE (unsigned_char_type_node);
3608 if (TREE_CODE (val) != INTEGER_CST)
3610 rtx val_rtx;
3612 val_rtx = expand_normal (val);
3613 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3615 /* Assume that we can memset by pieces if we can store
3616 * the coefficients by pieces (in the required modes).
3617 * We can't pass builtin_memset_gen_str as that emits RTL. */
3618 c = 1;
3619 if (host_integerp (len, 1)
3620 && can_store_by_pieces (tree_low_cst (len, 1),
3621 builtin_memset_read_str, &c, dest_align,
3622 true))
3624 val_rtx = force_reg (val_mode, val_rtx);
3625 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3626 builtin_memset_gen_str, val_rtx, dest_align,
3627 true, 0);
3629 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3630 dest_align, expected_align,
3631 expected_size))
3632 goto do_libcall;
3634 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3635 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3636 return dest_mem;
3639 if (target_char_cast (val, &c))
3640 goto do_libcall;
3642 if (c)
3644 if (host_integerp (len, 1)
3645 && can_store_by_pieces (tree_low_cst (len, 1),
3646 builtin_memset_read_str, &c, dest_align,
3647 true))
3648 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3649 builtin_memset_read_str, &c, dest_align, true, 0);
3650 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3651 gen_int_mode (c, val_mode),
3652 dest_align, expected_align,
3653 expected_size))
3654 goto do_libcall;
3656 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3657 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3658 return dest_mem;
3661 set_mem_align (dest_mem, dest_align);
3662 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3663 CALL_EXPR_TAILCALL (orig_exp)
3664 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3665 expected_align, expected_size);
3667 if (dest_addr == 0)
3669 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3670 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3673 return dest_addr;
3675 do_libcall:
3676 fndecl = get_callee_fndecl (orig_exp);
3677 fcode = DECL_FUNCTION_CODE (fndecl);
3678 if (fcode == BUILT_IN_MEMSET)
3679 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3680 dest, val, len);
3681 else if (fcode == BUILT_IN_BZERO)
3682 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3683 dest, len);
3684 else
3685 gcc_unreachable ();
3686 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3687 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3688 return expand_call (fn, target, target == const0_rtx);
3691 /* Expand expression EXP, which is a call to the bzero builtin. Return
3692 NULL_RTX if we failed the caller should emit a normal call. */
3694 static rtx
3695 expand_builtin_bzero (tree exp)
3697 tree dest, size;
3698 location_t loc = EXPR_LOCATION (exp);
3700 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3701 return NULL_RTX;
3703 dest = CALL_EXPR_ARG (exp, 0);
3704 size = CALL_EXPR_ARG (exp, 1);
3706 /* New argument list transforming bzero(ptr x, int y) to
3707 memset(ptr x, int 0, size_t y). This is done this way
3708 so that if it isn't expanded inline, we fallback to
3709 calling bzero instead of memset. */
3711 return expand_builtin_memset_args (dest, integer_zero_node,
3712 fold_convert_loc (loc,
3713 size_type_node, size),
3714 const0_rtx, VOIDmode, exp);
3717 /* Expand expression EXP, which is a call to the memcmp built-in function.
3718 Return NULL_RTX if we failed and the caller should emit a normal call,
3719 otherwise try to get the result in TARGET, if convenient (and in mode
3720 MODE, if that's convenient). */
3722 static rtx
3723 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3724 ATTRIBUTE_UNUSED enum machine_mode mode)
3726 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3728 if (!validate_arglist (exp,
3729 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3730 return NULL_RTX;
3732 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3733 implementing memcmp because it will stop if it encounters two
3734 zero bytes. */
3735 #if defined HAVE_cmpmemsi
3737 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3738 rtx result;
3739 rtx insn;
3740 tree arg1 = CALL_EXPR_ARG (exp, 0);
3741 tree arg2 = CALL_EXPR_ARG (exp, 1);
3742 tree len = CALL_EXPR_ARG (exp, 2);
3744 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3745 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3746 enum machine_mode insn_mode;
3748 if (HAVE_cmpmemsi)
3749 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3750 else
3751 return NULL_RTX;
3753 /* If we don't have POINTER_TYPE, call the function. */
3754 if (arg1_align == 0 || arg2_align == 0)
3755 return NULL_RTX;
3757 /* Make a place to write the result of the instruction. */
3758 result = target;
3759 if (! (result != 0
3760 && REG_P (result) && GET_MODE (result) == insn_mode
3761 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3762 result = gen_reg_rtx (insn_mode);
3764 arg1_rtx = get_memory_rtx (arg1, len);
3765 arg2_rtx = get_memory_rtx (arg2, len);
3766 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3768 /* Set MEM_SIZE as appropriate. */
3769 if (CONST_INT_P (arg3_rtx))
3771 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3772 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3775 if (HAVE_cmpmemsi)
3776 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3777 GEN_INT (MIN (arg1_align, arg2_align)));
3778 else
3779 gcc_unreachable ();
3781 if (insn)
3782 emit_insn (insn);
3783 else
3784 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3785 TYPE_MODE (integer_type_node), 3,
3786 XEXP (arg1_rtx, 0), Pmode,
3787 XEXP (arg2_rtx, 0), Pmode,
3788 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3789 TYPE_UNSIGNED (sizetype)),
3790 TYPE_MODE (sizetype));
3792 /* Return the value in the proper mode for this function. */
3793 mode = TYPE_MODE (TREE_TYPE (exp));
3794 if (GET_MODE (result) == mode)
3795 return result;
3796 else if (target != 0)
3798 convert_move (target, result, 0);
3799 return target;
3801 else
3802 return convert_to_mode (mode, result, 0);
3804 #endif /* HAVE_cmpmemsi. */
3806 return NULL_RTX;
3809 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3810 if we failed the caller should emit a normal call, otherwise try to get
3811 the result in TARGET, if convenient. */
3813 static rtx
3814 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3816 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3817 return NULL_RTX;
3819 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3820 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3821 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3823 rtx arg1_rtx, arg2_rtx;
3824 rtx result, insn = NULL_RTX;
3825 tree fndecl, fn;
3826 tree arg1 = CALL_EXPR_ARG (exp, 0);
3827 tree arg2 = CALL_EXPR_ARG (exp, 1);
3829 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3830 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3832 /* If we don't have POINTER_TYPE, call the function. */
3833 if (arg1_align == 0 || arg2_align == 0)
3834 return NULL_RTX;
3836 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3837 arg1 = builtin_save_expr (arg1);
3838 arg2 = builtin_save_expr (arg2);
3840 arg1_rtx = get_memory_rtx (arg1, NULL);
3841 arg2_rtx = get_memory_rtx (arg2, NULL);
3843 #ifdef HAVE_cmpstrsi
3844 /* Try to call cmpstrsi. */
3845 if (HAVE_cmpstrsi)
3847 enum machine_mode insn_mode
3848 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3850 /* Make a place to write the result of the instruction. */
3851 result = target;
3852 if (! (result != 0
3853 && REG_P (result) && GET_MODE (result) == insn_mode
3854 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3855 result = gen_reg_rtx (insn_mode);
3857 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3858 GEN_INT (MIN (arg1_align, arg2_align)));
3860 #endif
3861 #ifdef HAVE_cmpstrnsi
3862 /* Try to determine at least one length and call cmpstrnsi. */
3863 if (!insn && HAVE_cmpstrnsi)
3865 tree len;
3866 rtx arg3_rtx;
3868 enum machine_mode insn_mode
3869 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3870 tree len1 = c_strlen (arg1, 1);
3871 tree len2 = c_strlen (arg2, 1);
3873 if (len1)
3874 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3875 if (len2)
3876 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3878 /* If we don't have a constant length for the first, use the length
3879 of the second, if we know it. We don't require a constant for
3880 this case; some cost analysis could be done if both are available
3881 but neither is constant. For now, assume they're equally cheap,
3882 unless one has side effects. If both strings have constant lengths,
3883 use the smaller. */
3885 if (!len1)
3886 len = len2;
3887 else if (!len2)
3888 len = len1;
3889 else if (TREE_SIDE_EFFECTS (len1))
3890 len = len2;
3891 else if (TREE_SIDE_EFFECTS (len2))
3892 len = len1;
3893 else if (TREE_CODE (len1) != INTEGER_CST)
3894 len = len2;
3895 else if (TREE_CODE (len2) != INTEGER_CST)
3896 len = len1;
3897 else if (tree_int_cst_lt (len1, len2))
3898 len = len1;
3899 else
3900 len = len2;
3902 /* If both arguments have side effects, we cannot optimize. */
3903 if (!len || TREE_SIDE_EFFECTS (len))
3904 goto do_libcall;
3906 arg3_rtx = expand_normal (len);
3908 /* Make a place to write the result of the instruction. */
3909 result = target;
3910 if (! (result != 0
3911 && REG_P (result) && GET_MODE (result) == insn_mode
3912 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3913 result = gen_reg_rtx (insn_mode);
3915 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3916 GEN_INT (MIN (arg1_align, arg2_align)));
3918 #endif
3920 if (insn)
3922 enum machine_mode mode;
3923 emit_insn (insn);
3925 /* Return the value in the proper mode for this function. */
3926 mode = TYPE_MODE (TREE_TYPE (exp));
3927 if (GET_MODE (result) == mode)
3928 return result;
3929 if (target == 0)
3930 return convert_to_mode (mode, result, 0);
3931 convert_move (target, result, 0);
3932 return target;
3935 /* Expand the library call ourselves using a stabilized argument
3936 list to avoid re-evaluating the function's arguments twice. */
3937 #ifdef HAVE_cmpstrnsi
3938 do_libcall:
3939 #endif
3940 fndecl = get_callee_fndecl (exp);
3941 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3942 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3943 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3944 return expand_call (fn, target, target == const0_rtx);
3946 #endif
3947 return NULL_RTX;
3950 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3951 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3952 the result in TARGET, if convenient. */
3954 static rtx
3955 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3956 ATTRIBUTE_UNUSED enum machine_mode mode)
3958 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3960 if (!validate_arglist (exp,
3961 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3962 return NULL_RTX;
3964 /* If c_strlen can determine an expression for one of the string
3965 lengths, and it doesn't have side effects, then emit cmpstrnsi
3966 using length MIN(strlen(string)+1, arg3). */
3967 #ifdef HAVE_cmpstrnsi
3968 if (HAVE_cmpstrnsi)
3970 tree len, len1, len2;
3971 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3972 rtx result, insn;
3973 tree fndecl, fn;
3974 tree arg1 = CALL_EXPR_ARG (exp, 0);
3975 tree arg2 = CALL_EXPR_ARG (exp, 1);
3976 tree arg3 = CALL_EXPR_ARG (exp, 2);
3978 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3979 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3980 enum machine_mode insn_mode
3981 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3983 len1 = c_strlen (arg1, 1);
3984 len2 = c_strlen (arg2, 1);
3986 if (len1)
3987 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3988 if (len2)
3989 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3991 /* If we don't have a constant length for the first, use the length
3992 of the second, if we know it. We don't require a constant for
3993 this case; some cost analysis could be done if both are available
3994 but neither is constant. For now, assume they're equally cheap,
3995 unless one has side effects. If both strings have constant lengths,
3996 use the smaller. */
3998 if (!len1)
3999 len = len2;
4000 else if (!len2)
4001 len = len1;
4002 else if (TREE_SIDE_EFFECTS (len1))
4003 len = len2;
4004 else if (TREE_SIDE_EFFECTS (len2))
4005 len = len1;
4006 else if (TREE_CODE (len1) != INTEGER_CST)
4007 len = len2;
4008 else if (TREE_CODE (len2) != INTEGER_CST)
4009 len = len1;
4010 else if (tree_int_cst_lt (len1, len2))
4011 len = len1;
4012 else
4013 len = len2;
4015 /* If both arguments have side effects, we cannot optimize. */
4016 if (!len || TREE_SIDE_EFFECTS (len))
4017 return NULL_RTX;
4019 /* The actual new length parameter is MIN(len,arg3). */
4020 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4021 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4023 /* If we don't have POINTER_TYPE, call the function. */
4024 if (arg1_align == 0 || arg2_align == 0)
4025 return NULL_RTX;
4027 /* Make a place to write the result of the instruction. */
4028 result = target;
4029 if (! (result != 0
4030 && REG_P (result) && GET_MODE (result) == insn_mode
4031 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4032 result = gen_reg_rtx (insn_mode);
4034 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4035 arg1 = builtin_save_expr (arg1);
4036 arg2 = builtin_save_expr (arg2);
4037 len = builtin_save_expr (len);
4039 arg1_rtx = get_memory_rtx (arg1, len);
4040 arg2_rtx = get_memory_rtx (arg2, len);
4041 arg3_rtx = expand_normal (len);
4042 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4043 GEN_INT (MIN (arg1_align, arg2_align)));
4044 if (insn)
4046 emit_insn (insn);
4048 /* Return the value in the proper mode for this function. */
4049 mode = TYPE_MODE (TREE_TYPE (exp));
4050 if (GET_MODE (result) == mode)
4051 return result;
4052 if (target == 0)
4053 return convert_to_mode (mode, result, 0);
4054 convert_move (target, result, 0);
4055 return target;
4058 /* Expand the library call ourselves using a stabilized argument
4059 list to avoid re-evaluating the function's arguments twice. */
4060 fndecl = get_callee_fndecl (exp);
4061 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4062 arg1, arg2, len);
4063 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4064 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4065 return expand_call (fn, target, target == const0_rtx);
4067 #endif
4068 return NULL_RTX;
4071 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4072 if that's convenient. */
4075 expand_builtin_saveregs (void)
4077 rtx val, seq;
4079 /* Don't do __builtin_saveregs more than once in a function.
4080 Save the result of the first call and reuse it. */
4081 if (saveregs_value != 0)
4082 return saveregs_value;
4084 /* When this function is called, it means that registers must be
4085 saved on entry to this function. So we migrate the call to the
4086 first insn of this function. */
4088 start_sequence ();
4090 /* Do whatever the machine needs done in this case. */
4091 val = targetm.calls.expand_builtin_saveregs ();
4093 seq = get_insns ();
4094 end_sequence ();
4096 saveregs_value = val;
4098 /* Put the insns after the NOTE that starts the function. If this
4099 is inside a start_sequence, make the outer-level insn chain current, so
4100 the code is placed at the start of the function. */
4101 push_topmost_sequence ();
4102 emit_insn_after (seq, entry_of_function ());
4103 pop_topmost_sequence ();
4105 return val;
4108 /* Expand a call to __builtin_next_arg. */
4110 static rtx
4111 expand_builtin_next_arg (void)
4113 /* Checking arguments is already done in fold_builtin_next_arg
4114 that must be called before this function. */
4115 return expand_binop (ptr_mode, add_optab,
4116 crtl->args.internal_arg_pointer,
4117 crtl->args.arg_offset_rtx,
4118 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4121 /* Make it easier for the backends by protecting the valist argument
4122 from multiple evaluations. */
4124 static tree
4125 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4127 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4129 /* The current way of determining the type of valist is completely
4130 bogus. We should have the information on the va builtin instead. */
4131 if (!vatype)
4132 vatype = targetm.fn_abi_va_list (cfun->decl);
4134 if (TREE_CODE (vatype) == ARRAY_TYPE)
4136 if (TREE_SIDE_EFFECTS (valist))
4137 valist = save_expr (valist);
4139 /* For this case, the backends will be expecting a pointer to
4140 vatype, but it's possible we've actually been given an array
4141 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4142 So fix it. */
4143 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4145 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4146 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4149 else
4151 tree pt = build_pointer_type (vatype);
4153 if (! needs_lvalue)
4155 if (! TREE_SIDE_EFFECTS (valist))
4156 return valist;
4158 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4159 TREE_SIDE_EFFECTS (valist) = 1;
4162 if (TREE_SIDE_EFFECTS (valist))
4163 valist = save_expr (valist);
4164 valist = fold_build2_loc (loc, MEM_REF,
4165 vatype, valist, build_int_cst (pt, 0));
4168 return valist;
4171 /* The "standard" definition of va_list is void*. */
4173 tree
4174 std_build_builtin_va_list (void)
4176 return ptr_type_node;
4179 /* The "standard" abi va_list is va_list_type_node. */
4181 tree
4182 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4184 return va_list_type_node;
4187 /* The "standard" type of va_list is va_list_type_node. */
4189 tree
4190 std_canonical_va_list_type (tree type)
4192 tree wtype, htype;
4194 if (INDIRECT_REF_P (type))
4195 type = TREE_TYPE (type);
4196 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4197 type = TREE_TYPE (type);
4198 wtype = va_list_type_node;
4199 htype = type;
4200 /* Treat structure va_list types. */
4201 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4202 htype = TREE_TYPE (htype);
4203 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4205 /* If va_list is an array type, the argument may have decayed
4206 to a pointer type, e.g. by being passed to another function.
4207 In that case, unwrap both types so that we can compare the
4208 underlying records. */
4209 if (TREE_CODE (htype) == ARRAY_TYPE
4210 || POINTER_TYPE_P (htype))
4212 wtype = TREE_TYPE (wtype);
4213 htype = TREE_TYPE (htype);
4216 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4217 return va_list_type_node;
4219 return NULL_TREE;
4222 /* The "standard" implementation of va_start: just assign `nextarg' to
4223 the variable. */
4225 void
4226 std_expand_builtin_va_start (tree valist, rtx nextarg)
4228 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4229 convert_move (va_r, nextarg, 0);
4232 /* Expand EXP, a call to __builtin_va_start. */
4234 static rtx
4235 expand_builtin_va_start (tree exp)
4237 rtx nextarg;
4238 tree valist;
4239 location_t loc = EXPR_LOCATION (exp);
4241 if (call_expr_nargs (exp) < 2)
4243 error_at (loc, "too few arguments to function %<va_start%>");
4244 return const0_rtx;
4247 if (fold_builtin_next_arg (exp, true))
4248 return const0_rtx;
4250 nextarg = expand_builtin_next_arg ();
4251 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4253 if (targetm.expand_builtin_va_start)
4254 targetm.expand_builtin_va_start (valist, nextarg);
4255 else
4256 std_expand_builtin_va_start (valist, nextarg);
4258 return const0_rtx;
4261 /* The "standard" implementation of va_arg: read the value from the
4262 current (padded) address and increment by the (padded) size. */
4264 tree
4265 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4266 gimple_seq *post_p)
4268 tree addr, t, type_size, rounded_size, valist_tmp;
4269 unsigned HOST_WIDE_INT align, boundary;
4270 bool indirect;
4272 #ifdef ARGS_GROW_DOWNWARD
4273 /* All of the alignment and movement below is for args-grow-up machines.
4274 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4275 implement their own specialized gimplify_va_arg_expr routines. */
4276 gcc_unreachable ();
4277 #endif
4279 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4280 if (indirect)
4281 type = build_pointer_type (type);
4283 align = PARM_BOUNDARY / BITS_PER_UNIT;
4284 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4286 /* When we align parameter on stack for caller, if the parameter
4287 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4288 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4289 here with caller. */
4290 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4291 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4293 boundary /= BITS_PER_UNIT;
4295 /* Hoist the valist value into a temporary for the moment. */
4296 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4298 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4299 requires greater alignment, we must perform dynamic alignment. */
4300 if (boundary > align
4301 && !integer_zerop (TYPE_SIZE (type)))
4303 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4304 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
4305 gimplify_and_add (t, pre_p);
4307 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4308 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
4309 valist_tmp,
4310 build_int_cst (TREE_TYPE (valist), -boundary)));
4311 gimplify_and_add (t, pre_p);
4313 else
4314 boundary = align;
4316 /* If the actual alignment is less than the alignment of the type,
4317 adjust the type accordingly so that we don't assume strict alignment
4318 when dereferencing the pointer. */
4319 boundary *= BITS_PER_UNIT;
4320 if (boundary < TYPE_ALIGN (type))
4322 type = build_variant_type_copy (type);
4323 TYPE_ALIGN (type) = boundary;
4326 /* Compute the rounded size of the type. */
4327 type_size = size_in_bytes (type);
4328 rounded_size = round_up (type_size, align);
4330 /* Reduce rounded_size so it's sharable with the postqueue. */
4331 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4333 /* Get AP. */
4334 addr = valist_tmp;
4335 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4337 /* Small args are padded downward. */
4338 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4339 rounded_size, size_int (align));
4340 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4341 size_binop (MINUS_EXPR, rounded_size, type_size));
4342 addr = fold_build_pointer_plus (addr, t);
4345 /* Compute new value for AP. */
4346 t = fold_build_pointer_plus (valist_tmp, rounded_size);
4347 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4348 gimplify_and_add (t, pre_p);
4350 addr = fold_convert (build_pointer_type (type), addr);
4352 if (indirect)
4353 addr = build_va_arg_indirect_ref (addr);
4355 return build_va_arg_indirect_ref (addr);
4358 /* Build an indirect-ref expression over the given TREE, which represents a
4359 piece of a va_arg() expansion. */
4360 tree
4361 build_va_arg_indirect_ref (tree addr)
4363 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4365 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4366 mf_mark (addr);
4368 return addr;
4371 /* Return a dummy expression of type TYPE in order to keep going after an
4372 error. */
4374 static tree
4375 dummy_object (tree type)
4377 tree t = build_int_cst (build_pointer_type (type), 0);
4378 return build2 (MEM_REF, type, t, t);
4381 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4382 builtin function, but a very special sort of operator. */
4384 enum gimplify_status
4385 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4387 tree promoted_type, have_va_type;
4388 tree valist = TREE_OPERAND (*expr_p, 0);
4389 tree type = TREE_TYPE (*expr_p);
4390 tree t;
4391 location_t loc = EXPR_LOCATION (*expr_p);
4393 /* Verify that valist is of the proper type. */
4394 have_va_type = TREE_TYPE (valist);
4395 if (have_va_type == error_mark_node)
4396 return GS_ERROR;
4397 have_va_type = targetm.canonical_va_list_type (have_va_type);
4399 if (have_va_type == NULL_TREE)
4401 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4402 return GS_ERROR;
4405 /* Generate a diagnostic for requesting data of a type that cannot
4406 be passed through `...' due to type promotion at the call site. */
4407 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4408 != type)
4410 static bool gave_help;
4411 bool warned;
4413 /* Unfortunately, this is merely undefined, rather than a constraint
4414 violation, so we cannot make this an error. If this call is never
4415 executed, the program is still strictly conforming. */
4416 warned = warning_at (loc, 0,
4417 "%qT is promoted to %qT when passed through %<...%>",
4418 type, promoted_type);
4419 if (!gave_help && warned)
4421 gave_help = true;
4422 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4423 promoted_type, type);
4426 /* We can, however, treat "undefined" any way we please.
4427 Call abort to encourage the user to fix the program. */
4428 if (warned)
4429 inform (loc, "if this code is reached, the program will abort");
4430 /* Before the abort, allow the evaluation of the va_list
4431 expression to exit or longjmp. */
4432 gimplify_and_add (valist, pre_p);
4433 t = build_call_expr_loc (loc,
4434 builtin_decl_implicit (BUILT_IN_TRAP), 0);
4435 gimplify_and_add (t, pre_p);
4437 /* This is dead code, but go ahead and finish so that the
4438 mode of the result comes out right. */
4439 *expr_p = dummy_object (type);
4440 return GS_ALL_DONE;
4442 else
4444 /* Make it easier for the backends by protecting the valist argument
4445 from multiple evaluations. */
4446 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4448 /* For this case, the backends will be expecting a pointer to
4449 TREE_TYPE (abi), but it's possible we've
4450 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4451 So fix it. */
4452 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4454 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4455 valist = fold_convert_loc (loc, p1,
4456 build_fold_addr_expr_loc (loc, valist));
4459 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4461 else
4462 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4464 if (!targetm.gimplify_va_arg_expr)
4465 /* FIXME: Once most targets are converted we should merely
4466 assert this is non-null. */
4467 return GS_ALL_DONE;
4469 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4470 return GS_OK;
4474 /* Expand EXP, a call to __builtin_va_end. */
4476 static rtx
4477 expand_builtin_va_end (tree exp)
4479 tree valist = CALL_EXPR_ARG (exp, 0);
4481 /* Evaluate for side effects, if needed. I hate macros that don't
4482 do that. */
4483 if (TREE_SIDE_EFFECTS (valist))
4484 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4486 return const0_rtx;
4489 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4490 builtin rather than just as an assignment in stdarg.h because of the
4491 nastiness of array-type va_list types. */
4493 static rtx
4494 expand_builtin_va_copy (tree exp)
4496 tree dst, src, t;
4497 location_t loc = EXPR_LOCATION (exp);
4499 dst = CALL_EXPR_ARG (exp, 0);
4500 src = CALL_EXPR_ARG (exp, 1);
4502 dst = stabilize_va_list_loc (loc, dst, 1);
4503 src = stabilize_va_list_loc (loc, src, 0);
4505 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4507 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4509 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4510 TREE_SIDE_EFFECTS (t) = 1;
4511 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4513 else
4515 rtx dstb, srcb, size;
4517 /* Evaluate to pointers. */
4518 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4519 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4520 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4521 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4523 dstb = convert_memory_address (Pmode, dstb);
4524 srcb = convert_memory_address (Pmode, srcb);
4526 /* "Dereference" to BLKmode memories. */
4527 dstb = gen_rtx_MEM (BLKmode, dstb);
4528 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4529 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4530 srcb = gen_rtx_MEM (BLKmode, srcb);
4531 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4532 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4534 /* Copy. */
4535 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4538 return const0_rtx;
4541 /* Expand a call to one of the builtin functions __builtin_frame_address or
4542 __builtin_return_address. */
4544 static rtx
4545 expand_builtin_frame_address (tree fndecl, tree exp)
4547 /* The argument must be a nonnegative integer constant.
4548 It counts the number of frames to scan up the stack.
4549 The value is the return address saved in that frame. */
4550 if (call_expr_nargs (exp) == 0)
4551 /* Warning about missing arg was already issued. */
4552 return const0_rtx;
4553 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4555 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4556 error ("invalid argument to %<__builtin_frame_address%>");
4557 else
4558 error ("invalid argument to %<__builtin_return_address%>");
4559 return const0_rtx;
4561 else
4563 rtx tem
4564 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4565 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4567 /* Some ports cannot access arbitrary stack frames. */
4568 if (tem == NULL)
4570 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4571 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4572 else
4573 warning (0, "unsupported argument to %<__builtin_return_address%>");
4574 return const0_rtx;
4577 /* For __builtin_frame_address, return what we've got. */
4578 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4579 return tem;
4581 if (!REG_P (tem)
4582 && ! CONSTANT_P (tem))
4583 tem = copy_addr_to_reg (tem);
4584 return tem;
4588 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4589 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4590 is the same as for allocate_dynamic_stack_space. */
4592 static rtx
4593 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4595 rtx op0;
4596 rtx result;
4597 bool valid_arglist;
4598 unsigned int align;
4599 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4600 == BUILT_IN_ALLOCA_WITH_ALIGN);
4602 /* Emit normal call if we use mudflap. */
4603 if (flag_mudflap)
4604 return NULL_RTX;
4606 valid_arglist
4607 = (alloca_with_align
4608 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4609 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4611 if (!valid_arglist)
4612 return NULL_RTX;
4614 /* Compute the argument. */
4615 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4617 /* Compute the alignment. */
4618 align = (alloca_with_align
4619 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4620 : BIGGEST_ALIGNMENT);
4622 /* Allocate the desired space. */
4623 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4624 result = convert_memory_address (ptr_mode, result);
4626 return result;
4629 /* Expand a call to bswap builtin in EXP.
4630 Return NULL_RTX if a normal call should be emitted rather than expanding the
4631 function in-line. If convenient, the result should be placed in TARGET.
4632 SUBTARGET may be used as the target for computing one of EXP's operands. */
4634 static rtx
4635 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4636 rtx subtarget)
4638 tree arg;
4639 rtx op0;
4641 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4642 return NULL_RTX;
4644 arg = CALL_EXPR_ARG (exp, 0);
4645 op0 = expand_expr (arg,
4646 subtarget && GET_MODE (subtarget) == target_mode
4647 ? subtarget : NULL_RTX,
4648 target_mode, EXPAND_NORMAL);
4649 if (GET_MODE (op0) != target_mode)
4650 op0 = convert_to_mode (target_mode, op0, 1);
4652 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4654 gcc_assert (target);
4656 return convert_to_mode (target_mode, target, 1);
4659 /* Expand a call to a unary builtin in EXP.
4660 Return NULL_RTX if a normal call should be emitted rather than expanding the
4661 function in-line. If convenient, the result should be placed in TARGET.
4662 SUBTARGET may be used as the target for computing one of EXP's operands. */
4664 static rtx
4665 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4666 rtx subtarget, optab op_optab)
4668 rtx op0;
4670 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4671 return NULL_RTX;
4673 /* Compute the argument. */
4674 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4675 (subtarget
4676 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4677 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4678 VOIDmode, EXPAND_NORMAL);
4679 /* Compute op, into TARGET if possible.
4680 Set TARGET to wherever the result comes back. */
4681 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4682 op_optab, op0, target, op_optab != clrsb_optab);
4683 gcc_assert (target);
4685 return convert_to_mode (target_mode, target, 0);
4688 /* Expand a call to __builtin_expect. We just return our argument
4689 as the builtin_expect semantic should've been already executed by
4690 tree branch prediction pass. */
4692 static rtx
4693 expand_builtin_expect (tree exp, rtx target)
4695 tree arg;
4697 if (call_expr_nargs (exp) < 2)
4698 return const0_rtx;
4699 arg = CALL_EXPR_ARG (exp, 0);
4701 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4702 /* When guessing was done, the hints should be already stripped away. */
4703 gcc_assert (!flag_guess_branch_prob
4704 || optimize == 0 || seen_error ());
4705 return target;
4708 /* Expand a call to __builtin_assume_aligned. We just return our first
4709 argument as the builtin_assume_aligned semantic should've been already
4710 executed by CCP. */
4712 static rtx
4713 expand_builtin_assume_aligned (tree exp, rtx target)
4715 if (call_expr_nargs (exp) < 2)
4716 return const0_rtx;
4717 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4718 EXPAND_NORMAL);
4719 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4720 && (call_expr_nargs (exp) < 3
4721 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4722 return target;
4725 void
4726 expand_builtin_trap (void)
4728 #ifdef HAVE_trap
4729 if (HAVE_trap)
4730 emit_insn (gen_trap ());
4731 else
4732 #endif
4733 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4734 emit_barrier ();
4737 /* Expand a call to __builtin_unreachable. We do nothing except emit
4738 a barrier saying that control flow will not pass here.
4740 It is the responsibility of the program being compiled to ensure
4741 that control flow does never reach __builtin_unreachable. */
4742 static void
4743 expand_builtin_unreachable (void)
4745 emit_barrier ();
4748 /* Expand EXP, a call to fabs, fabsf or fabsl.
4749 Return NULL_RTX if a normal call should be emitted rather than expanding
4750 the function inline. If convenient, the result should be placed
4751 in TARGET. SUBTARGET may be used as the target for computing
4752 the operand. */
4754 static rtx
4755 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4757 enum machine_mode mode;
4758 tree arg;
4759 rtx op0;
4761 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4762 return NULL_RTX;
4764 arg = CALL_EXPR_ARG (exp, 0);
4765 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4766 mode = TYPE_MODE (TREE_TYPE (arg));
4767 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4768 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4771 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4772 Return NULL is a normal call should be emitted rather than expanding the
4773 function inline. If convenient, the result should be placed in TARGET.
4774 SUBTARGET may be used as the target for computing the operand. */
4776 static rtx
4777 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4779 rtx op0, op1;
4780 tree arg;
4782 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4783 return NULL_RTX;
4785 arg = CALL_EXPR_ARG (exp, 0);
4786 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4788 arg = CALL_EXPR_ARG (exp, 1);
4789 op1 = expand_normal (arg);
4791 return expand_copysign (op0, op1, target);
4794 /* Create a new constant string literal and return a char* pointer to it.
4795 The STRING_CST value is the LEN characters at STR. */
4796 tree
4797 build_string_literal (int len, const char *str)
4799 tree t, elem, index, type;
4801 t = build_string (len, str);
4802 elem = build_type_variant (char_type_node, 1, 0);
4803 index = build_index_type (size_int (len - 1));
4804 type = build_array_type (elem, index);
4805 TREE_TYPE (t) = type;
4806 TREE_CONSTANT (t) = 1;
4807 TREE_READONLY (t) = 1;
4808 TREE_STATIC (t) = 1;
4810 type = build_pointer_type (elem);
4811 t = build1 (ADDR_EXPR, type,
4812 build4 (ARRAY_REF, elem,
4813 t, integer_zero_node, NULL_TREE, NULL_TREE));
4814 return t;
4817 /* Expand a call to __builtin___clear_cache. */
4819 static rtx
4820 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4822 #ifndef HAVE_clear_cache
4823 #ifdef CLEAR_INSN_CACHE
4824 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4825 does something. Just do the default expansion to a call to
4826 __clear_cache(). */
4827 return NULL_RTX;
4828 #else
4829 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4830 does nothing. There is no need to call it. Do nothing. */
4831 return const0_rtx;
4832 #endif /* CLEAR_INSN_CACHE */
4833 #else
4834 /* We have a "clear_cache" insn, and it will handle everything. */
4835 tree begin, end;
4836 rtx begin_rtx, end_rtx;
4838 /* We must not expand to a library call. If we did, any
4839 fallback library function in libgcc that might contain a call to
4840 __builtin___clear_cache() would recurse infinitely. */
4841 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4843 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4844 return const0_rtx;
4847 if (HAVE_clear_cache)
4849 struct expand_operand ops[2];
4851 begin = CALL_EXPR_ARG (exp, 0);
4852 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4854 end = CALL_EXPR_ARG (exp, 1);
4855 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4857 create_address_operand (&ops[0], begin_rtx);
4858 create_address_operand (&ops[1], end_rtx);
4859 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4860 return const0_rtx;
4862 return const0_rtx;
4863 #endif /* HAVE_clear_cache */
4866 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4868 static rtx
4869 round_trampoline_addr (rtx tramp)
4871 rtx temp, addend, mask;
4873 /* If we don't need too much alignment, we'll have been guaranteed
4874 proper alignment by get_trampoline_type. */
4875 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4876 return tramp;
4878 /* Round address up to desired boundary. */
4879 temp = gen_reg_rtx (Pmode);
4880 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
4881 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
4883 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4884 temp, 0, OPTAB_LIB_WIDEN);
4885 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4886 temp, 0, OPTAB_LIB_WIDEN);
4888 return tramp;
4891 static rtx
4892 expand_builtin_init_trampoline (tree exp, bool onstack)
4894 tree t_tramp, t_func, t_chain;
4895 rtx m_tramp, r_tramp, r_chain, tmp;
4897 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4898 POINTER_TYPE, VOID_TYPE))
4899 return NULL_RTX;
4901 t_tramp = CALL_EXPR_ARG (exp, 0);
4902 t_func = CALL_EXPR_ARG (exp, 1);
4903 t_chain = CALL_EXPR_ARG (exp, 2);
4905 r_tramp = expand_normal (t_tramp);
4906 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4907 MEM_NOTRAP_P (m_tramp) = 1;
4909 /* If ONSTACK, the TRAMP argument should be the address of a field
4910 within the local function's FRAME decl. Either way, let's see if
4911 we can fill in the MEM_ATTRs for this memory. */
4912 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4913 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
4914 true, 0);
4916 /* Creator of a heap trampoline is responsible for making sure the
4917 address is aligned to at least STACK_BOUNDARY. Normally malloc
4918 will ensure this anyhow. */
4919 tmp = round_trampoline_addr (r_tramp);
4920 if (tmp != r_tramp)
4922 m_tramp = change_address (m_tramp, BLKmode, tmp);
4923 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4924 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4927 /* The FUNC argument should be the address of the nested function.
4928 Extract the actual function decl to pass to the hook. */
4929 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4930 t_func = TREE_OPERAND (t_func, 0);
4931 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4933 r_chain = expand_normal (t_chain);
4935 /* Generate insns to initialize the trampoline. */
4936 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4938 if (onstack)
4940 trampolines_created = 1;
4942 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4943 "trampoline generated for nested function %qD", t_func);
4946 return const0_rtx;
4949 static rtx
4950 expand_builtin_adjust_trampoline (tree exp)
4952 rtx tramp;
4954 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4955 return NULL_RTX;
4957 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4958 tramp = round_trampoline_addr (tramp);
4959 if (targetm.calls.trampoline_adjust_address)
4960 tramp = targetm.calls.trampoline_adjust_address (tramp);
4962 return tramp;
4965 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4966 function. The function first checks whether the back end provides
4967 an insn to implement signbit for the respective mode. If not, it
4968 checks whether the floating point format of the value is such that
4969 the sign bit can be extracted. If that is not the case, the
4970 function returns NULL_RTX to indicate that a normal call should be
4971 emitted rather than expanding the function in-line. EXP is the
4972 expression that is a call to the builtin function; if convenient,
4973 the result should be placed in TARGET. */
4974 static rtx
4975 expand_builtin_signbit (tree exp, rtx target)
4977 const struct real_format *fmt;
4978 enum machine_mode fmode, imode, rmode;
4979 tree arg;
4980 int word, bitpos;
4981 enum insn_code icode;
4982 rtx temp;
4983 location_t loc = EXPR_LOCATION (exp);
4985 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4986 return NULL_RTX;
4988 arg = CALL_EXPR_ARG (exp, 0);
4989 fmode = TYPE_MODE (TREE_TYPE (arg));
4990 rmode = TYPE_MODE (TREE_TYPE (exp));
4991 fmt = REAL_MODE_FORMAT (fmode);
4993 arg = builtin_save_expr (arg);
4995 /* Expand the argument yielding a RTX expression. */
4996 temp = expand_normal (arg);
4998 /* Check if the back end provides an insn that handles signbit for the
4999 argument's mode. */
5000 icode = optab_handler (signbit_optab, fmode);
5001 if (icode != CODE_FOR_nothing)
5003 rtx last = get_last_insn ();
5004 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5005 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5006 return target;
5007 delete_insns_since (last);
5010 /* For floating point formats without a sign bit, implement signbit
5011 as "ARG < 0.0". */
5012 bitpos = fmt->signbit_ro;
5013 if (bitpos < 0)
5015 /* But we can't do this if the format supports signed zero. */
5016 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5017 return NULL_RTX;
5019 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5020 build_real (TREE_TYPE (arg), dconst0));
5021 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5024 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5026 imode = int_mode_for_mode (fmode);
5027 if (imode == BLKmode)
5028 return NULL_RTX;
5029 temp = gen_lowpart (imode, temp);
5031 else
5033 imode = word_mode;
5034 /* Handle targets with different FP word orders. */
5035 if (FLOAT_WORDS_BIG_ENDIAN)
5036 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5037 else
5038 word = bitpos / BITS_PER_WORD;
5039 temp = operand_subword_force (temp, word, fmode);
5040 bitpos = bitpos % BITS_PER_WORD;
5043 /* Force the intermediate word_mode (or narrower) result into a
5044 register. This avoids attempting to create paradoxical SUBREGs
5045 of floating point modes below. */
5046 temp = force_reg (imode, temp);
5048 /* If the bitpos is within the "result mode" lowpart, the operation
5049 can be implement with a single bitwise AND. Otherwise, we need
5050 a right shift and an AND. */
5052 if (bitpos < GET_MODE_BITSIZE (rmode))
5054 double_int mask = double_int_setbit (double_int_zero, bitpos);
5056 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5057 temp = gen_lowpart (rmode, temp);
5058 temp = expand_binop (rmode, and_optab, temp,
5059 immed_double_int_const (mask, rmode),
5060 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5062 else
5064 /* Perform a logical right shift to place the signbit in the least
5065 significant bit, then truncate the result to the desired mode
5066 and mask just this bit. */
5067 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5068 temp = gen_lowpart (rmode, temp);
5069 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5070 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5073 return temp;
5076 /* Expand fork or exec calls. TARGET is the desired target of the
5077 call. EXP is the call. FN is the
5078 identificator of the actual function. IGNORE is nonzero if the
5079 value is to be ignored. */
5081 static rtx
5082 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5084 tree id, decl;
5085 tree call;
5087 /* If we are not profiling, just call the function. */
5088 if (!profile_arc_flag)
5089 return NULL_RTX;
5091 /* Otherwise call the wrapper. This should be equivalent for the rest of
5092 compiler, so the code does not diverge, and the wrapper may run the
5093 code necessary for keeping the profiling sane. */
5095 switch (DECL_FUNCTION_CODE (fn))
5097 case BUILT_IN_FORK:
5098 id = get_identifier ("__gcov_fork");
5099 break;
5101 case BUILT_IN_EXECL:
5102 id = get_identifier ("__gcov_execl");
5103 break;
5105 case BUILT_IN_EXECV:
5106 id = get_identifier ("__gcov_execv");
5107 break;
5109 case BUILT_IN_EXECLP:
5110 id = get_identifier ("__gcov_execlp");
5111 break;
5113 case BUILT_IN_EXECLE:
5114 id = get_identifier ("__gcov_execle");
5115 break;
5117 case BUILT_IN_EXECVP:
5118 id = get_identifier ("__gcov_execvp");
5119 break;
5121 case BUILT_IN_EXECVE:
5122 id = get_identifier ("__gcov_execve");
5123 break;
5125 default:
5126 gcc_unreachable ();
5129 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5130 FUNCTION_DECL, id, TREE_TYPE (fn));
5131 DECL_EXTERNAL (decl) = 1;
5132 TREE_PUBLIC (decl) = 1;
5133 DECL_ARTIFICIAL (decl) = 1;
5134 TREE_NOTHROW (decl) = 1;
5135 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5136 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5137 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5138 return expand_call (call, target, ignore);
5143 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5144 the pointer in these functions is void*, the tree optimizers may remove
5145 casts. The mode computed in expand_builtin isn't reliable either, due
5146 to __sync_bool_compare_and_swap.
5148 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5149 group of builtins. This gives us log2 of the mode size. */
5151 static inline enum machine_mode
5152 get_builtin_sync_mode (int fcode_diff)
5154 /* The size is not negotiable, so ask not to get BLKmode in return
5155 if the target indicates that a smaller size would be better. */
5156 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5159 /* Expand the memory expression LOC and return the appropriate memory operand
5160 for the builtin_sync operations. */
5162 static rtx
5163 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5165 rtx addr, mem;
5167 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5168 addr = convert_memory_address (Pmode, addr);
5170 /* Note that we explicitly do not want any alias information for this
5171 memory, so that we kill all other live memories. Otherwise we don't
5172 satisfy the full barrier semantics of the intrinsic. */
5173 mem = validize_mem (gen_rtx_MEM (mode, addr));
5175 /* The alignment needs to be at least according to that of the mode. */
5176 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5177 get_pointer_alignment (loc)));
5178 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5179 MEM_VOLATILE_P (mem) = 1;
5181 return mem;
5184 /* Make sure an argument is in the right mode.
5185 EXP is the tree argument.
5186 MODE is the mode it should be in. */
5188 static rtx
5189 expand_expr_force_mode (tree exp, enum machine_mode mode)
5191 rtx val;
5192 enum machine_mode old_mode;
5194 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5195 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5196 of CONST_INTs, where we know the old_mode only from the call argument. */
5198 old_mode = GET_MODE (val);
5199 if (old_mode == VOIDmode)
5200 old_mode = TYPE_MODE (TREE_TYPE (exp));
5201 val = convert_modes (mode, old_mode, val, 1);
5202 return val;
5206 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5207 EXP is the CALL_EXPR. CODE is the rtx code
5208 that corresponds to the arithmetic or logical operation from the name;
5209 an exception here is that NOT actually means NAND. TARGET is an optional
5210 place for us to store the results; AFTER is true if this is the
5211 fetch_and_xxx form. */
5213 static rtx
5214 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5215 enum rtx_code code, bool after,
5216 rtx target)
5218 rtx val, mem;
5219 location_t loc = EXPR_LOCATION (exp);
5221 if (code == NOT && warn_sync_nand)
5223 tree fndecl = get_callee_fndecl (exp);
5224 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5226 static bool warned_f_a_n, warned_n_a_f;
5228 switch (fcode)
5230 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5231 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5232 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5233 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5234 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5235 if (warned_f_a_n)
5236 break;
5238 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5239 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5240 warned_f_a_n = true;
5241 break;
5243 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5244 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5245 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5246 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5247 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5248 if (warned_n_a_f)
5249 break;
5251 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5252 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5253 warned_n_a_f = true;
5254 break;
5256 default:
5257 gcc_unreachable ();
5261 /* Expand the operands. */
5262 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5263 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5265 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5266 after);
5269 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5270 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5271 true if this is the boolean form. TARGET is a place for us to store the
5272 results; this is NOT optional if IS_BOOL is true. */
5274 static rtx
5275 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5276 bool is_bool, rtx target)
5278 rtx old_val, new_val, mem;
5279 rtx *pbool, *poval;
5281 /* Expand the operands. */
5282 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5283 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5284 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5286 pbool = poval = NULL;
5287 if (target != const0_rtx)
5289 if (is_bool)
5290 pbool = &target;
5291 else
5292 poval = &target;
5294 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5295 false, MEMMODEL_SEQ_CST,
5296 MEMMODEL_SEQ_CST))
5297 return NULL_RTX;
5299 return target;
5302 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5303 general form is actually an atomic exchange, and some targets only
5304 support a reduced form with the second argument being a constant 1.
5305 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5306 the results. */
5308 static rtx
5309 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5310 rtx target)
5312 rtx val, mem;
5314 /* Expand the operands. */
5315 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5316 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5318 return expand_sync_lock_test_and_set (target, mem, val);
5321 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5323 static void
5324 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5326 rtx mem;
5328 /* Expand the operands. */
5329 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5331 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5334 /* Given an integer representing an ``enum memmodel'', verify its
5335 correctness and return the memory model enum. */
5337 static enum memmodel
5338 get_memmodel (tree exp)
5340 rtx op;
5341 unsigned HOST_WIDE_INT val;
5343 /* If the parameter is not a constant, it's a run time value so we'll just
5344 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5345 if (TREE_CODE (exp) != INTEGER_CST)
5346 return MEMMODEL_SEQ_CST;
5348 op = expand_normal (exp);
5350 val = INTVAL (op);
5351 if (targetm.memmodel_check)
5352 val = targetm.memmodel_check (val);
5353 else if (val & ~MEMMODEL_MASK)
5355 warning (OPT_Winvalid_memory_model,
5356 "Unknown architecture specifier in memory model to builtin.");
5357 return MEMMODEL_SEQ_CST;
5360 if ((INTVAL(op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5362 warning (OPT_Winvalid_memory_model,
5363 "invalid memory model argument to builtin");
5364 return MEMMODEL_SEQ_CST;
5367 return (enum memmodel) val;
5370 /* Expand the __atomic_exchange intrinsic:
5371 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5372 EXP is the CALL_EXPR.
5373 TARGET is an optional place for us to store the results. */
5375 static rtx
5376 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5378 rtx val, mem;
5379 enum memmodel model;
5381 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5382 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5384 error ("invalid memory model for %<__atomic_exchange%>");
5385 return NULL_RTX;
5388 if (!flag_inline_atomics)
5389 return NULL_RTX;
5391 /* Expand the operands. */
5392 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5393 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5395 return expand_atomic_exchange (target, mem, val, model);
5398 /* Expand the __atomic_compare_exchange intrinsic:
5399 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5400 TYPE desired, BOOL weak,
5401 enum memmodel success,
5402 enum memmodel failure)
5403 EXP is the CALL_EXPR.
5404 TARGET is an optional place for us to store the results. */
5406 static rtx
5407 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5408 rtx target)
5410 rtx expect, desired, mem, oldval;
5411 enum memmodel success, failure;
5412 tree weak;
5413 bool is_weak;
5415 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5416 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5418 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5419 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5421 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5422 return NULL_RTX;
5425 if (failure > success)
5427 error ("failure memory model cannot be stronger than success "
5428 "memory model for %<__atomic_compare_exchange%>");
5429 return NULL_RTX;
5432 if (!flag_inline_atomics)
5433 return NULL_RTX;
5435 /* Expand the operands. */
5436 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5438 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5439 expect = convert_memory_address (Pmode, expect);
5440 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5442 weak = CALL_EXPR_ARG (exp, 3);
5443 is_weak = false;
5444 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
5445 is_weak = true;
5447 oldval = copy_to_reg (gen_rtx_MEM (mode, expect));
5449 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5450 &oldval, mem, oldval, desired,
5451 is_weak, success, failure))
5452 return NULL_RTX;
5454 emit_move_insn (gen_rtx_MEM (mode, expect), oldval);
5455 return target;
5458 /* Expand the __atomic_load intrinsic:
5459 TYPE __atomic_load (TYPE *object, enum memmodel)
5460 EXP is the CALL_EXPR.
5461 TARGET is an optional place for us to store the results. */
5463 static rtx
5464 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5466 rtx mem;
5467 enum memmodel model;
5469 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5470 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5471 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5473 error ("invalid memory model for %<__atomic_load%>");
5474 return NULL_RTX;
5477 if (!flag_inline_atomics)
5478 return NULL_RTX;
5480 /* Expand the operand. */
5481 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5483 return expand_atomic_load (target, mem, model);
5487 /* Expand the __atomic_store intrinsic:
5488 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5489 EXP is the CALL_EXPR.
5490 TARGET is an optional place for us to store the results. */
5492 static rtx
5493 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5495 rtx mem, val;
5496 enum memmodel model;
5498 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5499 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5500 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5501 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5503 error ("invalid memory model for %<__atomic_store%>");
5504 return NULL_RTX;
5507 if (!flag_inline_atomics)
5508 return NULL_RTX;
5510 /* Expand the operands. */
5511 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5512 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5514 return expand_atomic_store (mem, val, model, false);
5517 /* Expand the __atomic_fetch_XXX intrinsic:
5518 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5519 EXP is the CALL_EXPR.
5520 TARGET is an optional place for us to store the results.
5521 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5522 FETCH_AFTER is true if returning the result of the operation.
5523 FETCH_AFTER is false if returning the value before the operation.
5524 IGNORE is true if the result is not used.
5525 EXT_CALL is the correct builtin for an external call if this cannot be
5526 resolved to an instruction sequence. */
5528 static rtx
5529 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5530 enum rtx_code code, bool fetch_after,
5531 bool ignore, enum built_in_function ext_call)
5533 rtx val, mem, ret;
5534 enum memmodel model;
5535 tree fndecl;
5536 tree addr;
5538 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5540 /* Expand the operands. */
5541 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5542 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5544 /* Only try generating instructions if inlining is turned on. */
5545 if (flag_inline_atomics)
5547 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5548 if (ret)
5549 return ret;
5552 /* Return if a different routine isn't needed for the library call. */
5553 if (ext_call == BUILT_IN_NONE)
5554 return NULL_RTX;
5556 /* Change the call to the specified function. */
5557 fndecl = get_callee_fndecl (exp);
5558 addr = CALL_EXPR_FN (exp);
5559 STRIP_NOPS (addr);
5561 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5562 TREE_OPERAND (addr, 0) = builtin_decl_explicit(ext_call);
5564 /* Expand the call here so we can emit trailing code. */
5565 ret = expand_call (exp, target, ignore);
5567 /* Replace the original function just in case it matters. */
5568 TREE_OPERAND (addr, 0) = fndecl;
5570 /* Then issue the arithmetic correction to return the right result. */
5571 if (!ignore)
5573 if (code == NOT)
5575 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5576 OPTAB_LIB_WIDEN);
5577 ret = expand_simple_unop (mode, NOT, ret, target, true);
5579 else
5580 ret = expand_simple_binop (mode, code, ret, val, target, true,
5581 OPTAB_LIB_WIDEN);
5583 return ret;
5587 #ifndef HAVE_atomic_clear
5588 # define HAVE_atomic_clear 0
5589 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5590 #endif
5592 /* Expand an atomic clear operation.
5593 void _atomic_clear (BOOL *obj, enum memmodel)
5594 EXP is the call expression. */
5596 static rtx
5597 expand_builtin_atomic_clear (tree exp)
5599 enum machine_mode mode;
5600 rtx mem, ret;
5601 enum memmodel model;
5603 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5604 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5605 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5607 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5608 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5610 error ("invalid memory model for %<__atomic_store%>");
5611 return const0_rtx;
5614 if (HAVE_atomic_clear)
5616 emit_insn (gen_atomic_clear (mem, model));
5617 return const0_rtx;
5620 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5621 Failing that, a store is issued by __atomic_store. The only way this can
5622 fail is if the bool type is larger than a word size. Unlikely, but
5623 handle it anyway for completeness. Assume a single threaded model since
5624 there is no atomic support in this case, and no barriers are required. */
5625 ret = expand_atomic_store (mem, const0_rtx, model, true);
5626 if (!ret)
5627 emit_move_insn (mem, const0_rtx);
5628 return const0_rtx;
5631 /* Expand an atomic test_and_set operation.
5632 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5633 EXP is the call expression. */
5635 static rtx
5636 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5638 rtx mem;
5639 enum memmodel model;
5640 enum machine_mode mode;
5642 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5643 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5644 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5646 return expand_atomic_test_and_set (target, mem, model);
5650 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5651 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5653 static tree
5654 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5656 int size;
5657 enum machine_mode mode;
5658 unsigned int mode_align, type_align;
5660 if (TREE_CODE (arg0) != INTEGER_CST)
5661 return NULL_TREE;
5663 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5664 mode = mode_for_size (size, MODE_INT, 0);
5665 mode_align = GET_MODE_ALIGNMENT (mode);
5667 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5668 type_align = mode_align;
5669 else
5671 tree ttype = TREE_TYPE (arg1);
5673 /* This function is usually invoked and folded immediately by the front
5674 end before anything else has a chance to look at it. The pointer
5675 parameter at this point is usually cast to a void *, so check for that
5676 and look past the cast. */
5677 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5678 && VOID_TYPE_P (TREE_TYPE (ttype)))
5679 arg1 = TREE_OPERAND (arg1, 0);
5681 ttype = TREE_TYPE (arg1);
5682 gcc_assert (POINTER_TYPE_P (ttype));
5684 /* Get the underlying type of the object. */
5685 ttype = TREE_TYPE (ttype);
5686 type_align = TYPE_ALIGN (ttype);
5689 /* If the object has smaller alignment, the the lock free routines cannot
5690 be used. */
5691 if (type_align < mode_align)
5692 return boolean_false_node;
5694 /* Check if a compare_and_swap pattern exists for the mode which represents
5695 the required size. The pattern is not allowed to fail, so the existence
5696 of the pattern indicates support is present. */
5697 if (can_compare_and_swap_p (mode, true))
5698 return boolean_true_node;
5699 else
5700 return boolean_false_node;
5703 /* Return true if the parameters to call EXP represent an object which will
5704 always generate lock free instructions. The first argument represents the
5705 size of the object, and the second parameter is a pointer to the object
5706 itself. If NULL is passed for the object, then the result is based on
5707 typical alignment for an object of the specified size. Otherwise return
5708 false. */
5710 static rtx
5711 expand_builtin_atomic_always_lock_free (tree exp)
5713 tree size;
5714 tree arg0 = CALL_EXPR_ARG (exp, 0);
5715 tree arg1 = CALL_EXPR_ARG (exp, 1);
5717 if (TREE_CODE (arg0) != INTEGER_CST)
5719 error ("non-constant argument 1 to __atomic_always_lock_free");
5720 return const0_rtx;
5723 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5724 if (size == boolean_true_node)
5725 return const1_rtx;
5726 return const0_rtx;
5729 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5730 is lock free on this architecture. */
5732 static tree
5733 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5735 if (!flag_inline_atomics)
5736 return NULL_TREE;
5738 /* If it isn't always lock free, don't generate a result. */
5739 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5740 return boolean_true_node;
5742 return NULL_TREE;
5745 /* Return true if the parameters to call EXP represent an object which will
5746 always generate lock free instructions. The first argument represents the
5747 size of the object, and the second parameter is a pointer to the object
5748 itself. If NULL is passed for the object, then the result is based on
5749 typical alignment for an object of the specified size. Otherwise return
5750 NULL*/
5752 static rtx
5753 expand_builtin_atomic_is_lock_free (tree exp)
5755 tree size;
5756 tree arg0 = CALL_EXPR_ARG (exp, 0);
5757 tree arg1 = CALL_EXPR_ARG (exp, 1);
5759 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5761 error ("non-integer argument 1 to __atomic_is_lock_free");
5762 return NULL_RTX;
5765 if (!flag_inline_atomics)
5766 return NULL_RTX;
5768 /* If the value is known at compile time, return the RTX for it. */
5769 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5770 if (size == boolean_true_node)
5771 return const1_rtx;
5773 return NULL_RTX;
5776 /* Expand the __atomic_thread_fence intrinsic:
5777 void __atomic_thread_fence (enum memmodel)
5778 EXP is the CALL_EXPR. */
5780 static void
5781 expand_builtin_atomic_thread_fence (tree exp)
5783 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5784 expand_mem_thread_fence (model);
5787 /* Expand the __atomic_signal_fence intrinsic:
5788 void __atomic_signal_fence (enum memmodel)
5789 EXP is the CALL_EXPR. */
5791 static void
5792 expand_builtin_atomic_signal_fence (tree exp)
5794 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5795 expand_mem_signal_fence (model);
5798 /* Expand the __sync_synchronize intrinsic. */
5800 static void
5801 expand_builtin_sync_synchronize (void)
5803 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5807 /* Expand an expression EXP that calls a built-in function,
5808 with result going to TARGET if that's convenient
5809 (and in mode MODE if that's convenient).
5810 SUBTARGET may be used as the target for computing one of EXP's operands.
5811 IGNORE is nonzero if the value is to be ignored. */
5814 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5815 int ignore)
5817 tree fndecl = get_callee_fndecl (exp);
5818 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5819 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5820 int flags;
5822 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5823 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5825 /* When not optimizing, generate calls to library functions for a certain
5826 set of builtins. */
5827 if (!optimize
5828 && !called_as_built_in (fndecl)
5829 && fcode != BUILT_IN_ALLOCA
5830 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5831 && fcode != BUILT_IN_FREE)
5832 return expand_call (exp, target, ignore);
5834 /* The built-in function expanders test for target == const0_rtx
5835 to determine whether the function's result will be ignored. */
5836 if (ignore)
5837 target = const0_rtx;
5839 /* If the result of a pure or const built-in function is ignored, and
5840 none of its arguments are volatile, we can avoid expanding the
5841 built-in call and just evaluate the arguments for side-effects. */
5842 if (target == const0_rtx
5843 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5844 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5846 bool volatilep = false;
5847 tree arg;
5848 call_expr_arg_iterator iter;
5850 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5851 if (TREE_THIS_VOLATILE (arg))
5853 volatilep = true;
5854 break;
5857 if (! volatilep)
5859 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5860 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5861 return const0_rtx;
5865 switch (fcode)
5867 CASE_FLT_FN (BUILT_IN_FABS):
5868 target = expand_builtin_fabs (exp, target, subtarget);
5869 if (target)
5870 return target;
5871 break;
5873 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5874 target = expand_builtin_copysign (exp, target, subtarget);
5875 if (target)
5876 return target;
5877 break;
5879 /* Just do a normal library call if we were unable to fold
5880 the values. */
5881 CASE_FLT_FN (BUILT_IN_CABS):
5882 break;
5884 CASE_FLT_FN (BUILT_IN_EXP):
5885 CASE_FLT_FN (BUILT_IN_EXP10):
5886 CASE_FLT_FN (BUILT_IN_POW10):
5887 CASE_FLT_FN (BUILT_IN_EXP2):
5888 CASE_FLT_FN (BUILT_IN_EXPM1):
5889 CASE_FLT_FN (BUILT_IN_LOGB):
5890 CASE_FLT_FN (BUILT_IN_LOG):
5891 CASE_FLT_FN (BUILT_IN_LOG10):
5892 CASE_FLT_FN (BUILT_IN_LOG2):
5893 CASE_FLT_FN (BUILT_IN_LOG1P):
5894 CASE_FLT_FN (BUILT_IN_TAN):
5895 CASE_FLT_FN (BUILT_IN_ASIN):
5896 CASE_FLT_FN (BUILT_IN_ACOS):
5897 CASE_FLT_FN (BUILT_IN_ATAN):
5898 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5899 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5900 because of possible accuracy problems. */
5901 if (! flag_unsafe_math_optimizations)
5902 break;
5903 CASE_FLT_FN (BUILT_IN_SQRT):
5904 CASE_FLT_FN (BUILT_IN_FLOOR):
5905 CASE_FLT_FN (BUILT_IN_CEIL):
5906 CASE_FLT_FN (BUILT_IN_TRUNC):
5907 CASE_FLT_FN (BUILT_IN_ROUND):
5908 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5909 CASE_FLT_FN (BUILT_IN_RINT):
5910 target = expand_builtin_mathfn (exp, target, subtarget);
5911 if (target)
5912 return target;
5913 break;
5915 CASE_FLT_FN (BUILT_IN_FMA):
5916 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5917 if (target)
5918 return target;
5919 break;
5921 CASE_FLT_FN (BUILT_IN_ILOGB):
5922 if (! flag_unsafe_math_optimizations)
5923 break;
5924 CASE_FLT_FN (BUILT_IN_ISINF):
5925 CASE_FLT_FN (BUILT_IN_FINITE):
5926 case BUILT_IN_ISFINITE:
5927 case BUILT_IN_ISNORMAL:
5928 target = expand_builtin_interclass_mathfn (exp, target);
5929 if (target)
5930 return target;
5931 break;
5933 CASE_FLT_FN (BUILT_IN_ICEIL):
5934 CASE_FLT_FN (BUILT_IN_LCEIL):
5935 CASE_FLT_FN (BUILT_IN_LLCEIL):
5936 CASE_FLT_FN (BUILT_IN_LFLOOR):
5937 CASE_FLT_FN (BUILT_IN_IFLOOR):
5938 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5939 target = expand_builtin_int_roundingfn (exp, target);
5940 if (target)
5941 return target;
5942 break;
5944 CASE_FLT_FN (BUILT_IN_IRINT):
5945 CASE_FLT_FN (BUILT_IN_LRINT):
5946 CASE_FLT_FN (BUILT_IN_LLRINT):
5947 CASE_FLT_FN (BUILT_IN_IROUND):
5948 CASE_FLT_FN (BUILT_IN_LROUND):
5949 CASE_FLT_FN (BUILT_IN_LLROUND):
5950 target = expand_builtin_int_roundingfn_2 (exp, target);
5951 if (target)
5952 return target;
5953 break;
5955 CASE_FLT_FN (BUILT_IN_POWI):
5956 target = expand_builtin_powi (exp, target);
5957 if (target)
5958 return target;
5959 break;
5961 CASE_FLT_FN (BUILT_IN_ATAN2):
5962 CASE_FLT_FN (BUILT_IN_LDEXP):
5963 CASE_FLT_FN (BUILT_IN_SCALB):
5964 CASE_FLT_FN (BUILT_IN_SCALBN):
5965 CASE_FLT_FN (BUILT_IN_SCALBLN):
5966 if (! flag_unsafe_math_optimizations)
5967 break;
5969 CASE_FLT_FN (BUILT_IN_FMOD):
5970 CASE_FLT_FN (BUILT_IN_REMAINDER):
5971 CASE_FLT_FN (BUILT_IN_DREM):
5972 CASE_FLT_FN (BUILT_IN_POW):
5973 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5974 if (target)
5975 return target;
5976 break;
5978 CASE_FLT_FN (BUILT_IN_CEXPI):
5979 target = expand_builtin_cexpi (exp, target);
5980 gcc_assert (target);
5981 return target;
5983 CASE_FLT_FN (BUILT_IN_SIN):
5984 CASE_FLT_FN (BUILT_IN_COS):
5985 if (! flag_unsafe_math_optimizations)
5986 break;
5987 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5988 if (target)
5989 return target;
5990 break;
5992 CASE_FLT_FN (BUILT_IN_SINCOS):
5993 if (! flag_unsafe_math_optimizations)
5994 break;
5995 target = expand_builtin_sincos (exp);
5996 if (target)
5997 return target;
5998 break;
6000 case BUILT_IN_APPLY_ARGS:
6001 return expand_builtin_apply_args ();
6003 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6004 FUNCTION with a copy of the parameters described by
6005 ARGUMENTS, and ARGSIZE. It returns a block of memory
6006 allocated on the stack into which is stored all the registers
6007 that might possibly be used for returning the result of a
6008 function. ARGUMENTS is the value returned by
6009 __builtin_apply_args. ARGSIZE is the number of bytes of
6010 arguments that must be copied. ??? How should this value be
6011 computed? We'll also need a safe worst case value for varargs
6012 functions. */
6013 case BUILT_IN_APPLY:
6014 if (!validate_arglist (exp, POINTER_TYPE,
6015 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6016 && !validate_arglist (exp, REFERENCE_TYPE,
6017 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6018 return const0_rtx;
6019 else
6021 rtx ops[3];
6023 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6024 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6025 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6027 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6030 /* __builtin_return (RESULT) causes the function to return the
6031 value described by RESULT. RESULT is address of the block of
6032 memory returned by __builtin_apply. */
6033 case BUILT_IN_RETURN:
6034 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6035 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6036 return const0_rtx;
6038 case BUILT_IN_SAVEREGS:
6039 return expand_builtin_saveregs ();
6041 case BUILT_IN_VA_ARG_PACK:
6042 /* All valid uses of __builtin_va_arg_pack () are removed during
6043 inlining. */
6044 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6045 return const0_rtx;
6047 case BUILT_IN_VA_ARG_PACK_LEN:
6048 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6049 inlining. */
6050 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6051 return const0_rtx;
6053 /* Return the address of the first anonymous stack arg. */
6054 case BUILT_IN_NEXT_ARG:
6055 if (fold_builtin_next_arg (exp, false))
6056 return const0_rtx;
6057 return expand_builtin_next_arg ();
6059 case BUILT_IN_CLEAR_CACHE:
6060 target = expand_builtin___clear_cache (exp);
6061 if (target)
6062 return target;
6063 break;
6065 case BUILT_IN_CLASSIFY_TYPE:
6066 return expand_builtin_classify_type (exp);
6068 case BUILT_IN_CONSTANT_P:
6069 return const0_rtx;
6071 case BUILT_IN_FRAME_ADDRESS:
6072 case BUILT_IN_RETURN_ADDRESS:
6073 return expand_builtin_frame_address (fndecl, exp);
6075 /* Returns the address of the area where the structure is returned.
6076 0 otherwise. */
6077 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6078 if (call_expr_nargs (exp) != 0
6079 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6080 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6081 return const0_rtx;
6082 else
6083 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6085 case BUILT_IN_ALLOCA:
6086 case BUILT_IN_ALLOCA_WITH_ALIGN:
6087 /* If the allocation stems from the declaration of a variable-sized
6088 object, it cannot accumulate. */
6089 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6090 if (target)
6091 return target;
6092 break;
6094 case BUILT_IN_STACK_SAVE:
6095 return expand_stack_save ();
6097 case BUILT_IN_STACK_RESTORE:
6098 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6099 return const0_rtx;
6101 case BUILT_IN_BSWAP16:
6102 case BUILT_IN_BSWAP32:
6103 case BUILT_IN_BSWAP64:
6104 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6105 if (target)
6106 return target;
6107 break;
6109 CASE_INT_FN (BUILT_IN_FFS):
6110 case BUILT_IN_FFSIMAX:
6111 target = expand_builtin_unop (target_mode, exp, target,
6112 subtarget, ffs_optab);
6113 if (target)
6114 return target;
6115 break;
6117 CASE_INT_FN (BUILT_IN_CLZ):
6118 case BUILT_IN_CLZIMAX:
6119 target = expand_builtin_unop (target_mode, exp, target,
6120 subtarget, clz_optab);
6121 if (target)
6122 return target;
6123 break;
6125 CASE_INT_FN (BUILT_IN_CTZ):
6126 case BUILT_IN_CTZIMAX:
6127 target = expand_builtin_unop (target_mode, exp, target,
6128 subtarget, ctz_optab);
6129 if (target)
6130 return target;
6131 break;
6133 CASE_INT_FN (BUILT_IN_CLRSB):
6134 case BUILT_IN_CLRSBIMAX:
6135 target = expand_builtin_unop (target_mode, exp, target,
6136 subtarget, clrsb_optab);
6137 if (target)
6138 return target;
6139 break;
6141 CASE_INT_FN (BUILT_IN_POPCOUNT):
6142 case BUILT_IN_POPCOUNTIMAX:
6143 target = expand_builtin_unop (target_mode, exp, target,
6144 subtarget, popcount_optab);
6145 if (target)
6146 return target;
6147 break;
6149 CASE_INT_FN (BUILT_IN_PARITY):
6150 case BUILT_IN_PARITYIMAX:
6151 target = expand_builtin_unop (target_mode, exp, target,
6152 subtarget, parity_optab);
6153 if (target)
6154 return target;
6155 break;
6157 case BUILT_IN_STRLEN:
6158 target = expand_builtin_strlen (exp, target, target_mode);
6159 if (target)
6160 return target;
6161 break;
6163 case BUILT_IN_STRCPY:
6164 target = expand_builtin_strcpy (exp, target);
6165 if (target)
6166 return target;
6167 break;
6169 case BUILT_IN_STRNCPY:
6170 target = expand_builtin_strncpy (exp, target);
6171 if (target)
6172 return target;
6173 break;
6175 case BUILT_IN_STPCPY:
6176 target = expand_builtin_stpcpy (exp, target, mode);
6177 if (target)
6178 return target;
6179 break;
6181 case BUILT_IN_MEMCPY:
6182 target = expand_builtin_memcpy (exp, target);
6183 if (target)
6184 return target;
6185 break;
6187 case BUILT_IN_MEMPCPY:
6188 target = expand_builtin_mempcpy (exp, target, mode);
6189 if (target)
6190 return target;
6191 break;
6193 case BUILT_IN_MEMSET:
6194 target = expand_builtin_memset (exp, target, mode);
6195 if (target)
6196 return target;
6197 break;
6199 case BUILT_IN_BZERO:
6200 target = expand_builtin_bzero (exp);
6201 if (target)
6202 return target;
6203 break;
6205 case BUILT_IN_STRCMP:
6206 target = expand_builtin_strcmp (exp, target);
6207 if (target)
6208 return target;
6209 break;
6211 case BUILT_IN_STRNCMP:
6212 target = expand_builtin_strncmp (exp, target, mode);
6213 if (target)
6214 return target;
6215 break;
6217 case BUILT_IN_BCMP:
6218 case BUILT_IN_MEMCMP:
6219 target = expand_builtin_memcmp (exp, target, mode);
6220 if (target)
6221 return target;
6222 break;
6224 case BUILT_IN_SETJMP:
6225 /* This should have been lowered to the builtins below. */
6226 gcc_unreachable ();
6228 case BUILT_IN_SETJMP_SETUP:
6229 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6230 and the receiver label. */
6231 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6233 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6234 VOIDmode, EXPAND_NORMAL);
6235 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6236 rtx label_r = label_rtx (label);
6238 /* This is copied from the handling of non-local gotos. */
6239 expand_builtin_setjmp_setup (buf_addr, label_r);
6240 nonlocal_goto_handler_labels
6241 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6242 nonlocal_goto_handler_labels);
6243 /* ??? Do not let expand_label treat us as such since we would
6244 not want to be both on the list of non-local labels and on
6245 the list of forced labels. */
6246 FORCED_LABEL (label) = 0;
6247 return const0_rtx;
6249 break;
6251 case BUILT_IN_SETJMP_DISPATCHER:
6252 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6253 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6255 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6256 rtx label_r = label_rtx (label);
6258 /* Remove the dispatcher label from the list of non-local labels
6259 since the receiver labels have been added to it above. */
6260 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6261 return const0_rtx;
6263 break;
6265 case BUILT_IN_SETJMP_RECEIVER:
6266 /* __builtin_setjmp_receiver is passed the receiver label. */
6267 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6269 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6270 rtx label_r = label_rtx (label);
6272 expand_builtin_setjmp_receiver (label_r);
6273 return const0_rtx;
6275 break;
6277 /* __builtin_longjmp is passed a pointer to an array of five words.
6278 It's similar to the C library longjmp function but works with
6279 __builtin_setjmp above. */
6280 case BUILT_IN_LONGJMP:
6281 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6283 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6284 VOIDmode, EXPAND_NORMAL);
6285 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6287 if (value != const1_rtx)
6289 error ("%<__builtin_longjmp%> second argument must be 1");
6290 return const0_rtx;
6293 expand_builtin_longjmp (buf_addr, value);
6294 return const0_rtx;
6296 break;
6298 case BUILT_IN_NONLOCAL_GOTO:
6299 target = expand_builtin_nonlocal_goto (exp);
6300 if (target)
6301 return target;
6302 break;
6304 /* This updates the setjmp buffer that is its argument with the value
6305 of the current stack pointer. */
6306 case BUILT_IN_UPDATE_SETJMP_BUF:
6307 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6309 rtx buf_addr
6310 = expand_normal (CALL_EXPR_ARG (exp, 0));
6312 expand_builtin_update_setjmp_buf (buf_addr);
6313 return const0_rtx;
6315 break;
6317 case BUILT_IN_TRAP:
6318 expand_builtin_trap ();
6319 return const0_rtx;
6321 case BUILT_IN_UNREACHABLE:
6322 expand_builtin_unreachable ();
6323 return const0_rtx;
6325 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6326 case BUILT_IN_SIGNBITD32:
6327 case BUILT_IN_SIGNBITD64:
6328 case BUILT_IN_SIGNBITD128:
6329 target = expand_builtin_signbit (exp, target);
6330 if (target)
6331 return target;
6332 break;
6334 /* Various hooks for the DWARF 2 __throw routine. */
6335 case BUILT_IN_UNWIND_INIT:
6336 expand_builtin_unwind_init ();
6337 return const0_rtx;
6338 case BUILT_IN_DWARF_CFA:
6339 return virtual_cfa_rtx;
6340 #ifdef DWARF2_UNWIND_INFO
6341 case BUILT_IN_DWARF_SP_COLUMN:
6342 return expand_builtin_dwarf_sp_column ();
6343 case BUILT_IN_INIT_DWARF_REG_SIZES:
6344 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6345 return const0_rtx;
6346 #endif
6347 case BUILT_IN_FROB_RETURN_ADDR:
6348 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6349 case BUILT_IN_EXTRACT_RETURN_ADDR:
6350 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6351 case BUILT_IN_EH_RETURN:
6352 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6353 CALL_EXPR_ARG (exp, 1));
6354 return const0_rtx;
6355 #ifdef EH_RETURN_DATA_REGNO
6356 case BUILT_IN_EH_RETURN_DATA_REGNO:
6357 return expand_builtin_eh_return_data_regno (exp);
6358 #endif
6359 case BUILT_IN_EXTEND_POINTER:
6360 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6361 case BUILT_IN_EH_POINTER:
6362 return expand_builtin_eh_pointer (exp);
6363 case BUILT_IN_EH_FILTER:
6364 return expand_builtin_eh_filter (exp);
6365 case BUILT_IN_EH_COPY_VALUES:
6366 return expand_builtin_eh_copy_values (exp);
6368 case BUILT_IN_VA_START:
6369 return expand_builtin_va_start (exp);
6370 case BUILT_IN_VA_END:
6371 return expand_builtin_va_end (exp);
6372 case BUILT_IN_VA_COPY:
6373 return expand_builtin_va_copy (exp);
6374 case BUILT_IN_EXPECT:
6375 return expand_builtin_expect (exp, target);
6376 case BUILT_IN_ASSUME_ALIGNED:
6377 return expand_builtin_assume_aligned (exp, target);
6378 case BUILT_IN_PREFETCH:
6379 expand_builtin_prefetch (exp);
6380 return const0_rtx;
6382 case BUILT_IN_INIT_TRAMPOLINE:
6383 return expand_builtin_init_trampoline (exp, true);
6384 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6385 return expand_builtin_init_trampoline (exp, false);
6386 case BUILT_IN_ADJUST_TRAMPOLINE:
6387 return expand_builtin_adjust_trampoline (exp);
6389 case BUILT_IN_FORK:
6390 case BUILT_IN_EXECL:
6391 case BUILT_IN_EXECV:
6392 case BUILT_IN_EXECLP:
6393 case BUILT_IN_EXECLE:
6394 case BUILT_IN_EXECVP:
6395 case BUILT_IN_EXECVE:
6396 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6397 if (target)
6398 return target;
6399 break;
6401 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6402 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6403 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6404 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6405 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6406 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6407 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6408 if (target)
6409 return target;
6410 break;
6412 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6413 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6414 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6415 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6416 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6417 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6418 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6419 if (target)
6420 return target;
6421 break;
6423 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6424 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6425 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6426 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6427 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6428 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6429 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6430 if (target)
6431 return target;
6432 break;
6434 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6435 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6436 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6437 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6438 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6439 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6440 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6441 if (target)
6442 return target;
6443 break;
6445 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6446 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6447 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6448 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6449 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6450 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6451 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6452 if (target)
6453 return target;
6454 break;
6456 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6457 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6458 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6459 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6460 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6461 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6462 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6463 if (target)
6464 return target;
6465 break;
6467 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6468 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6469 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6470 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6471 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6472 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6473 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6474 if (target)
6475 return target;
6476 break;
6478 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6479 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6480 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6481 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6482 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6483 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6484 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6485 if (target)
6486 return target;
6487 break;
6489 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6490 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6491 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6492 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6493 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6494 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6495 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6496 if (target)
6497 return target;
6498 break;
6500 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6501 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6502 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6503 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6504 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6505 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6506 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6507 if (target)
6508 return target;
6509 break;
6511 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6512 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6513 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6514 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6515 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6516 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6517 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6518 if (target)
6519 return target;
6520 break;
6522 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6523 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6524 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6525 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6526 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6527 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6528 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6529 if (target)
6530 return target;
6531 break;
6533 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6534 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6535 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6536 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6537 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6538 if (mode == VOIDmode)
6539 mode = TYPE_MODE (boolean_type_node);
6540 if (!target || !register_operand (target, mode))
6541 target = gen_reg_rtx (mode);
6543 mode = get_builtin_sync_mode
6544 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6545 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6546 if (target)
6547 return target;
6548 break;
6550 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6551 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6552 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6553 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6554 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6555 mode = get_builtin_sync_mode
6556 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6557 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6558 if (target)
6559 return target;
6560 break;
6562 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6563 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6564 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6565 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6566 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6567 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6568 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6569 if (target)
6570 return target;
6571 break;
6573 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6574 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6575 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6576 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6577 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6578 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6579 expand_builtin_sync_lock_release (mode, exp);
6580 return const0_rtx;
6582 case BUILT_IN_SYNC_SYNCHRONIZE:
6583 expand_builtin_sync_synchronize ();
6584 return const0_rtx;
6586 case BUILT_IN_ATOMIC_EXCHANGE_1:
6587 case BUILT_IN_ATOMIC_EXCHANGE_2:
6588 case BUILT_IN_ATOMIC_EXCHANGE_4:
6589 case BUILT_IN_ATOMIC_EXCHANGE_8:
6590 case BUILT_IN_ATOMIC_EXCHANGE_16:
6591 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6592 target = expand_builtin_atomic_exchange (mode, exp, target);
6593 if (target)
6594 return target;
6595 break;
6597 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6598 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6599 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6600 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6601 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6603 unsigned int nargs, z;
6604 VEC(tree,gc) *vec;
6606 mode =
6607 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6608 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6609 if (target)
6610 return target;
6612 /* If this is turned into an external library call, the weak parameter
6613 must be dropped to match the expected parameter list. */
6614 nargs = call_expr_nargs (exp);
6615 vec = VEC_alloc (tree, gc, nargs - 1);
6616 for (z = 0; z < 3; z++)
6617 VEC_quick_push (tree, vec, CALL_EXPR_ARG (exp, z));
6618 /* Skip the boolean weak parameter. */
6619 for (z = 4; z < 6; z++)
6620 VEC_quick_push (tree, vec, CALL_EXPR_ARG (exp, z));
6621 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6622 break;
6625 case BUILT_IN_ATOMIC_LOAD_1:
6626 case BUILT_IN_ATOMIC_LOAD_2:
6627 case BUILT_IN_ATOMIC_LOAD_4:
6628 case BUILT_IN_ATOMIC_LOAD_8:
6629 case BUILT_IN_ATOMIC_LOAD_16:
6630 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6631 target = expand_builtin_atomic_load (mode, exp, target);
6632 if (target)
6633 return target;
6634 break;
6636 case BUILT_IN_ATOMIC_STORE_1:
6637 case BUILT_IN_ATOMIC_STORE_2:
6638 case BUILT_IN_ATOMIC_STORE_4:
6639 case BUILT_IN_ATOMIC_STORE_8:
6640 case BUILT_IN_ATOMIC_STORE_16:
6641 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6642 target = expand_builtin_atomic_store (mode, exp);
6643 if (target)
6644 return const0_rtx;
6645 break;
6647 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6648 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6649 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6650 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6651 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6653 enum built_in_function lib;
6654 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6655 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6656 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6657 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6658 ignore, lib);
6659 if (target)
6660 return target;
6661 break;
6663 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6664 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6665 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6666 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6667 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6669 enum built_in_function lib;
6670 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6671 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6672 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6673 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6674 ignore, lib);
6675 if (target)
6676 return target;
6677 break;
6679 case BUILT_IN_ATOMIC_AND_FETCH_1:
6680 case BUILT_IN_ATOMIC_AND_FETCH_2:
6681 case BUILT_IN_ATOMIC_AND_FETCH_4:
6682 case BUILT_IN_ATOMIC_AND_FETCH_8:
6683 case BUILT_IN_ATOMIC_AND_FETCH_16:
6685 enum built_in_function lib;
6686 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6687 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6688 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6689 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6690 ignore, lib);
6691 if (target)
6692 return target;
6693 break;
6695 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6696 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6697 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6698 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6699 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6701 enum built_in_function lib;
6702 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6703 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6704 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6705 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6706 ignore, lib);
6707 if (target)
6708 return target;
6709 break;
6711 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6712 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6713 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6714 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6715 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6717 enum built_in_function lib;
6718 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6719 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6720 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6721 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6722 ignore, lib);
6723 if (target)
6724 return target;
6725 break;
6727 case BUILT_IN_ATOMIC_OR_FETCH_1:
6728 case BUILT_IN_ATOMIC_OR_FETCH_2:
6729 case BUILT_IN_ATOMIC_OR_FETCH_4:
6730 case BUILT_IN_ATOMIC_OR_FETCH_8:
6731 case BUILT_IN_ATOMIC_OR_FETCH_16:
6733 enum built_in_function lib;
6734 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6735 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6736 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6737 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6738 ignore, lib);
6739 if (target)
6740 return target;
6741 break;
6743 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6744 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6745 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6746 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6747 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6748 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6749 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6750 ignore, BUILT_IN_NONE);
6751 if (target)
6752 return target;
6753 break;
6755 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6756 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6757 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6758 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6759 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6760 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6761 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6762 ignore, BUILT_IN_NONE);
6763 if (target)
6764 return target;
6765 break;
6767 case BUILT_IN_ATOMIC_FETCH_AND_1:
6768 case BUILT_IN_ATOMIC_FETCH_AND_2:
6769 case BUILT_IN_ATOMIC_FETCH_AND_4:
6770 case BUILT_IN_ATOMIC_FETCH_AND_8:
6771 case BUILT_IN_ATOMIC_FETCH_AND_16:
6772 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6773 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6774 ignore, BUILT_IN_NONE);
6775 if (target)
6776 return target;
6777 break;
6779 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6780 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6781 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6782 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6783 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6784 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6785 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6786 ignore, BUILT_IN_NONE);
6787 if (target)
6788 return target;
6789 break;
6791 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6792 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6793 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6794 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6795 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6796 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6797 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6798 ignore, BUILT_IN_NONE);
6799 if (target)
6800 return target;
6801 break;
6803 case BUILT_IN_ATOMIC_FETCH_OR_1:
6804 case BUILT_IN_ATOMIC_FETCH_OR_2:
6805 case BUILT_IN_ATOMIC_FETCH_OR_4:
6806 case BUILT_IN_ATOMIC_FETCH_OR_8:
6807 case BUILT_IN_ATOMIC_FETCH_OR_16:
6808 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6809 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6810 ignore, BUILT_IN_NONE);
6811 if (target)
6812 return target;
6813 break;
6815 case BUILT_IN_ATOMIC_TEST_AND_SET:
6816 return expand_builtin_atomic_test_and_set (exp, target);
6818 case BUILT_IN_ATOMIC_CLEAR:
6819 return expand_builtin_atomic_clear (exp);
6821 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6822 return expand_builtin_atomic_always_lock_free (exp);
6824 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6825 target = expand_builtin_atomic_is_lock_free (exp);
6826 if (target)
6827 return target;
6828 break;
6830 case BUILT_IN_ATOMIC_THREAD_FENCE:
6831 expand_builtin_atomic_thread_fence (exp);
6832 return const0_rtx;
6834 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6835 expand_builtin_atomic_signal_fence (exp);
6836 return const0_rtx;
6838 case BUILT_IN_OBJECT_SIZE:
6839 return expand_builtin_object_size (exp);
6841 case BUILT_IN_MEMCPY_CHK:
6842 case BUILT_IN_MEMPCPY_CHK:
6843 case BUILT_IN_MEMMOVE_CHK:
6844 case BUILT_IN_MEMSET_CHK:
6845 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6846 if (target)
6847 return target;
6848 break;
6850 case BUILT_IN_STRCPY_CHK:
6851 case BUILT_IN_STPCPY_CHK:
6852 case BUILT_IN_STRNCPY_CHK:
6853 case BUILT_IN_STPNCPY_CHK:
6854 case BUILT_IN_STRCAT_CHK:
6855 case BUILT_IN_STRNCAT_CHK:
6856 case BUILT_IN_SNPRINTF_CHK:
6857 case BUILT_IN_VSNPRINTF_CHK:
6858 maybe_emit_chk_warning (exp, fcode);
6859 break;
6861 case BUILT_IN_SPRINTF_CHK:
6862 case BUILT_IN_VSPRINTF_CHK:
6863 maybe_emit_sprintf_chk_warning (exp, fcode);
6864 break;
6866 case BUILT_IN_FREE:
6867 if (warn_free_nonheap_object)
6868 maybe_emit_free_warning (exp);
6869 break;
6871 default: /* just do library call, if unknown builtin */
6872 break;
6875 /* The switch statement above can drop through to cause the function
6876 to be called normally. */
6877 return expand_call (exp, target, ignore);
6880 /* Determine whether a tree node represents a call to a built-in
6881 function. If the tree T is a call to a built-in function with
6882 the right number of arguments of the appropriate types, return
6883 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6884 Otherwise the return value is END_BUILTINS. */
6886 enum built_in_function
6887 builtin_mathfn_code (const_tree t)
6889 const_tree fndecl, arg, parmlist;
6890 const_tree argtype, parmtype;
6891 const_call_expr_arg_iterator iter;
6893 if (TREE_CODE (t) != CALL_EXPR
6894 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6895 return END_BUILTINS;
6897 fndecl = get_callee_fndecl (t);
6898 if (fndecl == NULL_TREE
6899 || TREE_CODE (fndecl) != FUNCTION_DECL
6900 || ! DECL_BUILT_IN (fndecl)
6901 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6902 return END_BUILTINS;
6904 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6905 init_const_call_expr_arg_iterator (t, &iter);
6906 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6908 /* If a function doesn't take a variable number of arguments,
6909 the last element in the list will have type `void'. */
6910 parmtype = TREE_VALUE (parmlist);
6911 if (VOID_TYPE_P (parmtype))
6913 if (more_const_call_expr_args_p (&iter))
6914 return END_BUILTINS;
6915 return DECL_FUNCTION_CODE (fndecl);
6918 if (! more_const_call_expr_args_p (&iter))
6919 return END_BUILTINS;
6921 arg = next_const_call_expr_arg (&iter);
6922 argtype = TREE_TYPE (arg);
6924 if (SCALAR_FLOAT_TYPE_P (parmtype))
6926 if (! SCALAR_FLOAT_TYPE_P (argtype))
6927 return END_BUILTINS;
6929 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6931 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6932 return END_BUILTINS;
6934 else if (POINTER_TYPE_P (parmtype))
6936 if (! POINTER_TYPE_P (argtype))
6937 return END_BUILTINS;
6939 else if (INTEGRAL_TYPE_P (parmtype))
6941 if (! INTEGRAL_TYPE_P (argtype))
6942 return END_BUILTINS;
6944 else
6945 return END_BUILTINS;
6948 /* Variable-length argument list. */
6949 return DECL_FUNCTION_CODE (fndecl);
6952 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6953 evaluate to a constant. */
6955 static tree
6956 fold_builtin_constant_p (tree arg)
6958 /* We return 1 for a numeric type that's known to be a constant
6959 value at compile-time or for an aggregate type that's a
6960 literal constant. */
6961 STRIP_NOPS (arg);
6963 /* If we know this is a constant, emit the constant of one. */
6964 if (CONSTANT_CLASS_P (arg)
6965 || (TREE_CODE (arg) == CONSTRUCTOR
6966 && TREE_CONSTANT (arg)))
6967 return integer_one_node;
6968 if (TREE_CODE (arg) == ADDR_EXPR)
6970 tree op = TREE_OPERAND (arg, 0);
6971 if (TREE_CODE (op) == STRING_CST
6972 || (TREE_CODE (op) == ARRAY_REF
6973 && integer_zerop (TREE_OPERAND (op, 1))
6974 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6975 return integer_one_node;
6978 /* If this expression has side effects, show we don't know it to be a
6979 constant. Likewise if it's a pointer or aggregate type since in
6980 those case we only want literals, since those are only optimized
6981 when generating RTL, not later.
6982 And finally, if we are compiling an initializer, not code, we
6983 need to return a definite result now; there's not going to be any
6984 more optimization done. */
6985 if (TREE_SIDE_EFFECTS (arg)
6986 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6987 || POINTER_TYPE_P (TREE_TYPE (arg))
6988 || cfun == 0
6989 || folding_initializer)
6990 return integer_zero_node;
6992 return NULL_TREE;
6995 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6996 return it as a truthvalue. */
6998 static tree
6999 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
7001 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7003 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7004 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7005 ret_type = TREE_TYPE (TREE_TYPE (fn));
7006 pred_type = TREE_VALUE (arg_types);
7007 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7009 pred = fold_convert_loc (loc, pred_type, pred);
7010 expected = fold_convert_loc (loc, expected_type, expected);
7011 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
7013 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7014 build_int_cst (ret_type, 0));
7017 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7018 NULL_TREE if no simplification is possible. */
7020 static tree
7021 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7023 tree inner, fndecl, inner_arg0;
7024 enum tree_code code;
7026 /* Distribute the expected value over short-circuiting operators.
7027 See through the cast from truthvalue_type_node to long. */
7028 inner_arg0 = arg0;
7029 while (TREE_CODE (inner_arg0) == NOP_EXPR
7030 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7031 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7032 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7034 /* If this is a builtin_expect within a builtin_expect keep the
7035 inner one. See through a comparison against a constant. It
7036 might have been added to create a thruthvalue. */
7037 inner = inner_arg0;
7039 if (COMPARISON_CLASS_P (inner)
7040 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7041 inner = TREE_OPERAND (inner, 0);
7043 if (TREE_CODE (inner) == CALL_EXPR
7044 && (fndecl = get_callee_fndecl (inner))
7045 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7046 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7047 return arg0;
7049 inner = inner_arg0;
7050 code = TREE_CODE (inner);
7051 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7053 tree op0 = TREE_OPERAND (inner, 0);
7054 tree op1 = TREE_OPERAND (inner, 1);
7056 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7057 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7058 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7060 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7063 /* If the argument isn't invariant then there's nothing else we can do. */
7064 if (!TREE_CONSTANT (inner_arg0))
7065 return NULL_TREE;
7067 /* If we expect that a comparison against the argument will fold to
7068 a constant return the constant. In practice, this means a true
7069 constant or the address of a non-weak symbol. */
7070 inner = inner_arg0;
7071 STRIP_NOPS (inner);
7072 if (TREE_CODE (inner) == ADDR_EXPR)
7076 inner = TREE_OPERAND (inner, 0);
7078 while (TREE_CODE (inner) == COMPONENT_REF
7079 || TREE_CODE (inner) == ARRAY_REF);
7080 if ((TREE_CODE (inner) == VAR_DECL
7081 || TREE_CODE (inner) == FUNCTION_DECL)
7082 && DECL_WEAK (inner))
7083 return NULL_TREE;
7086 /* Otherwise, ARG0 already has the proper type for the return value. */
7087 return arg0;
7090 /* Fold a call to __builtin_classify_type with argument ARG. */
7092 static tree
7093 fold_builtin_classify_type (tree arg)
7095 if (arg == 0)
7096 return build_int_cst (integer_type_node, no_type_class);
7098 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7101 /* Fold a call to __builtin_strlen with argument ARG. */
7103 static tree
7104 fold_builtin_strlen (location_t loc, tree type, tree arg)
7106 if (!validate_arg (arg, POINTER_TYPE))
7107 return NULL_TREE;
7108 else
7110 tree len = c_strlen (arg, 0);
7112 if (len)
7113 return fold_convert_loc (loc, type, len);
7115 return NULL_TREE;
7119 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7121 static tree
7122 fold_builtin_inf (location_t loc, tree type, int warn)
7124 REAL_VALUE_TYPE real;
7126 /* __builtin_inff is intended to be usable to define INFINITY on all
7127 targets. If an infinity is not available, INFINITY expands "to a
7128 positive constant of type float that overflows at translation
7129 time", footnote "In this case, using INFINITY will violate the
7130 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7131 Thus we pedwarn to ensure this constraint violation is
7132 diagnosed. */
7133 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7134 pedwarn (loc, 0, "target format does not support infinity");
7136 real_inf (&real);
7137 return build_real (type, real);
7140 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7142 static tree
7143 fold_builtin_nan (tree arg, tree type, int quiet)
7145 REAL_VALUE_TYPE real;
7146 const char *str;
7148 if (!validate_arg (arg, POINTER_TYPE))
7149 return NULL_TREE;
7150 str = c_getstr (arg);
7151 if (!str)
7152 return NULL_TREE;
7154 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7155 return NULL_TREE;
7157 return build_real (type, real);
7160 /* Return true if the floating point expression T has an integer value.
7161 We also allow +Inf, -Inf and NaN to be considered integer values. */
7163 static bool
7164 integer_valued_real_p (tree t)
7166 switch (TREE_CODE (t))
7168 case FLOAT_EXPR:
7169 return true;
7171 case ABS_EXPR:
7172 case SAVE_EXPR:
7173 return integer_valued_real_p (TREE_OPERAND (t, 0));
7175 case COMPOUND_EXPR:
7176 case MODIFY_EXPR:
7177 case BIND_EXPR:
7178 return integer_valued_real_p (TREE_OPERAND (t, 1));
7180 case PLUS_EXPR:
7181 case MINUS_EXPR:
7182 case MULT_EXPR:
7183 case MIN_EXPR:
7184 case MAX_EXPR:
7185 return integer_valued_real_p (TREE_OPERAND (t, 0))
7186 && integer_valued_real_p (TREE_OPERAND (t, 1));
7188 case COND_EXPR:
7189 return integer_valued_real_p (TREE_OPERAND (t, 1))
7190 && integer_valued_real_p (TREE_OPERAND (t, 2));
7192 case REAL_CST:
7193 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7195 case NOP_EXPR:
7197 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7198 if (TREE_CODE (type) == INTEGER_TYPE)
7199 return true;
7200 if (TREE_CODE (type) == REAL_TYPE)
7201 return integer_valued_real_p (TREE_OPERAND (t, 0));
7202 break;
7205 case CALL_EXPR:
7206 switch (builtin_mathfn_code (t))
7208 CASE_FLT_FN (BUILT_IN_CEIL):
7209 CASE_FLT_FN (BUILT_IN_FLOOR):
7210 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7211 CASE_FLT_FN (BUILT_IN_RINT):
7212 CASE_FLT_FN (BUILT_IN_ROUND):
7213 CASE_FLT_FN (BUILT_IN_TRUNC):
7214 return true;
7216 CASE_FLT_FN (BUILT_IN_FMIN):
7217 CASE_FLT_FN (BUILT_IN_FMAX):
7218 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7219 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7221 default:
7222 break;
7224 break;
7226 default:
7227 break;
7229 return false;
7232 /* FNDECL is assumed to be a builtin where truncation can be propagated
7233 across (for instance floor((double)f) == (double)floorf (f).
7234 Do the transformation for a call with argument ARG. */
7236 static tree
7237 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7239 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7241 if (!validate_arg (arg, REAL_TYPE))
7242 return NULL_TREE;
7244 /* Integer rounding functions are idempotent. */
7245 if (fcode == builtin_mathfn_code (arg))
7246 return arg;
7248 /* If argument is already integer valued, and we don't need to worry
7249 about setting errno, there's no need to perform rounding. */
7250 if (! flag_errno_math && integer_valued_real_p (arg))
7251 return arg;
7253 if (optimize)
7255 tree arg0 = strip_float_extensions (arg);
7256 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7257 tree newtype = TREE_TYPE (arg0);
7258 tree decl;
7260 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7261 && (decl = mathfn_built_in (newtype, fcode)))
7262 return fold_convert_loc (loc, ftype,
7263 build_call_expr_loc (loc, decl, 1,
7264 fold_convert_loc (loc,
7265 newtype,
7266 arg0)));
7268 return NULL_TREE;
7271 /* FNDECL is assumed to be builtin which can narrow the FP type of
7272 the argument, for instance lround((double)f) -> lroundf (f).
7273 Do the transformation for a call with argument ARG. */
7275 static tree
7276 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7278 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7280 if (!validate_arg (arg, REAL_TYPE))
7281 return NULL_TREE;
7283 /* If argument is already integer valued, and we don't need to worry
7284 about setting errno, there's no need to perform rounding. */
7285 if (! flag_errno_math && integer_valued_real_p (arg))
7286 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7287 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7289 if (optimize)
7291 tree ftype = TREE_TYPE (arg);
7292 tree arg0 = strip_float_extensions (arg);
7293 tree newtype = TREE_TYPE (arg0);
7294 tree decl;
7296 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7297 && (decl = mathfn_built_in (newtype, fcode)))
7298 return build_call_expr_loc (loc, decl, 1,
7299 fold_convert_loc (loc, newtype, arg0));
7302 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7303 sizeof (int) == sizeof (long). */
7304 if (TYPE_PRECISION (integer_type_node)
7305 == TYPE_PRECISION (long_integer_type_node))
7307 tree newfn = NULL_TREE;
7308 switch (fcode)
7310 CASE_FLT_FN (BUILT_IN_ICEIL):
7311 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7312 break;
7314 CASE_FLT_FN (BUILT_IN_IFLOOR):
7315 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7316 break;
7318 CASE_FLT_FN (BUILT_IN_IROUND):
7319 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7320 break;
7322 CASE_FLT_FN (BUILT_IN_IRINT):
7323 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7324 break;
7326 default:
7327 break;
7330 if (newfn)
7332 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7333 return fold_convert_loc (loc,
7334 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7338 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7339 sizeof (long long) == sizeof (long). */
7340 if (TYPE_PRECISION (long_long_integer_type_node)
7341 == TYPE_PRECISION (long_integer_type_node))
7343 tree newfn = NULL_TREE;
7344 switch (fcode)
7346 CASE_FLT_FN (BUILT_IN_LLCEIL):
7347 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7348 break;
7350 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7351 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7352 break;
7354 CASE_FLT_FN (BUILT_IN_LLROUND):
7355 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7356 break;
7358 CASE_FLT_FN (BUILT_IN_LLRINT):
7359 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7360 break;
7362 default:
7363 break;
7366 if (newfn)
7368 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7369 return fold_convert_loc (loc,
7370 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7374 return NULL_TREE;
7377 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7378 return type. Return NULL_TREE if no simplification can be made. */
7380 static tree
7381 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7383 tree res;
7385 if (!validate_arg (arg, COMPLEX_TYPE)
7386 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7387 return NULL_TREE;
7389 /* Calculate the result when the argument is a constant. */
7390 if (TREE_CODE (arg) == COMPLEX_CST
7391 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7392 type, mpfr_hypot)))
7393 return res;
7395 if (TREE_CODE (arg) == COMPLEX_EXPR)
7397 tree real = TREE_OPERAND (arg, 0);
7398 tree imag = TREE_OPERAND (arg, 1);
7400 /* If either part is zero, cabs is fabs of the other. */
7401 if (real_zerop (real))
7402 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7403 if (real_zerop (imag))
7404 return fold_build1_loc (loc, ABS_EXPR, type, real);
7406 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7407 if (flag_unsafe_math_optimizations
7408 && operand_equal_p (real, imag, OEP_PURE_SAME))
7410 const REAL_VALUE_TYPE sqrt2_trunc
7411 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7412 STRIP_NOPS (real);
7413 return fold_build2_loc (loc, MULT_EXPR, type,
7414 fold_build1_loc (loc, ABS_EXPR, type, real),
7415 build_real (type, sqrt2_trunc));
7419 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7420 if (TREE_CODE (arg) == NEGATE_EXPR
7421 || TREE_CODE (arg) == CONJ_EXPR)
7422 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7424 /* Don't do this when optimizing for size. */
7425 if (flag_unsafe_math_optimizations
7426 && optimize && optimize_function_for_speed_p (cfun))
7428 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7430 if (sqrtfn != NULL_TREE)
7432 tree rpart, ipart, result;
7434 arg = builtin_save_expr (arg);
7436 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7437 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7439 rpart = builtin_save_expr (rpart);
7440 ipart = builtin_save_expr (ipart);
7442 result = fold_build2_loc (loc, PLUS_EXPR, type,
7443 fold_build2_loc (loc, MULT_EXPR, type,
7444 rpart, rpart),
7445 fold_build2_loc (loc, MULT_EXPR, type,
7446 ipart, ipart));
7448 return build_call_expr_loc (loc, sqrtfn, 1, result);
7452 return NULL_TREE;
7455 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7456 complex tree type of the result. If NEG is true, the imaginary
7457 zero is negative. */
7459 static tree
7460 build_complex_cproj (tree type, bool neg)
7462 REAL_VALUE_TYPE rinf, rzero = dconst0;
7464 real_inf (&rinf);
7465 rzero.sign = neg;
7466 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7467 build_real (TREE_TYPE (type), rzero));
7470 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7471 return type. Return NULL_TREE if no simplification can be made. */
7473 static tree
7474 fold_builtin_cproj (location_t loc, tree arg, tree type)
7476 if (!validate_arg (arg, COMPLEX_TYPE)
7477 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7478 return NULL_TREE;
7480 /* If there are no infinities, return arg. */
7481 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7482 return non_lvalue_loc (loc, arg);
7484 /* Calculate the result when the argument is a constant. */
7485 if (TREE_CODE (arg) == COMPLEX_CST)
7487 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7488 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7490 if (real_isinf (real) || real_isinf (imag))
7491 return build_complex_cproj (type, imag->sign);
7492 else
7493 return arg;
7495 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7497 tree real = TREE_OPERAND (arg, 0);
7498 tree imag = TREE_OPERAND (arg, 1);
7500 STRIP_NOPS (real);
7501 STRIP_NOPS (imag);
7503 /* If the real part is inf and the imag part is known to be
7504 nonnegative, return (inf + 0i). Remember side-effects are
7505 possible in the imag part. */
7506 if (TREE_CODE (real) == REAL_CST
7507 && real_isinf (TREE_REAL_CST_PTR (real))
7508 && tree_expr_nonnegative_p (imag))
7509 return omit_one_operand_loc (loc, type,
7510 build_complex_cproj (type, false),
7511 arg);
7513 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7514 Remember side-effects are possible in the real part. */
7515 if (TREE_CODE (imag) == REAL_CST
7516 && real_isinf (TREE_REAL_CST_PTR (imag)))
7517 return
7518 omit_one_operand_loc (loc, type,
7519 build_complex_cproj (type, TREE_REAL_CST_PTR
7520 (imag)->sign), arg);
7523 return NULL_TREE;
7526 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7527 Return NULL_TREE if no simplification can be made. */
7529 static tree
7530 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7533 enum built_in_function fcode;
7534 tree res;
7536 if (!validate_arg (arg, REAL_TYPE))
7537 return NULL_TREE;
7539 /* Calculate the result when the argument is a constant. */
7540 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7541 return res;
7543 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7544 fcode = builtin_mathfn_code (arg);
7545 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7547 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7548 arg = fold_build2_loc (loc, MULT_EXPR, type,
7549 CALL_EXPR_ARG (arg, 0),
7550 build_real (type, dconsthalf));
7551 return build_call_expr_loc (loc, expfn, 1, arg);
7554 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7555 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7557 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7559 if (powfn)
7561 tree arg0 = CALL_EXPR_ARG (arg, 0);
7562 tree tree_root;
7563 /* The inner root was either sqrt or cbrt. */
7564 /* This was a conditional expression but it triggered a bug
7565 in Sun C 5.5. */
7566 REAL_VALUE_TYPE dconstroot;
7567 if (BUILTIN_SQRT_P (fcode))
7568 dconstroot = dconsthalf;
7569 else
7570 dconstroot = dconst_third ();
7572 /* Adjust for the outer root. */
7573 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7574 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7575 tree_root = build_real (type, dconstroot);
7576 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7580 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7581 if (flag_unsafe_math_optimizations
7582 && (fcode == BUILT_IN_POW
7583 || fcode == BUILT_IN_POWF
7584 || fcode == BUILT_IN_POWL))
7586 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7587 tree arg0 = CALL_EXPR_ARG (arg, 0);
7588 tree arg1 = CALL_EXPR_ARG (arg, 1);
7589 tree narg1;
7590 if (!tree_expr_nonnegative_p (arg0))
7591 arg0 = build1 (ABS_EXPR, type, arg0);
7592 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7593 build_real (type, dconsthalf));
7594 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7597 return NULL_TREE;
7600 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7601 Return NULL_TREE if no simplification can be made. */
7603 static tree
7604 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7606 const enum built_in_function fcode = builtin_mathfn_code (arg);
7607 tree res;
7609 if (!validate_arg (arg, REAL_TYPE))
7610 return NULL_TREE;
7612 /* Calculate the result when the argument is a constant. */
7613 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7614 return res;
7616 if (flag_unsafe_math_optimizations)
7618 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7619 if (BUILTIN_EXPONENT_P (fcode))
7621 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7622 const REAL_VALUE_TYPE third_trunc =
7623 real_value_truncate (TYPE_MODE (type), dconst_third ());
7624 arg = fold_build2_loc (loc, MULT_EXPR, type,
7625 CALL_EXPR_ARG (arg, 0),
7626 build_real (type, third_trunc));
7627 return build_call_expr_loc (loc, expfn, 1, arg);
7630 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7631 if (BUILTIN_SQRT_P (fcode))
7633 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7635 if (powfn)
7637 tree arg0 = CALL_EXPR_ARG (arg, 0);
7638 tree tree_root;
7639 REAL_VALUE_TYPE dconstroot = dconst_third ();
7641 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7642 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7643 tree_root = build_real (type, dconstroot);
7644 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7648 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7649 if (BUILTIN_CBRT_P (fcode))
7651 tree arg0 = CALL_EXPR_ARG (arg, 0);
7652 if (tree_expr_nonnegative_p (arg0))
7654 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7656 if (powfn)
7658 tree tree_root;
7659 REAL_VALUE_TYPE dconstroot;
7661 real_arithmetic (&dconstroot, MULT_EXPR,
7662 dconst_third_ptr (), dconst_third_ptr ());
7663 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7664 tree_root = build_real (type, dconstroot);
7665 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7670 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7671 if (fcode == BUILT_IN_POW
7672 || fcode == BUILT_IN_POWF
7673 || fcode == BUILT_IN_POWL)
7675 tree arg00 = CALL_EXPR_ARG (arg, 0);
7676 tree arg01 = CALL_EXPR_ARG (arg, 1);
7677 if (tree_expr_nonnegative_p (arg00))
7679 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7680 const REAL_VALUE_TYPE dconstroot
7681 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7682 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7683 build_real (type, dconstroot));
7684 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7688 return NULL_TREE;
7691 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7692 TYPE is the type of the return value. Return NULL_TREE if no
7693 simplification can be made. */
7695 static tree
7696 fold_builtin_cos (location_t loc,
7697 tree arg, tree type, tree fndecl)
7699 tree res, narg;
7701 if (!validate_arg (arg, REAL_TYPE))
7702 return NULL_TREE;
7704 /* Calculate the result when the argument is a constant. */
7705 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7706 return res;
7708 /* Optimize cos(-x) into cos (x). */
7709 if ((narg = fold_strip_sign_ops (arg)))
7710 return build_call_expr_loc (loc, fndecl, 1, narg);
7712 return NULL_TREE;
7715 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7716 Return NULL_TREE if no simplification can be made. */
7718 static tree
7719 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7721 if (validate_arg (arg, REAL_TYPE))
7723 tree res, narg;
7725 /* Calculate the result when the argument is a constant. */
7726 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7727 return res;
7729 /* Optimize cosh(-x) into cosh (x). */
7730 if ((narg = fold_strip_sign_ops (arg)))
7731 return build_call_expr_loc (loc, fndecl, 1, narg);
7734 return NULL_TREE;
7737 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7738 argument ARG. TYPE is the type of the return value. Return
7739 NULL_TREE if no simplification can be made. */
7741 static tree
7742 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7743 bool hyper)
7745 if (validate_arg (arg, COMPLEX_TYPE)
7746 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7748 tree tmp;
7750 /* Calculate the result when the argument is a constant. */
7751 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7752 return tmp;
7754 /* Optimize fn(-x) into fn(x). */
7755 if ((tmp = fold_strip_sign_ops (arg)))
7756 return build_call_expr_loc (loc, fndecl, 1, tmp);
7759 return NULL_TREE;
7762 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7763 Return NULL_TREE if no simplification can be made. */
7765 static tree
7766 fold_builtin_tan (tree arg, tree type)
7768 enum built_in_function fcode;
7769 tree res;
7771 if (!validate_arg (arg, REAL_TYPE))
7772 return NULL_TREE;
7774 /* Calculate the result when the argument is a constant. */
7775 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7776 return res;
7778 /* Optimize tan(atan(x)) = x. */
7779 fcode = builtin_mathfn_code (arg);
7780 if (flag_unsafe_math_optimizations
7781 && (fcode == BUILT_IN_ATAN
7782 || fcode == BUILT_IN_ATANF
7783 || fcode == BUILT_IN_ATANL))
7784 return CALL_EXPR_ARG (arg, 0);
7786 return NULL_TREE;
7789 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7790 NULL_TREE if no simplification can be made. */
7792 static tree
7793 fold_builtin_sincos (location_t loc,
7794 tree arg0, tree arg1, tree arg2)
7796 tree type;
7797 tree res, fn, call;
7799 if (!validate_arg (arg0, REAL_TYPE)
7800 || !validate_arg (arg1, POINTER_TYPE)
7801 || !validate_arg (arg2, POINTER_TYPE))
7802 return NULL_TREE;
7804 type = TREE_TYPE (arg0);
7806 /* Calculate the result when the argument is a constant. */
7807 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7808 return res;
7810 /* Canonicalize sincos to cexpi. */
7811 if (!TARGET_C99_FUNCTIONS)
7812 return NULL_TREE;
7813 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7814 if (!fn)
7815 return NULL_TREE;
7817 call = build_call_expr_loc (loc, fn, 1, arg0);
7818 call = builtin_save_expr (call);
7820 return build2 (COMPOUND_EXPR, void_type_node,
7821 build2 (MODIFY_EXPR, void_type_node,
7822 build_fold_indirect_ref_loc (loc, arg1),
7823 build1 (IMAGPART_EXPR, type, call)),
7824 build2 (MODIFY_EXPR, void_type_node,
7825 build_fold_indirect_ref_loc (loc, arg2),
7826 build1 (REALPART_EXPR, type, call)));
7829 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7830 NULL_TREE if no simplification can be made. */
7832 static tree
7833 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7835 tree rtype;
7836 tree realp, imagp, ifn;
7837 tree res;
7839 if (!validate_arg (arg0, COMPLEX_TYPE)
7840 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7841 return NULL_TREE;
7843 /* Calculate the result when the argument is a constant. */
7844 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7845 return res;
7847 rtype = TREE_TYPE (TREE_TYPE (arg0));
7849 /* In case we can figure out the real part of arg0 and it is constant zero
7850 fold to cexpi. */
7851 if (!TARGET_C99_FUNCTIONS)
7852 return NULL_TREE;
7853 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7854 if (!ifn)
7855 return NULL_TREE;
7857 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7858 && real_zerop (realp))
7860 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7861 return build_call_expr_loc (loc, ifn, 1, narg);
7864 /* In case we can easily decompose real and imaginary parts split cexp
7865 to exp (r) * cexpi (i). */
7866 if (flag_unsafe_math_optimizations
7867 && realp)
7869 tree rfn, rcall, icall;
7871 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7872 if (!rfn)
7873 return NULL_TREE;
7875 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7876 if (!imagp)
7877 return NULL_TREE;
7879 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7880 icall = builtin_save_expr (icall);
7881 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7882 rcall = builtin_save_expr (rcall);
7883 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7884 fold_build2_loc (loc, MULT_EXPR, rtype,
7885 rcall,
7886 fold_build1_loc (loc, REALPART_EXPR,
7887 rtype, icall)),
7888 fold_build2_loc (loc, MULT_EXPR, rtype,
7889 rcall,
7890 fold_build1_loc (loc, IMAGPART_EXPR,
7891 rtype, icall)));
7894 return NULL_TREE;
7897 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7898 Return NULL_TREE if no simplification can be made. */
7900 static tree
7901 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7903 if (!validate_arg (arg, REAL_TYPE))
7904 return NULL_TREE;
7906 /* Optimize trunc of constant value. */
7907 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7909 REAL_VALUE_TYPE r, x;
7910 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7912 x = TREE_REAL_CST (arg);
7913 real_trunc (&r, TYPE_MODE (type), &x);
7914 return build_real (type, r);
7917 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7920 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7921 Return NULL_TREE if no simplification can be made. */
7923 static tree
7924 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7926 if (!validate_arg (arg, REAL_TYPE))
7927 return NULL_TREE;
7929 /* Optimize floor of constant value. */
7930 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7932 REAL_VALUE_TYPE x;
7934 x = TREE_REAL_CST (arg);
7935 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7937 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7938 REAL_VALUE_TYPE r;
7940 real_floor (&r, TYPE_MODE (type), &x);
7941 return build_real (type, r);
7945 /* Fold floor (x) where x is nonnegative to trunc (x). */
7946 if (tree_expr_nonnegative_p (arg))
7948 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7949 if (truncfn)
7950 return build_call_expr_loc (loc, truncfn, 1, arg);
7953 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7956 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7957 Return NULL_TREE if no simplification can be made. */
7959 static tree
7960 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7962 if (!validate_arg (arg, REAL_TYPE))
7963 return NULL_TREE;
7965 /* Optimize ceil of constant value. */
7966 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7968 REAL_VALUE_TYPE x;
7970 x = TREE_REAL_CST (arg);
7971 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7973 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7974 REAL_VALUE_TYPE r;
7976 real_ceil (&r, TYPE_MODE (type), &x);
7977 return build_real (type, r);
7981 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7984 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7985 Return NULL_TREE if no simplification can be made. */
7987 static tree
7988 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7990 if (!validate_arg (arg, REAL_TYPE))
7991 return NULL_TREE;
7993 /* Optimize round of constant value. */
7994 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7996 REAL_VALUE_TYPE x;
7998 x = TREE_REAL_CST (arg);
7999 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8001 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8002 REAL_VALUE_TYPE r;
8004 real_round (&r, TYPE_MODE (type), &x);
8005 return build_real (type, r);
8009 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8012 /* Fold function call to builtin lround, lroundf or lroundl (or the
8013 corresponding long long versions) and other rounding functions. ARG
8014 is the argument to the call. Return NULL_TREE if no simplification
8015 can be made. */
8017 static tree
8018 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8020 if (!validate_arg (arg, REAL_TYPE))
8021 return NULL_TREE;
8023 /* Optimize lround of constant value. */
8024 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8026 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8028 if (real_isfinite (&x))
8030 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8031 tree ftype = TREE_TYPE (arg);
8032 double_int val;
8033 REAL_VALUE_TYPE r;
8035 switch (DECL_FUNCTION_CODE (fndecl))
8037 CASE_FLT_FN (BUILT_IN_IFLOOR):
8038 CASE_FLT_FN (BUILT_IN_LFLOOR):
8039 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8040 real_floor (&r, TYPE_MODE (ftype), &x);
8041 break;
8043 CASE_FLT_FN (BUILT_IN_ICEIL):
8044 CASE_FLT_FN (BUILT_IN_LCEIL):
8045 CASE_FLT_FN (BUILT_IN_LLCEIL):
8046 real_ceil (&r, TYPE_MODE (ftype), &x);
8047 break;
8049 CASE_FLT_FN (BUILT_IN_IROUND):
8050 CASE_FLT_FN (BUILT_IN_LROUND):
8051 CASE_FLT_FN (BUILT_IN_LLROUND):
8052 real_round (&r, TYPE_MODE (ftype), &x);
8053 break;
8055 default:
8056 gcc_unreachable ();
8059 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8060 if (double_int_fits_to_tree_p (itype, val))
8061 return double_int_to_tree (itype, val);
8065 switch (DECL_FUNCTION_CODE (fndecl))
8067 CASE_FLT_FN (BUILT_IN_LFLOOR):
8068 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8069 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8070 if (tree_expr_nonnegative_p (arg))
8071 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8072 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8073 break;
8074 default:;
8077 return fold_fixed_mathfn (loc, fndecl, arg);
8080 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8081 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8082 the argument to the call. Return NULL_TREE if no simplification can
8083 be made. */
8085 static tree
8086 fold_builtin_bitop (tree fndecl, tree arg)
8088 if (!validate_arg (arg, INTEGER_TYPE))
8089 return NULL_TREE;
8091 /* Optimize for constant argument. */
8092 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8094 HOST_WIDE_INT hi, width, result;
8095 unsigned HOST_WIDE_INT lo;
8096 tree type;
8098 type = TREE_TYPE (arg);
8099 width = TYPE_PRECISION (type);
8100 lo = TREE_INT_CST_LOW (arg);
8102 /* Clear all the bits that are beyond the type's precision. */
8103 if (width > HOST_BITS_PER_WIDE_INT)
8105 hi = TREE_INT_CST_HIGH (arg);
8106 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8107 hi &= ~((unsigned HOST_WIDE_INT) (-1)
8108 << (width - HOST_BITS_PER_WIDE_INT));
8110 else
8112 hi = 0;
8113 if (width < HOST_BITS_PER_WIDE_INT)
8114 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8117 switch (DECL_FUNCTION_CODE (fndecl))
8119 CASE_INT_FN (BUILT_IN_FFS):
8120 if (lo != 0)
8121 result = ffs_hwi (lo);
8122 else if (hi != 0)
8123 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8124 else
8125 result = 0;
8126 break;
8128 CASE_INT_FN (BUILT_IN_CLZ):
8129 if (hi != 0)
8130 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8131 else if (lo != 0)
8132 result = width - floor_log2 (lo) - 1;
8133 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8134 result = width;
8135 break;
8137 CASE_INT_FN (BUILT_IN_CTZ):
8138 if (lo != 0)
8139 result = ctz_hwi (lo);
8140 else if (hi != 0)
8141 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8142 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8143 result = width;
8144 break;
8146 CASE_INT_FN (BUILT_IN_CLRSB):
8147 if (width > HOST_BITS_PER_WIDE_INT
8148 && (hi & ((unsigned HOST_WIDE_INT) 1
8149 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8151 hi = ~hi & ~((unsigned HOST_WIDE_INT) (-1)
8152 << (width - HOST_BITS_PER_WIDE_INT - 1));
8153 lo = ~lo;
8155 else if (width <= HOST_BITS_PER_WIDE_INT
8156 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8157 lo = ~lo & ~((unsigned HOST_WIDE_INT) (-1) << (width - 1));
8158 if (hi != 0)
8159 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8160 else if (lo != 0)
8161 result = width - floor_log2 (lo) - 2;
8162 else
8163 result = width - 1;
8164 break;
8166 CASE_INT_FN (BUILT_IN_POPCOUNT):
8167 result = 0;
8168 while (lo)
8169 result++, lo &= lo - 1;
8170 while (hi)
8171 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8172 break;
8174 CASE_INT_FN (BUILT_IN_PARITY):
8175 result = 0;
8176 while (lo)
8177 result++, lo &= lo - 1;
8178 while (hi)
8179 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8180 result &= 1;
8181 break;
8183 default:
8184 gcc_unreachable ();
8187 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8190 return NULL_TREE;
8193 /* Fold function call to builtin_bswap and the short, long and long long
8194 variants. Return NULL_TREE if no simplification can be made. */
8195 static tree
8196 fold_builtin_bswap (tree fndecl, tree arg)
8198 if (! validate_arg (arg, INTEGER_TYPE))
8199 return NULL_TREE;
8201 /* Optimize constant value. */
8202 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8204 HOST_WIDE_INT hi, width, r_hi = 0;
8205 unsigned HOST_WIDE_INT lo, r_lo = 0;
8206 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8208 width = TYPE_PRECISION (type);
8209 lo = TREE_INT_CST_LOW (arg);
8210 hi = TREE_INT_CST_HIGH (arg);
8212 switch (DECL_FUNCTION_CODE (fndecl))
8214 case BUILT_IN_BSWAP16:
8215 case BUILT_IN_BSWAP32:
8216 case BUILT_IN_BSWAP64:
8218 int s;
8220 for (s = 0; s < width; s += 8)
8222 int d = width - s - 8;
8223 unsigned HOST_WIDE_INT byte;
8225 if (s < HOST_BITS_PER_WIDE_INT)
8226 byte = (lo >> s) & 0xff;
8227 else
8228 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8230 if (d < HOST_BITS_PER_WIDE_INT)
8231 r_lo |= byte << d;
8232 else
8233 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8237 break;
8239 default:
8240 gcc_unreachable ();
8243 if (width < HOST_BITS_PER_WIDE_INT)
8244 return build_int_cst (type, r_lo);
8245 else
8246 return build_int_cst_wide (type, r_lo, r_hi);
8249 return NULL_TREE;
8252 /* A subroutine of fold_builtin to fold the various logarithmic
8253 functions. Return NULL_TREE if no simplification can me made.
8254 FUNC is the corresponding MPFR logarithm function. */
8256 static tree
8257 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8258 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8260 if (validate_arg (arg, REAL_TYPE))
8262 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8263 tree res;
8264 const enum built_in_function fcode = builtin_mathfn_code (arg);
8266 /* Calculate the result when the argument is a constant. */
8267 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8268 return res;
8270 /* Special case, optimize logN(expN(x)) = x. */
8271 if (flag_unsafe_math_optimizations
8272 && ((func == mpfr_log
8273 && (fcode == BUILT_IN_EXP
8274 || fcode == BUILT_IN_EXPF
8275 || fcode == BUILT_IN_EXPL))
8276 || (func == mpfr_log2
8277 && (fcode == BUILT_IN_EXP2
8278 || fcode == BUILT_IN_EXP2F
8279 || fcode == BUILT_IN_EXP2L))
8280 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8281 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8283 /* Optimize logN(func()) for various exponential functions. We
8284 want to determine the value "x" and the power "exponent" in
8285 order to transform logN(x**exponent) into exponent*logN(x). */
8286 if (flag_unsafe_math_optimizations)
8288 tree exponent = 0, x = 0;
8290 switch (fcode)
8292 CASE_FLT_FN (BUILT_IN_EXP):
8293 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8294 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8295 dconst_e ()));
8296 exponent = CALL_EXPR_ARG (arg, 0);
8297 break;
8298 CASE_FLT_FN (BUILT_IN_EXP2):
8299 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8300 x = build_real (type, dconst2);
8301 exponent = CALL_EXPR_ARG (arg, 0);
8302 break;
8303 CASE_FLT_FN (BUILT_IN_EXP10):
8304 CASE_FLT_FN (BUILT_IN_POW10):
8305 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8307 REAL_VALUE_TYPE dconst10;
8308 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8309 x = build_real (type, dconst10);
8311 exponent = CALL_EXPR_ARG (arg, 0);
8312 break;
8313 CASE_FLT_FN (BUILT_IN_SQRT):
8314 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8315 x = CALL_EXPR_ARG (arg, 0);
8316 exponent = build_real (type, dconsthalf);
8317 break;
8318 CASE_FLT_FN (BUILT_IN_CBRT):
8319 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8320 x = CALL_EXPR_ARG (arg, 0);
8321 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8322 dconst_third ()));
8323 break;
8324 CASE_FLT_FN (BUILT_IN_POW):
8325 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8326 x = CALL_EXPR_ARG (arg, 0);
8327 exponent = CALL_EXPR_ARG (arg, 1);
8328 break;
8329 default:
8330 break;
8333 /* Now perform the optimization. */
8334 if (x && exponent)
8336 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8337 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8342 return NULL_TREE;
8345 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8346 NULL_TREE if no simplification can be made. */
8348 static tree
8349 fold_builtin_hypot (location_t loc, tree fndecl,
8350 tree arg0, tree arg1, tree type)
8352 tree res, narg0, narg1;
8354 if (!validate_arg (arg0, REAL_TYPE)
8355 || !validate_arg (arg1, REAL_TYPE))
8356 return NULL_TREE;
8358 /* Calculate the result when the argument is a constant. */
8359 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8360 return res;
8362 /* If either argument to hypot has a negate or abs, strip that off.
8363 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8364 narg0 = fold_strip_sign_ops (arg0);
8365 narg1 = fold_strip_sign_ops (arg1);
8366 if (narg0 || narg1)
8368 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8369 narg1 ? narg1 : arg1);
8372 /* If either argument is zero, hypot is fabs of the other. */
8373 if (real_zerop (arg0))
8374 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8375 else if (real_zerop (arg1))
8376 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8378 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8379 if (flag_unsafe_math_optimizations
8380 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8382 const REAL_VALUE_TYPE sqrt2_trunc
8383 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8384 return fold_build2_loc (loc, MULT_EXPR, type,
8385 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8386 build_real (type, sqrt2_trunc));
8389 return NULL_TREE;
8393 /* Fold a builtin function call to pow, powf, or powl. Return
8394 NULL_TREE if no simplification can be made. */
8395 static tree
8396 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8398 tree res;
8400 if (!validate_arg (arg0, REAL_TYPE)
8401 || !validate_arg (arg1, REAL_TYPE))
8402 return NULL_TREE;
8404 /* Calculate the result when the argument is a constant. */
8405 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8406 return res;
8408 /* Optimize pow(1.0,y) = 1.0. */
8409 if (real_onep (arg0))
8410 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8412 if (TREE_CODE (arg1) == REAL_CST
8413 && !TREE_OVERFLOW (arg1))
8415 REAL_VALUE_TYPE cint;
8416 REAL_VALUE_TYPE c;
8417 HOST_WIDE_INT n;
8419 c = TREE_REAL_CST (arg1);
8421 /* Optimize pow(x,0.0) = 1.0. */
8422 if (REAL_VALUES_EQUAL (c, dconst0))
8423 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8424 arg0);
8426 /* Optimize pow(x,1.0) = x. */
8427 if (REAL_VALUES_EQUAL (c, dconst1))
8428 return arg0;
8430 /* Optimize pow(x,-1.0) = 1.0/x. */
8431 if (REAL_VALUES_EQUAL (c, dconstm1))
8432 return fold_build2_loc (loc, RDIV_EXPR, type,
8433 build_real (type, dconst1), arg0);
8435 /* Optimize pow(x,0.5) = sqrt(x). */
8436 if (flag_unsafe_math_optimizations
8437 && REAL_VALUES_EQUAL (c, dconsthalf))
8439 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8441 if (sqrtfn != NULL_TREE)
8442 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8445 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8446 if (flag_unsafe_math_optimizations)
8448 const REAL_VALUE_TYPE dconstroot
8449 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8451 if (REAL_VALUES_EQUAL (c, dconstroot))
8453 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8454 if (cbrtfn != NULL_TREE)
8455 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8459 /* Check for an integer exponent. */
8460 n = real_to_integer (&c);
8461 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8462 if (real_identical (&c, &cint))
8464 /* Attempt to evaluate pow at compile-time, unless this should
8465 raise an exception. */
8466 if (TREE_CODE (arg0) == REAL_CST
8467 && !TREE_OVERFLOW (arg0)
8468 && (n > 0
8469 || (!flag_trapping_math && !flag_errno_math)
8470 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8472 REAL_VALUE_TYPE x;
8473 bool inexact;
8475 x = TREE_REAL_CST (arg0);
8476 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8477 if (flag_unsafe_math_optimizations || !inexact)
8478 return build_real (type, x);
8481 /* Strip sign ops from even integer powers. */
8482 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8484 tree narg0 = fold_strip_sign_ops (arg0);
8485 if (narg0)
8486 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8491 if (flag_unsafe_math_optimizations)
8493 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8495 /* Optimize pow(expN(x),y) = expN(x*y). */
8496 if (BUILTIN_EXPONENT_P (fcode))
8498 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8499 tree arg = CALL_EXPR_ARG (arg0, 0);
8500 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8501 return build_call_expr_loc (loc, expfn, 1, arg);
8504 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8505 if (BUILTIN_SQRT_P (fcode))
8507 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8508 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8509 build_real (type, dconsthalf));
8510 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8513 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8514 if (BUILTIN_CBRT_P (fcode))
8516 tree arg = CALL_EXPR_ARG (arg0, 0);
8517 if (tree_expr_nonnegative_p (arg))
8519 const REAL_VALUE_TYPE dconstroot
8520 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8521 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8522 build_real (type, dconstroot));
8523 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8527 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8528 if (fcode == BUILT_IN_POW
8529 || fcode == BUILT_IN_POWF
8530 || fcode == BUILT_IN_POWL)
8532 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8533 if (tree_expr_nonnegative_p (arg00))
8535 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8536 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8537 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8542 return NULL_TREE;
8545 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8546 Return NULL_TREE if no simplification can be made. */
8547 static tree
8548 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8549 tree arg0, tree arg1, tree type)
8551 if (!validate_arg (arg0, REAL_TYPE)
8552 || !validate_arg (arg1, INTEGER_TYPE))
8553 return NULL_TREE;
8555 /* Optimize pow(1.0,y) = 1.0. */
8556 if (real_onep (arg0))
8557 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8559 if (host_integerp (arg1, 0))
8561 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8563 /* Evaluate powi at compile-time. */
8564 if (TREE_CODE (arg0) == REAL_CST
8565 && !TREE_OVERFLOW (arg0))
8567 REAL_VALUE_TYPE x;
8568 x = TREE_REAL_CST (arg0);
8569 real_powi (&x, TYPE_MODE (type), &x, c);
8570 return build_real (type, x);
8573 /* Optimize pow(x,0) = 1.0. */
8574 if (c == 0)
8575 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8576 arg0);
8578 /* Optimize pow(x,1) = x. */
8579 if (c == 1)
8580 return arg0;
8582 /* Optimize pow(x,-1) = 1.0/x. */
8583 if (c == -1)
8584 return fold_build2_loc (loc, RDIV_EXPR, type,
8585 build_real (type, dconst1), arg0);
8588 return NULL_TREE;
8591 /* A subroutine of fold_builtin to fold the various exponent
8592 functions. Return NULL_TREE if no simplification can be made.
8593 FUNC is the corresponding MPFR exponent function. */
8595 static tree
8596 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8597 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8599 if (validate_arg (arg, REAL_TYPE))
8601 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8602 tree res;
8604 /* Calculate the result when the argument is a constant. */
8605 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8606 return res;
8608 /* Optimize expN(logN(x)) = x. */
8609 if (flag_unsafe_math_optimizations)
8611 const enum built_in_function fcode = builtin_mathfn_code (arg);
8613 if ((func == mpfr_exp
8614 && (fcode == BUILT_IN_LOG
8615 || fcode == BUILT_IN_LOGF
8616 || fcode == BUILT_IN_LOGL))
8617 || (func == mpfr_exp2
8618 && (fcode == BUILT_IN_LOG2
8619 || fcode == BUILT_IN_LOG2F
8620 || fcode == BUILT_IN_LOG2L))
8621 || (func == mpfr_exp10
8622 && (fcode == BUILT_IN_LOG10
8623 || fcode == BUILT_IN_LOG10F
8624 || fcode == BUILT_IN_LOG10L)))
8625 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8629 return NULL_TREE;
8632 /* Return true if VAR is a VAR_DECL or a component thereof. */
8634 static bool
8635 var_decl_component_p (tree var)
8637 tree inner = var;
8638 while (handled_component_p (inner))
8639 inner = TREE_OPERAND (inner, 0);
8640 return SSA_VAR_P (inner);
8643 /* Fold function call to builtin memset. Return
8644 NULL_TREE if no simplification can be made. */
8646 static tree
8647 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8648 tree type, bool ignore)
8650 tree var, ret, etype;
8651 unsigned HOST_WIDE_INT length, cval;
8653 if (! validate_arg (dest, POINTER_TYPE)
8654 || ! validate_arg (c, INTEGER_TYPE)
8655 || ! validate_arg (len, INTEGER_TYPE))
8656 return NULL_TREE;
8658 if (! host_integerp (len, 1))
8659 return NULL_TREE;
8661 /* If the LEN parameter is zero, return DEST. */
8662 if (integer_zerop (len))
8663 return omit_one_operand_loc (loc, type, dest, c);
8665 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8666 return NULL_TREE;
8668 var = dest;
8669 STRIP_NOPS (var);
8670 if (TREE_CODE (var) != ADDR_EXPR)
8671 return NULL_TREE;
8673 var = TREE_OPERAND (var, 0);
8674 if (TREE_THIS_VOLATILE (var))
8675 return NULL_TREE;
8677 etype = TREE_TYPE (var);
8678 if (TREE_CODE (etype) == ARRAY_TYPE)
8679 etype = TREE_TYPE (etype);
8681 if (!INTEGRAL_TYPE_P (etype)
8682 && !POINTER_TYPE_P (etype))
8683 return NULL_TREE;
8685 if (! var_decl_component_p (var))
8686 return NULL_TREE;
8688 length = tree_low_cst (len, 1);
8689 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8690 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8691 return NULL_TREE;
8693 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8694 return NULL_TREE;
8696 if (integer_zerop (c))
8697 cval = 0;
8698 else
8700 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8701 return NULL_TREE;
8703 cval = TREE_INT_CST_LOW (c);
8704 cval &= 0xff;
8705 cval |= cval << 8;
8706 cval |= cval << 16;
8707 cval |= (cval << 31) << 1;
8710 ret = build_int_cst_type (etype, cval);
8711 var = build_fold_indirect_ref_loc (loc,
8712 fold_convert_loc (loc,
8713 build_pointer_type (etype),
8714 dest));
8715 ret = build2 (MODIFY_EXPR, etype, var, ret);
8716 if (ignore)
8717 return ret;
8719 return omit_one_operand_loc (loc, type, dest, ret);
8722 /* Fold function call to builtin memset. Return
8723 NULL_TREE if no simplification can be made. */
8725 static tree
8726 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8728 if (! validate_arg (dest, POINTER_TYPE)
8729 || ! validate_arg (size, INTEGER_TYPE))
8730 return NULL_TREE;
8732 if (!ignore)
8733 return NULL_TREE;
8735 /* New argument list transforming bzero(ptr x, int y) to
8736 memset(ptr x, int 0, size_t y). This is done this way
8737 so that if it isn't expanded inline, we fallback to
8738 calling bzero instead of memset. */
8740 return fold_builtin_memset (loc, dest, integer_zero_node,
8741 fold_convert_loc (loc, size_type_node, size),
8742 void_type_node, ignore);
8745 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8746 NULL_TREE if no simplification can be made.
8747 If ENDP is 0, return DEST (like memcpy).
8748 If ENDP is 1, return DEST+LEN (like mempcpy).
8749 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8750 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8751 (memmove). */
8753 static tree
8754 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8755 tree len, tree type, bool ignore, int endp)
8757 tree destvar, srcvar, expr;
8759 if (! validate_arg (dest, POINTER_TYPE)
8760 || ! validate_arg (src, POINTER_TYPE)
8761 || ! validate_arg (len, INTEGER_TYPE))
8762 return NULL_TREE;
8764 /* If the LEN parameter is zero, return DEST. */
8765 if (integer_zerop (len))
8766 return omit_one_operand_loc (loc, type, dest, src);
8768 /* If SRC and DEST are the same (and not volatile), return
8769 DEST{,+LEN,+LEN-1}. */
8770 if (operand_equal_p (src, dest, 0))
8771 expr = len;
8772 else
8774 tree srctype, desttype;
8775 unsigned int src_align, dest_align;
8776 tree off0;
8778 if (endp == 3)
8780 src_align = get_pointer_alignment (src);
8781 dest_align = get_pointer_alignment (dest);
8783 /* Both DEST and SRC must be pointer types.
8784 ??? This is what old code did. Is the testing for pointer types
8785 really mandatory?
8787 If either SRC is readonly or length is 1, we can use memcpy. */
8788 if (!dest_align || !src_align)
8789 return NULL_TREE;
8790 if (readonly_data_expr (src)
8791 || (host_integerp (len, 1)
8792 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8793 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8795 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8796 if (!fn)
8797 return NULL_TREE;
8798 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8801 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8802 if (TREE_CODE (src) == ADDR_EXPR
8803 && TREE_CODE (dest) == ADDR_EXPR)
8805 tree src_base, dest_base, fn;
8806 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8807 HOST_WIDE_INT size = -1;
8808 HOST_WIDE_INT maxsize = -1;
8810 srcvar = TREE_OPERAND (src, 0);
8811 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8812 &size, &maxsize);
8813 destvar = TREE_OPERAND (dest, 0);
8814 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8815 &size, &maxsize);
8816 if (host_integerp (len, 1))
8817 maxsize = tree_low_cst (len, 1);
8818 else
8819 maxsize = -1;
8820 src_offset /= BITS_PER_UNIT;
8821 dest_offset /= BITS_PER_UNIT;
8822 if (SSA_VAR_P (src_base)
8823 && SSA_VAR_P (dest_base))
8825 if (operand_equal_p (src_base, dest_base, 0)
8826 && ranges_overlap_p (src_offset, maxsize,
8827 dest_offset, maxsize))
8828 return NULL_TREE;
8830 else if (TREE_CODE (src_base) == MEM_REF
8831 && TREE_CODE (dest_base) == MEM_REF)
8833 double_int off;
8834 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8835 TREE_OPERAND (dest_base, 0), 0))
8836 return NULL_TREE;
8837 off = double_int_add (mem_ref_offset (src_base),
8838 shwi_to_double_int (src_offset));
8839 if (!double_int_fits_in_shwi_p (off))
8840 return NULL_TREE;
8841 src_offset = off.low;
8842 off = double_int_add (mem_ref_offset (dest_base),
8843 shwi_to_double_int (dest_offset));
8844 if (!double_int_fits_in_shwi_p (off))
8845 return NULL_TREE;
8846 dest_offset = off.low;
8847 if (ranges_overlap_p (src_offset, maxsize,
8848 dest_offset, maxsize))
8849 return NULL_TREE;
8851 else
8852 return NULL_TREE;
8854 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8855 if (!fn)
8856 return NULL_TREE;
8857 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8860 /* If the destination and source do not alias optimize into
8861 memcpy as well. */
8862 if ((is_gimple_min_invariant (dest)
8863 || TREE_CODE (dest) == SSA_NAME)
8864 && (is_gimple_min_invariant (src)
8865 || TREE_CODE (src) == SSA_NAME))
8867 ao_ref destr, srcr;
8868 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8869 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8870 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8872 tree fn;
8873 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8874 if (!fn)
8875 return NULL_TREE;
8876 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8880 return NULL_TREE;
8883 if (!host_integerp (len, 0))
8884 return NULL_TREE;
8885 /* FIXME:
8886 This logic lose for arguments like (type *)malloc (sizeof (type)),
8887 since we strip the casts of up to VOID return value from malloc.
8888 Perhaps we ought to inherit type from non-VOID argument here? */
8889 STRIP_NOPS (src);
8890 STRIP_NOPS (dest);
8891 if (!POINTER_TYPE_P (TREE_TYPE (src))
8892 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8893 return NULL_TREE;
8894 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8895 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8897 tree tem = TREE_OPERAND (src, 0);
8898 STRIP_NOPS (tem);
8899 if (tem != TREE_OPERAND (src, 0))
8900 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8902 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8904 tree tem = TREE_OPERAND (dest, 0);
8905 STRIP_NOPS (tem);
8906 if (tem != TREE_OPERAND (dest, 0))
8907 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8909 srctype = TREE_TYPE (TREE_TYPE (src));
8910 if (TREE_CODE (srctype) == ARRAY_TYPE
8911 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8913 srctype = TREE_TYPE (srctype);
8914 STRIP_NOPS (src);
8915 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8917 desttype = TREE_TYPE (TREE_TYPE (dest));
8918 if (TREE_CODE (desttype) == ARRAY_TYPE
8919 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8921 desttype = TREE_TYPE (desttype);
8922 STRIP_NOPS (dest);
8923 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8925 if (TREE_ADDRESSABLE (srctype)
8926 || TREE_ADDRESSABLE (desttype))
8927 return NULL_TREE;
8929 src_align = get_pointer_alignment (src);
8930 dest_align = get_pointer_alignment (dest);
8931 if (dest_align < TYPE_ALIGN (desttype)
8932 || src_align < TYPE_ALIGN (srctype))
8933 return NULL_TREE;
8935 if (!ignore)
8936 dest = builtin_save_expr (dest);
8938 /* Build accesses at offset zero with a ref-all character type. */
8939 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8940 ptr_mode, true), 0);
8942 destvar = dest;
8943 STRIP_NOPS (destvar);
8944 if (TREE_CODE (destvar) == ADDR_EXPR
8945 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8946 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8947 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8948 else
8949 destvar = NULL_TREE;
8951 srcvar = src;
8952 STRIP_NOPS (srcvar);
8953 if (TREE_CODE (srcvar) == ADDR_EXPR
8954 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8955 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8957 if (!destvar
8958 || src_align >= TYPE_ALIGN (desttype))
8959 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8960 srcvar, off0);
8961 else if (!STRICT_ALIGNMENT)
8963 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8964 src_align);
8965 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8967 else
8968 srcvar = NULL_TREE;
8970 else
8971 srcvar = NULL_TREE;
8973 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8974 return NULL_TREE;
8976 if (srcvar == NULL_TREE)
8978 STRIP_NOPS (src);
8979 if (src_align >= TYPE_ALIGN (desttype))
8980 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8981 else
8983 if (STRICT_ALIGNMENT)
8984 return NULL_TREE;
8985 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8986 src_align);
8987 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8990 else if (destvar == NULL_TREE)
8992 STRIP_NOPS (dest);
8993 if (dest_align >= TYPE_ALIGN (srctype))
8994 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8995 else
8997 if (STRICT_ALIGNMENT)
8998 return NULL_TREE;
8999 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
9000 dest_align);
9001 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
9005 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
9008 if (ignore)
9009 return expr;
9011 if (endp == 0 || endp == 3)
9012 return omit_one_operand_loc (loc, type, dest, expr);
9014 if (expr == len)
9015 expr = NULL_TREE;
9017 if (endp == 2)
9018 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9019 ssize_int (1));
9021 dest = fold_build_pointer_plus_loc (loc, dest, len);
9022 dest = fold_convert_loc (loc, type, dest);
9023 if (expr)
9024 dest = omit_one_operand_loc (loc, type, dest, expr);
9025 return dest;
9028 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9029 If LEN is not NULL, it represents the length of the string to be
9030 copied. Return NULL_TREE if no simplification can be made. */
9032 tree
9033 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9035 tree fn;
9037 if (!validate_arg (dest, POINTER_TYPE)
9038 || !validate_arg (src, POINTER_TYPE))
9039 return NULL_TREE;
9041 /* If SRC and DEST are the same (and not volatile), return DEST. */
9042 if (operand_equal_p (src, dest, 0))
9043 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9045 if (optimize_function_for_size_p (cfun))
9046 return NULL_TREE;
9048 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9049 if (!fn)
9050 return NULL_TREE;
9052 if (!len)
9054 len = c_strlen (src, 1);
9055 if (! len || TREE_SIDE_EFFECTS (len))
9056 return NULL_TREE;
9059 len = fold_convert_loc (loc, size_type_node, len);
9060 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9061 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9062 build_call_expr_loc (loc, fn, 3, dest, src, len));
9065 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9066 Return NULL_TREE if no simplification can be made. */
9068 static tree
9069 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9071 tree fn, len, lenp1, call, type;
9073 if (!validate_arg (dest, POINTER_TYPE)
9074 || !validate_arg (src, POINTER_TYPE))
9075 return NULL_TREE;
9077 len = c_strlen (src, 1);
9078 if (!len
9079 || TREE_CODE (len) != INTEGER_CST)
9080 return NULL_TREE;
9082 if (optimize_function_for_size_p (cfun)
9083 /* If length is zero it's small enough. */
9084 && !integer_zerop (len))
9085 return NULL_TREE;
9087 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9088 if (!fn)
9089 return NULL_TREE;
9091 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9092 fold_convert_loc (loc, size_type_node, len),
9093 build_int_cst (size_type_node, 1));
9094 /* We use dest twice in building our expression. Save it from
9095 multiple expansions. */
9096 dest = builtin_save_expr (dest);
9097 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9099 type = TREE_TYPE (TREE_TYPE (fndecl));
9100 dest = fold_build_pointer_plus_loc (loc, dest, len);
9101 dest = fold_convert_loc (loc, type, dest);
9102 dest = omit_one_operand_loc (loc, type, dest, call);
9103 return dest;
9106 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9107 If SLEN is not NULL, it represents the length of the source string.
9108 Return NULL_TREE if no simplification can be made. */
9110 tree
9111 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9112 tree src, tree len, tree slen)
9114 tree fn;
9116 if (!validate_arg (dest, POINTER_TYPE)
9117 || !validate_arg (src, POINTER_TYPE)
9118 || !validate_arg (len, INTEGER_TYPE))
9119 return NULL_TREE;
9121 /* If the LEN parameter is zero, return DEST. */
9122 if (integer_zerop (len))
9123 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9125 /* We can't compare slen with len as constants below if len is not a
9126 constant. */
9127 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9128 return NULL_TREE;
9130 if (!slen)
9131 slen = c_strlen (src, 1);
9133 /* Now, we must be passed a constant src ptr parameter. */
9134 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9135 return NULL_TREE;
9137 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9139 /* We do not support simplification of this case, though we do
9140 support it when expanding trees into RTL. */
9141 /* FIXME: generate a call to __builtin_memset. */
9142 if (tree_int_cst_lt (slen, len))
9143 return NULL_TREE;
9145 /* OK transform into builtin memcpy. */
9146 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9147 if (!fn)
9148 return NULL_TREE;
9150 len = fold_convert_loc (loc, size_type_node, len);
9151 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9152 build_call_expr_loc (loc, fn, 3, dest, src, len));
9155 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9156 arguments to the call, and TYPE is its return type.
9157 Return NULL_TREE if no simplification can be made. */
9159 static tree
9160 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9162 if (!validate_arg (arg1, POINTER_TYPE)
9163 || !validate_arg (arg2, INTEGER_TYPE)
9164 || !validate_arg (len, INTEGER_TYPE))
9165 return NULL_TREE;
9166 else
9168 const char *p1;
9170 if (TREE_CODE (arg2) != INTEGER_CST
9171 || !host_integerp (len, 1))
9172 return NULL_TREE;
9174 p1 = c_getstr (arg1);
9175 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9177 char c;
9178 const char *r;
9179 tree tem;
9181 if (target_char_cast (arg2, &c))
9182 return NULL_TREE;
9184 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9186 if (r == NULL)
9187 return build_int_cst (TREE_TYPE (arg1), 0);
9189 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9190 return fold_convert_loc (loc, type, tem);
9192 return NULL_TREE;
9196 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9197 Return NULL_TREE if no simplification can be made. */
9199 static tree
9200 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9202 const char *p1, *p2;
9204 if (!validate_arg (arg1, POINTER_TYPE)
9205 || !validate_arg (arg2, POINTER_TYPE)
9206 || !validate_arg (len, INTEGER_TYPE))
9207 return NULL_TREE;
9209 /* If the LEN parameter is zero, return zero. */
9210 if (integer_zerop (len))
9211 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9212 arg1, arg2);
9214 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9215 if (operand_equal_p (arg1, arg2, 0))
9216 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9218 p1 = c_getstr (arg1);
9219 p2 = c_getstr (arg2);
9221 /* If all arguments are constant, and the value of len is not greater
9222 than the lengths of arg1 and arg2, evaluate at compile-time. */
9223 if (host_integerp (len, 1) && p1 && p2
9224 && compare_tree_int (len, strlen (p1) + 1) <= 0
9225 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9227 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9229 if (r > 0)
9230 return integer_one_node;
9231 else if (r < 0)
9232 return integer_minus_one_node;
9233 else
9234 return integer_zero_node;
9237 /* If len parameter is one, return an expression corresponding to
9238 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9239 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9241 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9242 tree cst_uchar_ptr_node
9243 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9245 tree ind1
9246 = fold_convert_loc (loc, integer_type_node,
9247 build1 (INDIRECT_REF, cst_uchar_node,
9248 fold_convert_loc (loc,
9249 cst_uchar_ptr_node,
9250 arg1)));
9251 tree ind2
9252 = fold_convert_loc (loc, integer_type_node,
9253 build1 (INDIRECT_REF, cst_uchar_node,
9254 fold_convert_loc (loc,
9255 cst_uchar_ptr_node,
9256 arg2)));
9257 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9260 return NULL_TREE;
9263 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9264 Return NULL_TREE if no simplification can be made. */
9266 static tree
9267 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9269 const char *p1, *p2;
9271 if (!validate_arg (arg1, POINTER_TYPE)
9272 || !validate_arg (arg2, POINTER_TYPE))
9273 return NULL_TREE;
9275 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9276 if (operand_equal_p (arg1, arg2, 0))
9277 return integer_zero_node;
9279 p1 = c_getstr (arg1);
9280 p2 = c_getstr (arg2);
9282 if (p1 && p2)
9284 const int i = strcmp (p1, p2);
9285 if (i < 0)
9286 return integer_minus_one_node;
9287 else if (i > 0)
9288 return integer_one_node;
9289 else
9290 return integer_zero_node;
9293 /* If the second arg is "", return *(const unsigned char*)arg1. */
9294 if (p2 && *p2 == '\0')
9296 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9297 tree cst_uchar_ptr_node
9298 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9300 return fold_convert_loc (loc, integer_type_node,
9301 build1 (INDIRECT_REF, cst_uchar_node,
9302 fold_convert_loc (loc,
9303 cst_uchar_ptr_node,
9304 arg1)));
9307 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9308 if (p1 && *p1 == '\0')
9310 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9311 tree cst_uchar_ptr_node
9312 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9314 tree temp
9315 = fold_convert_loc (loc, integer_type_node,
9316 build1 (INDIRECT_REF, cst_uchar_node,
9317 fold_convert_loc (loc,
9318 cst_uchar_ptr_node,
9319 arg2)));
9320 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9323 return NULL_TREE;
9326 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9327 Return NULL_TREE if no simplification can be made. */
9329 static tree
9330 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9332 const char *p1, *p2;
9334 if (!validate_arg (arg1, POINTER_TYPE)
9335 || !validate_arg (arg2, POINTER_TYPE)
9336 || !validate_arg (len, INTEGER_TYPE))
9337 return NULL_TREE;
9339 /* If the LEN parameter is zero, return zero. */
9340 if (integer_zerop (len))
9341 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9342 arg1, arg2);
9344 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9345 if (operand_equal_p (arg1, arg2, 0))
9346 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9348 p1 = c_getstr (arg1);
9349 p2 = c_getstr (arg2);
9351 if (host_integerp (len, 1) && p1 && p2)
9353 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9354 if (i > 0)
9355 return integer_one_node;
9356 else if (i < 0)
9357 return integer_minus_one_node;
9358 else
9359 return integer_zero_node;
9362 /* If the second arg is "", and the length is greater than zero,
9363 return *(const unsigned char*)arg1. */
9364 if (p2 && *p2 == '\0'
9365 && TREE_CODE (len) == INTEGER_CST
9366 && tree_int_cst_sgn (len) == 1)
9368 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9369 tree cst_uchar_ptr_node
9370 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9372 return fold_convert_loc (loc, integer_type_node,
9373 build1 (INDIRECT_REF, cst_uchar_node,
9374 fold_convert_loc (loc,
9375 cst_uchar_ptr_node,
9376 arg1)));
9379 /* If the first arg is "", and the length is greater than zero,
9380 return -*(const unsigned char*)arg2. */
9381 if (p1 && *p1 == '\0'
9382 && TREE_CODE (len) == INTEGER_CST
9383 && tree_int_cst_sgn (len) == 1)
9385 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9386 tree cst_uchar_ptr_node
9387 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9389 tree temp = fold_convert_loc (loc, integer_type_node,
9390 build1 (INDIRECT_REF, cst_uchar_node,
9391 fold_convert_loc (loc,
9392 cst_uchar_ptr_node,
9393 arg2)));
9394 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9397 /* If len parameter is one, return an expression corresponding to
9398 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9399 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9401 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9402 tree cst_uchar_ptr_node
9403 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9405 tree ind1 = fold_convert_loc (loc, integer_type_node,
9406 build1 (INDIRECT_REF, cst_uchar_node,
9407 fold_convert_loc (loc,
9408 cst_uchar_ptr_node,
9409 arg1)));
9410 tree ind2 = fold_convert_loc (loc, integer_type_node,
9411 build1 (INDIRECT_REF, cst_uchar_node,
9412 fold_convert_loc (loc,
9413 cst_uchar_ptr_node,
9414 arg2)));
9415 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9418 return NULL_TREE;
9421 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9422 ARG. Return NULL_TREE if no simplification can be made. */
9424 static tree
9425 fold_builtin_signbit (location_t loc, tree arg, tree type)
9427 if (!validate_arg (arg, REAL_TYPE))
9428 return NULL_TREE;
9430 /* If ARG is a compile-time constant, determine the result. */
9431 if (TREE_CODE (arg) == REAL_CST
9432 && !TREE_OVERFLOW (arg))
9434 REAL_VALUE_TYPE c;
9436 c = TREE_REAL_CST (arg);
9437 return (REAL_VALUE_NEGATIVE (c)
9438 ? build_one_cst (type)
9439 : build_zero_cst (type));
9442 /* If ARG is non-negative, the result is always zero. */
9443 if (tree_expr_nonnegative_p (arg))
9444 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9446 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9447 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9448 return fold_convert (type,
9449 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9450 build_real (TREE_TYPE (arg), dconst0)));
9452 return NULL_TREE;
9455 /* Fold function call to builtin copysign, copysignf or copysignl with
9456 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9457 be made. */
9459 static tree
9460 fold_builtin_copysign (location_t loc, tree fndecl,
9461 tree arg1, tree arg2, tree type)
9463 tree tem;
9465 if (!validate_arg (arg1, REAL_TYPE)
9466 || !validate_arg (arg2, REAL_TYPE))
9467 return NULL_TREE;
9469 /* copysign(X,X) is X. */
9470 if (operand_equal_p (arg1, arg2, 0))
9471 return fold_convert_loc (loc, type, arg1);
9473 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9474 if (TREE_CODE (arg1) == REAL_CST
9475 && TREE_CODE (arg2) == REAL_CST
9476 && !TREE_OVERFLOW (arg1)
9477 && !TREE_OVERFLOW (arg2))
9479 REAL_VALUE_TYPE c1, c2;
9481 c1 = TREE_REAL_CST (arg1);
9482 c2 = TREE_REAL_CST (arg2);
9483 /* c1.sign := c2.sign. */
9484 real_copysign (&c1, &c2);
9485 return build_real (type, c1);
9488 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9489 Remember to evaluate Y for side-effects. */
9490 if (tree_expr_nonnegative_p (arg2))
9491 return omit_one_operand_loc (loc, type,
9492 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9493 arg2);
9495 /* Strip sign changing operations for the first argument. */
9496 tem = fold_strip_sign_ops (arg1);
9497 if (tem)
9498 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9500 return NULL_TREE;
9503 /* Fold a call to builtin isascii with argument ARG. */
9505 static tree
9506 fold_builtin_isascii (location_t loc, tree arg)
9508 if (!validate_arg (arg, INTEGER_TYPE))
9509 return NULL_TREE;
9510 else
9512 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9513 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9514 build_int_cst (integer_type_node,
9515 ~ (unsigned HOST_WIDE_INT) 0x7f));
9516 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9517 arg, integer_zero_node);
9521 /* Fold a call to builtin toascii with argument ARG. */
9523 static tree
9524 fold_builtin_toascii (location_t loc, tree arg)
9526 if (!validate_arg (arg, INTEGER_TYPE))
9527 return NULL_TREE;
9529 /* Transform toascii(c) -> (c & 0x7f). */
9530 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9531 build_int_cst (integer_type_node, 0x7f));
9534 /* Fold a call to builtin isdigit with argument ARG. */
9536 static tree
9537 fold_builtin_isdigit (location_t loc, tree arg)
9539 if (!validate_arg (arg, INTEGER_TYPE))
9540 return NULL_TREE;
9541 else
9543 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9544 /* According to the C standard, isdigit is unaffected by locale.
9545 However, it definitely is affected by the target character set. */
9546 unsigned HOST_WIDE_INT target_digit0
9547 = lang_hooks.to_target_charset ('0');
9549 if (target_digit0 == 0)
9550 return NULL_TREE;
9552 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9553 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9554 build_int_cst (unsigned_type_node, target_digit0));
9555 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9556 build_int_cst (unsigned_type_node, 9));
9560 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9562 static tree
9563 fold_builtin_fabs (location_t loc, tree arg, tree type)
9565 if (!validate_arg (arg, REAL_TYPE))
9566 return NULL_TREE;
9568 arg = fold_convert_loc (loc, type, arg);
9569 if (TREE_CODE (arg) == REAL_CST)
9570 return fold_abs_const (arg, type);
9571 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9574 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9576 static tree
9577 fold_builtin_abs (location_t loc, tree arg, tree type)
9579 if (!validate_arg (arg, INTEGER_TYPE))
9580 return NULL_TREE;
9582 arg = fold_convert_loc (loc, type, arg);
9583 if (TREE_CODE (arg) == INTEGER_CST)
9584 return fold_abs_const (arg, type);
9585 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9588 /* Fold a fma operation with arguments ARG[012]. */
9590 tree
9591 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9592 tree type, tree arg0, tree arg1, tree arg2)
9594 if (TREE_CODE (arg0) == REAL_CST
9595 && TREE_CODE (arg1) == REAL_CST
9596 && TREE_CODE (arg2) == REAL_CST)
9597 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9599 return NULL_TREE;
9602 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9604 static tree
9605 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9607 if (validate_arg (arg0, REAL_TYPE)
9608 && validate_arg(arg1, REAL_TYPE)
9609 && validate_arg(arg2, REAL_TYPE))
9611 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9612 if (tem)
9613 return tem;
9615 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9616 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9617 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9619 return NULL_TREE;
9622 /* Fold a call to builtin fmin or fmax. */
9624 static tree
9625 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9626 tree type, bool max)
9628 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9630 /* Calculate the result when the argument is a constant. */
9631 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9633 if (res)
9634 return res;
9636 /* If either argument is NaN, return the other one. Avoid the
9637 transformation if we get (and honor) a signalling NaN. Using
9638 omit_one_operand() ensures we create a non-lvalue. */
9639 if (TREE_CODE (arg0) == REAL_CST
9640 && real_isnan (&TREE_REAL_CST (arg0))
9641 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9642 || ! TREE_REAL_CST (arg0).signalling))
9643 return omit_one_operand_loc (loc, type, arg1, arg0);
9644 if (TREE_CODE (arg1) == REAL_CST
9645 && real_isnan (&TREE_REAL_CST (arg1))
9646 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9647 || ! TREE_REAL_CST (arg1).signalling))
9648 return omit_one_operand_loc (loc, type, arg0, arg1);
9650 /* Transform fmin/fmax(x,x) -> x. */
9651 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9652 return omit_one_operand_loc (loc, type, arg0, arg1);
9654 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9655 functions to return the numeric arg if the other one is NaN.
9656 These tree codes don't honor that, so only transform if
9657 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9658 handled, so we don't have to worry about it either. */
9659 if (flag_finite_math_only)
9660 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9661 fold_convert_loc (loc, type, arg0),
9662 fold_convert_loc (loc, type, arg1));
9664 return NULL_TREE;
9667 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9669 static tree
9670 fold_builtin_carg (location_t loc, tree arg, tree type)
9672 if (validate_arg (arg, COMPLEX_TYPE)
9673 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9675 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9677 if (atan2_fn)
9679 tree new_arg = builtin_save_expr (arg);
9680 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9681 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9682 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9686 return NULL_TREE;
9689 /* Fold a call to builtin logb/ilogb. */
9691 static tree
9692 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9694 if (! validate_arg (arg, REAL_TYPE))
9695 return NULL_TREE;
9697 STRIP_NOPS (arg);
9699 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9701 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9703 switch (value->cl)
9705 case rvc_nan:
9706 case rvc_inf:
9707 /* If arg is Inf or NaN and we're logb, return it. */
9708 if (TREE_CODE (rettype) == REAL_TYPE)
9709 return fold_convert_loc (loc, rettype, arg);
9710 /* Fall through... */
9711 case rvc_zero:
9712 /* Zero may set errno and/or raise an exception for logb, also
9713 for ilogb we don't know FP_ILOGB0. */
9714 return NULL_TREE;
9715 case rvc_normal:
9716 /* For normal numbers, proceed iff radix == 2. In GCC,
9717 normalized significands are in the range [0.5, 1.0). We
9718 want the exponent as if they were [1.0, 2.0) so get the
9719 exponent and subtract 1. */
9720 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9721 return fold_convert_loc (loc, rettype,
9722 build_int_cst (integer_type_node,
9723 REAL_EXP (value)-1));
9724 break;
9728 return NULL_TREE;
9731 /* Fold a call to builtin significand, if radix == 2. */
9733 static tree
9734 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9736 if (! validate_arg (arg, REAL_TYPE))
9737 return NULL_TREE;
9739 STRIP_NOPS (arg);
9741 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9743 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9745 switch (value->cl)
9747 case rvc_zero:
9748 case rvc_nan:
9749 case rvc_inf:
9750 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9751 return fold_convert_loc (loc, rettype, arg);
9752 case rvc_normal:
9753 /* For normal numbers, proceed iff radix == 2. */
9754 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9756 REAL_VALUE_TYPE result = *value;
9757 /* In GCC, normalized significands are in the range [0.5,
9758 1.0). We want them to be [1.0, 2.0) so set the
9759 exponent to 1. */
9760 SET_REAL_EXP (&result, 1);
9761 return build_real (rettype, result);
9763 break;
9767 return NULL_TREE;
9770 /* Fold a call to builtin frexp, we can assume the base is 2. */
9772 static tree
9773 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9775 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9776 return NULL_TREE;
9778 STRIP_NOPS (arg0);
9780 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9781 return NULL_TREE;
9783 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9785 /* Proceed if a valid pointer type was passed in. */
9786 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9788 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9789 tree frac, exp;
9791 switch (value->cl)
9793 case rvc_zero:
9794 /* For +-0, return (*exp = 0, +-0). */
9795 exp = integer_zero_node;
9796 frac = arg0;
9797 break;
9798 case rvc_nan:
9799 case rvc_inf:
9800 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9801 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9802 case rvc_normal:
9804 /* Since the frexp function always expects base 2, and in
9805 GCC normalized significands are already in the range
9806 [0.5, 1.0), we have exactly what frexp wants. */
9807 REAL_VALUE_TYPE frac_rvt = *value;
9808 SET_REAL_EXP (&frac_rvt, 0);
9809 frac = build_real (rettype, frac_rvt);
9810 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9812 break;
9813 default:
9814 gcc_unreachable ();
9817 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9818 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9819 TREE_SIDE_EFFECTS (arg1) = 1;
9820 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9823 return NULL_TREE;
9826 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9827 then we can assume the base is two. If it's false, then we have to
9828 check the mode of the TYPE parameter in certain cases. */
9830 static tree
9831 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9832 tree type, bool ldexp)
9834 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9836 STRIP_NOPS (arg0);
9837 STRIP_NOPS (arg1);
9839 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9840 if (real_zerop (arg0) || integer_zerop (arg1)
9841 || (TREE_CODE (arg0) == REAL_CST
9842 && !real_isfinite (&TREE_REAL_CST (arg0))))
9843 return omit_one_operand_loc (loc, type, arg0, arg1);
9845 /* If both arguments are constant, then try to evaluate it. */
9846 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9847 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9848 && host_integerp (arg1, 0))
9850 /* Bound the maximum adjustment to twice the range of the
9851 mode's valid exponents. Use abs to ensure the range is
9852 positive as a sanity check. */
9853 const long max_exp_adj = 2 *
9854 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9855 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9857 /* Get the user-requested adjustment. */
9858 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9860 /* The requested adjustment must be inside this range. This
9861 is a preliminary cap to avoid things like overflow, we
9862 may still fail to compute the result for other reasons. */
9863 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9865 REAL_VALUE_TYPE initial_result;
9867 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9869 /* Ensure we didn't overflow. */
9870 if (! real_isinf (&initial_result))
9872 const REAL_VALUE_TYPE trunc_result
9873 = real_value_truncate (TYPE_MODE (type), initial_result);
9875 /* Only proceed if the target mode can hold the
9876 resulting value. */
9877 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9878 return build_real (type, trunc_result);
9884 return NULL_TREE;
9887 /* Fold a call to builtin modf. */
9889 static tree
9890 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9892 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9893 return NULL_TREE;
9895 STRIP_NOPS (arg0);
9897 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9898 return NULL_TREE;
9900 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9902 /* Proceed if a valid pointer type was passed in. */
9903 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9905 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9906 REAL_VALUE_TYPE trunc, frac;
9908 switch (value->cl)
9910 case rvc_nan:
9911 case rvc_zero:
9912 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9913 trunc = frac = *value;
9914 break;
9915 case rvc_inf:
9916 /* For +-Inf, return (*arg1 = arg0, +-0). */
9917 frac = dconst0;
9918 frac.sign = value->sign;
9919 trunc = *value;
9920 break;
9921 case rvc_normal:
9922 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9923 real_trunc (&trunc, VOIDmode, value);
9924 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9925 /* If the original number was negative and already
9926 integral, then the fractional part is -0.0. */
9927 if (value->sign && frac.cl == rvc_zero)
9928 frac.sign = value->sign;
9929 break;
9932 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9933 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9934 build_real (rettype, trunc));
9935 TREE_SIDE_EFFECTS (arg1) = 1;
9936 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9937 build_real (rettype, frac));
9940 return NULL_TREE;
9943 /* Given a location LOC, an interclass builtin function decl FNDECL
9944 and its single argument ARG, return an folded expression computing
9945 the same, or NULL_TREE if we either couldn't or didn't want to fold
9946 (the latter happen if there's an RTL instruction available). */
9948 static tree
9949 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9951 enum machine_mode mode;
9953 if (!validate_arg (arg, REAL_TYPE))
9954 return NULL_TREE;
9956 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9957 return NULL_TREE;
9959 mode = TYPE_MODE (TREE_TYPE (arg));
9961 /* If there is no optab, try generic code. */
9962 switch (DECL_FUNCTION_CODE (fndecl))
9964 tree result;
9966 CASE_FLT_FN (BUILT_IN_ISINF):
9968 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9969 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9970 tree const type = TREE_TYPE (arg);
9971 REAL_VALUE_TYPE r;
9972 char buf[128];
9974 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9975 real_from_string (&r, buf);
9976 result = build_call_expr (isgr_fn, 2,
9977 fold_build1_loc (loc, ABS_EXPR, type, arg),
9978 build_real (type, r));
9979 return result;
9981 CASE_FLT_FN (BUILT_IN_FINITE):
9982 case BUILT_IN_ISFINITE:
9984 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9985 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9986 tree const type = TREE_TYPE (arg);
9987 REAL_VALUE_TYPE r;
9988 char buf[128];
9990 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9991 real_from_string (&r, buf);
9992 result = build_call_expr (isle_fn, 2,
9993 fold_build1_loc (loc, ABS_EXPR, type, arg),
9994 build_real (type, r));
9995 /*result = fold_build2_loc (loc, UNGT_EXPR,
9996 TREE_TYPE (TREE_TYPE (fndecl)),
9997 fold_build1_loc (loc, ABS_EXPR, type, arg),
9998 build_real (type, r));
9999 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10000 TREE_TYPE (TREE_TYPE (fndecl)),
10001 result);*/
10002 return result;
10004 case BUILT_IN_ISNORMAL:
10006 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10007 islessequal(fabs(x),DBL_MAX). */
10008 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10009 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10010 tree const type = TREE_TYPE (arg);
10011 REAL_VALUE_TYPE rmax, rmin;
10012 char buf[128];
10014 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10015 real_from_string (&rmax, buf);
10016 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10017 real_from_string (&rmin, buf);
10018 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10019 result = build_call_expr (isle_fn, 2, arg,
10020 build_real (type, rmax));
10021 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
10022 build_call_expr (isge_fn, 2, arg,
10023 build_real (type, rmin)));
10024 return result;
10026 default:
10027 break;
10030 return NULL_TREE;
10033 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10034 ARG is the argument for the call. */
10036 static tree
10037 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10039 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10040 REAL_VALUE_TYPE r;
10042 if (!validate_arg (arg, REAL_TYPE))
10043 return NULL_TREE;
10045 switch (builtin_index)
10047 case BUILT_IN_ISINF:
10048 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10049 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10051 if (TREE_CODE (arg) == REAL_CST)
10053 r = TREE_REAL_CST (arg);
10054 if (real_isinf (&r))
10055 return real_compare (GT_EXPR, &r, &dconst0)
10056 ? integer_one_node : integer_minus_one_node;
10057 else
10058 return integer_zero_node;
10061 return NULL_TREE;
10063 case BUILT_IN_ISINF_SIGN:
10065 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10066 /* In a boolean context, GCC will fold the inner COND_EXPR to
10067 1. So e.g. "if (isinf_sign(x))" would be folded to just
10068 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10069 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10070 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10071 tree tmp = NULL_TREE;
10073 arg = builtin_save_expr (arg);
10075 if (signbit_fn && isinf_fn)
10077 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10078 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10080 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10081 signbit_call, integer_zero_node);
10082 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10083 isinf_call, integer_zero_node);
10085 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10086 integer_minus_one_node, integer_one_node);
10087 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10088 isinf_call, tmp,
10089 integer_zero_node);
10092 return tmp;
10095 case BUILT_IN_ISFINITE:
10096 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10097 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10098 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10100 if (TREE_CODE (arg) == REAL_CST)
10102 r = TREE_REAL_CST (arg);
10103 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10106 return NULL_TREE;
10108 case BUILT_IN_ISNAN:
10109 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10110 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10112 if (TREE_CODE (arg) == REAL_CST)
10114 r = TREE_REAL_CST (arg);
10115 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10118 arg = builtin_save_expr (arg);
10119 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10121 default:
10122 gcc_unreachable ();
10126 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10127 This builtin will generate code to return the appropriate floating
10128 point classification depending on the value of the floating point
10129 number passed in. The possible return values must be supplied as
10130 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10131 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10132 one floating point argument which is "type generic". */
10134 static tree
10135 fold_builtin_fpclassify (location_t loc, tree exp)
10137 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10138 arg, type, res, tmp;
10139 enum machine_mode mode;
10140 REAL_VALUE_TYPE r;
10141 char buf[128];
10143 /* Verify the required arguments in the original call. */
10144 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10145 INTEGER_TYPE, INTEGER_TYPE,
10146 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10147 return NULL_TREE;
10149 fp_nan = CALL_EXPR_ARG (exp, 0);
10150 fp_infinite = CALL_EXPR_ARG (exp, 1);
10151 fp_normal = CALL_EXPR_ARG (exp, 2);
10152 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10153 fp_zero = CALL_EXPR_ARG (exp, 4);
10154 arg = CALL_EXPR_ARG (exp, 5);
10155 type = TREE_TYPE (arg);
10156 mode = TYPE_MODE (type);
10157 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10159 /* fpclassify(x) ->
10160 isnan(x) ? FP_NAN :
10161 (fabs(x) == Inf ? FP_INFINITE :
10162 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10163 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10165 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10166 build_real (type, dconst0));
10167 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10168 tmp, fp_zero, fp_subnormal);
10170 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10171 real_from_string (&r, buf);
10172 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10173 arg, build_real (type, r));
10174 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10176 if (HONOR_INFINITIES (mode))
10178 real_inf (&r);
10179 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10180 build_real (type, r));
10181 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10182 fp_infinite, res);
10185 if (HONOR_NANS (mode))
10187 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10188 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10191 return res;
10194 /* Fold a call to an unordered comparison function such as
10195 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10196 being called and ARG0 and ARG1 are the arguments for the call.
10197 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10198 the opposite of the desired result. UNORDERED_CODE is used
10199 for modes that can hold NaNs and ORDERED_CODE is used for
10200 the rest. */
10202 static tree
10203 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10204 enum tree_code unordered_code,
10205 enum tree_code ordered_code)
10207 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10208 enum tree_code code;
10209 tree type0, type1;
10210 enum tree_code code0, code1;
10211 tree cmp_type = NULL_TREE;
10213 type0 = TREE_TYPE (arg0);
10214 type1 = TREE_TYPE (arg1);
10216 code0 = TREE_CODE (type0);
10217 code1 = TREE_CODE (type1);
10219 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10220 /* Choose the wider of two real types. */
10221 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10222 ? type0 : type1;
10223 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10224 cmp_type = type0;
10225 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10226 cmp_type = type1;
10228 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10229 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10231 if (unordered_code == UNORDERED_EXPR)
10233 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10234 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10235 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10238 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10239 : ordered_code;
10240 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10241 fold_build2_loc (loc, code, type, arg0, arg1));
10244 /* Fold a call to built-in function FNDECL with 0 arguments.
10245 IGNORE is true if the result of the function call is ignored. This
10246 function returns NULL_TREE if no simplification was possible. */
10248 static tree
10249 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10251 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10252 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10253 switch (fcode)
10255 CASE_FLT_FN (BUILT_IN_INF):
10256 case BUILT_IN_INFD32:
10257 case BUILT_IN_INFD64:
10258 case BUILT_IN_INFD128:
10259 return fold_builtin_inf (loc, type, true);
10261 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10262 return fold_builtin_inf (loc, type, false);
10264 case BUILT_IN_CLASSIFY_TYPE:
10265 return fold_builtin_classify_type (NULL_TREE);
10267 default:
10268 break;
10270 return NULL_TREE;
10273 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10274 IGNORE is true if the result of the function call is ignored. This
10275 function returns NULL_TREE if no simplification was possible. */
10277 static tree
10278 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10280 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10281 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10282 switch (fcode)
10284 case BUILT_IN_CONSTANT_P:
10286 tree val = fold_builtin_constant_p (arg0);
10288 /* Gimplification will pull the CALL_EXPR for the builtin out of
10289 an if condition. When not optimizing, we'll not CSE it back.
10290 To avoid link error types of regressions, return false now. */
10291 if (!val && !optimize)
10292 val = integer_zero_node;
10294 return val;
10297 case BUILT_IN_CLASSIFY_TYPE:
10298 return fold_builtin_classify_type (arg0);
10300 case BUILT_IN_STRLEN:
10301 return fold_builtin_strlen (loc, type, arg0);
10303 CASE_FLT_FN (BUILT_IN_FABS):
10304 return fold_builtin_fabs (loc, arg0, type);
10306 case BUILT_IN_ABS:
10307 case BUILT_IN_LABS:
10308 case BUILT_IN_LLABS:
10309 case BUILT_IN_IMAXABS:
10310 return fold_builtin_abs (loc, arg0, type);
10312 CASE_FLT_FN (BUILT_IN_CONJ):
10313 if (validate_arg (arg0, COMPLEX_TYPE)
10314 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10315 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10316 break;
10318 CASE_FLT_FN (BUILT_IN_CREAL):
10319 if (validate_arg (arg0, COMPLEX_TYPE)
10320 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10321 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10322 break;
10324 CASE_FLT_FN (BUILT_IN_CIMAG):
10325 if (validate_arg (arg0, COMPLEX_TYPE)
10326 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10327 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10328 break;
10330 CASE_FLT_FN (BUILT_IN_CCOS):
10331 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
10333 CASE_FLT_FN (BUILT_IN_CCOSH):
10334 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
10336 CASE_FLT_FN (BUILT_IN_CPROJ):
10337 return fold_builtin_cproj(loc, arg0, type);
10339 CASE_FLT_FN (BUILT_IN_CSIN):
10340 if (validate_arg (arg0, COMPLEX_TYPE)
10341 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10342 return do_mpc_arg1 (arg0, type, mpc_sin);
10343 break;
10345 CASE_FLT_FN (BUILT_IN_CSINH):
10346 if (validate_arg (arg0, COMPLEX_TYPE)
10347 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10348 return do_mpc_arg1 (arg0, type, mpc_sinh);
10349 break;
10351 CASE_FLT_FN (BUILT_IN_CTAN):
10352 if (validate_arg (arg0, COMPLEX_TYPE)
10353 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10354 return do_mpc_arg1 (arg0, type, mpc_tan);
10355 break;
10357 CASE_FLT_FN (BUILT_IN_CTANH):
10358 if (validate_arg (arg0, COMPLEX_TYPE)
10359 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10360 return do_mpc_arg1 (arg0, type, mpc_tanh);
10361 break;
10363 CASE_FLT_FN (BUILT_IN_CLOG):
10364 if (validate_arg (arg0, COMPLEX_TYPE)
10365 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10366 return do_mpc_arg1 (arg0, type, mpc_log);
10367 break;
10369 CASE_FLT_FN (BUILT_IN_CSQRT):
10370 if (validate_arg (arg0, COMPLEX_TYPE)
10371 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10372 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10373 break;
10375 CASE_FLT_FN (BUILT_IN_CASIN):
10376 if (validate_arg (arg0, COMPLEX_TYPE)
10377 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10378 return do_mpc_arg1 (arg0, type, mpc_asin);
10379 break;
10381 CASE_FLT_FN (BUILT_IN_CACOS):
10382 if (validate_arg (arg0, COMPLEX_TYPE)
10383 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10384 return do_mpc_arg1 (arg0, type, mpc_acos);
10385 break;
10387 CASE_FLT_FN (BUILT_IN_CATAN):
10388 if (validate_arg (arg0, COMPLEX_TYPE)
10389 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10390 return do_mpc_arg1 (arg0, type, mpc_atan);
10391 break;
10393 CASE_FLT_FN (BUILT_IN_CASINH):
10394 if (validate_arg (arg0, COMPLEX_TYPE)
10395 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10396 return do_mpc_arg1 (arg0, type, mpc_asinh);
10397 break;
10399 CASE_FLT_FN (BUILT_IN_CACOSH):
10400 if (validate_arg (arg0, COMPLEX_TYPE)
10401 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10402 return do_mpc_arg1 (arg0, type, mpc_acosh);
10403 break;
10405 CASE_FLT_FN (BUILT_IN_CATANH):
10406 if (validate_arg (arg0, COMPLEX_TYPE)
10407 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10408 return do_mpc_arg1 (arg0, type, mpc_atanh);
10409 break;
10411 CASE_FLT_FN (BUILT_IN_CABS):
10412 return fold_builtin_cabs (loc, arg0, type, fndecl);
10414 CASE_FLT_FN (BUILT_IN_CARG):
10415 return fold_builtin_carg (loc, arg0, type);
10417 CASE_FLT_FN (BUILT_IN_SQRT):
10418 return fold_builtin_sqrt (loc, arg0, type);
10420 CASE_FLT_FN (BUILT_IN_CBRT):
10421 return fold_builtin_cbrt (loc, arg0, type);
10423 CASE_FLT_FN (BUILT_IN_ASIN):
10424 if (validate_arg (arg0, REAL_TYPE))
10425 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10426 &dconstm1, &dconst1, true);
10427 break;
10429 CASE_FLT_FN (BUILT_IN_ACOS):
10430 if (validate_arg (arg0, REAL_TYPE))
10431 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10432 &dconstm1, &dconst1, true);
10433 break;
10435 CASE_FLT_FN (BUILT_IN_ATAN):
10436 if (validate_arg (arg0, REAL_TYPE))
10437 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10438 break;
10440 CASE_FLT_FN (BUILT_IN_ASINH):
10441 if (validate_arg (arg0, REAL_TYPE))
10442 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10443 break;
10445 CASE_FLT_FN (BUILT_IN_ACOSH):
10446 if (validate_arg (arg0, REAL_TYPE))
10447 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10448 &dconst1, NULL, true);
10449 break;
10451 CASE_FLT_FN (BUILT_IN_ATANH):
10452 if (validate_arg (arg0, REAL_TYPE))
10453 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10454 &dconstm1, &dconst1, false);
10455 break;
10457 CASE_FLT_FN (BUILT_IN_SIN):
10458 if (validate_arg (arg0, REAL_TYPE))
10459 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10460 break;
10462 CASE_FLT_FN (BUILT_IN_COS):
10463 return fold_builtin_cos (loc, arg0, type, fndecl);
10465 CASE_FLT_FN (BUILT_IN_TAN):
10466 return fold_builtin_tan (arg0, type);
10468 CASE_FLT_FN (BUILT_IN_CEXP):
10469 return fold_builtin_cexp (loc, arg0, type);
10471 CASE_FLT_FN (BUILT_IN_CEXPI):
10472 if (validate_arg (arg0, REAL_TYPE))
10473 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10474 break;
10476 CASE_FLT_FN (BUILT_IN_SINH):
10477 if (validate_arg (arg0, REAL_TYPE))
10478 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10479 break;
10481 CASE_FLT_FN (BUILT_IN_COSH):
10482 return fold_builtin_cosh (loc, arg0, type, fndecl);
10484 CASE_FLT_FN (BUILT_IN_TANH):
10485 if (validate_arg (arg0, REAL_TYPE))
10486 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10487 break;
10489 CASE_FLT_FN (BUILT_IN_ERF):
10490 if (validate_arg (arg0, REAL_TYPE))
10491 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10492 break;
10494 CASE_FLT_FN (BUILT_IN_ERFC):
10495 if (validate_arg (arg0, REAL_TYPE))
10496 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10497 break;
10499 CASE_FLT_FN (BUILT_IN_TGAMMA):
10500 if (validate_arg (arg0, REAL_TYPE))
10501 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10502 break;
10504 CASE_FLT_FN (BUILT_IN_EXP):
10505 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10507 CASE_FLT_FN (BUILT_IN_EXP2):
10508 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10510 CASE_FLT_FN (BUILT_IN_EXP10):
10511 CASE_FLT_FN (BUILT_IN_POW10):
10512 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10514 CASE_FLT_FN (BUILT_IN_EXPM1):
10515 if (validate_arg (arg0, REAL_TYPE))
10516 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10517 break;
10519 CASE_FLT_FN (BUILT_IN_LOG):
10520 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10522 CASE_FLT_FN (BUILT_IN_LOG2):
10523 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10525 CASE_FLT_FN (BUILT_IN_LOG10):
10526 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10528 CASE_FLT_FN (BUILT_IN_LOG1P):
10529 if (validate_arg (arg0, REAL_TYPE))
10530 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10531 &dconstm1, NULL, false);
10532 break;
10534 CASE_FLT_FN (BUILT_IN_J0):
10535 if (validate_arg (arg0, REAL_TYPE))
10536 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10537 NULL, NULL, 0);
10538 break;
10540 CASE_FLT_FN (BUILT_IN_J1):
10541 if (validate_arg (arg0, REAL_TYPE))
10542 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10543 NULL, NULL, 0);
10544 break;
10546 CASE_FLT_FN (BUILT_IN_Y0):
10547 if (validate_arg (arg0, REAL_TYPE))
10548 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10549 &dconst0, NULL, false);
10550 break;
10552 CASE_FLT_FN (BUILT_IN_Y1):
10553 if (validate_arg (arg0, REAL_TYPE))
10554 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10555 &dconst0, NULL, false);
10556 break;
10558 CASE_FLT_FN (BUILT_IN_NAN):
10559 case BUILT_IN_NAND32:
10560 case BUILT_IN_NAND64:
10561 case BUILT_IN_NAND128:
10562 return fold_builtin_nan (arg0, type, true);
10564 CASE_FLT_FN (BUILT_IN_NANS):
10565 return fold_builtin_nan (arg0, type, false);
10567 CASE_FLT_FN (BUILT_IN_FLOOR):
10568 return fold_builtin_floor (loc, fndecl, arg0);
10570 CASE_FLT_FN (BUILT_IN_CEIL):
10571 return fold_builtin_ceil (loc, fndecl, arg0);
10573 CASE_FLT_FN (BUILT_IN_TRUNC):
10574 return fold_builtin_trunc (loc, fndecl, arg0);
10576 CASE_FLT_FN (BUILT_IN_ROUND):
10577 return fold_builtin_round (loc, fndecl, arg0);
10579 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10580 CASE_FLT_FN (BUILT_IN_RINT):
10581 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10583 CASE_FLT_FN (BUILT_IN_ICEIL):
10584 CASE_FLT_FN (BUILT_IN_LCEIL):
10585 CASE_FLT_FN (BUILT_IN_LLCEIL):
10586 CASE_FLT_FN (BUILT_IN_LFLOOR):
10587 CASE_FLT_FN (BUILT_IN_IFLOOR):
10588 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10589 CASE_FLT_FN (BUILT_IN_IROUND):
10590 CASE_FLT_FN (BUILT_IN_LROUND):
10591 CASE_FLT_FN (BUILT_IN_LLROUND):
10592 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10594 CASE_FLT_FN (BUILT_IN_IRINT):
10595 CASE_FLT_FN (BUILT_IN_LRINT):
10596 CASE_FLT_FN (BUILT_IN_LLRINT):
10597 return fold_fixed_mathfn (loc, fndecl, arg0);
10599 case BUILT_IN_BSWAP16:
10600 case BUILT_IN_BSWAP32:
10601 case BUILT_IN_BSWAP64:
10602 return fold_builtin_bswap (fndecl, arg0);
10604 CASE_INT_FN (BUILT_IN_FFS):
10605 CASE_INT_FN (BUILT_IN_CLZ):
10606 CASE_INT_FN (BUILT_IN_CTZ):
10607 CASE_INT_FN (BUILT_IN_CLRSB):
10608 CASE_INT_FN (BUILT_IN_POPCOUNT):
10609 CASE_INT_FN (BUILT_IN_PARITY):
10610 return fold_builtin_bitop (fndecl, arg0);
10612 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10613 return fold_builtin_signbit (loc, arg0, type);
10615 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10616 return fold_builtin_significand (loc, arg0, type);
10618 CASE_FLT_FN (BUILT_IN_ILOGB):
10619 CASE_FLT_FN (BUILT_IN_LOGB):
10620 return fold_builtin_logb (loc, arg0, type);
10622 case BUILT_IN_ISASCII:
10623 return fold_builtin_isascii (loc, arg0);
10625 case BUILT_IN_TOASCII:
10626 return fold_builtin_toascii (loc, arg0);
10628 case BUILT_IN_ISDIGIT:
10629 return fold_builtin_isdigit (loc, arg0);
10631 CASE_FLT_FN (BUILT_IN_FINITE):
10632 case BUILT_IN_FINITED32:
10633 case BUILT_IN_FINITED64:
10634 case BUILT_IN_FINITED128:
10635 case BUILT_IN_ISFINITE:
10637 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10638 if (ret)
10639 return ret;
10640 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10643 CASE_FLT_FN (BUILT_IN_ISINF):
10644 case BUILT_IN_ISINFD32:
10645 case BUILT_IN_ISINFD64:
10646 case BUILT_IN_ISINFD128:
10648 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10649 if (ret)
10650 return ret;
10651 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10654 case BUILT_IN_ISNORMAL:
10655 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10657 case BUILT_IN_ISINF_SIGN:
10658 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10660 CASE_FLT_FN (BUILT_IN_ISNAN):
10661 case BUILT_IN_ISNAND32:
10662 case BUILT_IN_ISNAND64:
10663 case BUILT_IN_ISNAND128:
10664 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10666 case BUILT_IN_PRINTF:
10667 case BUILT_IN_PRINTF_UNLOCKED:
10668 case BUILT_IN_VPRINTF:
10669 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10671 case BUILT_IN_FREE:
10672 if (integer_zerop (arg0))
10673 return build_empty_stmt (loc);
10674 break;
10676 default:
10677 break;
10680 return NULL_TREE;
10684 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10685 IGNORE is true if the result of the function call is ignored. This
10686 function returns NULL_TREE if no simplification was possible. */
10688 static tree
10689 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10691 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10692 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10694 switch (fcode)
10696 CASE_FLT_FN (BUILT_IN_JN):
10697 if (validate_arg (arg0, INTEGER_TYPE)
10698 && validate_arg (arg1, REAL_TYPE))
10699 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10700 break;
10702 CASE_FLT_FN (BUILT_IN_YN):
10703 if (validate_arg (arg0, INTEGER_TYPE)
10704 && validate_arg (arg1, REAL_TYPE))
10705 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10706 &dconst0, false);
10707 break;
10709 CASE_FLT_FN (BUILT_IN_DREM):
10710 CASE_FLT_FN (BUILT_IN_REMAINDER):
10711 if (validate_arg (arg0, REAL_TYPE)
10712 && validate_arg(arg1, REAL_TYPE))
10713 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10714 break;
10716 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10717 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10718 if (validate_arg (arg0, REAL_TYPE)
10719 && validate_arg(arg1, POINTER_TYPE))
10720 return do_mpfr_lgamma_r (arg0, arg1, type);
10721 break;
10723 CASE_FLT_FN (BUILT_IN_ATAN2):
10724 if (validate_arg (arg0, REAL_TYPE)
10725 && validate_arg(arg1, REAL_TYPE))
10726 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10727 break;
10729 CASE_FLT_FN (BUILT_IN_FDIM):
10730 if (validate_arg (arg0, REAL_TYPE)
10731 && validate_arg(arg1, REAL_TYPE))
10732 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10733 break;
10735 CASE_FLT_FN (BUILT_IN_HYPOT):
10736 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10738 CASE_FLT_FN (BUILT_IN_CPOW):
10739 if (validate_arg (arg0, COMPLEX_TYPE)
10740 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10741 && validate_arg (arg1, COMPLEX_TYPE)
10742 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10743 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10744 break;
10746 CASE_FLT_FN (BUILT_IN_LDEXP):
10747 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10748 CASE_FLT_FN (BUILT_IN_SCALBN):
10749 CASE_FLT_FN (BUILT_IN_SCALBLN):
10750 return fold_builtin_load_exponent (loc, arg0, arg1,
10751 type, /*ldexp=*/false);
10753 CASE_FLT_FN (BUILT_IN_FREXP):
10754 return fold_builtin_frexp (loc, arg0, arg1, type);
10756 CASE_FLT_FN (BUILT_IN_MODF):
10757 return fold_builtin_modf (loc, arg0, arg1, type);
10759 case BUILT_IN_BZERO:
10760 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10762 case BUILT_IN_FPUTS:
10763 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10765 case BUILT_IN_FPUTS_UNLOCKED:
10766 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10768 case BUILT_IN_STRSTR:
10769 return fold_builtin_strstr (loc, arg0, arg1, type);
10771 case BUILT_IN_STRCAT:
10772 return fold_builtin_strcat (loc, arg0, arg1);
10774 case BUILT_IN_STRSPN:
10775 return fold_builtin_strspn (loc, arg0, arg1);
10777 case BUILT_IN_STRCSPN:
10778 return fold_builtin_strcspn (loc, arg0, arg1);
10780 case BUILT_IN_STRCHR:
10781 case BUILT_IN_INDEX:
10782 return fold_builtin_strchr (loc, arg0, arg1, type);
10784 case BUILT_IN_STRRCHR:
10785 case BUILT_IN_RINDEX:
10786 return fold_builtin_strrchr (loc, arg0, arg1, type);
10788 case BUILT_IN_STRCPY:
10789 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10791 case BUILT_IN_STPCPY:
10792 if (ignore)
10794 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10795 if (!fn)
10796 break;
10798 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10800 else
10801 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10802 break;
10804 case BUILT_IN_STRCMP:
10805 return fold_builtin_strcmp (loc, arg0, arg1);
10807 case BUILT_IN_STRPBRK:
10808 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10810 case BUILT_IN_EXPECT:
10811 return fold_builtin_expect (loc, arg0, arg1);
10813 CASE_FLT_FN (BUILT_IN_POW):
10814 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10816 CASE_FLT_FN (BUILT_IN_POWI):
10817 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10819 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10820 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10822 CASE_FLT_FN (BUILT_IN_FMIN):
10823 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10825 CASE_FLT_FN (BUILT_IN_FMAX):
10826 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10828 case BUILT_IN_ISGREATER:
10829 return fold_builtin_unordered_cmp (loc, fndecl,
10830 arg0, arg1, UNLE_EXPR, LE_EXPR);
10831 case BUILT_IN_ISGREATEREQUAL:
10832 return fold_builtin_unordered_cmp (loc, fndecl,
10833 arg0, arg1, UNLT_EXPR, LT_EXPR);
10834 case BUILT_IN_ISLESS:
10835 return fold_builtin_unordered_cmp (loc, fndecl,
10836 arg0, arg1, UNGE_EXPR, GE_EXPR);
10837 case BUILT_IN_ISLESSEQUAL:
10838 return fold_builtin_unordered_cmp (loc, fndecl,
10839 arg0, arg1, UNGT_EXPR, GT_EXPR);
10840 case BUILT_IN_ISLESSGREATER:
10841 return fold_builtin_unordered_cmp (loc, fndecl,
10842 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10843 case BUILT_IN_ISUNORDERED:
10844 return fold_builtin_unordered_cmp (loc, fndecl,
10845 arg0, arg1, UNORDERED_EXPR,
10846 NOP_EXPR);
10848 /* We do the folding for va_start in the expander. */
10849 case BUILT_IN_VA_START:
10850 break;
10852 case BUILT_IN_SPRINTF:
10853 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10855 case BUILT_IN_OBJECT_SIZE:
10856 return fold_builtin_object_size (arg0, arg1);
10858 case BUILT_IN_PRINTF:
10859 case BUILT_IN_PRINTF_UNLOCKED:
10860 case BUILT_IN_VPRINTF:
10861 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10863 case BUILT_IN_PRINTF_CHK:
10864 case BUILT_IN_VPRINTF_CHK:
10865 if (!validate_arg (arg0, INTEGER_TYPE)
10866 || TREE_SIDE_EFFECTS (arg0))
10867 return NULL_TREE;
10868 else
10869 return fold_builtin_printf (loc, fndecl,
10870 arg1, NULL_TREE, ignore, fcode);
10871 break;
10873 case BUILT_IN_FPRINTF:
10874 case BUILT_IN_FPRINTF_UNLOCKED:
10875 case BUILT_IN_VFPRINTF:
10876 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10877 ignore, fcode);
10879 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10880 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10882 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10883 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10885 default:
10886 break;
10888 return NULL_TREE;
10891 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10892 and ARG2. IGNORE is true if the result of the function call is ignored.
10893 This function returns NULL_TREE if no simplification was possible. */
10895 static tree
10896 fold_builtin_3 (location_t loc, tree fndecl,
10897 tree arg0, tree arg1, tree arg2, bool ignore)
10899 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10900 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10901 switch (fcode)
10904 CASE_FLT_FN (BUILT_IN_SINCOS):
10905 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10907 CASE_FLT_FN (BUILT_IN_FMA):
10908 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10909 break;
10911 CASE_FLT_FN (BUILT_IN_REMQUO):
10912 if (validate_arg (arg0, REAL_TYPE)
10913 && validate_arg(arg1, REAL_TYPE)
10914 && validate_arg(arg2, POINTER_TYPE))
10915 return do_mpfr_remquo (arg0, arg1, arg2);
10916 break;
10918 case BUILT_IN_MEMSET:
10919 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10921 case BUILT_IN_BCOPY:
10922 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10923 void_type_node, true, /*endp=*/3);
10925 case BUILT_IN_MEMCPY:
10926 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10927 type, ignore, /*endp=*/0);
10929 case BUILT_IN_MEMPCPY:
10930 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10931 type, ignore, /*endp=*/1);
10933 case BUILT_IN_MEMMOVE:
10934 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10935 type, ignore, /*endp=*/3);
10937 case BUILT_IN_STRNCAT:
10938 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10940 case BUILT_IN_STRNCPY:
10941 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10943 case BUILT_IN_STRNCMP:
10944 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10946 case BUILT_IN_MEMCHR:
10947 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10949 case BUILT_IN_BCMP:
10950 case BUILT_IN_MEMCMP:
10951 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10953 case BUILT_IN_SPRINTF:
10954 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10956 case BUILT_IN_SNPRINTF:
10957 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10959 case BUILT_IN_STRCPY_CHK:
10960 case BUILT_IN_STPCPY_CHK:
10961 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10962 ignore, fcode);
10964 case BUILT_IN_STRCAT_CHK:
10965 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10967 case BUILT_IN_PRINTF_CHK:
10968 case BUILT_IN_VPRINTF_CHK:
10969 if (!validate_arg (arg0, INTEGER_TYPE)
10970 || TREE_SIDE_EFFECTS (arg0))
10971 return NULL_TREE;
10972 else
10973 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10974 break;
10976 case BUILT_IN_FPRINTF:
10977 case BUILT_IN_FPRINTF_UNLOCKED:
10978 case BUILT_IN_VFPRINTF:
10979 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10980 ignore, fcode);
10982 case BUILT_IN_FPRINTF_CHK:
10983 case BUILT_IN_VFPRINTF_CHK:
10984 if (!validate_arg (arg1, INTEGER_TYPE)
10985 || TREE_SIDE_EFFECTS (arg1))
10986 return NULL_TREE;
10987 else
10988 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10989 ignore, fcode);
10991 default:
10992 break;
10994 return NULL_TREE;
10997 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10998 ARG2, and ARG3. IGNORE is true if the result of the function call is
10999 ignored. This function returns NULL_TREE if no simplification was
11000 possible. */
11002 static tree
11003 fold_builtin_4 (location_t loc, tree fndecl,
11004 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11006 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11008 switch (fcode)
11010 case BUILT_IN_MEMCPY_CHK:
11011 case BUILT_IN_MEMPCPY_CHK:
11012 case BUILT_IN_MEMMOVE_CHK:
11013 case BUILT_IN_MEMSET_CHK:
11014 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11015 NULL_TREE, ignore,
11016 DECL_FUNCTION_CODE (fndecl));
11018 case BUILT_IN_STRNCPY_CHK:
11019 case BUILT_IN_STPNCPY_CHK:
11020 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
11021 ignore, fcode);
11023 case BUILT_IN_STRNCAT_CHK:
11024 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11026 case BUILT_IN_SNPRINTF:
11027 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
11029 case BUILT_IN_FPRINTF_CHK:
11030 case BUILT_IN_VFPRINTF_CHK:
11031 if (!validate_arg (arg1, INTEGER_TYPE)
11032 || TREE_SIDE_EFFECTS (arg1))
11033 return NULL_TREE;
11034 else
11035 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11036 ignore, fcode);
11037 break;
11039 default:
11040 break;
11042 return NULL_TREE;
11045 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11046 arguments, where NARGS <= 4. IGNORE is true if the result of the
11047 function call is ignored. This function returns NULL_TREE if no
11048 simplification was possible. Note that this only folds builtins with
11049 fixed argument patterns. Foldings that do varargs-to-varargs
11050 transformations, or that match calls with more than 4 arguments,
11051 need to be handled with fold_builtin_varargs instead. */
11053 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11055 static tree
11056 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11058 tree ret = NULL_TREE;
11060 switch (nargs)
11062 case 0:
11063 ret = fold_builtin_0 (loc, fndecl, ignore);
11064 break;
11065 case 1:
11066 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11067 break;
11068 case 2:
11069 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11070 break;
11071 case 3:
11072 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11073 break;
11074 case 4:
11075 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11076 ignore);
11077 break;
11078 default:
11079 break;
11081 if (ret)
11083 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11084 SET_EXPR_LOCATION (ret, loc);
11085 TREE_NO_WARNING (ret) = 1;
11086 return ret;
11088 return NULL_TREE;
11091 /* Builtins with folding operations that operate on "..." arguments
11092 need special handling; we need to store the arguments in a convenient
11093 data structure before attempting any folding. Fortunately there are
11094 only a few builtins that fall into this category. FNDECL is the
11095 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11096 result of the function call is ignored. */
11098 static tree
11099 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11100 bool ignore ATTRIBUTE_UNUSED)
11102 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11103 tree ret = NULL_TREE;
11105 switch (fcode)
11107 case BUILT_IN_SPRINTF_CHK:
11108 case BUILT_IN_VSPRINTF_CHK:
11109 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11110 break;
11112 case BUILT_IN_SNPRINTF_CHK:
11113 case BUILT_IN_VSNPRINTF_CHK:
11114 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11115 break;
11117 case BUILT_IN_FPCLASSIFY:
11118 ret = fold_builtin_fpclassify (loc, exp);
11119 break;
11121 default:
11122 break;
11124 if (ret)
11126 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11127 SET_EXPR_LOCATION (ret, loc);
11128 TREE_NO_WARNING (ret) = 1;
11129 return ret;
11131 return NULL_TREE;
11134 /* Return true if FNDECL shouldn't be folded right now.
11135 If a built-in function has an inline attribute always_inline
11136 wrapper, defer folding it after always_inline functions have
11137 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11138 might not be performed. */
11140 bool
11141 avoid_folding_inline_builtin (tree fndecl)
11143 return (DECL_DECLARED_INLINE_P (fndecl)
11144 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11145 && cfun
11146 && !cfun->always_inline_functions_inlined
11147 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11150 /* A wrapper function for builtin folding that prevents warnings for
11151 "statement without effect" and the like, caused by removing the
11152 call node earlier than the warning is generated. */
11154 tree
11155 fold_call_expr (location_t loc, tree exp, bool ignore)
11157 tree ret = NULL_TREE;
11158 tree fndecl = get_callee_fndecl (exp);
11159 if (fndecl
11160 && TREE_CODE (fndecl) == FUNCTION_DECL
11161 && DECL_BUILT_IN (fndecl)
11162 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11163 yet. Defer folding until we see all the arguments
11164 (after inlining). */
11165 && !CALL_EXPR_VA_ARG_PACK (exp))
11167 int nargs = call_expr_nargs (exp);
11169 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11170 instead last argument is __builtin_va_arg_pack (). Defer folding
11171 even in that case, until arguments are finalized. */
11172 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11174 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11175 if (fndecl2
11176 && TREE_CODE (fndecl2) == FUNCTION_DECL
11177 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11178 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11179 return NULL_TREE;
11182 if (avoid_folding_inline_builtin (fndecl))
11183 return NULL_TREE;
11185 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11186 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11187 CALL_EXPR_ARGP (exp), ignore);
11188 else
11190 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11192 tree *args = CALL_EXPR_ARGP (exp);
11193 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11195 if (!ret)
11196 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11197 if (ret)
11198 return ret;
11201 return NULL_TREE;
11204 /* Conveniently construct a function call expression. FNDECL names the
11205 function to be called and N arguments are passed in the array
11206 ARGARRAY. */
11208 tree
11209 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11211 tree fntype = TREE_TYPE (fndecl);
11212 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11214 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11217 /* Conveniently construct a function call expression. FNDECL names the
11218 function to be called and the arguments are passed in the vector
11219 VEC. */
11221 tree
11222 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
11224 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
11225 VEC_address (tree, vec));
11229 /* Conveniently construct a function call expression. FNDECL names the
11230 function to be called, N is the number of arguments, and the "..."
11231 parameters are the argument expressions. */
11233 tree
11234 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11236 va_list ap;
11237 tree *argarray = XALLOCAVEC (tree, n);
11238 int i;
11240 va_start (ap, n);
11241 for (i = 0; i < n; i++)
11242 argarray[i] = va_arg (ap, tree);
11243 va_end (ap);
11244 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11247 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11248 varargs macros aren't supported by all bootstrap compilers. */
11250 tree
11251 build_call_expr (tree fndecl, int n, ...)
11253 va_list ap;
11254 tree *argarray = XALLOCAVEC (tree, n);
11255 int i;
11257 va_start (ap, n);
11258 for (i = 0; i < n; i++)
11259 argarray[i] = va_arg (ap, tree);
11260 va_end (ap);
11261 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11264 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11265 N arguments are passed in the array ARGARRAY. */
11267 tree
11268 fold_builtin_call_array (location_t loc, tree type,
11269 tree fn,
11270 int n,
11271 tree *argarray)
11273 tree ret = NULL_TREE;
11274 tree exp;
11276 if (TREE_CODE (fn) == ADDR_EXPR)
11278 tree fndecl = TREE_OPERAND (fn, 0);
11279 if (TREE_CODE (fndecl) == FUNCTION_DECL
11280 && DECL_BUILT_IN (fndecl))
11282 /* If last argument is __builtin_va_arg_pack (), arguments to this
11283 function are not finalized yet. Defer folding until they are. */
11284 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11286 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11287 if (fndecl2
11288 && TREE_CODE (fndecl2) == FUNCTION_DECL
11289 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11290 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11291 return build_call_array_loc (loc, type, fn, n, argarray);
11293 if (avoid_folding_inline_builtin (fndecl))
11294 return build_call_array_loc (loc, type, fn, n, argarray);
11295 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11297 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11298 if (ret)
11299 return ret;
11301 return build_call_array_loc (loc, type, fn, n, argarray);
11303 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11305 /* First try the transformations that don't require consing up
11306 an exp. */
11307 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11308 if (ret)
11309 return ret;
11312 /* If we got this far, we need to build an exp. */
11313 exp = build_call_array_loc (loc, type, fn, n, argarray);
11314 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11315 return ret ? ret : exp;
11319 return build_call_array_loc (loc, type, fn, n, argarray);
11322 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11323 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11324 of arguments in ARGS to be omitted. OLDNARGS is the number of
11325 elements in ARGS. */
11327 static tree
11328 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11329 int skip, tree fndecl, int n, va_list newargs)
11331 int nargs = oldnargs - skip + n;
11332 tree *buffer;
11334 if (n > 0)
11336 int i, j;
11338 buffer = XALLOCAVEC (tree, nargs);
11339 for (i = 0; i < n; i++)
11340 buffer[i] = va_arg (newargs, tree);
11341 for (j = skip; j < oldnargs; j++, i++)
11342 buffer[i] = args[j];
11344 else
11345 buffer = args + skip;
11347 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11350 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11351 list ARGS along with N new arguments specified as the "..."
11352 parameters. SKIP is the number of arguments in ARGS to be omitted.
11353 OLDNARGS is the number of elements in ARGS. */
11355 static tree
11356 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11357 int skip, tree fndecl, int n, ...)
11359 va_list ap;
11360 tree t;
11362 va_start (ap, n);
11363 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11364 va_end (ap);
11366 return t;
11369 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11370 along with N new arguments specified as the "..." parameters. SKIP
11371 is the number of arguments in EXP to be omitted. This function is used
11372 to do varargs-to-varargs transformations. */
11374 static tree
11375 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11377 va_list ap;
11378 tree t;
11380 va_start (ap, n);
11381 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11382 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11383 va_end (ap);
11385 return t;
11388 /* Validate a single argument ARG against a tree code CODE representing
11389 a type. */
11391 static bool
11392 validate_arg (const_tree arg, enum tree_code code)
11394 if (!arg)
11395 return false;
11396 else if (code == POINTER_TYPE)
11397 return POINTER_TYPE_P (TREE_TYPE (arg));
11398 else if (code == INTEGER_TYPE)
11399 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11400 return code == TREE_CODE (TREE_TYPE (arg));
11403 /* This function validates the types of a function call argument list
11404 against a specified list of tree_codes. If the last specifier is a 0,
11405 that represents an ellipses, otherwise the last specifier must be a
11406 VOID_TYPE.
11408 This is the GIMPLE version of validate_arglist. Eventually we want to
11409 completely convert builtins.c to work from GIMPLEs and the tree based
11410 validate_arglist will then be removed. */
11412 bool
11413 validate_gimple_arglist (const_gimple call, ...)
11415 enum tree_code code;
11416 bool res = 0;
11417 va_list ap;
11418 const_tree arg;
11419 size_t i;
11421 va_start (ap, call);
11422 i = 0;
11426 code = (enum tree_code) va_arg (ap, int);
11427 switch (code)
11429 case 0:
11430 /* This signifies an ellipses, any further arguments are all ok. */
11431 res = true;
11432 goto end;
11433 case VOID_TYPE:
11434 /* This signifies an endlink, if no arguments remain, return
11435 true, otherwise return false. */
11436 res = (i == gimple_call_num_args (call));
11437 goto end;
11438 default:
11439 /* If no parameters remain or the parameter's code does not
11440 match the specified code, return false. Otherwise continue
11441 checking any remaining arguments. */
11442 arg = gimple_call_arg (call, i++);
11443 if (!validate_arg (arg, code))
11444 goto end;
11445 break;
11448 while (1);
11450 /* We need gotos here since we can only have one VA_CLOSE in a
11451 function. */
11452 end: ;
11453 va_end (ap);
11455 return res;
11458 /* This function validates the types of a function call argument list
11459 against a specified list of tree_codes. If the last specifier is a 0,
11460 that represents an ellipses, otherwise the last specifier must be a
11461 VOID_TYPE. */
11463 bool
11464 validate_arglist (const_tree callexpr, ...)
11466 enum tree_code code;
11467 bool res = 0;
11468 va_list ap;
11469 const_call_expr_arg_iterator iter;
11470 const_tree arg;
11472 va_start (ap, callexpr);
11473 init_const_call_expr_arg_iterator (callexpr, &iter);
11477 code = (enum tree_code) va_arg (ap, int);
11478 switch (code)
11480 case 0:
11481 /* This signifies an ellipses, any further arguments are all ok. */
11482 res = true;
11483 goto end;
11484 case VOID_TYPE:
11485 /* This signifies an endlink, if no arguments remain, return
11486 true, otherwise return false. */
11487 res = !more_const_call_expr_args_p (&iter);
11488 goto end;
11489 default:
11490 /* If no parameters remain or the parameter's code does not
11491 match the specified code, return false. Otherwise continue
11492 checking any remaining arguments. */
11493 arg = next_const_call_expr_arg (&iter);
11494 if (!validate_arg (arg, code))
11495 goto end;
11496 break;
11499 while (1);
11501 /* We need gotos here since we can only have one VA_CLOSE in a
11502 function. */
11503 end: ;
11504 va_end (ap);
11506 return res;
11509 /* Default target-specific builtin expander that does nothing. */
11512 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11513 rtx target ATTRIBUTE_UNUSED,
11514 rtx subtarget ATTRIBUTE_UNUSED,
11515 enum machine_mode mode ATTRIBUTE_UNUSED,
11516 int ignore ATTRIBUTE_UNUSED)
11518 return NULL_RTX;
11521 /* Returns true is EXP represents data that would potentially reside
11522 in a readonly section. */
11524 static bool
11525 readonly_data_expr (tree exp)
11527 STRIP_NOPS (exp);
11529 if (TREE_CODE (exp) != ADDR_EXPR)
11530 return false;
11532 exp = get_base_address (TREE_OPERAND (exp, 0));
11533 if (!exp)
11534 return false;
11536 /* Make sure we call decl_readonly_section only for trees it
11537 can handle (since it returns true for everything it doesn't
11538 understand). */
11539 if (TREE_CODE (exp) == STRING_CST
11540 || TREE_CODE (exp) == CONSTRUCTOR
11541 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11542 return decl_readonly_section (exp, 0);
11543 else
11544 return false;
11547 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11548 to the call, and TYPE is its return type.
11550 Return NULL_TREE if no simplification was possible, otherwise return the
11551 simplified form of the call as a tree.
11553 The simplified form may be a constant or other expression which
11554 computes the same value, but in a more efficient manner (including
11555 calls to other builtin functions).
11557 The call may contain arguments which need to be evaluated, but
11558 which are not useful to determine the result of the call. In
11559 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11560 COMPOUND_EXPR will be an argument which must be evaluated.
11561 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11562 COMPOUND_EXPR in the chain will contain the tree for the simplified
11563 form of the builtin function call. */
11565 static tree
11566 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11568 if (!validate_arg (s1, POINTER_TYPE)
11569 || !validate_arg (s2, POINTER_TYPE))
11570 return NULL_TREE;
11571 else
11573 tree fn;
11574 const char *p1, *p2;
11576 p2 = c_getstr (s2);
11577 if (p2 == NULL)
11578 return NULL_TREE;
11580 p1 = c_getstr (s1);
11581 if (p1 != NULL)
11583 const char *r = strstr (p1, p2);
11584 tree tem;
11586 if (r == NULL)
11587 return build_int_cst (TREE_TYPE (s1), 0);
11589 /* Return an offset into the constant string argument. */
11590 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11591 return fold_convert_loc (loc, type, tem);
11594 /* The argument is const char *, and the result is char *, so we need
11595 a type conversion here to avoid a warning. */
11596 if (p2[0] == '\0')
11597 return fold_convert_loc (loc, type, s1);
11599 if (p2[1] != '\0')
11600 return NULL_TREE;
11602 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11603 if (!fn)
11604 return NULL_TREE;
11606 /* New argument list transforming strstr(s1, s2) to
11607 strchr(s1, s2[0]). */
11608 return build_call_expr_loc (loc, fn, 2, s1,
11609 build_int_cst (integer_type_node, p2[0]));
11613 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11614 the call, and TYPE is its return type.
11616 Return NULL_TREE if no simplification was possible, otherwise return the
11617 simplified form of the call as a tree.
11619 The simplified form may be a constant or other expression which
11620 computes the same value, but in a more efficient manner (including
11621 calls to other builtin functions).
11623 The call may contain arguments which need to be evaluated, but
11624 which are not useful to determine the result of the call. In
11625 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11626 COMPOUND_EXPR will be an argument which must be evaluated.
11627 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11628 COMPOUND_EXPR in the chain will contain the tree for the simplified
11629 form of the builtin function call. */
11631 static tree
11632 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11634 if (!validate_arg (s1, POINTER_TYPE)
11635 || !validate_arg (s2, INTEGER_TYPE))
11636 return NULL_TREE;
11637 else
11639 const char *p1;
11641 if (TREE_CODE (s2) != INTEGER_CST)
11642 return NULL_TREE;
11644 p1 = c_getstr (s1);
11645 if (p1 != NULL)
11647 char c;
11648 const char *r;
11649 tree tem;
11651 if (target_char_cast (s2, &c))
11652 return NULL_TREE;
11654 r = strchr (p1, c);
11656 if (r == NULL)
11657 return build_int_cst (TREE_TYPE (s1), 0);
11659 /* Return an offset into the constant string argument. */
11660 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11661 return fold_convert_loc (loc, type, tem);
11663 return NULL_TREE;
11667 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11668 the call, and TYPE is its return type.
11670 Return NULL_TREE if no simplification was possible, otherwise return the
11671 simplified form of the call as a tree.
11673 The simplified form may be a constant or other expression which
11674 computes the same value, but in a more efficient manner (including
11675 calls to other builtin functions).
11677 The call may contain arguments which need to be evaluated, but
11678 which are not useful to determine the result of the call. In
11679 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11680 COMPOUND_EXPR will be an argument which must be evaluated.
11681 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11682 COMPOUND_EXPR in the chain will contain the tree for the simplified
11683 form of the builtin function call. */
11685 static tree
11686 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11688 if (!validate_arg (s1, POINTER_TYPE)
11689 || !validate_arg (s2, INTEGER_TYPE))
11690 return NULL_TREE;
11691 else
11693 tree fn;
11694 const char *p1;
11696 if (TREE_CODE (s2) != INTEGER_CST)
11697 return NULL_TREE;
11699 p1 = c_getstr (s1);
11700 if (p1 != NULL)
11702 char c;
11703 const char *r;
11704 tree tem;
11706 if (target_char_cast (s2, &c))
11707 return NULL_TREE;
11709 r = strrchr (p1, c);
11711 if (r == NULL)
11712 return build_int_cst (TREE_TYPE (s1), 0);
11714 /* Return an offset into the constant string argument. */
11715 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11716 return fold_convert_loc (loc, type, tem);
11719 if (! integer_zerop (s2))
11720 return NULL_TREE;
11722 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11723 if (!fn)
11724 return NULL_TREE;
11726 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11727 return build_call_expr_loc (loc, fn, 2, s1, s2);
11731 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11732 to the call, and TYPE is its return type.
11734 Return NULL_TREE if no simplification was possible, otherwise return the
11735 simplified form of the call as a tree.
11737 The simplified form may be a constant or other expression which
11738 computes the same value, but in a more efficient manner (including
11739 calls to other builtin functions).
11741 The call may contain arguments which need to be evaluated, but
11742 which are not useful to determine the result of the call. In
11743 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11744 COMPOUND_EXPR will be an argument which must be evaluated.
11745 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11746 COMPOUND_EXPR in the chain will contain the tree for the simplified
11747 form of the builtin function call. */
11749 static tree
11750 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11752 if (!validate_arg (s1, POINTER_TYPE)
11753 || !validate_arg (s2, POINTER_TYPE))
11754 return NULL_TREE;
11755 else
11757 tree fn;
11758 const char *p1, *p2;
11760 p2 = c_getstr (s2);
11761 if (p2 == NULL)
11762 return NULL_TREE;
11764 p1 = c_getstr (s1);
11765 if (p1 != NULL)
11767 const char *r = strpbrk (p1, p2);
11768 tree tem;
11770 if (r == NULL)
11771 return build_int_cst (TREE_TYPE (s1), 0);
11773 /* Return an offset into the constant string argument. */
11774 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11775 return fold_convert_loc (loc, type, tem);
11778 if (p2[0] == '\0')
11779 /* strpbrk(x, "") == NULL.
11780 Evaluate and ignore s1 in case it had side-effects. */
11781 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11783 if (p2[1] != '\0')
11784 return NULL_TREE; /* Really call strpbrk. */
11786 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11787 if (!fn)
11788 return NULL_TREE;
11790 /* New argument list transforming strpbrk(s1, s2) to
11791 strchr(s1, s2[0]). */
11792 return build_call_expr_loc (loc, fn, 2, s1,
11793 build_int_cst (integer_type_node, p2[0]));
11797 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11798 to the call.
11800 Return NULL_TREE if no simplification was possible, otherwise return the
11801 simplified form of the call as a tree.
11803 The simplified form may be a constant or other expression which
11804 computes the same value, but in a more efficient manner (including
11805 calls to other builtin functions).
11807 The call may contain arguments which need to be evaluated, but
11808 which are not useful to determine the result of the call. In
11809 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11810 COMPOUND_EXPR will be an argument which must be evaluated.
11811 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11812 COMPOUND_EXPR in the chain will contain the tree for the simplified
11813 form of the builtin function call. */
11815 static tree
11816 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11818 if (!validate_arg (dst, POINTER_TYPE)
11819 || !validate_arg (src, POINTER_TYPE))
11820 return NULL_TREE;
11821 else
11823 const char *p = c_getstr (src);
11825 /* If the string length is zero, return the dst parameter. */
11826 if (p && *p == '\0')
11827 return dst;
11829 if (optimize_insn_for_speed_p ())
11831 /* See if we can store by pieces into (dst + strlen(dst)). */
11832 tree newdst, call;
11833 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11834 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11836 if (!strlen_fn || !strcpy_fn)
11837 return NULL_TREE;
11839 /* If we don't have a movstr we don't want to emit an strcpy
11840 call. We have to do that if the length of the source string
11841 isn't computable (in that case we can use memcpy probably
11842 later expanding to a sequence of mov instructions). If we
11843 have movstr instructions we can emit strcpy calls. */
11844 if (!HAVE_movstr)
11846 tree len = c_strlen (src, 1);
11847 if (! len || TREE_SIDE_EFFECTS (len))
11848 return NULL_TREE;
11851 /* Stabilize the argument list. */
11852 dst = builtin_save_expr (dst);
11854 /* Create strlen (dst). */
11855 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11856 /* Create (dst p+ strlen (dst)). */
11858 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11859 newdst = builtin_save_expr (newdst);
11861 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11862 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11864 return NULL_TREE;
11868 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11869 arguments to the call.
11871 Return NULL_TREE if no simplification was possible, otherwise return the
11872 simplified form of the call as a tree.
11874 The simplified form may be a constant or other expression which
11875 computes the same value, but in a more efficient manner (including
11876 calls to other builtin functions).
11878 The call may contain arguments which need to be evaluated, but
11879 which are not useful to determine the result of the call. In
11880 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11881 COMPOUND_EXPR will be an argument which must be evaluated.
11882 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11883 COMPOUND_EXPR in the chain will contain the tree for the simplified
11884 form of the builtin function call. */
11886 static tree
11887 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11889 if (!validate_arg (dst, POINTER_TYPE)
11890 || !validate_arg (src, POINTER_TYPE)
11891 || !validate_arg (len, INTEGER_TYPE))
11892 return NULL_TREE;
11893 else
11895 const char *p = c_getstr (src);
11897 /* If the requested length is zero, or the src parameter string
11898 length is zero, return the dst parameter. */
11899 if (integer_zerop (len) || (p && *p == '\0'))
11900 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11902 /* If the requested len is greater than or equal to the string
11903 length, call strcat. */
11904 if (TREE_CODE (len) == INTEGER_CST && p
11905 && compare_tree_int (len, strlen (p)) >= 0)
11907 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11909 /* If the replacement _DECL isn't initialized, don't do the
11910 transformation. */
11911 if (!fn)
11912 return NULL_TREE;
11914 return build_call_expr_loc (loc, fn, 2, dst, src);
11916 return NULL_TREE;
11920 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11921 to the call.
11923 Return NULL_TREE if no simplification was possible, otherwise return the
11924 simplified form of the call as a tree.
11926 The simplified form may be a constant or other expression which
11927 computes the same value, but in a more efficient manner (including
11928 calls to other builtin functions).
11930 The call may contain arguments which need to be evaluated, but
11931 which are not useful to determine the result of the call. In
11932 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11933 COMPOUND_EXPR will be an argument which must be evaluated.
11934 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11935 COMPOUND_EXPR in the chain will contain the tree for the simplified
11936 form of the builtin function call. */
11938 static tree
11939 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11941 if (!validate_arg (s1, POINTER_TYPE)
11942 || !validate_arg (s2, POINTER_TYPE))
11943 return NULL_TREE;
11944 else
11946 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11948 /* If both arguments are constants, evaluate at compile-time. */
11949 if (p1 && p2)
11951 const size_t r = strspn (p1, p2);
11952 return size_int (r);
11955 /* If either argument is "", return NULL_TREE. */
11956 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11957 /* Evaluate and ignore both arguments in case either one has
11958 side-effects. */
11959 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11960 s1, s2);
11961 return NULL_TREE;
11965 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11966 to the call.
11968 Return NULL_TREE if no simplification was possible, otherwise return the
11969 simplified form of the call as a tree.
11971 The simplified form may be a constant or other expression which
11972 computes the same value, but in a more efficient manner (including
11973 calls to other builtin functions).
11975 The call may contain arguments which need to be evaluated, but
11976 which are not useful to determine the result of the call. In
11977 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11978 COMPOUND_EXPR will be an argument which must be evaluated.
11979 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11980 COMPOUND_EXPR in the chain will contain the tree for the simplified
11981 form of the builtin function call. */
11983 static tree
11984 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11986 if (!validate_arg (s1, POINTER_TYPE)
11987 || !validate_arg (s2, POINTER_TYPE))
11988 return NULL_TREE;
11989 else
11991 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11993 /* If both arguments are constants, evaluate at compile-time. */
11994 if (p1 && p2)
11996 const size_t r = strcspn (p1, p2);
11997 return size_int (r);
12000 /* If the first argument is "", return NULL_TREE. */
12001 if (p1 && *p1 == '\0')
12003 /* Evaluate and ignore argument s2 in case it has
12004 side-effects. */
12005 return omit_one_operand_loc (loc, size_type_node,
12006 size_zero_node, s2);
12009 /* If the second argument is "", return __builtin_strlen(s1). */
12010 if (p2 && *p2 == '\0')
12012 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
12014 /* If the replacement _DECL isn't initialized, don't do the
12015 transformation. */
12016 if (!fn)
12017 return NULL_TREE;
12019 return build_call_expr_loc (loc, fn, 1, s1);
12021 return NULL_TREE;
12025 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
12026 to the call. IGNORE is true if the value returned
12027 by the builtin will be ignored. UNLOCKED is true is true if this
12028 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
12029 the known length of the string. Return NULL_TREE if no simplification
12030 was possible. */
12032 tree
12033 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
12034 bool ignore, bool unlocked, tree len)
12036 /* If we're using an unlocked function, assume the other unlocked
12037 functions exist explicitly. */
12038 tree const fn_fputc = (unlocked
12039 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
12040 : builtin_decl_implicit (BUILT_IN_FPUTC));
12041 tree const fn_fwrite = (unlocked
12042 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
12043 : builtin_decl_implicit (BUILT_IN_FWRITE));
12045 /* If the return value is used, don't do the transformation. */
12046 if (!ignore)
12047 return NULL_TREE;
12049 /* Verify the arguments in the original call. */
12050 if (!validate_arg (arg0, POINTER_TYPE)
12051 || !validate_arg (arg1, POINTER_TYPE))
12052 return NULL_TREE;
12054 if (! len)
12055 len = c_strlen (arg0, 0);
12057 /* Get the length of the string passed to fputs. If the length
12058 can't be determined, punt. */
12059 if (!len
12060 || TREE_CODE (len) != INTEGER_CST)
12061 return NULL_TREE;
12063 switch (compare_tree_int (len, 1))
12065 case -1: /* length is 0, delete the call entirely . */
12066 return omit_one_operand_loc (loc, integer_type_node,
12067 integer_zero_node, arg1);;
12069 case 0: /* length is 1, call fputc. */
12071 const char *p = c_getstr (arg0);
12073 if (p != NULL)
12075 if (fn_fputc)
12076 return build_call_expr_loc (loc, fn_fputc, 2,
12077 build_int_cst
12078 (integer_type_node, p[0]), arg1);
12079 else
12080 return NULL_TREE;
12083 /* FALLTHROUGH */
12084 case 1: /* length is greater than 1, call fwrite. */
12086 /* If optimizing for size keep fputs. */
12087 if (optimize_function_for_size_p (cfun))
12088 return NULL_TREE;
12089 /* New argument list transforming fputs(string, stream) to
12090 fwrite(string, 1, len, stream). */
12091 if (fn_fwrite)
12092 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12093 size_one_node, len, arg1);
12094 else
12095 return NULL_TREE;
12097 default:
12098 gcc_unreachable ();
12100 return NULL_TREE;
12103 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12104 produced. False otherwise. This is done so that we don't output the error
12105 or warning twice or three times. */
12107 bool
12108 fold_builtin_next_arg (tree exp, bool va_start_p)
12110 tree fntype = TREE_TYPE (current_function_decl);
12111 int nargs = call_expr_nargs (exp);
12112 tree arg;
12113 /* There is good chance the current input_location points inside the
12114 definition of the va_start macro (perhaps on the token for
12115 builtin) in a system header, so warnings will not be emitted.
12116 Use the location in real source code. */
12117 source_location current_location =
12118 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12119 NULL);
12121 if (!stdarg_p (fntype))
12123 error ("%<va_start%> used in function with fixed args");
12124 return true;
12127 if (va_start_p)
12129 if (va_start_p && (nargs != 2))
12131 error ("wrong number of arguments to function %<va_start%>");
12132 return true;
12134 arg = CALL_EXPR_ARG (exp, 1);
12136 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12137 when we checked the arguments and if needed issued a warning. */
12138 else
12140 if (nargs == 0)
12142 /* Evidently an out of date version of <stdarg.h>; can't validate
12143 va_start's second argument, but can still work as intended. */
12144 warning_at (current_location,
12145 OPT_Wvarargs,
12146 "%<__builtin_next_arg%> called without an argument");
12147 return true;
12149 else if (nargs > 1)
12151 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12152 return true;
12154 arg = CALL_EXPR_ARG (exp, 0);
12157 if (TREE_CODE (arg) == SSA_NAME)
12158 arg = SSA_NAME_VAR (arg);
12160 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12161 or __builtin_next_arg (0) the first time we see it, after checking
12162 the arguments and if needed issuing a warning. */
12163 if (!integer_zerop (arg))
12165 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12167 /* Strip off all nops for the sake of the comparison. This
12168 is not quite the same as STRIP_NOPS. It does more.
12169 We must also strip off INDIRECT_EXPR for C++ reference
12170 parameters. */
12171 while (CONVERT_EXPR_P (arg)
12172 || TREE_CODE (arg) == INDIRECT_REF)
12173 arg = TREE_OPERAND (arg, 0);
12174 if (arg != last_parm)
12176 /* FIXME: Sometimes with the tree optimizers we can get the
12177 not the last argument even though the user used the last
12178 argument. We just warn and set the arg to be the last
12179 argument so that we will get wrong-code because of
12180 it. */
12181 warning_at (current_location,
12182 OPT_Wvarargs,
12183 "second parameter of %<va_start%> not last named argument");
12186 /* Undefined by C99 7.15.1.4p4 (va_start):
12187 "If the parameter parmN is declared with the register storage
12188 class, with a function or array type, or with a type that is
12189 not compatible with the type that results after application of
12190 the default argument promotions, the behavior is undefined."
12192 else if (DECL_REGISTER (arg))
12194 warning_at (current_location,
12195 OPT_Wvarargs,
12196 "undefined behaviour when second parameter of "
12197 "%<va_start%> is declared with %<register%> storage");
12200 /* We want to verify the second parameter just once before the tree
12201 optimizers are run and then avoid keeping it in the tree,
12202 as otherwise we could warn even for correct code like:
12203 void foo (int i, ...)
12204 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12205 if (va_start_p)
12206 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12207 else
12208 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12210 return false;
12214 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12215 ORIG may be null if this is a 2-argument call. We don't attempt to
12216 simplify calls with more than 3 arguments.
12218 Return NULL_TREE if no simplification was possible, otherwise return the
12219 simplified form of the call as a tree. If IGNORED is true, it means that
12220 the caller does not use the returned value of the function. */
12222 static tree
12223 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12224 tree orig, int ignored)
12226 tree call, retval;
12227 const char *fmt_str = NULL;
12229 /* Verify the required arguments in the original call. We deal with two
12230 types of sprintf() calls: 'sprintf (str, fmt)' and
12231 'sprintf (dest, "%s", orig)'. */
12232 if (!validate_arg (dest, POINTER_TYPE)
12233 || !validate_arg (fmt, POINTER_TYPE))
12234 return NULL_TREE;
12235 if (orig && !validate_arg (orig, POINTER_TYPE))
12236 return NULL_TREE;
12238 /* Check whether the format is a literal string constant. */
12239 fmt_str = c_getstr (fmt);
12240 if (fmt_str == NULL)
12241 return NULL_TREE;
12243 call = NULL_TREE;
12244 retval = NULL_TREE;
12246 if (!init_target_chars ())
12247 return NULL_TREE;
12249 /* If the format doesn't contain % args or %%, use strcpy. */
12250 if (strchr (fmt_str, target_percent) == NULL)
12252 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12254 if (!fn)
12255 return NULL_TREE;
12257 /* Don't optimize sprintf (buf, "abc", ptr++). */
12258 if (orig)
12259 return NULL_TREE;
12261 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12262 'format' is known to contain no % formats. */
12263 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12264 if (!ignored)
12265 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12268 /* If the format is "%s", use strcpy if the result isn't used. */
12269 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12271 tree fn;
12272 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12274 if (!fn)
12275 return NULL_TREE;
12277 /* Don't crash on sprintf (str1, "%s"). */
12278 if (!orig)
12279 return NULL_TREE;
12281 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12282 if (!ignored)
12284 retval = c_strlen (orig, 1);
12285 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12286 return NULL_TREE;
12288 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12291 if (call && retval)
12293 retval = fold_convert_loc
12294 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12295 retval);
12296 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12298 else
12299 return call;
12302 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12303 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12304 attempt to simplify calls with more than 4 arguments.
12306 Return NULL_TREE if no simplification was possible, otherwise return the
12307 simplified form of the call as a tree. If IGNORED is true, it means that
12308 the caller does not use the returned value of the function. */
12310 static tree
12311 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12312 tree orig, int ignored)
12314 tree call, retval;
12315 const char *fmt_str = NULL;
12316 unsigned HOST_WIDE_INT destlen;
12318 /* Verify the required arguments in the original call. We deal with two
12319 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12320 'snprintf (dest, cst, "%s", orig)'. */
12321 if (!validate_arg (dest, POINTER_TYPE)
12322 || !validate_arg (destsize, INTEGER_TYPE)
12323 || !validate_arg (fmt, POINTER_TYPE))
12324 return NULL_TREE;
12325 if (orig && !validate_arg (orig, POINTER_TYPE))
12326 return NULL_TREE;
12328 if (!host_integerp (destsize, 1))
12329 return NULL_TREE;
12331 /* Check whether the format is a literal string constant. */
12332 fmt_str = c_getstr (fmt);
12333 if (fmt_str == NULL)
12334 return NULL_TREE;
12336 call = NULL_TREE;
12337 retval = NULL_TREE;
12339 if (!init_target_chars ())
12340 return NULL_TREE;
12342 destlen = tree_low_cst (destsize, 1);
12344 /* If the format doesn't contain % args or %%, use strcpy. */
12345 if (strchr (fmt_str, target_percent) == NULL)
12347 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12348 size_t len = strlen (fmt_str);
12350 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12351 if (orig)
12352 return NULL_TREE;
12354 /* We could expand this as
12355 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12356 or to
12357 memcpy (str, fmt_with_nul_at_cstm1, cst);
12358 but in the former case that might increase code size
12359 and in the latter case grow .rodata section too much.
12360 So punt for now. */
12361 if (len >= destlen)
12362 return NULL_TREE;
12364 if (!fn)
12365 return NULL_TREE;
12367 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12368 'format' is known to contain no % formats and
12369 strlen (fmt) < cst. */
12370 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12372 if (!ignored)
12373 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12376 /* If the format is "%s", use strcpy if the result isn't used. */
12377 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12379 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12380 unsigned HOST_WIDE_INT origlen;
12382 /* Don't crash on snprintf (str1, cst, "%s"). */
12383 if (!orig)
12384 return NULL_TREE;
12386 retval = c_strlen (orig, 1);
12387 if (!retval || !host_integerp (retval, 1))
12388 return NULL_TREE;
12390 origlen = tree_low_cst (retval, 1);
12391 /* We could expand this as
12392 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12393 or to
12394 memcpy (str1, str2_with_nul_at_cstm1, cst);
12395 but in the former case that might increase code size
12396 and in the latter case grow .rodata section too much.
12397 So punt for now. */
12398 if (origlen >= destlen)
12399 return NULL_TREE;
12401 /* Convert snprintf (str1, cst, "%s", str2) into
12402 strcpy (str1, str2) if strlen (str2) < cst. */
12403 if (!fn)
12404 return NULL_TREE;
12406 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12408 if (ignored)
12409 retval = NULL_TREE;
12412 if (call && retval)
12414 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12415 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12416 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12418 else
12419 return call;
12422 /* Expand a call EXP to __builtin_object_size. */
12425 expand_builtin_object_size (tree exp)
12427 tree ost;
12428 int object_size_type;
12429 tree fndecl = get_callee_fndecl (exp);
12431 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12433 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12434 exp, fndecl);
12435 expand_builtin_trap ();
12436 return const0_rtx;
12439 ost = CALL_EXPR_ARG (exp, 1);
12440 STRIP_NOPS (ost);
12442 if (TREE_CODE (ost) != INTEGER_CST
12443 || tree_int_cst_sgn (ost) < 0
12444 || compare_tree_int (ost, 3) > 0)
12446 error ("%Klast argument of %D is not integer constant between 0 and 3",
12447 exp, fndecl);
12448 expand_builtin_trap ();
12449 return const0_rtx;
12452 object_size_type = tree_low_cst (ost, 0);
12454 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12457 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12458 FCODE is the BUILT_IN_* to use.
12459 Return NULL_RTX if we failed; the caller should emit a normal call,
12460 otherwise try to get the result in TARGET, if convenient (and in
12461 mode MODE if that's convenient). */
12463 static rtx
12464 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12465 enum built_in_function fcode)
12467 tree dest, src, len, size;
12469 if (!validate_arglist (exp,
12470 POINTER_TYPE,
12471 fcode == BUILT_IN_MEMSET_CHK
12472 ? INTEGER_TYPE : POINTER_TYPE,
12473 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12474 return NULL_RTX;
12476 dest = CALL_EXPR_ARG (exp, 0);
12477 src = CALL_EXPR_ARG (exp, 1);
12478 len = CALL_EXPR_ARG (exp, 2);
12479 size = CALL_EXPR_ARG (exp, 3);
12481 if (! host_integerp (size, 1))
12482 return NULL_RTX;
12484 if (host_integerp (len, 1) || integer_all_onesp (size))
12486 tree fn;
12488 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12490 warning_at (tree_nonartificial_location (exp),
12491 0, "%Kcall to %D will always overflow destination buffer",
12492 exp, get_callee_fndecl (exp));
12493 return NULL_RTX;
12496 fn = NULL_TREE;
12497 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12498 mem{cpy,pcpy,move,set} is available. */
12499 switch (fcode)
12501 case BUILT_IN_MEMCPY_CHK:
12502 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12503 break;
12504 case BUILT_IN_MEMPCPY_CHK:
12505 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12506 break;
12507 case BUILT_IN_MEMMOVE_CHK:
12508 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12509 break;
12510 case BUILT_IN_MEMSET_CHK:
12511 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12512 break;
12513 default:
12514 break;
12517 if (! fn)
12518 return NULL_RTX;
12520 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12521 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12522 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12523 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12525 else if (fcode == BUILT_IN_MEMSET_CHK)
12526 return NULL_RTX;
12527 else
12529 unsigned int dest_align = get_pointer_alignment (dest);
12531 /* If DEST is not a pointer type, call the normal function. */
12532 if (dest_align == 0)
12533 return NULL_RTX;
12535 /* If SRC and DEST are the same (and not volatile), do nothing. */
12536 if (operand_equal_p (src, dest, 0))
12538 tree expr;
12540 if (fcode != BUILT_IN_MEMPCPY_CHK)
12542 /* Evaluate and ignore LEN in case it has side-effects. */
12543 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12544 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12547 expr = fold_build_pointer_plus (dest, len);
12548 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12551 /* __memmove_chk special case. */
12552 if (fcode == BUILT_IN_MEMMOVE_CHK)
12554 unsigned int src_align = get_pointer_alignment (src);
12556 if (src_align == 0)
12557 return NULL_RTX;
12559 /* If src is categorized for a readonly section we can use
12560 normal __memcpy_chk. */
12561 if (readonly_data_expr (src))
12563 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12564 if (!fn)
12565 return NULL_RTX;
12566 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12567 dest, src, len, size);
12568 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12569 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12570 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12573 return NULL_RTX;
12577 /* Emit warning if a buffer overflow is detected at compile time. */
12579 static void
12580 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12582 int is_strlen = 0;
12583 tree len, size;
12584 location_t loc = tree_nonartificial_location (exp);
12586 switch (fcode)
12588 case BUILT_IN_STRCPY_CHK:
12589 case BUILT_IN_STPCPY_CHK:
12590 /* For __strcat_chk the warning will be emitted only if overflowing
12591 by at least strlen (dest) + 1 bytes. */
12592 case BUILT_IN_STRCAT_CHK:
12593 len = CALL_EXPR_ARG (exp, 1);
12594 size = CALL_EXPR_ARG (exp, 2);
12595 is_strlen = 1;
12596 break;
12597 case BUILT_IN_STRNCAT_CHK:
12598 case BUILT_IN_STRNCPY_CHK:
12599 case BUILT_IN_STPNCPY_CHK:
12600 len = CALL_EXPR_ARG (exp, 2);
12601 size = CALL_EXPR_ARG (exp, 3);
12602 break;
12603 case BUILT_IN_SNPRINTF_CHK:
12604 case BUILT_IN_VSNPRINTF_CHK:
12605 len = CALL_EXPR_ARG (exp, 1);
12606 size = CALL_EXPR_ARG (exp, 3);
12607 break;
12608 default:
12609 gcc_unreachable ();
12612 if (!len || !size)
12613 return;
12615 if (! host_integerp (size, 1) || integer_all_onesp (size))
12616 return;
12618 if (is_strlen)
12620 len = c_strlen (len, 1);
12621 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12622 return;
12624 else if (fcode == BUILT_IN_STRNCAT_CHK)
12626 tree src = CALL_EXPR_ARG (exp, 1);
12627 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12628 return;
12629 src = c_strlen (src, 1);
12630 if (! src || ! host_integerp (src, 1))
12632 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12633 exp, get_callee_fndecl (exp));
12634 return;
12636 else if (tree_int_cst_lt (src, size))
12637 return;
12639 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12640 return;
12642 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12643 exp, get_callee_fndecl (exp));
12646 /* Emit warning if a buffer overflow is detected at compile time
12647 in __sprintf_chk/__vsprintf_chk calls. */
12649 static void
12650 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12652 tree size, len, fmt;
12653 const char *fmt_str;
12654 int nargs = call_expr_nargs (exp);
12656 /* Verify the required arguments in the original call. */
12658 if (nargs < 4)
12659 return;
12660 size = CALL_EXPR_ARG (exp, 2);
12661 fmt = CALL_EXPR_ARG (exp, 3);
12663 if (! host_integerp (size, 1) || integer_all_onesp (size))
12664 return;
12666 /* Check whether the format is a literal string constant. */
12667 fmt_str = c_getstr (fmt);
12668 if (fmt_str == NULL)
12669 return;
12671 if (!init_target_chars ())
12672 return;
12674 /* If the format doesn't contain % args or %%, we know its size. */
12675 if (strchr (fmt_str, target_percent) == 0)
12676 len = build_int_cstu (size_type_node, strlen (fmt_str));
12677 /* If the format is "%s" and first ... argument is a string literal,
12678 we know it too. */
12679 else if (fcode == BUILT_IN_SPRINTF_CHK
12680 && strcmp (fmt_str, target_percent_s) == 0)
12682 tree arg;
12684 if (nargs < 5)
12685 return;
12686 arg = CALL_EXPR_ARG (exp, 4);
12687 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12688 return;
12690 len = c_strlen (arg, 1);
12691 if (!len || ! host_integerp (len, 1))
12692 return;
12694 else
12695 return;
12697 if (! tree_int_cst_lt (len, size))
12698 warning_at (tree_nonartificial_location (exp),
12699 0, "%Kcall to %D will always overflow destination buffer",
12700 exp, get_callee_fndecl (exp));
12703 /* Emit warning if a free is called with address of a variable. */
12705 static void
12706 maybe_emit_free_warning (tree exp)
12708 tree arg = CALL_EXPR_ARG (exp, 0);
12710 STRIP_NOPS (arg);
12711 if (TREE_CODE (arg) != ADDR_EXPR)
12712 return;
12714 arg = get_base_address (TREE_OPERAND (arg, 0));
12715 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12716 return;
12718 if (SSA_VAR_P (arg))
12719 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12720 "%Kattempt to free a non-heap object %qD", exp, arg);
12721 else
12722 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12723 "%Kattempt to free a non-heap object", exp);
12726 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12727 if possible. */
12729 tree
12730 fold_builtin_object_size (tree ptr, tree ost)
12732 unsigned HOST_WIDE_INT bytes;
12733 int object_size_type;
12735 if (!validate_arg (ptr, POINTER_TYPE)
12736 || !validate_arg (ost, INTEGER_TYPE))
12737 return NULL_TREE;
12739 STRIP_NOPS (ost);
12741 if (TREE_CODE (ost) != INTEGER_CST
12742 || tree_int_cst_sgn (ost) < 0
12743 || compare_tree_int (ost, 3) > 0)
12744 return NULL_TREE;
12746 object_size_type = tree_low_cst (ost, 0);
12748 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12749 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12750 and (size_t) 0 for types 2 and 3. */
12751 if (TREE_SIDE_EFFECTS (ptr))
12752 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12754 if (TREE_CODE (ptr) == ADDR_EXPR)
12756 bytes = compute_builtin_object_size (ptr, object_size_type);
12757 if (double_int_fits_to_tree_p (size_type_node,
12758 uhwi_to_double_int (bytes)))
12759 return build_int_cstu (size_type_node, bytes);
12761 else if (TREE_CODE (ptr) == SSA_NAME)
12763 /* If object size is not known yet, delay folding until
12764 later. Maybe subsequent passes will help determining
12765 it. */
12766 bytes = compute_builtin_object_size (ptr, object_size_type);
12767 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12768 && double_int_fits_to_tree_p (size_type_node,
12769 uhwi_to_double_int (bytes)))
12770 return build_int_cstu (size_type_node, bytes);
12773 return NULL_TREE;
12776 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12777 DEST, SRC, LEN, and SIZE are the arguments to the call.
12778 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12779 code of the builtin. If MAXLEN is not NULL, it is maximum length
12780 passed as third argument. */
12782 tree
12783 fold_builtin_memory_chk (location_t loc, tree fndecl,
12784 tree dest, tree src, tree len, tree size,
12785 tree maxlen, bool ignore,
12786 enum built_in_function fcode)
12788 tree fn;
12790 if (!validate_arg (dest, POINTER_TYPE)
12791 || !validate_arg (src,
12792 (fcode == BUILT_IN_MEMSET_CHK
12793 ? INTEGER_TYPE : POINTER_TYPE))
12794 || !validate_arg (len, INTEGER_TYPE)
12795 || !validate_arg (size, INTEGER_TYPE))
12796 return NULL_TREE;
12798 /* If SRC and DEST are the same (and not volatile), return DEST
12799 (resp. DEST+LEN for __mempcpy_chk). */
12800 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12802 if (fcode != BUILT_IN_MEMPCPY_CHK)
12803 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12804 dest, len);
12805 else
12807 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12808 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12812 if (! host_integerp (size, 1))
12813 return NULL_TREE;
12815 if (! integer_all_onesp (size))
12817 if (! host_integerp (len, 1))
12819 /* If LEN is not constant, try MAXLEN too.
12820 For MAXLEN only allow optimizing into non-_ocs function
12821 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12822 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12824 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12826 /* (void) __mempcpy_chk () can be optimized into
12827 (void) __memcpy_chk (). */
12828 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12829 if (!fn)
12830 return NULL_TREE;
12832 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12834 return NULL_TREE;
12837 else
12838 maxlen = len;
12840 if (tree_int_cst_lt (size, maxlen))
12841 return NULL_TREE;
12844 fn = NULL_TREE;
12845 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12846 mem{cpy,pcpy,move,set} is available. */
12847 switch (fcode)
12849 case BUILT_IN_MEMCPY_CHK:
12850 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12851 break;
12852 case BUILT_IN_MEMPCPY_CHK:
12853 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12854 break;
12855 case BUILT_IN_MEMMOVE_CHK:
12856 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12857 break;
12858 case BUILT_IN_MEMSET_CHK:
12859 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12860 break;
12861 default:
12862 break;
12865 if (!fn)
12866 return NULL_TREE;
12868 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12871 /* Fold a call to the __st[rp]cpy_chk builtin.
12872 DEST, SRC, and SIZE are the arguments to the call.
12873 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12874 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12875 strings passed as second argument. */
12877 tree
12878 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12879 tree src, tree size,
12880 tree maxlen, bool ignore,
12881 enum built_in_function fcode)
12883 tree len, fn;
12885 if (!validate_arg (dest, POINTER_TYPE)
12886 || !validate_arg (src, POINTER_TYPE)
12887 || !validate_arg (size, INTEGER_TYPE))
12888 return NULL_TREE;
12890 /* If SRC and DEST are the same (and not volatile), return DEST. */
12891 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12892 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12894 if (! host_integerp (size, 1))
12895 return NULL_TREE;
12897 if (! integer_all_onesp (size))
12899 len = c_strlen (src, 1);
12900 if (! len || ! host_integerp (len, 1))
12902 /* If LEN is not constant, try MAXLEN too.
12903 For MAXLEN only allow optimizing into non-_ocs function
12904 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12905 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12907 if (fcode == BUILT_IN_STPCPY_CHK)
12909 if (! ignore)
12910 return NULL_TREE;
12912 /* If return value of __stpcpy_chk is ignored,
12913 optimize into __strcpy_chk. */
12914 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12915 if (!fn)
12916 return NULL_TREE;
12918 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12921 if (! len || TREE_SIDE_EFFECTS (len))
12922 return NULL_TREE;
12924 /* If c_strlen returned something, but not a constant,
12925 transform __strcpy_chk into __memcpy_chk. */
12926 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12927 if (!fn)
12928 return NULL_TREE;
12930 len = fold_convert_loc (loc, size_type_node, len);
12931 len = size_binop_loc (loc, PLUS_EXPR, len,
12932 build_int_cst (size_type_node, 1));
12933 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12934 build_call_expr_loc (loc, fn, 4,
12935 dest, src, len, size));
12938 else
12939 maxlen = len;
12941 if (! tree_int_cst_lt (maxlen, size))
12942 return NULL_TREE;
12945 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12946 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12947 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12948 if (!fn)
12949 return NULL_TREE;
12951 return build_call_expr_loc (loc, fn, 2, dest, src);
12954 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12955 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12956 length passed as third argument. IGNORE is true if return value can be
12957 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12959 tree
12960 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12961 tree len, tree size, tree maxlen, bool ignore,
12962 enum built_in_function fcode)
12964 tree fn;
12966 if (!validate_arg (dest, POINTER_TYPE)
12967 || !validate_arg (src, POINTER_TYPE)
12968 || !validate_arg (len, INTEGER_TYPE)
12969 || !validate_arg (size, INTEGER_TYPE))
12970 return NULL_TREE;
12972 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12974 /* If return value of __stpncpy_chk is ignored,
12975 optimize into __strncpy_chk. */
12976 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12977 if (fn)
12978 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12981 if (! host_integerp (size, 1))
12982 return NULL_TREE;
12984 if (! integer_all_onesp (size))
12986 if (! host_integerp (len, 1))
12988 /* If LEN is not constant, try MAXLEN too.
12989 For MAXLEN only allow optimizing into non-_ocs function
12990 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12991 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12992 return NULL_TREE;
12994 else
12995 maxlen = len;
12997 if (tree_int_cst_lt (size, maxlen))
12998 return NULL_TREE;
13001 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
13002 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
13003 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
13004 if (!fn)
13005 return NULL_TREE;
13007 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13010 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
13011 are the arguments to the call. */
13013 static tree
13014 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
13015 tree src, tree size)
13017 tree fn;
13018 const char *p;
13020 if (!validate_arg (dest, POINTER_TYPE)
13021 || !validate_arg (src, POINTER_TYPE)
13022 || !validate_arg (size, INTEGER_TYPE))
13023 return NULL_TREE;
13025 p = c_getstr (src);
13026 /* If the SRC parameter is "", return DEST. */
13027 if (p && *p == '\0')
13028 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13030 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
13031 return NULL_TREE;
13033 /* If __builtin_strcat_chk is used, assume strcat is available. */
13034 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
13035 if (!fn)
13036 return NULL_TREE;
13038 return build_call_expr_loc (loc, fn, 2, dest, src);
13041 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
13042 LEN, and SIZE. */
13044 static tree
13045 fold_builtin_strncat_chk (location_t loc, tree fndecl,
13046 tree dest, tree src, tree len, tree size)
13048 tree fn;
13049 const char *p;
13051 if (!validate_arg (dest, POINTER_TYPE)
13052 || !validate_arg (src, POINTER_TYPE)
13053 || !validate_arg (size, INTEGER_TYPE)
13054 || !validate_arg (size, INTEGER_TYPE))
13055 return NULL_TREE;
13057 p = c_getstr (src);
13058 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13059 if (p && *p == '\0')
13060 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
13061 else if (integer_zerop (len))
13062 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13064 if (! host_integerp (size, 1))
13065 return NULL_TREE;
13067 if (! integer_all_onesp (size))
13069 tree src_len = c_strlen (src, 1);
13070 if (src_len
13071 && host_integerp (src_len, 1)
13072 && host_integerp (len, 1)
13073 && ! tree_int_cst_lt (len, src_len))
13075 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13076 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13077 if (!fn)
13078 return NULL_TREE;
13080 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13082 return NULL_TREE;
13085 /* If __builtin_strncat_chk is used, assume strncat is available. */
13086 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13087 if (!fn)
13088 return NULL_TREE;
13090 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13093 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13094 Return NULL_TREE if a normal call should be emitted rather than
13095 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13096 or BUILT_IN_VSPRINTF_CHK. */
13098 static tree
13099 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13100 enum built_in_function fcode)
13102 tree dest, size, len, fn, fmt, flag;
13103 const char *fmt_str;
13105 /* Verify the required arguments in the original call. */
13106 if (nargs < 4)
13107 return NULL_TREE;
13108 dest = args[0];
13109 if (!validate_arg (dest, POINTER_TYPE))
13110 return NULL_TREE;
13111 flag = args[1];
13112 if (!validate_arg (flag, INTEGER_TYPE))
13113 return NULL_TREE;
13114 size = args[2];
13115 if (!validate_arg (size, INTEGER_TYPE))
13116 return NULL_TREE;
13117 fmt = args[3];
13118 if (!validate_arg (fmt, POINTER_TYPE))
13119 return NULL_TREE;
13121 if (! host_integerp (size, 1))
13122 return NULL_TREE;
13124 len = NULL_TREE;
13126 if (!init_target_chars ())
13127 return NULL_TREE;
13129 /* Check whether the format is a literal string constant. */
13130 fmt_str = c_getstr (fmt);
13131 if (fmt_str != NULL)
13133 /* If the format doesn't contain % args or %%, we know the size. */
13134 if (strchr (fmt_str, target_percent) == 0)
13136 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13137 len = build_int_cstu (size_type_node, strlen (fmt_str));
13139 /* If the format is "%s" and first ... argument is a string literal,
13140 we know the size too. */
13141 else if (fcode == BUILT_IN_SPRINTF_CHK
13142 && strcmp (fmt_str, target_percent_s) == 0)
13144 tree arg;
13146 if (nargs == 5)
13148 arg = args[4];
13149 if (validate_arg (arg, POINTER_TYPE))
13151 len = c_strlen (arg, 1);
13152 if (! len || ! host_integerp (len, 1))
13153 len = NULL_TREE;
13159 if (! integer_all_onesp (size))
13161 if (! len || ! tree_int_cst_lt (len, size))
13162 return NULL_TREE;
13165 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13166 or if format doesn't contain % chars or is "%s". */
13167 if (! integer_zerop (flag))
13169 if (fmt_str == NULL)
13170 return NULL_TREE;
13171 if (strchr (fmt_str, target_percent) != NULL
13172 && strcmp (fmt_str, target_percent_s))
13173 return NULL_TREE;
13176 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13177 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13178 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13179 if (!fn)
13180 return NULL_TREE;
13182 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13185 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13186 a normal call should be emitted rather than expanding the function
13187 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13189 static tree
13190 fold_builtin_sprintf_chk (location_t loc, tree exp,
13191 enum built_in_function fcode)
13193 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13194 CALL_EXPR_ARGP (exp), fcode);
13197 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13198 NULL_TREE if a normal call should be emitted rather than expanding
13199 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13200 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13201 passed as second argument. */
13203 static tree
13204 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13205 tree maxlen, enum built_in_function fcode)
13207 tree dest, size, len, fn, fmt, flag;
13208 const char *fmt_str;
13210 /* Verify the required arguments in the original call. */
13211 if (nargs < 5)
13212 return NULL_TREE;
13213 dest = args[0];
13214 if (!validate_arg (dest, POINTER_TYPE))
13215 return NULL_TREE;
13216 len = args[1];
13217 if (!validate_arg (len, INTEGER_TYPE))
13218 return NULL_TREE;
13219 flag = args[2];
13220 if (!validate_arg (flag, INTEGER_TYPE))
13221 return NULL_TREE;
13222 size = args[3];
13223 if (!validate_arg (size, INTEGER_TYPE))
13224 return NULL_TREE;
13225 fmt = args[4];
13226 if (!validate_arg (fmt, POINTER_TYPE))
13227 return NULL_TREE;
13229 if (! host_integerp (size, 1))
13230 return NULL_TREE;
13232 if (! integer_all_onesp (size))
13234 if (! host_integerp (len, 1))
13236 /* If LEN is not constant, try MAXLEN too.
13237 For MAXLEN only allow optimizing into non-_ocs function
13238 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13239 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13240 return NULL_TREE;
13242 else
13243 maxlen = len;
13245 if (tree_int_cst_lt (size, maxlen))
13246 return NULL_TREE;
13249 if (!init_target_chars ())
13250 return NULL_TREE;
13252 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13253 or if format doesn't contain % chars or is "%s". */
13254 if (! integer_zerop (flag))
13256 fmt_str = c_getstr (fmt);
13257 if (fmt_str == NULL)
13258 return NULL_TREE;
13259 if (strchr (fmt_str, target_percent) != NULL
13260 && strcmp (fmt_str, target_percent_s))
13261 return NULL_TREE;
13264 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13265 available. */
13266 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13267 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13268 if (!fn)
13269 return NULL_TREE;
13271 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13274 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13275 a normal call should be emitted rather than expanding the function
13276 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13277 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13278 passed as second argument. */
13280 tree
13281 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13282 enum built_in_function fcode)
13284 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13285 CALL_EXPR_ARGP (exp), maxlen, fcode);
13288 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13289 FMT and ARG are the arguments to the call; we don't fold cases with
13290 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13292 Return NULL_TREE if no simplification was possible, otherwise return the
13293 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13294 code of the function to be simplified. */
13296 static tree
13297 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13298 tree arg, bool ignore,
13299 enum built_in_function fcode)
13301 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13302 const char *fmt_str = NULL;
13304 /* If the return value is used, don't do the transformation. */
13305 if (! ignore)
13306 return NULL_TREE;
13308 /* Verify the required arguments in the original call. */
13309 if (!validate_arg (fmt, POINTER_TYPE))
13310 return NULL_TREE;
13312 /* Check whether the format is a literal string constant. */
13313 fmt_str = c_getstr (fmt);
13314 if (fmt_str == NULL)
13315 return NULL_TREE;
13317 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13319 /* If we're using an unlocked function, assume the other
13320 unlocked functions exist explicitly. */
13321 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13322 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13324 else
13326 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13327 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13330 if (!init_target_chars ())
13331 return NULL_TREE;
13333 if (strcmp (fmt_str, target_percent_s) == 0
13334 || strchr (fmt_str, target_percent) == NULL)
13336 const char *str;
13338 if (strcmp (fmt_str, target_percent_s) == 0)
13340 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13341 return NULL_TREE;
13343 if (!arg || !validate_arg (arg, POINTER_TYPE))
13344 return NULL_TREE;
13346 str = c_getstr (arg);
13347 if (str == NULL)
13348 return NULL_TREE;
13350 else
13352 /* The format specifier doesn't contain any '%' characters. */
13353 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13354 && arg)
13355 return NULL_TREE;
13356 str = fmt_str;
13359 /* If the string was "", printf does nothing. */
13360 if (str[0] == '\0')
13361 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13363 /* If the string has length of 1, call putchar. */
13364 if (str[1] == '\0')
13366 /* Given printf("c"), (where c is any one character,)
13367 convert "c"[0] to an int and pass that to the replacement
13368 function. */
13369 newarg = build_int_cst (integer_type_node, str[0]);
13370 if (fn_putchar)
13371 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13373 else
13375 /* If the string was "string\n", call puts("string"). */
13376 size_t len = strlen (str);
13377 if ((unsigned char)str[len - 1] == target_newline
13378 && (size_t) (int) len == len
13379 && (int) len > 0)
13381 char *newstr;
13382 tree offset_node, string_cst;
13384 /* Create a NUL-terminated string that's one char shorter
13385 than the original, stripping off the trailing '\n'. */
13386 newarg = build_string_literal (len, str);
13387 string_cst = string_constant (newarg, &offset_node);
13388 gcc_checking_assert (string_cst
13389 && (TREE_STRING_LENGTH (string_cst)
13390 == (int) len)
13391 && integer_zerop (offset_node)
13392 && (unsigned char)
13393 TREE_STRING_POINTER (string_cst)[len - 1]
13394 == target_newline);
13395 /* build_string_literal creates a new STRING_CST,
13396 modify it in place to avoid double copying. */
13397 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13398 newstr[len - 1] = '\0';
13399 if (fn_puts)
13400 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13402 else
13403 /* We'd like to arrange to call fputs(string,stdout) here,
13404 but we need stdout and don't have a way to get it yet. */
13405 return NULL_TREE;
13409 /* The other optimizations can be done only on the non-va_list variants. */
13410 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13411 return NULL_TREE;
13413 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13414 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13416 if (!arg || !validate_arg (arg, POINTER_TYPE))
13417 return NULL_TREE;
13418 if (fn_puts)
13419 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13422 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13423 else if (strcmp (fmt_str, target_percent_c) == 0)
13425 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13426 return NULL_TREE;
13427 if (fn_putchar)
13428 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13431 if (!call)
13432 return NULL_TREE;
13434 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13437 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13438 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13439 more than 3 arguments, and ARG may be null in the 2-argument case.
13441 Return NULL_TREE if no simplification was possible, otherwise return the
13442 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13443 code of the function to be simplified. */
13445 static tree
13446 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13447 tree fmt, tree arg, bool ignore,
13448 enum built_in_function fcode)
13450 tree fn_fputc, fn_fputs, call = NULL_TREE;
13451 const char *fmt_str = NULL;
13453 /* If the return value is used, don't do the transformation. */
13454 if (! ignore)
13455 return NULL_TREE;
13457 /* Verify the required arguments in the original call. */
13458 if (!validate_arg (fp, POINTER_TYPE))
13459 return NULL_TREE;
13460 if (!validate_arg (fmt, POINTER_TYPE))
13461 return NULL_TREE;
13463 /* Check whether the format is a literal string constant. */
13464 fmt_str = c_getstr (fmt);
13465 if (fmt_str == NULL)
13466 return NULL_TREE;
13468 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13470 /* If we're using an unlocked function, assume the other
13471 unlocked functions exist explicitly. */
13472 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13473 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13475 else
13477 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13478 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13481 if (!init_target_chars ())
13482 return NULL_TREE;
13484 /* If the format doesn't contain % args or %%, use strcpy. */
13485 if (strchr (fmt_str, target_percent) == NULL)
13487 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13488 && arg)
13489 return NULL_TREE;
13491 /* If the format specifier was "", fprintf does nothing. */
13492 if (fmt_str[0] == '\0')
13494 /* If FP has side-effects, just wait until gimplification is
13495 done. */
13496 if (TREE_SIDE_EFFECTS (fp))
13497 return NULL_TREE;
13499 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13502 /* When "string" doesn't contain %, replace all cases of
13503 fprintf (fp, string) with fputs (string, fp). The fputs
13504 builtin will take care of special cases like length == 1. */
13505 if (fn_fputs)
13506 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13509 /* The other optimizations can be done only on the non-va_list variants. */
13510 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13511 return NULL_TREE;
13513 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13514 else if (strcmp (fmt_str, target_percent_s) == 0)
13516 if (!arg || !validate_arg (arg, POINTER_TYPE))
13517 return NULL_TREE;
13518 if (fn_fputs)
13519 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13522 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13523 else if (strcmp (fmt_str, target_percent_c) == 0)
13525 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13526 return NULL_TREE;
13527 if (fn_fputc)
13528 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13531 if (!call)
13532 return NULL_TREE;
13533 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13536 /* Initialize format string characters in the target charset. */
13538 static bool
13539 init_target_chars (void)
13541 static bool init;
13542 if (!init)
13544 target_newline = lang_hooks.to_target_charset ('\n');
13545 target_percent = lang_hooks.to_target_charset ('%');
13546 target_c = lang_hooks.to_target_charset ('c');
13547 target_s = lang_hooks.to_target_charset ('s');
13548 if (target_newline == 0 || target_percent == 0 || target_c == 0
13549 || target_s == 0)
13550 return false;
13552 target_percent_c[0] = target_percent;
13553 target_percent_c[1] = target_c;
13554 target_percent_c[2] = '\0';
13556 target_percent_s[0] = target_percent;
13557 target_percent_s[1] = target_s;
13558 target_percent_s[2] = '\0';
13560 target_percent_s_newline[0] = target_percent;
13561 target_percent_s_newline[1] = target_s;
13562 target_percent_s_newline[2] = target_newline;
13563 target_percent_s_newline[3] = '\0';
13565 init = true;
13567 return true;
13570 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13571 and no overflow/underflow occurred. INEXACT is true if M was not
13572 exactly calculated. TYPE is the tree type for the result. This
13573 function assumes that you cleared the MPFR flags and then
13574 calculated M to see if anything subsequently set a flag prior to
13575 entering this function. Return NULL_TREE if any checks fail. */
13577 static tree
13578 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13580 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13581 overflow/underflow occurred. If -frounding-math, proceed iff the
13582 result of calling FUNC was exact. */
13583 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13584 && (!flag_rounding_math || !inexact))
13586 REAL_VALUE_TYPE rr;
13588 real_from_mpfr (&rr, m, type, GMP_RNDN);
13589 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13590 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13591 but the mpft_t is not, then we underflowed in the
13592 conversion. */
13593 if (real_isfinite (&rr)
13594 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13596 REAL_VALUE_TYPE rmode;
13598 real_convert (&rmode, TYPE_MODE (type), &rr);
13599 /* Proceed iff the specified mode can hold the value. */
13600 if (real_identical (&rmode, &rr))
13601 return build_real (type, rmode);
13604 return NULL_TREE;
13607 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13608 number and no overflow/underflow occurred. INEXACT is true if M
13609 was not exactly calculated. TYPE is the tree type for the result.
13610 This function assumes that you cleared the MPFR flags and then
13611 calculated M to see if anything subsequently set a flag prior to
13612 entering this function. Return NULL_TREE if any checks fail, if
13613 FORCE_CONVERT is true, then bypass the checks. */
13615 static tree
13616 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13618 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13619 overflow/underflow occurred. If -frounding-math, proceed iff the
13620 result of calling FUNC was exact. */
13621 if (force_convert
13622 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13623 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13624 && (!flag_rounding_math || !inexact)))
13626 REAL_VALUE_TYPE re, im;
13628 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13629 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13630 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13631 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13632 but the mpft_t is not, then we underflowed in the
13633 conversion. */
13634 if (force_convert
13635 || (real_isfinite (&re) && real_isfinite (&im)
13636 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13637 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13639 REAL_VALUE_TYPE re_mode, im_mode;
13641 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13642 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13643 /* Proceed iff the specified mode can hold the value. */
13644 if (force_convert
13645 || (real_identical (&re_mode, &re)
13646 && real_identical (&im_mode, &im)))
13647 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13648 build_real (TREE_TYPE (type), im_mode));
13651 return NULL_TREE;
13654 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13655 FUNC on it and return the resulting value as a tree with type TYPE.
13656 If MIN and/or MAX are not NULL, then the supplied ARG must be
13657 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13658 acceptable values, otherwise they are not. The mpfr precision is
13659 set to the precision of TYPE. We assume that function FUNC returns
13660 zero if the result could be calculated exactly within the requested
13661 precision. */
13663 static tree
13664 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13665 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13666 bool inclusive)
13668 tree result = NULL_TREE;
13670 STRIP_NOPS (arg);
13672 /* To proceed, MPFR must exactly represent the target floating point
13673 format, which only happens when the target base equals two. */
13674 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13675 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13677 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13679 if (real_isfinite (ra)
13680 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13681 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13683 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13684 const int prec = fmt->p;
13685 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13686 int inexact;
13687 mpfr_t m;
13689 mpfr_init2 (m, prec);
13690 mpfr_from_real (m, ra, GMP_RNDN);
13691 mpfr_clear_flags ();
13692 inexact = func (m, m, rnd);
13693 result = do_mpfr_ckconv (m, type, inexact);
13694 mpfr_clear (m);
13698 return result;
13701 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13702 FUNC on it and return the resulting value as a tree with type TYPE.
13703 The mpfr precision is set to the precision of TYPE. We assume that
13704 function FUNC returns zero if the result could be calculated
13705 exactly within the requested precision. */
13707 static tree
13708 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13709 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13711 tree result = NULL_TREE;
13713 STRIP_NOPS (arg1);
13714 STRIP_NOPS (arg2);
13716 /* To proceed, MPFR must exactly represent the target floating point
13717 format, which only happens when the target base equals two. */
13718 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13719 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13720 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13722 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13723 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13725 if (real_isfinite (ra1) && real_isfinite (ra2))
13727 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13728 const int prec = fmt->p;
13729 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13730 int inexact;
13731 mpfr_t m1, m2;
13733 mpfr_inits2 (prec, m1, m2, NULL);
13734 mpfr_from_real (m1, ra1, GMP_RNDN);
13735 mpfr_from_real (m2, ra2, GMP_RNDN);
13736 mpfr_clear_flags ();
13737 inexact = func (m1, m1, m2, rnd);
13738 result = do_mpfr_ckconv (m1, type, inexact);
13739 mpfr_clears (m1, m2, NULL);
13743 return result;
13746 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13747 FUNC on it and return the resulting value as a tree with type TYPE.
13748 The mpfr precision is set to the precision of TYPE. We assume that
13749 function FUNC returns zero if the result could be calculated
13750 exactly within the requested precision. */
13752 static tree
13753 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13754 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13756 tree result = NULL_TREE;
13758 STRIP_NOPS (arg1);
13759 STRIP_NOPS (arg2);
13760 STRIP_NOPS (arg3);
13762 /* To proceed, MPFR must exactly represent the target floating point
13763 format, which only happens when the target base equals two. */
13764 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13765 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13766 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13767 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13769 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13770 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13771 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13773 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13775 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13776 const int prec = fmt->p;
13777 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13778 int inexact;
13779 mpfr_t m1, m2, m3;
13781 mpfr_inits2 (prec, m1, m2, m3, NULL);
13782 mpfr_from_real (m1, ra1, GMP_RNDN);
13783 mpfr_from_real (m2, ra2, GMP_RNDN);
13784 mpfr_from_real (m3, ra3, GMP_RNDN);
13785 mpfr_clear_flags ();
13786 inexact = func (m1, m1, m2, m3, rnd);
13787 result = do_mpfr_ckconv (m1, type, inexact);
13788 mpfr_clears (m1, m2, m3, NULL);
13792 return result;
13795 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13796 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13797 If ARG_SINP and ARG_COSP are NULL then the result is returned
13798 as a complex value.
13799 The type is taken from the type of ARG and is used for setting the
13800 precision of the calculation and results. */
13802 static tree
13803 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13805 tree const type = TREE_TYPE (arg);
13806 tree result = NULL_TREE;
13808 STRIP_NOPS (arg);
13810 /* To proceed, MPFR must exactly represent the target floating point
13811 format, which only happens when the target base equals two. */
13812 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13813 && TREE_CODE (arg) == REAL_CST
13814 && !TREE_OVERFLOW (arg))
13816 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13818 if (real_isfinite (ra))
13820 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13821 const int prec = fmt->p;
13822 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13823 tree result_s, result_c;
13824 int inexact;
13825 mpfr_t m, ms, mc;
13827 mpfr_inits2 (prec, m, ms, mc, NULL);
13828 mpfr_from_real (m, ra, GMP_RNDN);
13829 mpfr_clear_flags ();
13830 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13831 result_s = do_mpfr_ckconv (ms, type, inexact);
13832 result_c = do_mpfr_ckconv (mc, type, inexact);
13833 mpfr_clears (m, ms, mc, NULL);
13834 if (result_s && result_c)
13836 /* If we are to return in a complex value do so. */
13837 if (!arg_sinp && !arg_cosp)
13838 return build_complex (build_complex_type (type),
13839 result_c, result_s);
13841 /* Dereference the sin/cos pointer arguments. */
13842 arg_sinp = build_fold_indirect_ref (arg_sinp);
13843 arg_cosp = build_fold_indirect_ref (arg_cosp);
13844 /* Proceed if valid pointer type were passed in. */
13845 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13846 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13848 /* Set the values. */
13849 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13850 result_s);
13851 TREE_SIDE_EFFECTS (result_s) = 1;
13852 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13853 result_c);
13854 TREE_SIDE_EFFECTS (result_c) = 1;
13855 /* Combine the assignments into a compound expr. */
13856 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13857 result_s, result_c));
13862 return result;
13865 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13866 two-argument mpfr order N Bessel function FUNC on them and return
13867 the resulting value as a tree with type TYPE. The mpfr precision
13868 is set to the precision of TYPE. We assume that function FUNC
13869 returns zero if the result could be calculated exactly within the
13870 requested precision. */
13871 static tree
13872 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13873 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13874 const REAL_VALUE_TYPE *min, bool inclusive)
13876 tree result = NULL_TREE;
13878 STRIP_NOPS (arg1);
13879 STRIP_NOPS (arg2);
13881 /* To proceed, MPFR must exactly represent the target floating point
13882 format, which only happens when the target base equals two. */
13883 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13884 && host_integerp (arg1, 0)
13885 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13887 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13888 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13890 if (n == (long)n
13891 && real_isfinite (ra)
13892 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13894 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13895 const int prec = fmt->p;
13896 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13897 int inexact;
13898 mpfr_t m;
13900 mpfr_init2 (m, prec);
13901 mpfr_from_real (m, ra, GMP_RNDN);
13902 mpfr_clear_flags ();
13903 inexact = func (m, n, m, rnd);
13904 result = do_mpfr_ckconv (m, type, inexact);
13905 mpfr_clear (m);
13909 return result;
13912 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13913 the pointer *(ARG_QUO) and return the result. The type is taken
13914 from the type of ARG0 and is used for setting the precision of the
13915 calculation and results. */
13917 static tree
13918 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13920 tree const type = TREE_TYPE (arg0);
13921 tree result = NULL_TREE;
13923 STRIP_NOPS (arg0);
13924 STRIP_NOPS (arg1);
13926 /* To proceed, MPFR must exactly represent the target floating point
13927 format, which only happens when the target base equals two. */
13928 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13929 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13930 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13932 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13933 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13935 if (real_isfinite (ra0) && real_isfinite (ra1))
13937 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13938 const int prec = fmt->p;
13939 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13940 tree result_rem;
13941 long integer_quo;
13942 mpfr_t m0, m1;
13944 mpfr_inits2 (prec, m0, m1, NULL);
13945 mpfr_from_real (m0, ra0, GMP_RNDN);
13946 mpfr_from_real (m1, ra1, GMP_RNDN);
13947 mpfr_clear_flags ();
13948 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13949 /* Remquo is independent of the rounding mode, so pass
13950 inexact=0 to do_mpfr_ckconv(). */
13951 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13952 mpfr_clears (m0, m1, NULL);
13953 if (result_rem)
13955 /* MPFR calculates quo in the host's long so it may
13956 return more bits in quo than the target int can hold
13957 if sizeof(host long) > sizeof(target int). This can
13958 happen even for native compilers in LP64 mode. In
13959 these cases, modulo the quo value with the largest
13960 number that the target int can hold while leaving one
13961 bit for the sign. */
13962 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13963 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13965 /* Dereference the quo pointer argument. */
13966 arg_quo = build_fold_indirect_ref (arg_quo);
13967 /* Proceed iff a valid pointer type was passed in. */
13968 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13970 /* Set the value. */
13971 tree result_quo
13972 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13973 build_int_cst (TREE_TYPE (arg_quo),
13974 integer_quo));
13975 TREE_SIDE_EFFECTS (result_quo) = 1;
13976 /* Combine the quo assignment with the rem. */
13977 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13978 result_quo, result_rem));
13983 return result;
13986 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13987 resulting value as a tree with type TYPE. The mpfr precision is
13988 set to the precision of TYPE. We assume that this mpfr function
13989 returns zero if the result could be calculated exactly within the
13990 requested precision. In addition, the integer pointer represented
13991 by ARG_SG will be dereferenced and set to the appropriate signgam
13992 (-1,1) value. */
13994 static tree
13995 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13997 tree result = NULL_TREE;
13999 STRIP_NOPS (arg);
14001 /* To proceed, MPFR must exactly represent the target floating point
14002 format, which only happens when the target base equals two. Also
14003 verify ARG is a constant and that ARG_SG is an int pointer. */
14004 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
14005 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
14006 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
14007 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
14009 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
14011 /* In addition to NaN and Inf, the argument cannot be zero or a
14012 negative integer. */
14013 if (real_isfinite (ra)
14014 && ra->cl != rvc_zero
14015 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
14017 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14018 const int prec = fmt->p;
14019 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
14020 int inexact, sg;
14021 mpfr_t m;
14022 tree result_lg;
14024 mpfr_init2 (m, prec);
14025 mpfr_from_real (m, ra, GMP_RNDN);
14026 mpfr_clear_flags ();
14027 inexact = mpfr_lgamma (m, &sg, m, rnd);
14028 result_lg = do_mpfr_ckconv (m, type, inexact);
14029 mpfr_clear (m);
14030 if (result_lg)
14032 tree result_sg;
14034 /* Dereference the arg_sg pointer argument. */
14035 arg_sg = build_fold_indirect_ref (arg_sg);
14036 /* Assign the signgam value into *arg_sg. */
14037 result_sg = fold_build2 (MODIFY_EXPR,
14038 TREE_TYPE (arg_sg), arg_sg,
14039 build_int_cst (TREE_TYPE (arg_sg), sg));
14040 TREE_SIDE_EFFECTS (result_sg) = 1;
14041 /* Combine the signgam assignment with the lgamma result. */
14042 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14043 result_sg, result_lg));
14048 return result;
14051 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14052 function FUNC on it and return the resulting value as a tree with
14053 type TYPE. The mpfr precision is set to the precision of TYPE. We
14054 assume that function FUNC returns zero if the result could be
14055 calculated exactly within the requested precision. */
14057 static tree
14058 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14060 tree result = NULL_TREE;
14062 STRIP_NOPS (arg);
14064 /* To proceed, MPFR must exactly represent the target floating point
14065 format, which only happens when the target base equals two. */
14066 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14067 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14068 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14070 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14071 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14073 if (real_isfinite (re) && real_isfinite (im))
14075 const struct real_format *const fmt =
14076 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14077 const int prec = fmt->p;
14078 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14079 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14080 int inexact;
14081 mpc_t m;
14083 mpc_init2 (m, prec);
14084 mpfr_from_real (mpc_realref(m), re, rnd);
14085 mpfr_from_real (mpc_imagref(m), im, rnd);
14086 mpfr_clear_flags ();
14087 inexact = func (m, m, crnd);
14088 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14089 mpc_clear (m);
14093 return result;
14096 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14097 mpc function FUNC on it and return the resulting value as a tree
14098 with type TYPE. The mpfr precision is set to the precision of
14099 TYPE. We assume that function FUNC returns zero if the result
14100 could be calculated exactly within the requested precision. If
14101 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14102 in the arguments and/or results. */
14104 tree
14105 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14106 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14108 tree result = NULL_TREE;
14110 STRIP_NOPS (arg0);
14111 STRIP_NOPS (arg1);
14113 /* To proceed, MPFR must exactly represent the target floating point
14114 format, which only happens when the target base equals two. */
14115 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14116 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14117 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14118 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14119 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14121 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14122 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14123 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14124 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14126 if (do_nonfinite
14127 || (real_isfinite (re0) && real_isfinite (im0)
14128 && real_isfinite (re1) && real_isfinite (im1)))
14130 const struct real_format *const fmt =
14131 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14132 const int prec = fmt->p;
14133 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14134 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14135 int inexact;
14136 mpc_t m0, m1;
14138 mpc_init2 (m0, prec);
14139 mpc_init2 (m1, prec);
14140 mpfr_from_real (mpc_realref(m0), re0, rnd);
14141 mpfr_from_real (mpc_imagref(m0), im0, rnd);
14142 mpfr_from_real (mpc_realref(m1), re1, rnd);
14143 mpfr_from_real (mpc_imagref(m1), im1, rnd);
14144 mpfr_clear_flags ();
14145 inexact = func (m0, m0, m1, crnd);
14146 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14147 mpc_clear (m0);
14148 mpc_clear (m1);
14152 return result;
14155 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14156 a normal call should be emitted rather than expanding the function
14157 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14159 static tree
14160 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14162 int nargs = gimple_call_num_args (stmt);
14164 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14165 (nargs > 0
14166 ? gimple_call_arg_ptr (stmt, 0)
14167 : &error_mark_node), fcode);
14170 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14171 a normal call should be emitted rather than expanding the function
14172 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14173 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14174 passed as second argument. */
14176 tree
14177 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14178 enum built_in_function fcode)
14180 int nargs = gimple_call_num_args (stmt);
14182 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14183 (nargs > 0
14184 ? gimple_call_arg_ptr (stmt, 0)
14185 : &error_mark_node), maxlen, fcode);
14188 /* Builtins with folding operations that operate on "..." arguments
14189 need special handling; we need to store the arguments in a convenient
14190 data structure before attempting any folding. Fortunately there are
14191 only a few builtins that fall into this category. FNDECL is the
14192 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14193 result of the function call is ignored. */
14195 static tree
14196 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14197 bool ignore ATTRIBUTE_UNUSED)
14199 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14200 tree ret = NULL_TREE;
14202 switch (fcode)
14204 case BUILT_IN_SPRINTF_CHK:
14205 case BUILT_IN_VSPRINTF_CHK:
14206 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14207 break;
14209 case BUILT_IN_SNPRINTF_CHK:
14210 case BUILT_IN_VSNPRINTF_CHK:
14211 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14213 default:
14214 break;
14216 if (ret)
14218 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14219 TREE_NO_WARNING (ret) = 1;
14220 return ret;
14222 return NULL_TREE;
14225 /* A wrapper function for builtin folding that prevents warnings for
14226 "statement without effect" and the like, caused by removing the
14227 call node earlier than the warning is generated. */
14229 tree
14230 fold_call_stmt (gimple stmt, bool ignore)
14232 tree ret = NULL_TREE;
14233 tree fndecl = gimple_call_fndecl (stmt);
14234 location_t loc = gimple_location (stmt);
14235 if (fndecl
14236 && TREE_CODE (fndecl) == FUNCTION_DECL
14237 && DECL_BUILT_IN (fndecl)
14238 && !gimple_call_va_arg_pack_p (stmt))
14240 int nargs = gimple_call_num_args (stmt);
14241 tree *args = (nargs > 0
14242 ? gimple_call_arg_ptr (stmt, 0)
14243 : &error_mark_node);
14245 if (avoid_folding_inline_builtin (fndecl))
14246 return NULL_TREE;
14247 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14249 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14251 else
14253 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14254 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14255 if (!ret)
14256 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14257 if (ret)
14259 /* Propagate location information from original call to
14260 expansion of builtin. Otherwise things like
14261 maybe_emit_chk_warning, that operate on the expansion
14262 of a builtin, will use the wrong location information. */
14263 if (gimple_has_location (stmt))
14265 tree realret = ret;
14266 if (TREE_CODE (ret) == NOP_EXPR)
14267 realret = TREE_OPERAND (ret, 0);
14268 if (CAN_HAVE_LOCATION_P (realret)
14269 && !EXPR_HAS_LOCATION (realret))
14270 SET_EXPR_LOCATION (realret, loc);
14271 return realret;
14273 return ret;
14277 return NULL_TREE;
14280 /* Look up the function in builtin_decl that corresponds to DECL
14281 and set ASMSPEC as its user assembler name. DECL must be a
14282 function decl that declares a builtin. */
14284 void
14285 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14287 tree builtin;
14288 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14289 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14290 && asmspec != 0);
14292 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14293 set_user_assembler_name (builtin, asmspec);
14294 switch (DECL_FUNCTION_CODE (decl))
14296 case BUILT_IN_MEMCPY:
14297 init_block_move_fn (asmspec);
14298 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14299 break;
14300 case BUILT_IN_MEMSET:
14301 init_block_clear_fn (asmspec);
14302 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14303 break;
14304 case BUILT_IN_MEMMOVE:
14305 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14306 break;
14307 case BUILT_IN_MEMCMP:
14308 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14309 break;
14310 case BUILT_IN_ABORT:
14311 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14312 break;
14313 case BUILT_IN_FFS:
14314 if (INT_TYPE_SIZE < BITS_PER_WORD)
14316 set_user_assembler_libfunc ("ffs", asmspec);
14317 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14318 MODE_INT, 0), "ffs");
14320 break;
14321 default:
14322 break;
14326 /* Return true if DECL is a builtin that expands to a constant or similarly
14327 simple code. */
14328 bool
14329 is_simple_builtin (tree decl)
14331 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14332 switch (DECL_FUNCTION_CODE (decl))
14334 /* Builtins that expand to constants. */
14335 case BUILT_IN_CONSTANT_P:
14336 case BUILT_IN_EXPECT:
14337 case BUILT_IN_OBJECT_SIZE:
14338 case BUILT_IN_UNREACHABLE:
14339 /* Simple register moves or loads from stack. */
14340 case BUILT_IN_ASSUME_ALIGNED:
14341 case BUILT_IN_RETURN_ADDRESS:
14342 case BUILT_IN_EXTRACT_RETURN_ADDR:
14343 case BUILT_IN_FROB_RETURN_ADDR:
14344 case BUILT_IN_RETURN:
14345 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14346 case BUILT_IN_FRAME_ADDRESS:
14347 case BUILT_IN_VA_END:
14348 case BUILT_IN_STACK_SAVE:
14349 case BUILT_IN_STACK_RESTORE:
14350 /* Exception state returns or moves registers around. */
14351 case BUILT_IN_EH_FILTER:
14352 case BUILT_IN_EH_POINTER:
14353 case BUILT_IN_EH_COPY_VALUES:
14354 return true;
14356 default:
14357 return false;
14360 return false;
14363 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14364 most probably expanded inline into reasonably simple code. This is a
14365 superset of is_simple_builtin. */
14366 bool
14367 is_inexpensive_builtin (tree decl)
14369 if (!decl)
14370 return false;
14371 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14372 return true;
14373 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14374 switch (DECL_FUNCTION_CODE (decl))
14376 case BUILT_IN_ABS:
14377 case BUILT_IN_ALLOCA:
14378 case BUILT_IN_ALLOCA_WITH_ALIGN:
14379 case BUILT_IN_BSWAP16:
14380 case BUILT_IN_BSWAP32:
14381 case BUILT_IN_BSWAP64:
14382 case BUILT_IN_CLZ:
14383 case BUILT_IN_CLZIMAX:
14384 case BUILT_IN_CLZL:
14385 case BUILT_IN_CLZLL:
14386 case BUILT_IN_CTZ:
14387 case BUILT_IN_CTZIMAX:
14388 case BUILT_IN_CTZL:
14389 case BUILT_IN_CTZLL:
14390 case BUILT_IN_FFS:
14391 case BUILT_IN_FFSIMAX:
14392 case BUILT_IN_FFSL:
14393 case BUILT_IN_FFSLL:
14394 case BUILT_IN_IMAXABS:
14395 case BUILT_IN_FINITE:
14396 case BUILT_IN_FINITEF:
14397 case BUILT_IN_FINITEL:
14398 case BUILT_IN_FINITED32:
14399 case BUILT_IN_FINITED64:
14400 case BUILT_IN_FINITED128:
14401 case BUILT_IN_FPCLASSIFY:
14402 case BUILT_IN_ISFINITE:
14403 case BUILT_IN_ISINF_SIGN:
14404 case BUILT_IN_ISINF:
14405 case BUILT_IN_ISINFF:
14406 case BUILT_IN_ISINFL:
14407 case BUILT_IN_ISINFD32:
14408 case BUILT_IN_ISINFD64:
14409 case BUILT_IN_ISINFD128:
14410 case BUILT_IN_ISNAN:
14411 case BUILT_IN_ISNANF:
14412 case BUILT_IN_ISNANL:
14413 case BUILT_IN_ISNAND32:
14414 case BUILT_IN_ISNAND64:
14415 case BUILT_IN_ISNAND128:
14416 case BUILT_IN_ISNORMAL:
14417 case BUILT_IN_ISGREATER:
14418 case BUILT_IN_ISGREATEREQUAL:
14419 case BUILT_IN_ISLESS:
14420 case BUILT_IN_ISLESSEQUAL:
14421 case BUILT_IN_ISLESSGREATER:
14422 case BUILT_IN_ISUNORDERED:
14423 case BUILT_IN_VA_ARG_PACK:
14424 case BUILT_IN_VA_ARG_PACK_LEN:
14425 case BUILT_IN_VA_COPY:
14426 case BUILT_IN_TRAP:
14427 case BUILT_IN_SAVEREGS:
14428 case BUILT_IN_POPCOUNTL:
14429 case BUILT_IN_POPCOUNTLL:
14430 case BUILT_IN_POPCOUNTIMAX:
14431 case BUILT_IN_POPCOUNT:
14432 case BUILT_IN_PARITYL:
14433 case BUILT_IN_PARITYLL:
14434 case BUILT_IN_PARITYIMAX:
14435 case BUILT_IN_PARITY:
14436 case BUILT_IN_LABS:
14437 case BUILT_IN_LLABS:
14438 case BUILT_IN_PREFETCH:
14439 return true;
14441 default:
14442 return is_simple_builtin (decl);
14445 return false;