* stmt.c (expand_nl_goto_receiver): Remove almost-copy of
[official-gcc.git] / gcc / builtins.c
blob6d501c47ad89df870bd4f71b16d1e4aa267515ae
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
52 #include "builtins.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
57 #endif
58 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
60 struct target_builtins default_target_builtins;
61 #if SWITCHABLE_TARGET
62 struct target_builtins *this_target_builtins = &default_target_builtins;
63 #endif
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
74 #undef DEF_BUILTIN
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 builtin_info_type builtin_info;
80 static const char *c_getstr (tree);
81 static rtx c_readstr (const char *, enum machine_mode);
82 static int target_char_cast (tree, char *);
83 static rtx get_memory_rtx (tree, tree);
84 static int apply_args_size (void);
85 static int apply_result_size (void);
86 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87 static rtx result_vector (int, rtx);
88 #endif
89 static void expand_builtin_update_setjmp_buf (rtx);
90 static void expand_builtin_prefetch (tree);
91 static rtx expand_builtin_apply_args (void);
92 static rtx expand_builtin_apply_args_1 (void);
93 static rtx expand_builtin_apply (rtx, rtx, rtx);
94 static void expand_builtin_return (rtx);
95 static enum type_class type_to_class (tree);
96 static rtx expand_builtin_classify_type (tree);
97 static void expand_errno_check (tree, rtx);
98 static rtx expand_builtin_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_next_arg (void);
108 static rtx expand_builtin_va_start (tree);
109 static rtx expand_builtin_va_end (tree);
110 static rtx expand_builtin_va_copy (tree);
111 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strcmp (tree, rtx);
113 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
114 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
115 static rtx expand_builtin_memcpy (tree, rtx);
116 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
118 enum machine_mode, int);
119 static rtx expand_builtin_strcpy (tree, rtx);
120 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
121 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strncpy (tree, rtx);
123 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
126 static rtx expand_builtin_bzero (tree);
127 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_alloca (tree, bool);
129 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
130 static rtx expand_builtin_frame_address (tree, tree);
131 static tree stabilize_va_list_loc (location_t, tree, int);
132 static rtx expand_builtin_expect (tree, rtx);
133 static tree fold_builtin_constant_p (tree);
134 static tree fold_builtin_expect (location_t, tree, tree);
135 static tree fold_builtin_classify_type (tree);
136 static tree fold_builtin_strlen (location_t, tree, tree);
137 static tree fold_builtin_inf (location_t, tree, int);
138 static tree fold_builtin_nan (tree, tree, int);
139 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
140 static bool validate_arg (const_tree, enum tree_code code);
141 static bool integer_valued_real_p (tree);
142 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
143 static bool readonly_data_expr (tree);
144 static rtx expand_builtin_fabs (tree, rtx, rtx);
145 static rtx expand_builtin_signbit (tree, rtx);
146 static tree fold_builtin_sqrt (location_t, tree, tree);
147 static tree fold_builtin_cbrt (location_t, tree, tree);
148 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
149 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
150 static tree fold_builtin_cos (location_t, tree, tree, tree);
151 static tree fold_builtin_cosh (location_t, tree, tree, tree);
152 static tree fold_builtin_tan (tree, tree);
153 static tree fold_builtin_trunc (location_t, tree, tree);
154 static tree fold_builtin_floor (location_t, tree, tree);
155 static tree fold_builtin_ceil (location_t, tree, tree);
156 static tree fold_builtin_round (location_t, tree, tree);
157 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
158 static tree fold_builtin_bitop (tree, tree);
159 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
160 static tree fold_builtin_strchr (location_t, tree, tree, tree);
161 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_strcmp (location_t, tree, tree);
164 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
165 static tree fold_builtin_signbit (location_t, tree, tree);
166 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_isascii (location_t, tree);
168 static tree fold_builtin_toascii (location_t, tree);
169 static tree fold_builtin_isdigit (location_t, tree);
170 static tree fold_builtin_fabs (location_t, tree, tree);
171 static tree fold_builtin_abs (location_t, tree, tree);
172 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
173 enum tree_code);
174 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
175 static tree fold_builtin_0 (location_t, tree, bool);
176 static tree fold_builtin_1 (location_t, tree, tree, bool);
177 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
178 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
179 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
180 static tree fold_builtin_varargs (location_t, tree, tree, bool);
182 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
183 static tree fold_builtin_strstr (location_t, tree, tree, tree);
184 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
185 static tree fold_builtin_strcat (location_t, tree, tree);
186 static tree fold_builtin_strncat (location_t, tree, tree, tree);
187 static tree fold_builtin_strspn (location_t, tree, tree);
188 static tree fold_builtin_strcspn (location_t, tree, tree);
189 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
190 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
192 static rtx expand_builtin_object_size (tree);
193 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
194 enum built_in_function);
195 static void maybe_emit_chk_warning (tree, enum built_in_function);
196 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_free_warning (tree);
198 static tree fold_builtin_object_size (tree, tree);
199 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
200 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
201 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
202 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
203 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
204 enum built_in_function);
205 static bool init_target_chars (void);
207 static unsigned HOST_WIDE_INT target_newline;
208 static unsigned HOST_WIDE_INT target_percent;
209 static unsigned HOST_WIDE_INT target_c;
210 static unsigned HOST_WIDE_INT target_s;
211 static char target_percent_c[3];
212 static char target_percent_s[3];
213 static char target_percent_s_newline[4];
214 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
215 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
216 static tree do_mpfr_arg2 (tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218 static tree do_mpfr_arg3 (tree, tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220 static tree do_mpfr_sincos (tree, tree, tree);
221 static tree do_mpfr_bessel_n (tree, tree, tree,
222 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, bool);
224 static tree do_mpfr_remquo (tree, tree, tree);
225 static tree do_mpfr_lgamma_r (tree, tree, tree);
226 static void expand_builtin_sync_synchronize (void);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
230 static bool
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 if (strncmp (name, "__atomic_", 9) == 0)
238 return true;
239 return false;
243 /* Return true if DECL is a function symbol representing a built-in. */
245 bool
246 is_builtin_fn (tree decl)
248 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
252 /* Return true if NODE should be considered for inline expansion regardless
253 of the optimization level. This means whenever a function is invoked with
254 its "internal" name, which normally contains the prefix "__builtin". */
256 static bool
257 called_as_built_in (tree node)
259 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
260 we want the name used to call the function, not the name it
261 will have. */
262 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
263 return is_builtin_name (name);
266 /* Compute values M and N such that M divides (address of EXP - N) and such
267 that N < M. If these numbers can be determined, store M in alignp and N in
268 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
269 *alignp and any bit-offset to *bitposp.
271 Note that the address (and thus the alignment) computed here is based
272 on the address to which a symbol resolves, whereas DECL_ALIGN is based
273 on the address at which an object is actually located. These two
274 addresses are not always the same. For example, on ARM targets,
275 the address &foo of a Thumb function foo() has the lowest bit set,
276 whereas foo() itself starts on an even address.
278 If ADDR_P is true we are taking the address of the memory reference EXP
279 and thus cannot rely on the access taking place. */
281 static bool
282 get_object_alignment_2 (tree exp, unsigned int *alignp,
283 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
285 HOST_WIDE_INT bitsize, bitpos;
286 tree offset;
287 enum machine_mode mode;
288 int unsignedp, volatilep;
289 unsigned int inner, align = BITS_PER_UNIT;
290 bool known_alignment = false;
292 /* Get the innermost object and the constant (bitpos) and possibly
293 variable (offset) offset of the access. */
294 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
295 &mode, &unsignedp, &volatilep, true);
297 /* Extract alignment information from the innermost object and
298 possibly adjust bitpos and offset. */
299 if (TREE_CODE (exp) == FUNCTION_DECL)
301 /* Function addresses can encode extra information besides their
302 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
303 allows the low bit to be used as a virtual bit, we know
304 that the address itself must be at least 2-byte aligned. */
305 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
306 align = 2 * BITS_PER_UNIT;
308 else if (TREE_CODE (exp) == LABEL_DECL)
310 else if (TREE_CODE (exp) == CONST_DECL)
312 /* The alignment of a CONST_DECL is determined by its initializer. */
313 exp = DECL_INITIAL (exp);
314 align = TYPE_ALIGN (TREE_TYPE (exp));
315 #ifdef CONSTANT_ALIGNMENT
316 if (CONSTANT_CLASS_P (exp))
317 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
318 #endif
319 known_alignment = true;
321 else if (DECL_P (exp))
323 align = DECL_ALIGN (exp);
324 known_alignment = true;
326 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
328 align = TYPE_ALIGN (TREE_TYPE (exp));
330 else if (TREE_CODE (exp) == INDIRECT_REF
331 || TREE_CODE (exp) == MEM_REF
332 || TREE_CODE (exp) == TARGET_MEM_REF)
334 tree addr = TREE_OPERAND (exp, 0);
335 unsigned ptr_align;
336 unsigned HOST_WIDE_INT ptr_bitpos;
338 if (TREE_CODE (addr) == BIT_AND_EXPR
339 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
341 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
342 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
343 align *= BITS_PER_UNIT;
344 addr = TREE_OPERAND (addr, 0);
347 known_alignment
348 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
349 align = MAX (ptr_align, align);
351 /* The alignment of the pointer operand in a TARGET_MEM_REF
352 has to take the variable offset parts into account. */
353 if (TREE_CODE (exp) == TARGET_MEM_REF)
355 if (TMR_INDEX (exp))
357 unsigned HOST_WIDE_INT step = 1;
358 if (TMR_STEP (exp))
359 step = TREE_INT_CST_LOW (TMR_STEP (exp));
360 align = MIN (align, (step & -step) * BITS_PER_UNIT);
362 if (TMR_INDEX2 (exp))
363 align = BITS_PER_UNIT;
364 known_alignment = false;
367 /* When EXP is an actual memory reference then we can use
368 TYPE_ALIGN of a pointer indirection to derive alignment.
369 Do so only if get_pointer_alignment_1 did not reveal absolute
370 alignment knowledge and if using that alignment would
371 improve the situation. */
372 if (!addr_p && !known_alignment
373 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
374 align = TYPE_ALIGN (TREE_TYPE (exp));
375 else
377 /* Else adjust bitpos accordingly. */
378 bitpos += ptr_bitpos;
379 if (TREE_CODE (exp) == MEM_REF
380 || TREE_CODE (exp) == TARGET_MEM_REF)
381 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
384 else if (TREE_CODE (exp) == STRING_CST)
386 /* STRING_CST are the only constant objects we allow to be not
387 wrapped inside a CONST_DECL. */
388 align = TYPE_ALIGN (TREE_TYPE (exp));
389 #ifdef CONSTANT_ALIGNMENT
390 if (CONSTANT_CLASS_P (exp))
391 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
392 #endif
393 known_alignment = true;
396 /* If there is a non-constant offset part extract the maximum
397 alignment that can prevail. */
398 inner = ~0U;
399 while (offset)
401 tree next_offset;
403 if (TREE_CODE (offset) == PLUS_EXPR)
405 next_offset = TREE_OPERAND (offset, 0);
406 offset = TREE_OPERAND (offset, 1);
408 else
409 next_offset = NULL;
410 if (host_integerp (offset, 1))
412 /* Any overflow in calculating offset_bits won't change
413 the alignment. */
414 unsigned offset_bits
415 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
417 if (offset_bits)
418 inner = MIN (inner, (offset_bits & -offset_bits));
420 else if (TREE_CODE (offset) == MULT_EXPR
421 && host_integerp (TREE_OPERAND (offset, 1), 1))
423 /* Any overflow in calculating offset_factor won't change
424 the alignment. */
425 unsigned offset_factor
426 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
427 * BITS_PER_UNIT);
429 if (offset_factor)
430 inner = MIN (inner, (offset_factor & -offset_factor));
432 else
434 inner = MIN (inner, BITS_PER_UNIT);
435 break;
437 offset = next_offset;
439 /* Alignment is innermost object alignment adjusted by the constant
440 and non-constant offset parts. */
441 align = MIN (align, inner);
443 *alignp = align;
444 *bitposp = bitpos & (*alignp - 1);
445 return known_alignment;
448 /* For a memory reference expression EXP compute values M and N such that M
449 divides (&EXP - N) and such that N < M. If these numbers can be determined,
450 store M in alignp and N in *BITPOSP and return true. Otherwise return false
451 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
453 bool
454 get_object_alignment_1 (tree exp, unsigned int *alignp,
455 unsigned HOST_WIDE_INT *bitposp)
457 return get_object_alignment_2 (exp, alignp, bitposp, false);
460 /* Return the alignment in bits of EXP, an object. */
462 unsigned int
463 get_object_alignment (tree exp)
465 unsigned HOST_WIDE_INT bitpos = 0;
466 unsigned int align;
468 get_object_alignment_1 (exp, &align, &bitpos);
470 /* align and bitpos now specify known low bits of the pointer.
471 ptr & (align - 1) == bitpos. */
473 if (bitpos != 0)
474 align = (bitpos & -bitpos);
475 return align;
478 /* For a pointer valued expression EXP compute values M and N such that M
479 divides (EXP - N) and such that N < M. If these numbers can be determined,
480 store M in alignp and N in *BITPOSP and return true. Return false if
481 the results are just a conservative approximation.
483 If EXP is not a pointer, false is returned too. */
485 bool
486 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
487 unsigned HOST_WIDE_INT *bitposp)
489 STRIP_NOPS (exp);
491 if (TREE_CODE (exp) == ADDR_EXPR)
492 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
493 alignp, bitposp, true);
494 else if (TREE_CODE (exp) == SSA_NAME
495 && POINTER_TYPE_P (TREE_TYPE (exp)))
497 unsigned int ptr_align, ptr_misalign;
498 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
500 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
502 *bitposp = ptr_misalign * BITS_PER_UNIT;
503 *alignp = ptr_align * BITS_PER_UNIT;
504 /* We cannot really tell whether this result is an approximation. */
505 return true;
507 else
509 *bitposp = 0;
510 *alignp = BITS_PER_UNIT;
511 return false;
514 else if (TREE_CODE (exp) == INTEGER_CST)
516 *alignp = BIGGEST_ALIGNMENT;
517 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
518 & (BIGGEST_ALIGNMENT - 1));
519 return true;
522 *bitposp = 0;
523 *alignp = BITS_PER_UNIT;
524 return false;
527 /* Return the alignment in bits of EXP, a pointer valued expression.
528 The alignment returned is, by default, the alignment of the thing that
529 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
531 Otherwise, look at the expression to see if we can do better, i.e., if the
532 expression is actually pointing at an object whose alignment is tighter. */
534 unsigned int
535 get_pointer_alignment (tree exp)
537 unsigned HOST_WIDE_INT bitpos = 0;
538 unsigned int align;
540 get_pointer_alignment_1 (exp, &align, &bitpos);
542 /* align and bitpos now specify known low bits of the pointer.
543 ptr & (align - 1) == bitpos. */
545 if (bitpos != 0)
546 align = (bitpos & -bitpos);
548 return align;
551 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
552 way, because it could contain a zero byte in the middle.
553 TREE_STRING_LENGTH is the size of the character array, not the string.
555 ONLY_VALUE should be nonzero if the result is not going to be emitted
556 into the instruction stream and zero if it is going to be expanded.
557 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
558 is returned, otherwise NULL, since
559 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
560 evaluate the side-effects.
562 The value returned is of type `ssizetype'.
564 Unfortunately, string_constant can't access the values of const char
565 arrays with initializers, so neither can we do so here. */
567 tree
568 c_strlen (tree src, int only_value)
570 tree offset_node;
571 HOST_WIDE_INT offset;
572 int max;
573 const char *ptr;
574 location_t loc;
576 STRIP_NOPS (src);
577 if (TREE_CODE (src) == COND_EXPR
578 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
580 tree len1, len2;
582 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
583 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
584 if (tree_int_cst_equal (len1, len2))
585 return len1;
588 if (TREE_CODE (src) == COMPOUND_EXPR
589 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
590 return c_strlen (TREE_OPERAND (src, 1), only_value);
592 loc = EXPR_LOC_OR_HERE (src);
594 src = string_constant (src, &offset_node);
595 if (src == 0)
596 return NULL_TREE;
598 max = TREE_STRING_LENGTH (src) - 1;
599 ptr = TREE_STRING_POINTER (src);
601 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
603 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
604 compute the offset to the following null if we don't know where to
605 start searching for it. */
606 int i;
608 for (i = 0; i < max; i++)
609 if (ptr[i] == 0)
610 return NULL_TREE;
612 /* We don't know the starting offset, but we do know that the string
613 has no internal zero bytes. We can assume that the offset falls
614 within the bounds of the string; otherwise, the programmer deserves
615 what he gets. Subtract the offset from the length of the string,
616 and return that. This would perhaps not be valid if we were dealing
617 with named arrays in addition to literal string constants. */
619 return size_diffop_loc (loc, size_int (max), offset_node);
622 /* We have a known offset into the string. Start searching there for
623 a null character if we can represent it as a single HOST_WIDE_INT. */
624 if (offset_node == 0)
625 offset = 0;
626 else if (! host_integerp (offset_node, 0))
627 offset = -1;
628 else
629 offset = tree_low_cst (offset_node, 0);
631 /* If the offset is known to be out of bounds, warn, and call strlen at
632 runtime. */
633 if (offset < 0 || offset > max)
635 /* Suppress multiple warnings for propagated constant strings. */
636 if (! TREE_NO_WARNING (src))
638 warning_at (loc, 0, "offset outside bounds of constant string");
639 TREE_NO_WARNING (src) = 1;
641 return NULL_TREE;
644 /* Use strlen to search for the first zero byte. Since any strings
645 constructed with build_string will have nulls appended, we win even
646 if we get handed something like (char[4])"abcd".
648 Since OFFSET is our starting index into the string, no further
649 calculation is needed. */
650 return ssize_int (strlen (ptr + offset));
653 /* Return a char pointer for a C string if it is a string constant
654 or sum of string constant and integer constant. */
656 static const char *
657 c_getstr (tree src)
659 tree offset_node;
661 src = string_constant (src, &offset_node);
662 if (src == 0)
663 return 0;
665 if (offset_node == 0)
666 return TREE_STRING_POINTER (src);
667 else if (!host_integerp (offset_node, 1)
668 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
669 return 0;
671 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
674 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
675 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
677 static rtx
678 c_readstr (const char *str, enum machine_mode mode)
680 HOST_WIDE_INT c[2];
681 HOST_WIDE_INT ch;
682 unsigned int i, j;
684 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
686 c[0] = 0;
687 c[1] = 0;
688 ch = 1;
689 for (i = 0; i < GET_MODE_SIZE (mode); i++)
691 j = i;
692 if (WORDS_BIG_ENDIAN)
693 j = GET_MODE_SIZE (mode) - i - 1;
694 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
695 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
696 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
697 j *= BITS_PER_UNIT;
698 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
700 if (ch)
701 ch = (unsigned char) str[i];
702 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
704 return immed_double_const (c[0], c[1], mode);
707 /* Cast a target constant CST to target CHAR and if that value fits into
708 host char type, return zero and put that value into variable pointed to by
709 P. */
711 static int
712 target_char_cast (tree cst, char *p)
714 unsigned HOST_WIDE_INT val, hostval;
716 if (TREE_CODE (cst) != INTEGER_CST
717 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
718 return 1;
720 val = TREE_INT_CST_LOW (cst);
721 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
722 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
724 hostval = val;
725 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
726 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
728 if (val != hostval)
729 return 1;
731 *p = hostval;
732 return 0;
735 /* Similar to save_expr, but assumes that arbitrary code is not executed
736 in between the multiple evaluations. In particular, we assume that a
737 non-addressable local variable will not be modified. */
739 static tree
740 builtin_save_expr (tree exp)
742 if (TREE_CODE (exp) == SSA_NAME
743 || (TREE_ADDRESSABLE (exp) == 0
744 && (TREE_CODE (exp) == PARM_DECL
745 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
746 return exp;
748 return save_expr (exp);
751 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
752 times to get the address of either a higher stack frame, or a return
753 address located within it (depending on FNDECL_CODE). */
755 static rtx
756 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
758 int i;
760 #ifdef INITIAL_FRAME_ADDRESS_RTX
761 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
762 #else
763 rtx tem;
765 /* For a zero count with __builtin_return_address, we don't care what
766 frame address we return, because target-specific definitions will
767 override us. Therefore frame pointer elimination is OK, and using
768 the soft frame pointer is OK.
770 For a nonzero count, or a zero count with __builtin_frame_address,
771 we require a stable offset from the current frame pointer to the
772 previous one, so we must use the hard frame pointer, and
773 we must disable frame pointer elimination. */
774 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
775 tem = frame_pointer_rtx;
776 else
778 tem = hard_frame_pointer_rtx;
780 /* Tell reload not to eliminate the frame pointer. */
781 crtl->accesses_prior_frames = 1;
783 #endif
785 /* Some machines need special handling before we can access
786 arbitrary frames. For example, on the SPARC, we must first flush
787 all register windows to the stack. */
788 #ifdef SETUP_FRAME_ADDRESSES
789 if (count > 0)
790 SETUP_FRAME_ADDRESSES ();
791 #endif
793 /* On the SPARC, the return address is not in the frame, it is in a
794 register. There is no way to access it off of the current frame
795 pointer, but it can be accessed off the previous frame pointer by
796 reading the value from the register window save area. */
797 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
798 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
799 count--;
800 #endif
802 /* Scan back COUNT frames to the specified frame. */
803 for (i = 0; i < count; i++)
805 /* Assume the dynamic chain pointer is in the word that the
806 frame address points to, unless otherwise specified. */
807 #ifdef DYNAMIC_CHAIN_ADDRESS
808 tem = DYNAMIC_CHAIN_ADDRESS (tem);
809 #endif
810 tem = memory_address (Pmode, tem);
811 tem = gen_frame_mem (Pmode, tem);
812 tem = copy_to_reg (tem);
815 /* For __builtin_frame_address, return what we've got. But, on
816 the SPARC for example, we may have to add a bias. */
817 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
818 #ifdef FRAME_ADDR_RTX
819 return FRAME_ADDR_RTX (tem);
820 #else
821 return tem;
822 #endif
824 /* For __builtin_return_address, get the return address from that frame. */
825 #ifdef RETURN_ADDR_RTX
826 tem = RETURN_ADDR_RTX (count, tem);
827 #else
828 tem = memory_address (Pmode,
829 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
830 tem = gen_frame_mem (Pmode, tem);
831 #endif
832 return tem;
835 /* Alias set used for setjmp buffer. */
836 static alias_set_type setjmp_alias_set = -1;
838 /* Construct the leading half of a __builtin_setjmp call. Control will
839 return to RECEIVER_LABEL. This is also called directly by the SJLJ
840 exception handling code. */
842 void
843 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
845 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
846 rtx stack_save;
847 rtx mem;
849 if (setjmp_alias_set == -1)
850 setjmp_alias_set = new_alias_set ();
852 buf_addr = convert_memory_address (Pmode, buf_addr);
854 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
856 /* We store the frame pointer and the address of receiver_label in
857 the buffer and use the rest of it for the stack save area, which
858 is machine-dependent. */
860 mem = gen_rtx_MEM (Pmode, buf_addr);
861 set_mem_alias_set (mem, setjmp_alias_set);
862 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
864 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
865 GET_MODE_SIZE (Pmode))),
866 set_mem_alias_set (mem, setjmp_alias_set);
868 emit_move_insn (validize_mem (mem),
869 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
871 stack_save = gen_rtx_MEM (sa_mode,
872 plus_constant (Pmode, buf_addr,
873 2 * GET_MODE_SIZE (Pmode)));
874 set_mem_alias_set (stack_save, setjmp_alias_set);
875 emit_stack_save (SAVE_NONLOCAL, &stack_save);
877 /* If there is further processing to do, do it. */
878 #ifdef HAVE_builtin_setjmp_setup
879 if (HAVE_builtin_setjmp_setup)
880 emit_insn (gen_builtin_setjmp_setup (buf_addr));
881 #endif
883 /* We have a nonlocal label. */
884 cfun->has_nonlocal_label = 1;
887 /* Construct the trailing part of a __builtin_setjmp call. This is
888 also called directly by the SJLJ exception handling code.
889 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
891 void
892 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
894 rtx chain;
896 /* Mark the FP as used when we get here, so we have to make sure it's
897 marked as used by this function. */
898 emit_use (hard_frame_pointer_rtx);
900 /* Mark the static chain as clobbered here so life information
901 doesn't get messed up for it. */
902 chain = targetm.calls.static_chain (current_function_decl, true);
903 if (chain && REG_P (chain))
904 emit_clobber (chain);
906 /* Now put in the code to restore the frame pointer, and argument
907 pointer, if needed. */
908 #ifdef HAVE_nonlocal_goto
909 if (! HAVE_nonlocal_goto)
910 #endif
911 /* First adjust our frame pointer to its actual value. It was
912 previously set to the start of the virtual area corresponding to
913 the stacked variables when we branched here and now needs to be
914 adjusted to the actual hardware fp value.
916 Assignments to virtual registers are converted by
917 instantiate_virtual_regs into the corresponding assignment
918 to the underlying register (fp in this case) that makes
919 the original assignment true.
920 So the following insn will actually be decrementing fp by
921 STARTING_FRAME_OFFSET. */
922 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
924 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
925 if (fixed_regs[ARG_POINTER_REGNUM])
927 #ifdef ELIMINABLE_REGS
928 /* If the argument pointer can be eliminated in favor of the
929 frame pointer, we don't need to restore it. We assume here
930 that if such an elimination is present, it can always be used.
931 This is the case on all known machines; if we don't make this
932 assumption, we do unnecessary saving on many machines. */
933 size_t i;
934 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
936 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
937 if (elim_regs[i].from == ARG_POINTER_REGNUM
938 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
939 break;
941 if (i == ARRAY_SIZE (elim_regs))
942 #endif
944 /* Now restore our arg pointer from the address at which it
945 was saved in our stack frame. */
946 emit_move_insn (crtl->args.internal_arg_pointer,
947 copy_to_reg (get_arg_pointer_save_area ()));
950 #endif
952 #ifdef HAVE_builtin_setjmp_receiver
953 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
954 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
955 else
956 #endif
957 #ifdef HAVE_nonlocal_goto_receiver
958 if (HAVE_nonlocal_goto_receiver)
959 emit_insn (gen_nonlocal_goto_receiver ());
960 else
961 #endif
962 { /* Nothing */ }
964 /* We must not allow the code we just generated to be reordered by
965 scheduling. Specifically, the update of the frame pointer must
966 happen immediately, not later. */
967 emit_insn (gen_blockage ());
970 /* __builtin_longjmp is passed a pointer to an array of five words (not
971 all will be used on all machines). It operates similarly to the C
972 library function of the same name, but is more efficient. Much of
973 the code below is copied from the handling of non-local gotos. */
975 static void
976 expand_builtin_longjmp (rtx buf_addr, rtx value)
978 rtx fp, lab, stack, insn, last;
979 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
981 /* DRAP is needed for stack realign if longjmp is expanded to current
982 function */
983 if (SUPPORTS_STACK_ALIGNMENT)
984 crtl->need_drap = true;
986 if (setjmp_alias_set == -1)
987 setjmp_alias_set = new_alias_set ();
989 buf_addr = convert_memory_address (Pmode, buf_addr);
991 buf_addr = force_reg (Pmode, buf_addr);
993 /* We require that the user must pass a second argument of 1, because
994 that is what builtin_setjmp will return. */
995 gcc_assert (value == const1_rtx);
997 last = get_last_insn ();
998 #ifdef HAVE_builtin_longjmp
999 if (HAVE_builtin_longjmp)
1000 emit_insn (gen_builtin_longjmp (buf_addr));
1001 else
1002 #endif
1004 fp = gen_rtx_MEM (Pmode, buf_addr);
1005 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1006 GET_MODE_SIZE (Pmode)));
1008 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1009 2 * GET_MODE_SIZE (Pmode)));
1010 set_mem_alias_set (fp, setjmp_alias_set);
1011 set_mem_alias_set (lab, setjmp_alias_set);
1012 set_mem_alias_set (stack, setjmp_alias_set);
1014 /* Pick up FP, label, and SP from the block and jump. This code is
1015 from expand_goto in stmt.c; see there for detailed comments. */
1016 #ifdef HAVE_nonlocal_goto
1017 if (HAVE_nonlocal_goto)
1018 /* We have to pass a value to the nonlocal_goto pattern that will
1019 get copied into the static_chain pointer, but it does not matter
1020 what that value is, because builtin_setjmp does not use it. */
1021 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1022 else
1023 #endif
1025 lab = copy_to_reg (lab);
1027 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1028 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1030 emit_move_insn (hard_frame_pointer_rtx, fp);
1031 emit_stack_restore (SAVE_NONLOCAL, stack);
1033 emit_use (hard_frame_pointer_rtx);
1034 emit_use (stack_pointer_rtx);
1035 emit_indirect_jump (lab);
1039 /* Search backwards and mark the jump insn as a non-local goto.
1040 Note that this precludes the use of __builtin_longjmp to a
1041 __builtin_setjmp target in the same function. However, we've
1042 already cautioned the user that these functions are for
1043 internal exception handling use only. */
1044 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1046 gcc_assert (insn != last);
1048 if (JUMP_P (insn))
1050 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1051 break;
1053 else if (CALL_P (insn))
1054 break;
1058 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1059 and the address of the save area. */
1061 static rtx
1062 expand_builtin_nonlocal_goto (tree exp)
1064 tree t_label, t_save_area;
1065 rtx r_label, r_save_area, r_fp, r_sp, insn;
1067 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1068 return NULL_RTX;
1070 t_label = CALL_EXPR_ARG (exp, 0);
1071 t_save_area = CALL_EXPR_ARG (exp, 1);
1073 r_label = expand_normal (t_label);
1074 r_label = convert_memory_address (Pmode, r_label);
1075 r_save_area = expand_normal (t_save_area);
1076 r_save_area = convert_memory_address (Pmode, r_save_area);
1077 /* Copy the address of the save location to a register just in case it was
1078 based on the frame pointer. */
1079 r_save_area = copy_to_reg (r_save_area);
1080 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1081 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1082 plus_constant (Pmode, r_save_area,
1083 GET_MODE_SIZE (Pmode)));
1085 crtl->has_nonlocal_goto = 1;
1087 #ifdef HAVE_nonlocal_goto
1088 /* ??? We no longer need to pass the static chain value, afaik. */
1089 if (HAVE_nonlocal_goto)
1090 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1091 else
1092 #endif
1094 r_label = copy_to_reg (r_label);
1096 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1097 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1099 /* Restore frame pointer for containing function. */
1100 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1101 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1103 /* USE of hard_frame_pointer_rtx added for consistency;
1104 not clear if really needed. */
1105 emit_use (hard_frame_pointer_rtx);
1106 emit_use (stack_pointer_rtx);
1108 /* If the architecture is using a GP register, we must
1109 conservatively assume that the target function makes use of it.
1110 The prologue of functions with nonlocal gotos must therefore
1111 initialize the GP register to the appropriate value, and we
1112 must then make sure that this value is live at the point
1113 of the jump. (Note that this doesn't necessarily apply
1114 to targets with a nonlocal_goto pattern; they are free
1115 to implement it in their own way. Note also that this is
1116 a no-op if the GP register is a global invariant.) */
1117 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1118 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1119 emit_use (pic_offset_table_rtx);
1121 emit_indirect_jump (r_label);
1124 /* Search backwards to the jump insn and mark it as a
1125 non-local goto. */
1126 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1128 if (JUMP_P (insn))
1130 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1131 break;
1133 else if (CALL_P (insn))
1134 break;
1137 return const0_rtx;
1140 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1141 (not all will be used on all machines) that was passed to __builtin_setjmp.
1142 It updates the stack pointer in that block to correspond to the current
1143 stack pointer. */
1145 static void
1146 expand_builtin_update_setjmp_buf (rtx buf_addr)
1148 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1149 rtx stack_save
1150 = gen_rtx_MEM (sa_mode,
1151 memory_address
1152 (sa_mode,
1153 plus_constant (Pmode, buf_addr,
1154 2 * GET_MODE_SIZE (Pmode))));
1156 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1159 /* Expand a call to __builtin_prefetch. For a target that does not support
1160 data prefetch, evaluate the memory address argument in case it has side
1161 effects. */
1163 static void
1164 expand_builtin_prefetch (tree exp)
1166 tree arg0, arg1, arg2;
1167 int nargs;
1168 rtx op0, op1, op2;
1170 if (!validate_arglist (exp, POINTER_TYPE, 0))
1171 return;
1173 arg0 = CALL_EXPR_ARG (exp, 0);
1175 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1176 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1177 locality). */
1178 nargs = call_expr_nargs (exp);
1179 if (nargs > 1)
1180 arg1 = CALL_EXPR_ARG (exp, 1);
1181 else
1182 arg1 = integer_zero_node;
1183 if (nargs > 2)
1184 arg2 = CALL_EXPR_ARG (exp, 2);
1185 else
1186 arg2 = integer_three_node;
1188 /* Argument 0 is an address. */
1189 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1191 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1192 if (TREE_CODE (arg1) != INTEGER_CST)
1194 error ("second argument to %<__builtin_prefetch%> must be a constant");
1195 arg1 = integer_zero_node;
1197 op1 = expand_normal (arg1);
1198 /* Argument 1 must be either zero or one. */
1199 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1201 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1202 " using zero");
1203 op1 = const0_rtx;
1206 /* Argument 2 (locality) must be a compile-time constant int. */
1207 if (TREE_CODE (arg2) != INTEGER_CST)
1209 error ("third argument to %<__builtin_prefetch%> must be a constant");
1210 arg2 = integer_zero_node;
1212 op2 = expand_normal (arg2);
1213 /* Argument 2 must be 0, 1, 2, or 3. */
1214 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1216 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1217 op2 = const0_rtx;
1220 #ifdef HAVE_prefetch
1221 if (HAVE_prefetch)
1223 struct expand_operand ops[3];
1225 create_address_operand (&ops[0], op0);
1226 create_integer_operand (&ops[1], INTVAL (op1));
1227 create_integer_operand (&ops[2], INTVAL (op2));
1228 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1229 return;
1231 #endif
1233 /* Don't do anything with direct references to volatile memory, but
1234 generate code to handle other side effects. */
1235 if (!MEM_P (op0) && side_effects_p (op0))
1236 emit_insn (op0);
1239 /* Get a MEM rtx for expression EXP which is the address of an operand
1240 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1241 the maximum length of the block of memory that might be accessed or
1242 NULL if unknown. */
1244 static rtx
1245 get_memory_rtx (tree exp, tree len)
1247 tree orig_exp = exp;
1248 rtx addr, mem;
1250 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1251 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1252 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1253 exp = TREE_OPERAND (exp, 0);
1255 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1256 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1258 /* Get an expression we can use to find the attributes to assign to MEM.
1259 First remove any nops. */
1260 while (CONVERT_EXPR_P (exp)
1261 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1262 exp = TREE_OPERAND (exp, 0);
1264 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1265 (as builtin stringops may alias with anything). */
1266 exp = fold_build2 (MEM_REF,
1267 build_array_type (char_type_node,
1268 build_range_type (sizetype,
1269 size_one_node, len)),
1270 exp, build_int_cst (ptr_type_node, 0));
1272 /* If the MEM_REF has no acceptable address, try to get the base object
1273 from the original address we got, and build an all-aliasing
1274 unknown-sized access to that one. */
1275 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1276 set_mem_attributes (mem, exp, 0);
1277 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1278 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1279 0))))
1281 exp = build_fold_addr_expr (exp);
1282 exp = fold_build2 (MEM_REF,
1283 build_array_type (char_type_node,
1284 build_range_type (sizetype,
1285 size_zero_node,
1286 NULL)),
1287 exp, build_int_cst (ptr_type_node, 0));
1288 set_mem_attributes (mem, exp, 0);
1290 set_mem_alias_set (mem, 0);
1291 return mem;
1294 /* Built-in functions to perform an untyped call and return. */
1296 #define apply_args_mode \
1297 (this_target_builtins->x_apply_args_mode)
1298 #define apply_result_mode \
1299 (this_target_builtins->x_apply_result_mode)
1301 /* Return the size required for the block returned by __builtin_apply_args,
1302 and initialize apply_args_mode. */
1304 static int
1305 apply_args_size (void)
1307 static int size = -1;
1308 int align;
1309 unsigned int regno;
1310 enum machine_mode mode;
1312 /* The values computed by this function never change. */
1313 if (size < 0)
1315 /* The first value is the incoming arg-pointer. */
1316 size = GET_MODE_SIZE (Pmode);
1318 /* The second value is the structure value address unless this is
1319 passed as an "invisible" first argument. */
1320 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1321 size += GET_MODE_SIZE (Pmode);
1323 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1324 if (FUNCTION_ARG_REGNO_P (regno))
1326 mode = targetm.calls.get_raw_arg_mode (regno);
1328 gcc_assert (mode != VOIDmode);
1330 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1331 if (size % align != 0)
1332 size = CEIL (size, align) * align;
1333 size += GET_MODE_SIZE (mode);
1334 apply_args_mode[regno] = mode;
1336 else
1338 apply_args_mode[regno] = VOIDmode;
1341 return size;
1344 /* Return the size required for the block returned by __builtin_apply,
1345 and initialize apply_result_mode. */
1347 static int
1348 apply_result_size (void)
1350 static int size = -1;
1351 int align, regno;
1352 enum machine_mode mode;
1354 /* The values computed by this function never change. */
1355 if (size < 0)
1357 size = 0;
1359 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1360 if (targetm.calls.function_value_regno_p (regno))
1362 mode = targetm.calls.get_raw_result_mode (regno);
1364 gcc_assert (mode != VOIDmode);
1366 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1367 if (size % align != 0)
1368 size = CEIL (size, align) * align;
1369 size += GET_MODE_SIZE (mode);
1370 apply_result_mode[regno] = mode;
1372 else
1373 apply_result_mode[regno] = VOIDmode;
1375 /* Allow targets that use untyped_call and untyped_return to override
1376 the size so that machine-specific information can be stored here. */
1377 #ifdef APPLY_RESULT_SIZE
1378 size = APPLY_RESULT_SIZE;
1379 #endif
1381 return size;
1384 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1385 /* Create a vector describing the result block RESULT. If SAVEP is true,
1386 the result block is used to save the values; otherwise it is used to
1387 restore the values. */
1389 static rtx
1390 result_vector (int savep, rtx result)
1392 int regno, size, align, nelts;
1393 enum machine_mode mode;
1394 rtx reg, mem;
1395 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1397 size = nelts = 0;
1398 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1399 if ((mode = apply_result_mode[regno]) != VOIDmode)
1401 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1402 if (size % align != 0)
1403 size = CEIL (size, align) * align;
1404 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1405 mem = adjust_address (result, mode, size);
1406 savevec[nelts++] = (savep
1407 ? gen_rtx_SET (VOIDmode, mem, reg)
1408 : gen_rtx_SET (VOIDmode, reg, mem));
1409 size += GET_MODE_SIZE (mode);
1411 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1413 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1415 /* Save the state required to perform an untyped call with the same
1416 arguments as were passed to the current function. */
1418 static rtx
1419 expand_builtin_apply_args_1 (void)
1421 rtx registers, tem;
1422 int size, align, regno;
1423 enum machine_mode mode;
1424 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1426 /* Create a block where the arg-pointer, structure value address,
1427 and argument registers can be saved. */
1428 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1430 /* Walk past the arg-pointer and structure value address. */
1431 size = GET_MODE_SIZE (Pmode);
1432 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1433 size += GET_MODE_SIZE (Pmode);
1435 /* Save each register used in calling a function to the block. */
1436 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1437 if ((mode = apply_args_mode[regno]) != VOIDmode)
1439 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1440 if (size % align != 0)
1441 size = CEIL (size, align) * align;
1443 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1445 emit_move_insn (adjust_address (registers, mode, size), tem);
1446 size += GET_MODE_SIZE (mode);
1449 /* Save the arg pointer to the block. */
1450 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1451 #ifdef STACK_GROWS_DOWNWARD
1452 /* We need the pointer as the caller actually passed them to us, not
1453 as we might have pretended they were passed. Make sure it's a valid
1454 operand, as emit_move_insn isn't expected to handle a PLUS. */
1456 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1457 NULL_RTX);
1458 #endif
1459 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1461 size = GET_MODE_SIZE (Pmode);
1463 /* Save the structure value address unless this is passed as an
1464 "invisible" first argument. */
1465 if (struct_incoming_value)
1467 emit_move_insn (adjust_address (registers, Pmode, size),
1468 copy_to_reg (struct_incoming_value));
1469 size += GET_MODE_SIZE (Pmode);
1472 /* Return the address of the block. */
1473 return copy_addr_to_reg (XEXP (registers, 0));
1476 /* __builtin_apply_args returns block of memory allocated on
1477 the stack into which is stored the arg pointer, structure
1478 value address, static chain, and all the registers that might
1479 possibly be used in performing a function call. The code is
1480 moved to the start of the function so the incoming values are
1481 saved. */
1483 static rtx
1484 expand_builtin_apply_args (void)
1486 /* Don't do __builtin_apply_args more than once in a function.
1487 Save the result of the first call and reuse it. */
1488 if (apply_args_value != 0)
1489 return apply_args_value;
1491 /* When this function is called, it means that registers must be
1492 saved on entry to this function. So we migrate the
1493 call to the first insn of this function. */
1494 rtx temp;
1495 rtx seq;
1497 start_sequence ();
1498 temp = expand_builtin_apply_args_1 ();
1499 seq = get_insns ();
1500 end_sequence ();
1502 apply_args_value = temp;
1504 /* Put the insns after the NOTE that starts the function.
1505 If this is inside a start_sequence, make the outer-level insn
1506 chain current, so the code is placed at the start of the
1507 function. If internal_arg_pointer is a non-virtual pseudo,
1508 it needs to be placed after the function that initializes
1509 that pseudo. */
1510 push_topmost_sequence ();
1511 if (REG_P (crtl->args.internal_arg_pointer)
1512 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1513 emit_insn_before (seq, parm_birth_insn);
1514 else
1515 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1516 pop_topmost_sequence ();
1517 return temp;
1521 /* Perform an untyped call and save the state required to perform an
1522 untyped return of whatever value was returned by the given function. */
1524 static rtx
1525 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1527 int size, align, regno;
1528 enum machine_mode mode;
1529 rtx incoming_args, result, reg, dest, src, call_insn;
1530 rtx old_stack_level = 0;
1531 rtx call_fusage = 0;
1532 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1534 arguments = convert_memory_address (Pmode, arguments);
1536 /* Create a block where the return registers can be saved. */
1537 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1539 /* Fetch the arg pointer from the ARGUMENTS block. */
1540 incoming_args = gen_reg_rtx (Pmode);
1541 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1542 #ifndef STACK_GROWS_DOWNWARD
1543 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1544 incoming_args, 0, OPTAB_LIB_WIDEN);
1545 #endif
1547 /* Push a new argument block and copy the arguments. Do not allow
1548 the (potential) memcpy call below to interfere with our stack
1549 manipulations. */
1550 do_pending_stack_adjust ();
1551 NO_DEFER_POP;
1553 /* Save the stack with nonlocal if available. */
1554 #ifdef HAVE_save_stack_nonlocal
1555 if (HAVE_save_stack_nonlocal)
1556 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1557 else
1558 #endif
1559 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1561 /* Allocate a block of memory onto the stack and copy the memory
1562 arguments to the outgoing arguments address. We can pass TRUE
1563 as the 4th argument because we just saved the stack pointer
1564 and will restore it right after the call. */
1565 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1567 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1568 may have already set current_function_calls_alloca to true.
1569 current_function_calls_alloca won't be set if argsize is zero,
1570 so we have to guarantee need_drap is true here. */
1571 if (SUPPORTS_STACK_ALIGNMENT)
1572 crtl->need_drap = true;
1574 dest = virtual_outgoing_args_rtx;
1575 #ifndef STACK_GROWS_DOWNWARD
1576 if (CONST_INT_P (argsize))
1577 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1578 else
1579 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1580 #endif
1581 dest = gen_rtx_MEM (BLKmode, dest);
1582 set_mem_align (dest, PARM_BOUNDARY);
1583 src = gen_rtx_MEM (BLKmode, incoming_args);
1584 set_mem_align (src, PARM_BOUNDARY);
1585 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1587 /* Refer to the argument block. */
1588 apply_args_size ();
1589 arguments = gen_rtx_MEM (BLKmode, arguments);
1590 set_mem_align (arguments, PARM_BOUNDARY);
1592 /* Walk past the arg-pointer and structure value address. */
1593 size = GET_MODE_SIZE (Pmode);
1594 if (struct_value)
1595 size += GET_MODE_SIZE (Pmode);
1597 /* Restore each of the registers previously saved. Make USE insns
1598 for each of these registers for use in making the call. */
1599 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1600 if ((mode = apply_args_mode[regno]) != VOIDmode)
1602 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1603 if (size % align != 0)
1604 size = CEIL (size, align) * align;
1605 reg = gen_rtx_REG (mode, regno);
1606 emit_move_insn (reg, adjust_address (arguments, mode, size));
1607 use_reg (&call_fusage, reg);
1608 size += GET_MODE_SIZE (mode);
1611 /* Restore the structure value address unless this is passed as an
1612 "invisible" first argument. */
1613 size = GET_MODE_SIZE (Pmode);
1614 if (struct_value)
1616 rtx value = gen_reg_rtx (Pmode);
1617 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1618 emit_move_insn (struct_value, value);
1619 if (REG_P (struct_value))
1620 use_reg (&call_fusage, struct_value);
1621 size += GET_MODE_SIZE (Pmode);
1624 /* All arguments and registers used for the call are set up by now! */
1625 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1627 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1628 and we don't want to load it into a register as an optimization,
1629 because prepare_call_address already did it if it should be done. */
1630 if (GET_CODE (function) != SYMBOL_REF)
1631 function = memory_address (FUNCTION_MODE, function);
1633 /* Generate the actual call instruction and save the return value. */
1634 #ifdef HAVE_untyped_call
1635 if (HAVE_untyped_call)
1636 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1637 result, result_vector (1, result)));
1638 else
1639 #endif
1640 #ifdef HAVE_call_value
1641 if (HAVE_call_value)
1643 rtx valreg = 0;
1645 /* Locate the unique return register. It is not possible to
1646 express a call that sets more than one return register using
1647 call_value; use untyped_call for that. In fact, untyped_call
1648 only needs to save the return registers in the given block. */
1649 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1650 if ((mode = apply_result_mode[regno]) != VOIDmode)
1652 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1654 valreg = gen_rtx_REG (mode, regno);
1657 emit_call_insn (GEN_CALL_VALUE (valreg,
1658 gen_rtx_MEM (FUNCTION_MODE, function),
1659 const0_rtx, NULL_RTX, const0_rtx));
1661 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1663 else
1664 #endif
1665 gcc_unreachable ();
1667 /* Find the CALL insn we just emitted, and attach the register usage
1668 information. */
1669 call_insn = last_call_insn ();
1670 add_function_usage_to (call_insn, call_fusage);
1672 /* Restore the stack. */
1673 #ifdef HAVE_save_stack_nonlocal
1674 if (HAVE_save_stack_nonlocal)
1675 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1676 else
1677 #endif
1678 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1679 fixup_args_size_notes (call_insn, get_last_insn(), 0);
1681 OK_DEFER_POP;
1683 /* Return the address of the result block. */
1684 result = copy_addr_to_reg (XEXP (result, 0));
1685 return convert_memory_address (ptr_mode, result);
1688 /* Perform an untyped return. */
1690 static void
1691 expand_builtin_return (rtx result)
1693 int size, align, regno;
1694 enum machine_mode mode;
1695 rtx reg;
1696 rtx call_fusage = 0;
1698 result = convert_memory_address (Pmode, result);
1700 apply_result_size ();
1701 result = gen_rtx_MEM (BLKmode, result);
1703 #ifdef HAVE_untyped_return
1704 if (HAVE_untyped_return)
1706 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1707 emit_barrier ();
1708 return;
1710 #endif
1712 /* Restore the return value and note that each value is used. */
1713 size = 0;
1714 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1715 if ((mode = apply_result_mode[regno]) != VOIDmode)
1717 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1718 if (size % align != 0)
1719 size = CEIL (size, align) * align;
1720 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1721 emit_move_insn (reg, adjust_address (result, mode, size));
1723 push_to_sequence (call_fusage);
1724 emit_use (reg);
1725 call_fusage = get_insns ();
1726 end_sequence ();
1727 size += GET_MODE_SIZE (mode);
1730 /* Put the USE insns before the return. */
1731 emit_insn (call_fusage);
1733 /* Return whatever values was restored by jumping directly to the end
1734 of the function. */
1735 expand_naked_return ();
1738 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1740 static enum type_class
1741 type_to_class (tree type)
1743 switch (TREE_CODE (type))
1745 case VOID_TYPE: return void_type_class;
1746 case INTEGER_TYPE: return integer_type_class;
1747 case ENUMERAL_TYPE: return enumeral_type_class;
1748 case BOOLEAN_TYPE: return boolean_type_class;
1749 case POINTER_TYPE: return pointer_type_class;
1750 case REFERENCE_TYPE: return reference_type_class;
1751 case OFFSET_TYPE: return offset_type_class;
1752 case REAL_TYPE: return real_type_class;
1753 case COMPLEX_TYPE: return complex_type_class;
1754 case FUNCTION_TYPE: return function_type_class;
1755 case METHOD_TYPE: return method_type_class;
1756 case RECORD_TYPE: return record_type_class;
1757 case UNION_TYPE:
1758 case QUAL_UNION_TYPE: return union_type_class;
1759 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1760 ? string_type_class : array_type_class);
1761 case LANG_TYPE: return lang_type_class;
1762 default: return no_type_class;
1766 /* Expand a call EXP to __builtin_classify_type. */
1768 static rtx
1769 expand_builtin_classify_type (tree exp)
1771 if (call_expr_nargs (exp))
1772 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1773 return GEN_INT (no_type_class);
1776 /* This helper macro, meant to be used in mathfn_built_in below,
1777 determines which among a set of three builtin math functions is
1778 appropriate for a given type mode. The `F' and `L' cases are
1779 automatically generated from the `double' case. */
1780 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1781 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1782 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1783 fcodel = BUILT_IN_MATHFN##L ; break;
1784 /* Similar to above, but appends _R after any F/L suffix. */
1785 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1786 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1787 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1788 fcodel = BUILT_IN_MATHFN##L_R ; break;
1790 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1791 if available. If IMPLICIT is true use the implicit builtin declaration,
1792 otherwise use the explicit declaration. If we can't do the conversion,
1793 return zero. */
1795 static tree
1796 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1798 enum built_in_function fcode, fcodef, fcodel, fcode2;
1800 switch (fn)
1802 CASE_MATHFN (BUILT_IN_ACOS)
1803 CASE_MATHFN (BUILT_IN_ACOSH)
1804 CASE_MATHFN (BUILT_IN_ASIN)
1805 CASE_MATHFN (BUILT_IN_ASINH)
1806 CASE_MATHFN (BUILT_IN_ATAN)
1807 CASE_MATHFN (BUILT_IN_ATAN2)
1808 CASE_MATHFN (BUILT_IN_ATANH)
1809 CASE_MATHFN (BUILT_IN_CBRT)
1810 CASE_MATHFN (BUILT_IN_CEIL)
1811 CASE_MATHFN (BUILT_IN_CEXPI)
1812 CASE_MATHFN (BUILT_IN_COPYSIGN)
1813 CASE_MATHFN (BUILT_IN_COS)
1814 CASE_MATHFN (BUILT_IN_COSH)
1815 CASE_MATHFN (BUILT_IN_DREM)
1816 CASE_MATHFN (BUILT_IN_ERF)
1817 CASE_MATHFN (BUILT_IN_ERFC)
1818 CASE_MATHFN (BUILT_IN_EXP)
1819 CASE_MATHFN (BUILT_IN_EXP10)
1820 CASE_MATHFN (BUILT_IN_EXP2)
1821 CASE_MATHFN (BUILT_IN_EXPM1)
1822 CASE_MATHFN (BUILT_IN_FABS)
1823 CASE_MATHFN (BUILT_IN_FDIM)
1824 CASE_MATHFN (BUILT_IN_FLOOR)
1825 CASE_MATHFN (BUILT_IN_FMA)
1826 CASE_MATHFN (BUILT_IN_FMAX)
1827 CASE_MATHFN (BUILT_IN_FMIN)
1828 CASE_MATHFN (BUILT_IN_FMOD)
1829 CASE_MATHFN (BUILT_IN_FREXP)
1830 CASE_MATHFN (BUILT_IN_GAMMA)
1831 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1832 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1833 CASE_MATHFN (BUILT_IN_HYPOT)
1834 CASE_MATHFN (BUILT_IN_ILOGB)
1835 CASE_MATHFN (BUILT_IN_ICEIL)
1836 CASE_MATHFN (BUILT_IN_IFLOOR)
1837 CASE_MATHFN (BUILT_IN_INF)
1838 CASE_MATHFN (BUILT_IN_IRINT)
1839 CASE_MATHFN (BUILT_IN_IROUND)
1840 CASE_MATHFN (BUILT_IN_ISINF)
1841 CASE_MATHFN (BUILT_IN_J0)
1842 CASE_MATHFN (BUILT_IN_J1)
1843 CASE_MATHFN (BUILT_IN_JN)
1844 CASE_MATHFN (BUILT_IN_LCEIL)
1845 CASE_MATHFN (BUILT_IN_LDEXP)
1846 CASE_MATHFN (BUILT_IN_LFLOOR)
1847 CASE_MATHFN (BUILT_IN_LGAMMA)
1848 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1849 CASE_MATHFN (BUILT_IN_LLCEIL)
1850 CASE_MATHFN (BUILT_IN_LLFLOOR)
1851 CASE_MATHFN (BUILT_IN_LLRINT)
1852 CASE_MATHFN (BUILT_IN_LLROUND)
1853 CASE_MATHFN (BUILT_IN_LOG)
1854 CASE_MATHFN (BUILT_IN_LOG10)
1855 CASE_MATHFN (BUILT_IN_LOG1P)
1856 CASE_MATHFN (BUILT_IN_LOG2)
1857 CASE_MATHFN (BUILT_IN_LOGB)
1858 CASE_MATHFN (BUILT_IN_LRINT)
1859 CASE_MATHFN (BUILT_IN_LROUND)
1860 CASE_MATHFN (BUILT_IN_MODF)
1861 CASE_MATHFN (BUILT_IN_NAN)
1862 CASE_MATHFN (BUILT_IN_NANS)
1863 CASE_MATHFN (BUILT_IN_NEARBYINT)
1864 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1865 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1866 CASE_MATHFN (BUILT_IN_POW)
1867 CASE_MATHFN (BUILT_IN_POWI)
1868 CASE_MATHFN (BUILT_IN_POW10)
1869 CASE_MATHFN (BUILT_IN_REMAINDER)
1870 CASE_MATHFN (BUILT_IN_REMQUO)
1871 CASE_MATHFN (BUILT_IN_RINT)
1872 CASE_MATHFN (BUILT_IN_ROUND)
1873 CASE_MATHFN (BUILT_IN_SCALB)
1874 CASE_MATHFN (BUILT_IN_SCALBLN)
1875 CASE_MATHFN (BUILT_IN_SCALBN)
1876 CASE_MATHFN (BUILT_IN_SIGNBIT)
1877 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1878 CASE_MATHFN (BUILT_IN_SIN)
1879 CASE_MATHFN (BUILT_IN_SINCOS)
1880 CASE_MATHFN (BUILT_IN_SINH)
1881 CASE_MATHFN (BUILT_IN_SQRT)
1882 CASE_MATHFN (BUILT_IN_TAN)
1883 CASE_MATHFN (BUILT_IN_TANH)
1884 CASE_MATHFN (BUILT_IN_TGAMMA)
1885 CASE_MATHFN (BUILT_IN_TRUNC)
1886 CASE_MATHFN (BUILT_IN_Y0)
1887 CASE_MATHFN (BUILT_IN_Y1)
1888 CASE_MATHFN (BUILT_IN_YN)
1890 default:
1891 return NULL_TREE;
1894 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1895 fcode2 = fcode;
1896 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1897 fcode2 = fcodef;
1898 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1899 fcode2 = fcodel;
1900 else
1901 return NULL_TREE;
1903 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1904 return NULL_TREE;
1906 return builtin_decl_explicit (fcode2);
1909 /* Like mathfn_built_in_1(), but always use the implicit array. */
1911 tree
1912 mathfn_built_in (tree type, enum built_in_function fn)
1914 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1917 /* If errno must be maintained, expand the RTL to check if the result,
1918 TARGET, of a built-in function call, EXP, is NaN, and if so set
1919 errno to EDOM. */
1921 static void
1922 expand_errno_check (tree exp, rtx target)
1924 rtx lab = gen_label_rtx ();
1926 /* Test the result; if it is NaN, set errno=EDOM because
1927 the argument was not in the domain. */
1928 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1929 NULL_RTX, NULL_RTX, lab,
1930 /* The jump is very likely. */
1931 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1933 #ifdef TARGET_EDOM
1934 /* If this built-in doesn't throw an exception, set errno directly. */
1935 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1937 #ifdef GEN_ERRNO_RTX
1938 rtx errno_rtx = GEN_ERRNO_RTX;
1939 #else
1940 rtx errno_rtx
1941 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1942 #endif
1943 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1944 emit_label (lab);
1945 return;
1947 #endif
1949 /* Make sure the library call isn't expanded as a tail call. */
1950 CALL_EXPR_TAILCALL (exp) = 0;
1952 /* We can't set errno=EDOM directly; let the library call do it.
1953 Pop the arguments right away in case the call gets deleted. */
1954 NO_DEFER_POP;
1955 expand_call (exp, target, 0);
1956 OK_DEFER_POP;
1957 emit_label (lab);
1960 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1961 Return NULL_RTX if a normal call should be emitted rather than expanding
1962 the function in-line. EXP is the expression that is a call to the builtin
1963 function; if convenient, the result should be placed in TARGET.
1964 SUBTARGET may be used as the target for computing one of EXP's operands. */
1966 static rtx
1967 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1969 optab builtin_optab;
1970 rtx op0, insns;
1971 tree fndecl = get_callee_fndecl (exp);
1972 enum machine_mode mode;
1973 bool errno_set = false;
1974 tree arg;
1976 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1977 return NULL_RTX;
1979 arg = CALL_EXPR_ARG (exp, 0);
1981 switch (DECL_FUNCTION_CODE (fndecl))
1983 CASE_FLT_FN (BUILT_IN_SQRT):
1984 errno_set = ! tree_expr_nonnegative_p (arg);
1985 builtin_optab = sqrt_optab;
1986 break;
1987 CASE_FLT_FN (BUILT_IN_EXP):
1988 errno_set = true; builtin_optab = exp_optab; break;
1989 CASE_FLT_FN (BUILT_IN_EXP10):
1990 CASE_FLT_FN (BUILT_IN_POW10):
1991 errno_set = true; builtin_optab = exp10_optab; break;
1992 CASE_FLT_FN (BUILT_IN_EXP2):
1993 errno_set = true; builtin_optab = exp2_optab; break;
1994 CASE_FLT_FN (BUILT_IN_EXPM1):
1995 errno_set = true; builtin_optab = expm1_optab; break;
1996 CASE_FLT_FN (BUILT_IN_LOGB):
1997 errno_set = true; builtin_optab = logb_optab; break;
1998 CASE_FLT_FN (BUILT_IN_LOG):
1999 errno_set = true; builtin_optab = log_optab; break;
2000 CASE_FLT_FN (BUILT_IN_LOG10):
2001 errno_set = true; builtin_optab = log10_optab; break;
2002 CASE_FLT_FN (BUILT_IN_LOG2):
2003 errno_set = true; builtin_optab = log2_optab; break;
2004 CASE_FLT_FN (BUILT_IN_LOG1P):
2005 errno_set = true; builtin_optab = log1p_optab; break;
2006 CASE_FLT_FN (BUILT_IN_ASIN):
2007 builtin_optab = asin_optab; break;
2008 CASE_FLT_FN (BUILT_IN_ACOS):
2009 builtin_optab = acos_optab; break;
2010 CASE_FLT_FN (BUILT_IN_TAN):
2011 builtin_optab = tan_optab; break;
2012 CASE_FLT_FN (BUILT_IN_ATAN):
2013 builtin_optab = atan_optab; break;
2014 CASE_FLT_FN (BUILT_IN_FLOOR):
2015 builtin_optab = floor_optab; break;
2016 CASE_FLT_FN (BUILT_IN_CEIL):
2017 builtin_optab = ceil_optab; break;
2018 CASE_FLT_FN (BUILT_IN_TRUNC):
2019 builtin_optab = btrunc_optab; break;
2020 CASE_FLT_FN (BUILT_IN_ROUND):
2021 builtin_optab = round_optab; break;
2022 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2023 builtin_optab = nearbyint_optab;
2024 if (flag_trapping_math)
2025 break;
2026 /* Else fallthrough and expand as rint. */
2027 CASE_FLT_FN (BUILT_IN_RINT):
2028 builtin_optab = rint_optab; break;
2029 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2030 builtin_optab = significand_optab; break;
2031 default:
2032 gcc_unreachable ();
2035 /* Make a suitable register to place result in. */
2036 mode = TYPE_MODE (TREE_TYPE (exp));
2038 if (! flag_errno_math || ! HONOR_NANS (mode))
2039 errno_set = false;
2041 /* Before working hard, check whether the instruction is available. */
2042 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2043 && (!errno_set || !optimize_insn_for_size_p ()))
2045 target = gen_reg_rtx (mode);
2047 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2048 need to expand the argument again. This way, we will not perform
2049 side-effects more the once. */
2050 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2052 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2054 start_sequence ();
2056 /* Compute into TARGET.
2057 Set TARGET to wherever the result comes back. */
2058 target = expand_unop (mode, builtin_optab, op0, target, 0);
2060 if (target != 0)
2062 if (errno_set)
2063 expand_errno_check (exp, target);
2065 /* Output the entire sequence. */
2066 insns = get_insns ();
2067 end_sequence ();
2068 emit_insn (insns);
2069 return target;
2072 /* If we were unable to expand via the builtin, stop the sequence
2073 (without outputting the insns) and call to the library function
2074 with the stabilized argument list. */
2075 end_sequence ();
2078 return expand_call (exp, target, target == const0_rtx);
2081 /* Expand a call to the builtin binary math functions (pow and atan2).
2082 Return NULL_RTX if a normal call should be emitted rather than expanding the
2083 function in-line. EXP is the expression that is a call to the builtin
2084 function; if convenient, the result should be placed in TARGET.
2085 SUBTARGET may be used as the target for computing one of EXP's
2086 operands. */
2088 static rtx
2089 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2091 optab builtin_optab;
2092 rtx op0, op1, insns;
2093 int op1_type = REAL_TYPE;
2094 tree fndecl = get_callee_fndecl (exp);
2095 tree arg0, arg1;
2096 enum machine_mode mode;
2097 bool errno_set = true;
2099 switch (DECL_FUNCTION_CODE (fndecl))
2101 CASE_FLT_FN (BUILT_IN_SCALBN):
2102 CASE_FLT_FN (BUILT_IN_SCALBLN):
2103 CASE_FLT_FN (BUILT_IN_LDEXP):
2104 op1_type = INTEGER_TYPE;
2105 default:
2106 break;
2109 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2110 return NULL_RTX;
2112 arg0 = CALL_EXPR_ARG (exp, 0);
2113 arg1 = CALL_EXPR_ARG (exp, 1);
2115 switch (DECL_FUNCTION_CODE (fndecl))
2117 CASE_FLT_FN (BUILT_IN_POW):
2118 builtin_optab = pow_optab; break;
2119 CASE_FLT_FN (BUILT_IN_ATAN2):
2120 builtin_optab = atan2_optab; break;
2121 CASE_FLT_FN (BUILT_IN_SCALB):
2122 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2123 return 0;
2124 builtin_optab = scalb_optab; break;
2125 CASE_FLT_FN (BUILT_IN_SCALBN):
2126 CASE_FLT_FN (BUILT_IN_SCALBLN):
2127 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2128 return 0;
2129 /* Fall through... */
2130 CASE_FLT_FN (BUILT_IN_LDEXP):
2131 builtin_optab = ldexp_optab; break;
2132 CASE_FLT_FN (BUILT_IN_FMOD):
2133 builtin_optab = fmod_optab; break;
2134 CASE_FLT_FN (BUILT_IN_REMAINDER):
2135 CASE_FLT_FN (BUILT_IN_DREM):
2136 builtin_optab = remainder_optab; break;
2137 default:
2138 gcc_unreachable ();
2141 /* Make a suitable register to place result in. */
2142 mode = TYPE_MODE (TREE_TYPE (exp));
2144 /* Before working hard, check whether the instruction is available. */
2145 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2146 return NULL_RTX;
2148 target = gen_reg_rtx (mode);
2150 if (! flag_errno_math || ! HONOR_NANS (mode))
2151 errno_set = false;
2153 if (errno_set && optimize_insn_for_size_p ())
2154 return 0;
2156 /* Always stabilize the argument list. */
2157 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2158 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2160 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2161 op1 = expand_normal (arg1);
2163 start_sequence ();
2165 /* Compute into TARGET.
2166 Set TARGET to wherever the result comes back. */
2167 target = expand_binop (mode, builtin_optab, op0, op1,
2168 target, 0, OPTAB_DIRECT);
2170 /* If we were unable to expand via the builtin, stop the sequence
2171 (without outputting the insns) and call to the library function
2172 with the stabilized argument list. */
2173 if (target == 0)
2175 end_sequence ();
2176 return expand_call (exp, target, target == const0_rtx);
2179 if (errno_set)
2180 expand_errno_check (exp, target);
2182 /* Output the entire sequence. */
2183 insns = get_insns ();
2184 end_sequence ();
2185 emit_insn (insns);
2187 return target;
2190 /* Expand a call to the builtin trinary math functions (fma).
2191 Return NULL_RTX if a normal call should be emitted rather than expanding the
2192 function in-line. EXP is the expression that is a call to the builtin
2193 function; if convenient, the result should be placed in TARGET.
2194 SUBTARGET may be used as the target for computing one of EXP's
2195 operands. */
2197 static rtx
2198 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2200 optab builtin_optab;
2201 rtx op0, op1, op2, insns;
2202 tree fndecl = get_callee_fndecl (exp);
2203 tree arg0, arg1, arg2;
2204 enum machine_mode mode;
2206 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2207 return NULL_RTX;
2209 arg0 = CALL_EXPR_ARG (exp, 0);
2210 arg1 = CALL_EXPR_ARG (exp, 1);
2211 arg2 = CALL_EXPR_ARG (exp, 2);
2213 switch (DECL_FUNCTION_CODE (fndecl))
2215 CASE_FLT_FN (BUILT_IN_FMA):
2216 builtin_optab = fma_optab; break;
2217 default:
2218 gcc_unreachable ();
2221 /* Make a suitable register to place result in. */
2222 mode = TYPE_MODE (TREE_TYPE (exp));
2224 /* Before working hard, check whether the instruction is available. */
2225 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2226 return NULL_RTX;
2228 target = gen_reg_rtx (mode);
2230 /* Always stabilize the argument list. */
2231 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2232 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2233 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2235 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2236 op1 = expand_normal (arg1);
2237 op2 = expand_normal (arg2);
2239 start_sequence ();
2241 /* Compute into TARGET.
2242 Set TARGET to wherever the result comes back. */
2243 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2244 target, 0);
2246 /* If we were unable to expand via the builtin, stop the sequence
2247 (without outputting the insns) and call to the library function
2248 with the stabilized argument list. */
2249 if (target == 0)
2251 end_sequence ();
2252 return expand_call (exp, target, target == const0_rtx);
2255 /* Output the entire sequence. */
2256 insns = get_insns ();
2257 end_sequence ();
2258 emit_insn (insns);
2260 return target;
2263 /* Expand a call to the builtin sin and cos math functions.
2264 Return NULL_RTX if a normal call should be emitted rather than expanding the
2265 function in-line. EXP is the expression that is a call to the builtin
2266 function; if convenient, the result should be placed in TARGET.
2267 SUBTARGET may be used as the target for computing one of EXP's
2268 operands. */
2270 static rtx
2271 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2273 optab builtin_optab;
2274 rtx op0, insns;
2275 tree fndecl = get_callee_fndecl (exp);
2276 enum machine_mode mode;
2277 tree arg;
2279 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2280 return NULL_RTX;
2282 arg = CALL_EXPR_ARG (exp, 0);
2284 switch (DECL_FUNCTION_CODE (fndecl))
2286 CASE_FLT_FN (BUILT_IN_SIN):
2287 CASE_FLT_FN (BUILT_IN_COS):
2288 builtin_optab = sincos_optab; break;
2289 default:
2290 gcc_unreachable ();
2293 /* Make a suitable register to place result in. */
2294 mode = TYPE_MODE (TREE_TYPE (exp));
2296 /* Check if sincos insn is available, otherwise fallback
2297 to sin or cos insn. */
2298 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2299 switch (DECL_FUNCTION_CODE (fndecl))
2301 CASE_FLT_FN (BUILT_IN_SIN):
2302 builtin_optab = sin_optab; break;
2303 CASE_FLT_FN (BUILT_IN_COS):
2304 builtin_optab = cos_optab; break;
2305 default:
2306 gcc_unreachable ();
2309 /* Before working hard, check whether the instruction is available. */
2310 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2312 target = gen_reg_rtx (mode);
2314 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2315 need to expand the argument again. This way, we will not perform
2316 side-effects more the once. */
2317 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2319 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2321 start_sequence ();
2323 /* Compute into TARGET.
2324 Set TARGET to wherever the result comes back. */
2325 if (builtin_optab == sincos_optab)
2327 int result;
2329 switch (DECL_FUNCTION_CODE (fndecl))
2331 CASE_FLT_FN (BUILT_IN_SIN):
2332 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2333 break;
2334 CASE_FLT_FN (BUILT_IN_COS):
2335 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2336 break;
2337 default:
2338 gcc_unreachable ();
2340 gcc_assert (result);
2342 else
2344 target = expand_unop (mode, builtin_optab, op0, target, 0);
2347 if (target != 0)
2349 /* Output the entire sequence. */
2350 insns = get_insns ();
2351 end_sequence ();
2352 emit_insn (insns);
2353 return target;
2356 /* If we were unable to expand via the builtin, stop the sequence
2357 (without outputting the insns) and call to the library function
2358 with the stabilized argument list. */
2359 end_sequence ();
2362 target = expand_call (exp, target, target == const0_rtx);
2364 return target;
2367 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2368 return an RTL instruction code that implements the functionality.
2369 If that isn't possible or available return CODE_FOR_nothing. */
2371 static enum insn_code
2372 interclass_mathfn_icode (tree arg, tree fndecl)
2374 bool errno_set = false;
2375 optab builtin_optab = unknown_optab;
2376 enum machine_mode mode;
2378 switch (DECL_FUNCTION_CODE (fndecl))
2380 CASE_FLT_FN (BUILT_IN_ILOGB):
2381 errno_set = true; builtin_optab = ilogb_optab; break;
2382 CASE_FLT_FN (BUILT_IN_ISINF):
2383 builtin_optab = isinf_optab; break;
2384 case BUILT_IN_ISNORMAL:
2385 case BUILT_IN_ISFINITE:
2386 CASE_FLT_FN (BUILT_IN_FINITE):
2387 case BUILT_IN_FINITED32:
2388 case BUILT_IN_FINITED64:
2389 case BUILT_IN_FINITED128:
2390 case BUILT_IN_ISINFD32:
2391 case BUILT_IN_ISINFD64:
2392 case BUILT_IN_ISINFD128:
2393 /* These builtins have no optabs (yet). */
2394 break;
2395 default:
2396 gcc_unreachable ();
2399 /* There's no easy way to detect the case we need to set EDOM. */
2400 if (flag_errno_math && errno_set)
2401 return CODE_FOR_nothing;
2403 /* Optab mode depends on the mode of the input argument. */
2404 mode = TYPE_MODE (TREE_TYPE (arg));
2406 if (builtin_optab)
2407 return optab_handler (builtin_optab, mode);
2408 return CODE_FOR_nothing;
2411 /* Expand a call to one of the builtin math functions that operate on
2412 floating point argument and output an integer result (ilogb, isinf,
2413 isnan, etc).
2414 Return 0 if a normal call should be emitted rather than expanding the
2415 function in-line. EXP is the expression that is a call to the builtin
2416 function; if convenient, the result should be placed in TARGET. */
2418 static rtx
2419 expand_builtin_interclass_mathfn (tree exp, rtx target)
2421 enum insn_code icode = CODE_FOR_nothing;
2422 rtx op0;
2423 tree fndecl = get_callee_fndecl (exp);
2424 enum machine_mode mode;
2425 tree arg;
2427 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2428 return NULL_RTX;
2430 arg = CALL_EXPR_ARG (exp, 0);
2431 icode = interclass_mathfn_icode (arg, fndecl);
2432 mode = TYPE_MODE (TREE_TYPE (arg));
2434 if (icode != CODE_FOR_nothing)
2436 struct expand_operand ops[1];
2437 rtx last = get_last_insn ();
2438 tree orig_arg = arg;
2440 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2441 need to expand the argument again. This way, we will not perform
2442 side-effects more the once. */
2443 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2445 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2447 if (mode != GET_MODE (op0))
2448 op0 = convert_to_mode (mode, op0, 0);
2450 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2451 if (maybe_legitimize_operands (icode, 0, 1, ops)
2452 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2453 return ops[0].value;
2455 delete_insns_since (last);
2456 CALL_EXPR_ARG (exp, 0) = orig_arg;
2459 return NULL_RTX;
2462 /* Expand a call to the builtin sincos math function.
2463 Return NULL_RTX if a normal call should be emitted rather than expanding the
2464 function in-line. EXP is the expression that is a call to the builtin
2465 function. */
2467 static rtx
2468 expand_builtin_sincos (tree exp)
2470 rtx op0, op1, op2, target1, target2;
2471 enum machine_mode mode;
2472 tree arg, sinp, cosp;
2473 int result;
2474 location_t loc = EXPR_LOCATION (exp);
2475 tree alias_type, alias_off;
2477 if (!validate_arglist (exp, REAL_TYPE,
2478 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2479 return NULL_RTX;
2481 arg = CALL_EXPR_ARG (exp, 0);
2482 sinp = CALL_EXPR_ARG (exp, 1);
2483 cosp = CALL_EXPR_ARG (exp, 2);
2485 /* Make a suitable register to place result in. */
2486 mode = TYPE_MODE (TREE_TYPE (arg));
2488 /* Check if sincos insn is available, otherwise emit the call. */
2489 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2490 return NULL_RTX;
2492 target1 = gen_reg_rtx (mode);
2493 target2 = gen_reg_rtx (mode);
2495 op0 = expand_normal (arg);
2496 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2497 alias_off = build_int_cst (alias_type, 0);
2498 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2499 sinp, alias_off));
2500 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2501 cosp, alias_off));
2503 /* Compute into target1 and target2.
2504 Set TARGET to wherever the result comes back. */
2505 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2506 gcc_assert (result);
2508 /* Move target1 and target2 to the memory locations indicated
2509 by op1 and op2. */
2510 emit_move_insn (op1, target1);
2511 emit_move_insn (op2, target2);
2513 return const0_rtx;
2516 /* Expand a call to the internal cexpi builtin to the sincos math function.
2517 EXP is the expression that is a call to the builtin function; if convenient,
2518 the result should be placed in TARGET. */
2520 static rtx
2521 expand_builtin_cexpi (tree exp, rtx target)
2523 tree fndecl = get_callee_fndecl (exp);
2524 tree arg, type;
2525 enum machine_mode mode;
2526 rtx op0, op1, op2;
2527 location_t loc = EXPR_LOCATION (exp);
2529 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2530 return NULL_RTX;
2532 arg = CALL_EXPR_ARG (exp, 0);
2533 type = TREE_TYPE (arg);
2534 mode = TYPE_MODE (TREE_TYPE (arg));
2536 /* Try expanding via a sincos optab, fall back to emitting a libcall
2537 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2538 is only generated from sincos, cexp or if we have either of them. */
2539 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2541 op1 = gen_reg_rtx (mode);
2542 op2 = gen_reg_rtx (mode);
2544 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2546 /* Compute into op1 and op2. */
2547 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2549 else if (TARGET_HAS_SINCOS)
2551 tree call, fn = NULL_TREE;
2552 tree top1, top2;
2553 rtx op1a, op2a;
2555 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2556 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2557 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2558 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2559 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2560 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2561 else
2562 gcc_unreachable ();
2564 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2565 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2566 op1a = copy_addr_to_reg (XEXP (op1, 0));
2567 op2a = copy_addr_to_reg (XEXP (op2, 0));
2568 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2569 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2571 /* Make sure not to fold the sincos call again. */
2572 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2573 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2574 call, 3, arg, top1, top2));
2576 else
2578 tree call, fn = NULL_TREE, narg;
2579 tree ctype = build_complex_type (type);
2581 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2582 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2583 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2584 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2585 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2586 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2587 else
2588 gcc_unreachable ();
2590 /* If we don't have a decl for cexp create one. This is the
2591 friendliest fallback if the user calls __builtin_cexpi
2592 without full target C99 function support. */
2593 if (fn == NULL_TREE)
2595 tree fntype;
2596 const char *name = NULL;
2598 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2599 name = "cexpf";
2600 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2601 name = "cexp";
2602 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2603 name = "cexpl";
2605 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2606 fn = build_fn_decl (name, fntype);
2609 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2610 build_real (type, dconst0), arg);
2612 /* Make sure not to fold the cexp call again. */
2613 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2614 return expand_expr (build_call_nary (ctype, call, 1, narg),
2615 target, VOIDmode, EXPAND_NORMAL);
2618 /* Now build the proper return type. */
2619 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2620 make_tree (TREE_TYPE (arg), op2),
2621 make_tree (TREE_TYPE (arg), op1)),
2622 target, VOIDmode, EXPAND_NORMAL);
2625 /* Conveniently construct a function call expression. FNDECL names the
2626 function to be called, N is the number of arguments, and the "..."
2627 parameters are the argument expressions. Unlike build_call_exr
2628 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2630 static tree
2631 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2633 va_list ap;
2634 tree fntype = TREE_TYPE (fndecl);
2635 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2637 va_start (ap, n);
2638 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2639 va_end (ap);
2640 SET_EXPR_LOCATION (fn, loc);
2641 return fn;
2644 /* Expand a call to one of the builtin rounding functions gcc defines
2645 as an extension (lfloor and lceil). As these are gcc extensions we
2646 do not need to worry about setting errno to EDOM.
2647 If expanding via optab fails, lower expression to (int)(floor(x)).
2648 EXP is the expression that is a call to the builtin function;
2649 if convenient, the result should be placed in TARGET. */
2651 static rtx
2652 expand_builtin_int_roundingfn (tree exp, rtx target)
2654 convert_optab builtin_optab;
2655 rtx op0, insns, tmp;
2656 tree fndecl = get_callee_fndecl (exp);
2657 enum built_in_function fallback_fn;
2658 tree fallback_fndecl;
2659 enum machine_mode mode;
2660 tree arg;
2662 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2663 gcc_unreachable ();
2665 arg = CALL_EXPR_ARG (exp, 0);
2667 switch (DECL_FUNCTION_CODE (fndecl))
2669 CASE_FLT_FN (BUILT_IN_ICEIL):
2670 CASE_FLT_FN (BUILT_IN_LCEIL):
2671 CASE_FLT_FN (BUILT_IN_LLCEIL):
2672 builtin_optab = lceil_optab;
2673 fallback_fn = BUILT_IN_CEIL;
2674 break;
2676 CASE_FLT_FN (BUILT_IN_IFLOOR):
2677 CASE_FLT_FN (BUILT_IN_LFLOOR):
2678 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2679 builtin_optab = lfloor_optab;
2680 fallback_fn = BUILT_IN_FLOOR;
2681 break;
2683 default:
2684 gcc_unreachable ();
2687 /* Make a suitable register to place result in. */
2688 mode = TYPE_MODE (TREE_TYPE (exp));
2690 target = gen_reg_rtx (mode);
2692 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2693 need to expand the argument again. This way, we will not perform
2694 side-effects more the once. */
2695 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2697 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2699 start_sequence ();
2701 /* Compute into TARGET. */
2702 if (expand_sfix_optab (target, op0, builtin_optab))
2704 /* Output the entire sequence. */
2705 insns = get_insns ();
2706 end_sequence ();
2707 emit_insn (insns);
2708 return target;
2711 /* If we were unable to expand via the builtin, stop the sequence
2712 (without outputting the insns). */
2713 end_sequence ();
2715 /* Fall back to floating point rounding optab. */
2716 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2718 /* For non-C99 targets we may end up without a fallback fndecl here
2719 if the user called __builtin_lfloor directly. In this case emit
2720 a call to the floor/ceil variants nevertheless. This should result
2721 in the best user experience for not full C99 targets. */
2722 if (fallback_fndecl == NULL_TREE)
2724 tree fntype;
2725 const char *name = NULL;
2727 switch (DECL_FUNCTION_CODE (fndecl))
2729 case BUILT_IN_ICEIL:
2730 case BUILT_IN_LCEIL:
2731 case BUILT_IN_LLCEIL:
2732 name = "ceil";
2733 break;
2734 case BUILT_IN_ICEILF:
2735 case BUILT_IN_LCEILF:
2736 case BUILT_IN_LLCEILF:
2737 name = "ceilf";
2738 break;
2739 case BUILT_IN_ICEILL:
2740 case BUILT_IN_LCEILL:
2741 case BUILT_IN_LLCEILL:
2742 name = "ceill";
2743 break;
2744 case BUILT_IN_IFLOOR:
2745 case BUILT_IN_LFLOOR:
2746 case BUILT_IN_LLFLOOR:
2747 name = "floor";
2748 break;
2749 case BUILT_IN_IFLOORF:
2750 case BUILT_IN_LFLOORF:
2751 case BUILT_IN_LLFLOORF:
2752 name = "floorf";
2753 break;
2754 case BUILT_IN_IFLOORL:
2755 case BUILT_IN_LFLOORL:
2756 case BUILT_IN_LLFLOORL:
2757 name = "floorl";
2758 break;
2759 default:
2760 gcc_unreachable ();
2763 fntype = build_function_type_list (TREE_TYPE (arg),
2764 TREE_TYPE (arg), NULL_TREE);
2765 fallback_fndecl = build_fn_decl (name, fntype);
2768 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2770 tmp = expand_normal (exp);
2772 /* Truncate the result of floating point optab to integer
2773 via expand_fix (). */
2774 target = gen_reg_rtx (mode);
2775 expand_fix (target, tmp, 0);
2777 return target;
2780 /* Expand a call to one of the builtin math functions doing integer
2781 conversion (lrint).
2782 Return 0 if a normal call should be emitted rather than expanding the
2783 function in-line. EXP is the expression that is a call to the builtin
2784 function; if convenient, the result should be placed in TARGET. */
2786 static rtx
2787 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2789 convert_optab builtin_optab;
2790 rtx op0, insns;
2791 tree fndecl = get_callee_fndecl (exp);
2792 tree arg;
2793 enum machine_mode mode;
2794 enum built_in_function fallback_fn = BUILT_IN_NONE;
2796 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2797 gcc_unreachable ();
2799 arg = CALL_EXPR_ARG (exp, 0);
2801 switch (DECL_FUNCTION_CODE (fndecl))
2803 CASE_FLT_FN (BUILT_IN_IRINT):
2804 fallback_fn = BUILT_IN_LRINT;
2805 /* FALLTHRU */
2806 CASE_FLT_FN (BUILT_IN_LRINT):
2807 CASE_FLT_FN (BUILT_IN_LLRINT):
2808 builtin_optab = lrint_optab;
2809 break;
2811 CASE_FLT_FN (BUILT_IN_IROUND):
2812 fallback_fn = BUILT_IN_LROUND;
2813 /* FALLTHRU */
2814 CASE_FLT_FN (BUILT_IN_LROUND):
2815 CASE_FLT_FN (BUILT_IN_LLROUND):
2816 builtin_optab = lround_optab;
2817 break;
2819 default:
2820 gcc_unreachable ();
2823 /* There's no easy way to detect the case we need to set EDOM. */
2824 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2825 return NULL_RTX;
2827 /* Make a suitable register to place result in. */
2828 mode = TYPE_MODE (TREE_TYPE (exp));
2830 /* There's no easy way to detect the case we need to set EDOM. */
2831 if (!flag_errno_math)
2833 target = gen_reg_rtx (mode);
2835 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2836 need to expand the argument again. This way, we will not perform
2837 side-effects more the once. */
2838 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2840 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2842 start_sequence ();
2844 if (expand_sfix_optab (target, op0, builtin_optab))
2846 /* Output the entire sequence. */
2847 insns = get_insns ();
2848 end_sequence ();
2849 emit_insn (insns);
2850 return target;
2853 /* If we were unable to expand via the builtin, stop the sequence
2854 (without outputting the insns) and call to the library function
2855 with the stabilized argument list. */
2856 end_sequence ();
2859 if (fallback_fn != BUILT_IN_NONE)
2861 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2862 targets, (int) round (x) should never be transformed into
2863 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2864 a call to lround in the hope that the target provides at least some
2865 C99 functions. This should result in the best user experience for
2866 not full C99 targets. */
2867 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2868 fallback_fn, 0);
2870 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2871 fallback_fndecl, 1, arg);
2873 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2874 return convert_to_mode (mode, target, 0);
2877 target = expand_call (exp, target, target == const0_rtx);
2879 return target;
2882 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2883 a normal call should be emitted rather than expanding the function
2884 in-line. EXP is the expression that is a call to the builtin
2885 function; if convenient, the result should be placed in TARGET. */
2887 static rtx
2888 expand_builtin_powi (tree exp, rtx target)
2890 tree arg0, arg1;
2891 rtx op0, op1;
2892 enum machine_mode mode;
2893 enum machine_mode mode2;
2895 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2896 return NULL_RTX;
2898 arg0 = CALL_EXPR_ARG (exp, 0);
2899 arg1 = CALL_EXPR_ARG (exp, 1);
2900 mode = TYPE_MODE (TREE_TYPE (exp));
2902 /* Emit a libcall to libgcc. */
2904 /* Mode of the 2nd argument must match that of an int. */
2905 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2907 if (target == NULL_RTX)
2908 target = gen_reg_rtx (mode);
2910 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2911 if (GET_MODE (op0) != mode)
2912 op0 = convert_to_mode (mode, op0, 0);
2913 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2914 if (GET_MODE (op1) != mode2)
2915 op1 = convert_to_mode (mode2, op1, 0);
2917 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2918 target, LCT_CONST, mode, 2,
2919 op0, mode, op1, mode2);
2921 return target;
2924 /* Expand expression EXP which is a call to the strlen builtin. Return
2925 NULL_RTX if we failed the caller should emit a normal call, otherwise
2926 try to get the result in TARGET, if convenient. */
2928 static rtx
2929 expand_builtin_strlen (tree exp, rtx target,
2930 enum machine_mode target_mode)
2932 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2933 return NULL_RTX;
2934 else
2936 struct expand_operand ops[4];
2937 rtx pat;
2938 tree len;
2939 tree src = CALL_EXPR_ARG (exp, 0);
2940 rtx src_reg, before_strlen;
2941 enum machine_mode insn_mode = target_mode;
2942 enum insn_code icode = CODE_FOR_nothing;
2943 unsigned int align;
2945 /* If the length can be computed at compile-time, return it. */
2946 len = c_strlen (src, 0);
2947 if (len)
2948 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2950 /* If the length can be computed at compile-time and is constant
2951 integer, but there are side-effects in src, evaluate
2952 src for side-effects, then return len.
2953 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2954 can be optimized into: i++; x = 3; */
2955 len = c_strlen (src, 1);
2956 if (len && TREE_CODE (len) == INTEGER_CST)
2958 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2959 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2962 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2964 /* If SRC is not a pointer type, don't do this operation inline. */
2965 if (align == 0)
2966 return NULL_RTX;
2968 /* Bail out if we can't compute strlen in the right mode. */
2969 while (insn_mode != VOIDmode)
2971 icode = optab_handler (strlen_optab, insn_mode);
2972 if (icode != CODE_FOR_nothing)
2973 break;
2975 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2977 if (insn_mode == VOIDmode)
2978 return NULL_RTX;
2980 /* Make a place to hold the source address. We will not expand
2981 the actual source until we are sure that the expansion will
2982 not fail -- there are trees that cannot be expanded twice. */
2983 src_reg = gen_reg_rtx (Pmode);
2985 /* Mark the beginning of the strlen sequence so we can emit the
2986 source operand later. */
2987 before_strlen = get_last_insn ();
2989 create_output_operand (&ops[0], target, insn_mode);
2990 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2991 create_integer_operand (&ops[2], 0);
2992 create_integer_operand (&ops[3], align);
2993 if (!maybe_expand_insn (icode, 4, ops))
2994 return NULL_RTX;
2996 /* Now that we are assured of success, expand the source. */
2997 start_sequence ();
2998 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2999 if (pat != src_reg)
3001 #ifdef POINTERS_EXTEND_UNSIGNED
3002 if (GET_MODE (pat) != Pmode)
3003 pat = convert_to_mode (Pmode, pat,
3004 POINTERS_EXTEND_UNSIGNED);
3005 #endif
3006 emit_move_insn (src_reg, pat);
3008 pat = get_insns ();
3009 end_sequence ();
3011 if (before_strlen)
3012 emit_insn_after (pat, before_strlen);
3013 else
3014 emit_insn_before (pat, get_insns ());
3016 /* Return the value in the proper mode for this function. */
3017 if (GET_MODE (ops[0].value) == target_mode)
3018 target = ops[0].value;
3019 else if (target != 0)
3020 convert_move (target, ops[0].value, 0);
3021 else
3022 target = convert_to_mode (target_mode, ops[0].value, 0);
3024 return target;
3028 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3029 bytes from constant string DATA + OFFSET and return it as target
3030 constant. */
3032 static rtx
3033 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3034 enum machine_mode mode)
3036 const char *str = (const char *) data;
3038 gcc_assert (offset >= 0
3039 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3040 <= strlen (str) + 1));
3042 return c_readstr (str + offset, mode);
3045 /* Expand a call EXP to the memcpy builtin.
3046 Return NULL_RTX if we failed, the caller should emit a normal call,
3047 otherwise try to get the result in TARGET, if convenient (and in
3048 mode MODE if that's convenient). */
3050 static rtx
3051 expand_builtin_memcpy (tree exp, rtx target)
3053 if (!validate_arglist (exp,
3054 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3055 return NULL_RTX;
3056 else
3058 tree dest = CALL_EXPR_ARG (exp, 0);
3059 tree src = CALL_EXPR_ARG (exp, 1);
3060 tree len = CALL_EXPR_ARG (exp, 2);
3061 const char *src_str;
3062 unsigned int src_align = get_pointer_alignment (src);
3063 unsigned int dest_align = get_pointer_alignment (dest);
3064 rtx dest_mem, src_mem, dest_addr, len_rtx;
3065 HOST_WIDE_INT expected_size = -1;
3066 unsigned int expected_align = 0;
3068 /* If DEST is not a pointer type, call the normal function. */
3069 if (dest_align == 0)
3070 return NULL_RTX;
3072 /* If either SRC is not a pointer type, don't do this
3073 operation in-line. */
3074 if (src_align == 0)
3075 return NULL_RTX;
3077 if (currently_expanding_gimple_stmt)
3078 stringop_block_profile (currently_expanding_gimple_stmt,
3079 &expected_align, &expected_size);
3081 if (expected_align < dest_align)
3082 expected_align = dest_align;
3083 dest_mem = get_memory_rtx (dest, len);
3084 set_mem_align (dest_mem, dest_align);
3085 len_rtx = expand_normal (len);
3086 src_str = c_getstr (src);
3088 /* If SRC is a string constant and block move would be done
3089 by pieces, we can avoid loading the string from memory
3090 and only stored the computed constants. */
3091 if (src_str
3092 && CONST_INT_P (len_rtx)
3093 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3094 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3095 CONST_CAST (char *, src_str),
3096 dest_align, false))
3098 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3099 builtin_memcpy_read_str,
3100 CONST_CAST (char *, src_str),
3101 dest_align, false, 0);
3102 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3103 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3104 return dest_mem;
3107 src_mem = get_memory_rtx (src, len);
3108 set_mem_align (src_mem, src_align);
3110 /* Copy word part most expediently. */
3111 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3112 CALL_EXPR_TAILCALL (exp)
3113 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3114 expected_align, expected_size);
3116 if (dest_addr == 0)
3118 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3119 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3121 return dest_addr;
3125 /* Expand a call EXP to the mempcpy builtin.
3126 Return NULL_RTX if we failed; the caller should emit a normal call,
3127 otherwise try to get the result in TARGET, if convenient (and in
3128 mode MODE if that's convenient). If ENDP is 0 return the
3129 destination pointer, if ENDP is 1 return the end pointer ala
3130 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3131 stpcpy. */
3133 static rtx
3134 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3136 if (!validate_arglist (exp,
3137 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3138 return NULL_RTX;
3139 else
3141 tree dest = CALL_EXPR_ARG (exp, 0);
3142 tree src = CALL_EXPR_ARG (exp, 1);
3143 tree len = CALL_EXPR_ARG (exp, 2);
3144 return expand_builtin_mempcpy_args (dest, src, len,
3145 target, mode, /*endp=*/ 1);
3149 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3150 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3151 so that this can also be called without constructing an actual CALL_EXPR.
3152 The other arguments and return value are the same as for
3153 expand_builtin_mempcpy. */
3155 static rtx
3156 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3157 rtx target, enum machine_mode mode, int endp)
3159 /* If return value is ignored, transform mempcpy into memcpy. */
3160 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3162 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3163 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3164 dest, src, len);
3165 return expand_expr (result, target, mode, EXPAND_NORMAL);
3167 else
3169 const char *src_str;
3170 unsigned int src_align = get_pointer_alignment (src);
3171 unsigned int dest_align = get_pointer_alignment (dest);
3172 rtx dest_mem, src_mem, len_rtx;
3174 /* If either SRC or DEST is not a pointer type, don't do this
3175 operation in-line. */
3176 if (dest_align == 0 || src_align == 0)
3177 return NULL_RTX;
3179 /* If LEN is not constant, call the normal function. */
3180 if (! host_integerp (len, 1))
3181 return NULL_RTX;
3183 len_rtx = expand_normal (len);
3184 src_str = c_getstr (src);
3186 /* If SRC is a string constant and block move would be done
3187 by pieces, we can avoid loading the string from memory
3188 and only stored the computed constants. */
3189 if (src_str
3190 && CONST_INT_P (len_rtx)
3191 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3192 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3193 CONST_CAST (char *, src_str),
3194 dest_align, false))
3196 dest_mem = get_memory_rtx (dest, len);
3197 set_mem_align (dest_mem, dest_align);
3198 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3199 builtin_memcpy_read_str,
3200 CONST_CAST (char *, src_str),
3201 dest_align, false, endp);
3202 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3203 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3204 return dest_mem;
3207 if (CONST_INT_P (len_rtx)
3208 && can_move_by_pieces (INTVAL (len_rtx),
3209 MIN (dest_align, src_align)))
3211 dest_mem = get_memory_rtx (dest, len);
3212 set_mem_align (dest_mem, dest_align);
3213 src_mem = get_memory_rtx (src, len);
3214 set_mem_align (src_mem, src_align);
3215 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3216 MIN (dest_align, src_align), endp);
3217 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3218 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3219 return dest_mem;
3222 return NULL_RTX;
3226 #ifndef HAVE_movstr
3227 # define HAVE_movstr 0
3228 # define CODE_FOR_movstr CODE_FOR_nothing
3229 #endif
3231 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3232 we failed, the caller should emit a normal call, otherwise try to
3233 get the result in TARGET, if convenient. If ENDP is 0 return the
3234 destination pointer, if ENDP is 1 return the end pointer ala
3235 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3236 stpcpy. */
3238 static rtx
3239 expand_movstr (tree dest, tree src, rtx target, int endp)
3241 struct expand_operand ops[3];
3242 rtx dest_mem;
3243 rtx src_mem;
3245 if (!HAVE_movstr)
3246 return NULL_RTX;
3248 dest_mem = get_memory_rtx (dest, NULL);
3249 src_mem = get_memory_rtx (src, NULL);
3250 if (!endp)
3252 target = force_reg (Pmode, XEXP (dest_mem, 0));
3253 dest_mem = replace_equiv_address (dest_mem, target);
3256 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3257 create_fixed_operand (&ops[1], dest_mem);
3258 create_fixed_operand (&ops[2], src_mem);
3259 expand_insn (CODE_FOR_movstr, 3, ops);
3261 if (endp && target != const0_rtx)
3263 target = ops[0].value;
3264 /* movstr is supposed to set end to the address of the NUL
3265 terminator. If the caller requested a mempcpy-like return value,
3266 adjust it. */
3267 if (endp == 1)
3269 rtx tem = plus_constant (GET_MODE (target),
3270 gen_lowpart (GET_MODE (target), target), 1);
3271 emit_move_insn (target, force_operand (tem, NULL_RTX));
3274 return target;
3277 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3278 NULL_RTX if we failed the caller should emit a normal call, otherwise
3279 try to get the result in TARGET, if convenient (and in mode MODE if that's
3280 convenient). */
3282 static rtx
3283 expand_builtin_strcpy (tree exp, rtx target)
3285 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3287 tree dest = CALL_EXPR_ARG (exp, 0);
3288 tree src = CALL_EXPR_ARG (exp, 1);
3289 return expand_builtin_strcpy_args (dest, src, target);
3291 return NULL_RTX;
3294 /* Helper function to do the actual work for expand_builtin_strcpy. The
3295 arguments to the builtin_strcpy call DEST and SRC are broken out
3296 so that this can also be called without constructing an actual CALL_EXPR.
3297 The other arguments and return value are the same as for
3298 expand_builtin_strcpy. */
3300 static rtx
3301 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3303 return expand_movstr (dest, src, target, /*endp=*/0);
3306 /* Expand a call EXP to the stpcpy builtin.
3307 Return NULL_RTX if we failed the caller should emit a normal call,
3308 otherwise try to get the result in TARGET, if convenient (and in
3309 mode MODE if that's convenient). */
3311 static rtx
3312 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3314 tree dst, src;
3315 location_t loc = EXPR_LOCATION (exp);
3317 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3318 return NULL_RTX;
3320 dst = CALL_EXPR_ARG (exp, 0);
3321 src = CALL_EXPR_ARG (exp, 1);
3323 /* If return value is ignored, transform stpcpy into strcpy. */
3324 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3326 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3327 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3328 return expand_expr (result, target, mode, EXPAND_NORMAL);
3330 else
3332 tree len, lenp1;
3333 rtx ret;
3335 /* Ensure we get an actual string whose length can be evaluated at
3336 compile-time, not an expression containing a string. This is
3337 because the latter will potentially produce pessimized code
3338 when used to produce the return value. */
3339 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3340 return expand_movstr (dst, src, target, /*endp=*/2);
3342 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3343 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3344 target, mode, /*endp=*/2);
3346 if (ret)
3347 return ret;
3349 if (TREE_CODE (len) == INTEGER_CST)
3351 rtx len_rtx = expand_normal (len);
3353 if (CONST_INT_P (len_rtx))
3355 ret = expand_builtin_strcpy_args (dst, src, target);
3357 if (ret)
3359 if (! target)
3361 if (mode != VOIDmode)
3362 target = gen_reg_rtx (mode);
3363 else
3364 target = gen_reg_rtx (GET_MODE (ret));
3366 if (GET_MODE (target) != GET_MODE (ret))
3367 ret = gen_lowpart (GET_MODE (target), ret);
3369 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3370 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3371 gcc_assert (ret);
3373 return target;
3378 return expand_movstr (dst, src, target, /*endp=*/2);
3382 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3383 bytes from constant string DATA + OFFSET and return it as target
3384 constant. */
3387 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3388 enum machine_mode mode)
3390 const char *str = (const char *) data;
3392 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3393 return const0_rtx;
3395 return c_readstr (str + offset, mode);
3398 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3399 NULL_RTX if we failed the caller should emit a normal call. */
3401 static rtx
3402 expand_builtin_strncpy (tree exp, rtx target)
3404 location_t loc = EXPR_LOCATION (exp);
3406 if (validate_arglist (exp,
3407 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3409 tree dest = CALL_EXPR_ARG (exp, 0);
3410 tree src = CALL_EXPR_ARG (exp, 1);
3411 tree len = CALL_EXPR_ARG (exp, 2);
3412 tree slen = c_strlen (src, 1);
3414 /* We must be passed a constant len and src parameter. */
3415 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3416 return NULL_RTX;
3418 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3420 /* We're required to pad with trailing zeros if the requested
3421 len is greater than strlen(s2)+1. In that case try to
3422 use store_by_pieces, if it fails, punt. */
3423 if (tree_int_cst_lt (slen, len))
3425 unsigned int dest_align = get_pointer_alignment (dest);
3426 const char *p = c_getstr (src);
3427 rtx dest_mem;
3429 if (!p || dest_align == 0 || !host_integerp (len, 1)
3430 || !can_store_by_pieces (tree_low_cst (len, 1),
3431 builtin_strncpy_read_str,
3432 CONST_CAST (char *, p),
3433 dest_align, false))
3434 return NULL_RTX;
3436 dest_mem = get_memory_rtx (dest, len);
3437 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3438 builtin_strncpy_read_str,
3439 CONST_CAST (char *, p), dest_align, false, 0);
3440 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3441 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3442 return dest_mem;
3445 return NULL_RTX;
3448 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3449 bytes from constant string DATA + OFFSET and return it as target
3450 constant. */
3453 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3454 enum machine_mode mode)
3456 const char *c = (const char *) data;
3457 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3459 memset (p, *c, GET_MODE_SIZE (mode));
3461 return c_readstr (p, mode);
3464 /* Callback routine for store_by_pieces. Return the RTL of a register
3465 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3466 char value given in the RTL register data. For example, if mode is
3467 4 bytes wide, return the RTL for 0x01010101*data. */
3469 static rtx
3470 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3471 enum machine_mode mode)
3473 rtx target, coeff;
3474 size_t size;
3475 char *p;
3477 size = GET_MODE_SIZE (mode);
3478 if (size == 1)
3479 return (rtx) data;
3481 p = XALLOCAVEC (char, size);
3482 memset (p, 1, size);
3483 coeff = c_readstr (p, mode);
3485 target = convert_to_mode (mode, (rtx) data, 1);
3486 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3487 return force_reg (mode, target);
3490 /* Expand expression EXP, which is a call to the memset builtin. Return
3491 NULL_RTX if we failed the caller should emit a normal call, otherwise
3492 try to get the result in TARGET, if convenient (and in mode MODE if that's
3493 convenient). */
3495 static rtx
3496 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3498 if (!validate_arglist (exp,
3499 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3500 return NULL_RTX;
3501 else
3503 tree dest = CALL_EXPR_ARG (exp, 0);
3504 tree val = CALL_EXPR_ARG (exp, 1);
3505 tree len = CALL_EXPR_ARG (exp, 2);
3506 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3510 /* Helper function to do the actual work for expand_builtin_memset. The
3511 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3512 so that this can also be called without constructing an actual CALL_EXPR.
3513 The other arguments and return value are the same as for
3514 expand_builtin_memset. */
3516 static rtx
3517 expand_builtin_memset_args (tree dest, tree val, tree len,
3518 rtx target, enum machine_mode mode, tree orig_exp)
3520 tree fndecl, fn;
3521 enum built_in_function fcode;
3522 enum machine_mode val_mode;
3523 char c;
3524 unsigned int dest_align;
3525 rtx dest_mem, dest_addr, len_rtx;
3526 HOST_WIDE_INT expected_size = -1;
3527 unsigned int expected_align = 0;
3529 dest_align = get_pointer_alignment (dest);
3531 /* If DEST is not a pointer type, don't do this operation in-line. */
3532 if (dest_align == 0)
3533 return NULL_RTX;
3535 if (currently_expanding_gimple_stmt)
3536 stringop_block_profile (currently_expanding_gimple_stmt,
3537 &expected_align, &expected_size);
3539 if (expected_align < dest_align)
3540 expected_align = dest_align;
3542 /* If the LEN parameter is zero, return DEST. */
3543 if (integer_zerop (len))
3545 /* Evaluate and ignore VAL in case it has side-effects. */
3546 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3547 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3550 /* Stabilize the arguments in case we fail. */
3551 dest = builtin_save_expr (dest);
3552 val = builtin_save_expr (val);
3553 len = builtin_save_expr (len);
3555 len_rtx = expand_normal (len);
3556 dest_mem = get_memory_rtx (dest, len);
3557 val_mode = TYPE_MODE (unsigned_char_type_node);
3559 if (TREE_CODE (val) != INTEGER_CST)
3561 rtx val_rtx;
3563 val_rtx = expand_normal (val);
3564 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3566 /* Assume that we can memset by pieces if we can store
3567 * the coefficients by pieces (in the required modes).
3568 * We can't pass builtin_memset_gen_str as that emits RTL. */
3569 c = 1;
3570 if (host_integerp (len, 1)
3571 && can_store_by_pieces (tree_low_cst (len, 1),
3572 builtin_memset_read_str, &c, dest_align,
3573 true))
3575 val_rtx = force_reg (val_mode, val_rtx);
3576 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3577 builtin_memset_gen_str, val_rtx, dest_align,
3578 true, 0);
3580 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3581 dest_align, expected_align,
3582 expected_size))
3583 goto do_libcall;
3585 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3586 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3587 return dest_mem;
3590 if (target_char_cast (val, &c))
3591 goto do_libcall;
3593 if (c)
3595 if (host_integerp (len, 1)
3596 && can_store_by_pieces (tree_low_cst (len, 1),
3597 builtin_memset_read_str, &c, dest_align,
3598 true))
3599 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3600 builtin_memset_read_str, &c, dest_align, true, 0);
3601 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3602 gen_int_mode (c, val_mode),
3603 dest_align, expected_align,
3604 expected_size))
3605 goto do_libcall;
3607 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3608 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3609 return dest_mem;
3612 set_mem_align (dest_mem, dest_align);
3613 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3614 CALL_EXPR_TAILCALL (orig_exp)
3615 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3616 expected_align, expected_size);
3618 if (dest_addr == 0)
3620 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3621 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3624 return dest_addr;
3626 do_libcall:
3627 fndecl = get_callee_fndecl (orig_exp);
3628 fcode = DECL_FUNCTION_CODE (fndecl);
3629 if (fcode == BUILT_IN_MEMSET)
3630 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3631 dest, val, len);
3632 else if (fcode == BUILT_IN_BZERO)
3633 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3634 dest, len);
3635 else
3636 gcc_unreachable ();
3637 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3638 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3639 return expand_call (fn, target, target == const0_rtx);
3642 /* Expand expression EXP, which is a call to the bzero builtin. Return
3643 NULL_RTX if we failed the caller should emit a normal call. */
3645 static rtx
3646 expand_builtin_bzero (tree exp)
3648 tree dest, size;
3649 location_t loc = EXPR_LOCATION (exp);
3651 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3652 return NULL_RTX;
3654 dest = CALL_EXPR_ARG (exp, 0);
3655 size = CALL_EXPR_ARG (exp, 1);
3657 /* New argument list transforming bzero(ptr x, int y) to
3658 memset(ptr x, int 0, size_t y). This is done this way
3659 so that if it isn't expanded inline, we fallback to
3660 calling bzero instead of memset. */
3662 return expand_builtin_memset_args (dest, integer_zero_node,
3663 fold_convert_loc (loc,
3664 size_type_node, size),
3665 const0_rtx, VOIDmode, exp);
3668 /* Expand expression EXP, which is a call to the memcmp built-in function.
3669 Return NULL_RTX if we failed and the caller should emit a normal call,
3670 otherwise try to get the result in TARGET, if convenient (and in mode
3671 MODE, if that's convenient). */
3673 static rtx
3674 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3675 ATTRIBUTE_UNUSED enum machine_mode mode)
3677 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3679 if (!validate_arglist (exp,
3680 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3681 return NULL_RTX;
3683 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3684 implementing memcmp because it will stop if it encounters two
3685 zero bytes. */
3686 #if defined HAVE_cmpmemsi
3688 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3689 rtx result;
3690 rtx insn;
3691 tree arg1 = CALL_EXPR_ARG (exp, 0);
3692 tree arg2 = CALL_EXPR_ARG (exp, 1);
3693 tree len = CALL_EXPR_ARG (exp, 2);
3695 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3696 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3697 enum machine_mode insn_mode;
3699 if (HAVE_cmpmemsi)
3700 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3701 else
3702 return NULL_RTX;
3704 /* If we don't have POINTER_TYPE, call the function. */
3705 if (arg1_align == 0 || arg2_align == 0)
3706 return NULL_RTX;
3708 /* Make a place to write the result of the instruction. */
3709 result = target;
3710 if (! (result != 0
3711 && REG_P (result) && GET_MODE (result) == insn_mode
3712 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3713 result = gen_reg_rtx (insn_mode);
3715 arg1_rtx = get_memory_rtx (arg1, len);
3716 arg2_rtx = get_memory_rtx (arg2, len);
3717 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3719 /* Set MEM_SIZE as appropriate. */
3720 if (CONST_INT_P (arg3_rtx))
3722 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3723 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3726 if (HAVE_cmpmemsi)
3727 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3728 GEN_INT (MIN (arg1_align, arg2_align)));
3729 else
3730 gcc_unreachable ();
3732 if (insn)
3733 emit_insn (insn);
3734 else
3735 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3736 TYPE_MODE (integer_type_node), 3,
3737 XEXP (arg1_rtx, 0), Pmode,
3738 XEXP (arg2_rtx, 0), Pmode,
3739 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3740 TYPE_UNSIGNED (sizetype)),
3741 TYPE_MODE (sizetype));
3743 /* Return the value in the proper mode for this function. */
3744 mode = TYPE_MODE (TREE_TYPE (exp));
3745 if (GET_MODE (result) == mode)
3746 return result;
3747 else if (target != 0)
3749 convert_move (target, result, 0);
3750 return target;
3752 else
3753 return convert_to_mode (mode, result, 0);
3755 #endif /* HAVE_cmpmemsi. */
3757 return NULL_RTX;
3760 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3761 if we failed the caller should emit a normal call, otherwise try to get
3762 the result in TARGET, if convenient. */
3764 static rtx
3765 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3767 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3768 return NULL_RTX;
3770 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3771 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3772 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3774 rtx arg1_rtx, arg2_rtx;
3775 rtx result, insn = NULL_RTX;
3776 tree fndecl, fn;
3777 tree arg1 = CALL_EXPR_ARG (exp, 0);
3778 tree arg2 = CALL_EXPR_ARG (exp, 1);
3780 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3781 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3783 /* If we don't have POINTER_TYPE, call the function. */
3784 if (arg1_align == 0 || arg2_align == 0)
3785 return NULL_RTX;
3787 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3788 arg1 = builtin_save_expr (arg1);
3789 arg2 = builtin_save_expr (arg2);
3791 arg1_rtx = get_memory_rtx (arg1, NULL);
3792 arg2_rtx = get_memory_rtx (arg2, NULL);
3794 #ifdef HAVE_cmpstrsi
3795 /* Try to call cmpstrsi. */
3796 if (HAVE_cmpstrsi)
3798 enum machine_mode insn_mode
3799 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3801 /* Make a place to write the result of the instruction. */
3802 result = target;
3803 if (! (result != 0
3804 && REG_P (result) && GET_MODE (result) == insn_mode
3805 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3806 result = gen_reg_rtx (insn_mode);
3808 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3809 GEN_INT (MIN (arg1_align, arg2_align)));
3811 #endif
3812 #ifdef HAVE_cmpstrnsi
3813 /* Try to determine at least one length and call cmpstrnsi. */
3814 if (!insn && HAVE_cmpstrnsi)
3816 tree len;
3817 rtx arg3_rtx;
3819 enum machine_mode insn_mode
3820 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3821 tree len1 = c_strlen (arg1, 1);
3822 tree len2 = c_strlen (arg2, 1);
3824 if (len1)
3825 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3826 if (len2)
3827 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3829 /* If we don't have a constant length for the first, use the length
3830 of the second, if we know it. We don't require a constant for
3831 this case; some cost analysis could be done if both are available
3832 but neither is constant. For now, assume they're equally cheap,
3833 unless one has side effects. If both strings have constant lengths,
3834 use the smaller. */
3836 if (!len1)
3837 len = len2;
3838 else if (!len2)
3839 len = len1;
3840 else if (TREE_SIDE_EFFECTS (len1))
3841 len = len2;
3842 else if (TREE_SIDE_EFFECTS (len2))
3843 len = len1;
3844 else if (TREE_CODE (len1) != INTEGER_CST)
3845 len = len2;
3846 else if (TREE_CODE (len2) != INTEGER_CST)
3847 len = len1;
3848 else if (tree_int_cst_lt (len1, len2))
3849 len = len1;
3850 else
3851 len = len2;
3853 /* If both arguments have side effects, we cannot optimize. */
3854 if (!len || TREE_SIDE_EFFECTS (len))
3855 goto do_libcall;
3857 arg3_rtx = expand_normal (len);
3859 /* Make a place to write the result of the instruction. */
3860 result = target;
3861 if (! (result != 0
3862 && REG_P (result) && GET_MODE (result) == insn_mode
3863 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3864 result = gen_reg_rtx (insn_mode);
3866 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3867 GEN_INT (MIN (arg1_align, arg2_align)));
3869 #endif
3871 if (insn)
3873 enum machine_mode mode;
3874 emit_insn (insn);
3876 /* Return the value in the proper mode for this function. */
3877 mode = TYPE_MODE (TREE_TYPE (exp));
3878 if (GET_MODE (result) == mode)
3879 return result;
3880 if (target == 0)
3881 return convert_to_mode (mode, result, 0);
3882 convert_move (target, result, 0);
3883 return target;
3886 /* Expand the library call ourselves using a stabilized argument
3887 list to avoid re-evaluating the function's arguments twice. */
3888 #ifdef HAVE_cmpstrnsi
3889 do_libcall:
3890 #endif
3891 fndecl = get_callee_fndecl (exp);
3892 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3893 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3894 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3895 return expand_call (fn, target, target == const0_rtx);
3897 #endif
3898 return NULL_RTX;
3901 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3902 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3903 the result in TARGET, if convenient. */
3905 static rtx
3906 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3907 ATTRIBUTE_UNUSED enum machine_mode mode)
3909 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3911 if (!validate_arglist (exp,
3912 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3913 return NULL_RTX;
3915 /* If c_strlen can determine an expression for one of the string
3916 lengths, and it doesn't have side effects, then emit cmpstrnsi
3917 using length MIN(strlen(string)+1, arg3). */
3918 #ifdef HAVE_cmpstrnsi
3919 if (HAVE_cmpstrnsi)
3921 tree len, len1, len2;
3922 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3923 rtx result, insn;
3924 tree fndecl, fn;
3925 tree arg1 = CALL_EXPR_ARG (exp, 0);
3926 tree arg2 = CALL_EXPR_ARG (exp, 1);
3927 tree arg3 = CALL_EXPR_ARG (exp, 2);
3929 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3930 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3931 enum machine_mode insn_mode
3932 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3934 len1 = c_strlen (arg1, 1);
3935 len2 = c_strlen (arg2, 1);
3937 if (len1)
3938 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3939 if (len2)
3940 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3942 /* If we don't have a constant length for the first, use the length
3943 of the second, if we know it. We don't require a constant for
3944 this case; some cost analysis could be done if both are available
3945 but neither is constant. For now, assume they're equally cheap,
3946 unless one has side effects. If both strings have constant lengths,
3947 use the smaller. */
3949 if (!len1)
3950 len = len2;
3951 else if (!len2)
3952 len = len1;
3953 else if (TREE_SIDE_EFFECTS (len1))
3954 len = len2;
3955 else if (TREE_SIDE_EFFECTS (len2))
3956 len = len1;
3957 else if (TREE_CODE (len1) != INTEGER_CST)
3958 len = len2;
3959 else if (TREE_CODE (len2) != INTEGER_CST)
3960 len = len1;
3961 else if (tree_int_cst_lt (len1, len2))
3962 len = len1;
3963 else
3964 len = len2;
3966 /* If both arguments have side effects, we cannot optimize. */
3967 if (!len || TREE_SIDE_EFFECTS (len))
3968 return NULL_RTX;
3970 /* The actual new length parameter is MIN(len,arg3). */
3971 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3972 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3974 /* If we don't have POINTER_TYPE, call the function. */
3975 if (arg1_align == 0 || arg2_align == 0)
3976 return NULL_RTX;
3978 /* Make a place to write the result of the instruction. */
3979 result = target;
3980 if (! (result != 0
3981 && REG_P (result) && GET_MODE (result) == insn_mode
3982 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3983 result = gen_reg_rtx (insn_mode);
3985 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3986 arg1 = builtin_save_expr (arg1);
3987 arg2 = builtin_save_expr (arg2);
3988 len = builtin_save_expr (len);
3990 arg1_rtx = get_memory_rtx (arg1, len);
3991 arg2_rtx = get_memory_rtx (arg2, len);
3992 arg3_rtx = expand_normal (len);
3993 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3994 GEN_INT (MIN (arg1_align, arg2_align)));
3995 if (insn)
3997 emit_insn (insn);
3999 /* Return the value in the proper mode for this function. */
4000 mode = TYPE_MODE (TREE_TYPE (exp));
4001 if (GET_MODE (result) == mode)
4002 return result;
4003 if (target == 0)
4004 return convert_to_mode (mode, result, 0);
4005 convert_move (target, result, 0);
4006 return target;
4009 /* Expand the library call ourselves using a stabilized argument
4010 list to avoid re-evaluating the function's arguments twice. */
4011 fndecl = get_callee_fndecl (exp);
4012 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4013 arg1, arg2, len);
4014 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4015 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4016 return expand_call (fn, target, target == const0_rtx);
4018 #endif
4019 return NULL_RTX;
4022 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4023 if that's convenient. */
4026 expand_builtin_saveregs (void)
4028 rtx val, seq;
4030 /* Don't do __builtin_saveregs more than once in a function.
4031 Save the result of the first call and reuse it. */
4032 if (saveregs_value != 0)
4033 return saveregs_value;
4035 /* When this function is called, it means that registers must be
4036 saved on entry to this function. So we migrate the call to the
4037 first insn of this function. */
4039 start_sequence ();
4041 /* Do whatever the machine needs done in this case. */
4042 val = targetm.calls.expand_builtin_saveregs ();
4044 seq = get_insns ();
4045 end_sequence ();
4047 saveregs_value = val;
4049 /* Put the insns after the NOTE that starts the function. If this
4050 is inside a start_sequence, make the outer-level insn chain current, so
4051 the code is placed at the start of the function. */
4052 push_topmost_sequence ();
4053 emit_insn_after (seq, entry_of_function ());
4054 pop_topmost_sequence ();
4056 return val;
4059 /* Expand a call to __builtin_next_arg. */
4061 static rtx
4062 expand_builtin_next_arg (void)
4064 /* Checking arguments is already done in fold_builtin_next_arg
4065 that must be called before this function. */
4066 return expand_binop (ptr_mode, add_optab,
4067 crtl->args.internal_arg_pointer,
4068 crtl->args.arg_offset_rtx,
4069 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4072 /* Make it easier for the backends by protecting the valist argument
4073 from multiple evaluations. */
4075 static tree
4076 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4078 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4080 /* The current way of determining the type of valist is completely
4081 bogus. We should have the information on the va builtin instead. */
4082 if (!vatype)
4083 vatype = targetm.fn_abi_va_list (cfun->decl);
4085 if (TREE_CODE (vatype) == ARRAY_TYPE)
4087 if (TREE_SIDE_EFFECTS (valist))
4088 valist = save_expr (valist);
4090 /* For this case, the backends will be expecting a pointer to
4091 vatype, but it's possible we've actually been given an array
4092 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4093 So fix it. */
4094 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4096 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4097 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4100 else
4102 tree pt = build_pointer_type (vatype);
4104 if (! needs_lvalue)
4106 if (! TREE_SIDE_EFFECTS (valist))
4107 return valist;
4109 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4110 TREE_SIDE_EFFECTS (valist) = 1;
4113 if (TREE_SIDE_EFFECTS (valist))
4114 valist = save_expr (valist);
4115 valist = fold_build2_loc (loc, MEM_REF,
4116 vatype, valist, build_int_cst (pt, 0));
4119 return valist;
4122 /* The "standard" definition of va_list is void*. */
4124 tree
4125 std_build_builtin_va_list (void)
4127 return ptr_type_node;
4130 /* The "standard" abi va_list is va_list_type_node. */
4132 tree
4133 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4135 return va_list_type_node;
4138 /* The "standard" type of va_list is va_list_type_node. */
4140 tree
4141 std_canonical_va_list_type (tree type)
4143 tree wtype, htype;
4145 if (INDIRECT_REF_P (type))
4146 type = TREE_TYPE (type);
4147 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4148 type = TREE_TYPE (type);
4149 wtype = va_list_type_node;
4150 htype = type;
4151 /* Treat structure va_list types. */
4152 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4153 htype = TREE_TYPE (htype);
4154 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4156 /* If va_list is an array type, the argument may have decayed
4157 to a pointer type, e.g. by being passed to another function.
4158 In that case, unwrap both types so that we can compare the
4159 underlying records. */
4160 if (TREE_CODE (htype) == ARRAY_TYPE
4161 || POINTER_TYPE_P (htype))
4163 wtype = TREE_TYPE (wtype);
4164 htype = TREE_TYPE (htype);
4167 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4168 return va_list_type_node;
4170 return NULL_TREE;
4173 /* The "standard" implementation of va_start: just assign `nextarg' to
4174 the variable. */
4176 void
4177 std_expand_builtin_va_start (tree valist, rtx nextarg)
4179 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4180 convert_move (va_r, nextarg, 0);
4183 /* Expand EXP, a call to __builtin_va_start. */
4185 static rtx
4186 expand_builtin_va_start (tree exp)
4188 rtx nextarg;
4189 tree valist;
4190 location_t loc = EXPR_LOCATION (exp);
4192 if (call_expr_nargs (exp) < 2)
4194 error_at (loc, "too few arguments to function %<va_start%>");
4195 return const0_rtx;
4198 if (fold_builtin_next_arg (exp, true))
4199 return const0_rtx;
4201 nextarg = expand_builtin_next_arg ();
4202 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4204 if (targetm.expand_builtin_va_start)
4205 targetm.expand_builtin_va_start (valist, nextarg);
4206 else
4207 std_expand_builtin_va_start (valist, nextarg);
4209 return const0_rtx;
4212 /* The "standard" implementation of va_arg: read the value from the
4213 current (padded) address and increment by the (padded) size. */
4215 tree
4216 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4217 gimple_seq *post_p)
4219 tree addr, t, type_size, rounded_size, valist_tmp;
4220 unsigned HOST_WIDE_INT align, boundary;
4221 bool indirect;
4223 #ifdef ARGS_GROW_DOWNWARD
4224 /* All of the alignment and movement below is for args-grow-up machines.
4225 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4226 implement their own specialized gimplify_va_arg_expr routines. */
4227 gcc_unreachable ();
4228 #endif
4230 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4231 if (indirect)
4232 type = build_pointer_type (type);
4234 align = PARM_BOUNDARY / BITS_PER_UNIT;
4235 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4237 /* When we align parameter on stack for caller, if the parameter
4238 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4239 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4240 here with caller. */
4241 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4242 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4244 boundary /= BITS_PER_UNIT;
4246 /* Hoist the valist value into a temporary for the moment. */
4247 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4249 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4250 requires greater alignment, we must perform dynamic alignment. */
4251 if (boundary > align
4252 && !integer_zerop (TYPE_SIZE (type)))
4254 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4255 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
4256 gimplify_and_add (t, pre_p);
4258 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4259 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
4260 valist_tmp,
4261 build_int_cst (TREE_TYPE (valist), -boundary)));
4262 gimplify_and_add (t, pre_p);
4264 else
4265 boundary = align;
4267 /* If the actual alignment is less than the alignment of the type,
4268 adjust the type accordingly so that we don't assume strict alignment
4269 when dereferencing the pointer. */
4270 boundary *= BITS_PER_UNIT;
4271 if (boundary < TYPE_ALIGN (type))
4273 type = build_variant_type_copy (type);
4274 TYPE_ALIGN (type) = boundary;
4277 /* Compute the rounded size of the type. */
4278 type_size = size_in_bytes (type);
4279 rounded_size = round_up (type_size, align);
4281 /* Reduce rounded_size so it's sharable with the postqueue. */
4282 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4284 /* Get AP. */
4285 addr = valist_tmp;
4286 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4288 /* Small args are padded downward. */
4289 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4290 rounded_size, size_int (align));
4291 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4292 size_binop (MINUS_EXPR, rounded_size, type_size));
4293 addr = fold_build_pointer_plus (addr, t);
4296 /* Compute new value for AP. */
4297 t = fold_build_pointer_plus (valist_tmp, rounded_size);
4298 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4299 gimplify_and_add (t, pre_p);
4301 addr = fold_convert (build_pointer_type (type), addr);
4303 if (indirect)
4304 addr = build_va_arg_indirect_ref (addr);
4306 return build_va_arg_indirect_ref (addr);
4309 /* Build an indirect-ref expression over the given TREE, which represents a
4310 piece of a va_arg() expansion. */
4311 tree
4312 build_va_arg_indirect_ref (tree addr)
4314 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4316 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4317 mf_mark (addr);
4319 return addr;
4322 /* Return a dummy expression of type TYPE in order to keep going after an
4323 error. */
4325 static tree
4326 dummy_object (tree type)
4328 tree t = build_int_cst (build_pointer_type (type), 0);
4329 return build2 (MEM_REF, type, t, t);
4332 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4333 builtin function, but a very special sort of operator. */
4335 enum gimplify_status
4336 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4338 tree promoted_type, have_va_type;
4339 tree valist = TREE_OPERAND (*expr_p, 0);
4340 tree type = TREE_TYPE (*expr_p);
4341 tree t;
4342 location_t loc = EXPR_LOCATION (*expr_p);
4344 /* Verify that valist is of the proper type. */
4345 have_va_type = TREE_TYPE (valist);
4346 if (have_va_type == error_mark_node)
4347 return GS_ERROR;
4348 have_va_type = targetm.canonical_va_list_type (have_va_type);
4350 if (have_va_type == NULL_TREE)
4352 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4353 return GS_ERROR;
4356 /* Generate a diagnostic for requesting data of a type that cannot
4357 be passed through `...' due to type promotion at the call site. */
4358 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4359 != type)
4361 static bool gave_help;
4362 bool warned;
4364 /* Unfortunately, this is merely undefined, rather than a constraint
4365 violation, so we cannot make this an error. If this call is never
4366 executed, the program is still strictly conforming. */
4367 warned = warning_at (loc, 0,
4368 "%qT is promoted to %qT when passed through %<...%>",
4369 type, promoted_type);
4370 if (!gave_help && warned)
4372 gave_help = true;
4373 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4374 promoted_type, type);
4377 /* We can, however, treat "undefined" any way we please.
4378 Call abort to encourage the user to fix the program. */
4379 if (warned)
4380 inform (loc, "if this code is reached, the program will abort");
4381 /* Before the abort, allow the evaluation of the va_list
4382 expression to exit or longjmp. */
4383 gimplify_and_add (valist, pre_p);
4384 t = build_call_expr_loc (loc,
4385 builtin_decl_implicit (BUILT_IN_TRAP), 0);
4386 gimplify_and_add (t, pre_p);
4388 /* This is dead code, but go ahead and finish so that the
4389 mode of the result comes out right. */
4390 *expr_p = dummy_object (type);
4391 return GS_ALL_DONE;
4393 else
4395 /* Make it easier for the backends by protecting the valist argument
4396 from multiple evaluations. */
4397 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4399 /* For this case, the backends will be expecting a pointer to
4400 TREE_TYPE (abi), but it's possible we've
4401 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4402 So fix it. */
4403 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4405 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4406 valist = fold_convert_loc (loc, p1,
4407 build_fold_addr_expr_loc (loc, valist));
4410 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4412 else
4413 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4415 if (!targetm.gimplify_va_arg_expr)
4416 /* FIXME: Once most targets are converted we should merely
4417 assert this is non-null. */
4418 return GS_ALL_DONE;
4420 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4421 return GS_OK;
4425 /* Expand EXP, a call to __builtin_va_end. */
4427 static rtx
4428 expand_builtin_va_end (tree exp)
4430 tree valist = CALL_EXPR_ARG (exp, 0);
4432 /* Evaluate for side effects, if needed. I hate macros that don't
4433 do that. */
4434 if (TREE_SIDE_EFFECTS (valist))
4435 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4437 return const0_rtx;
4440 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4441 builtin rather than just as an assignment in stdarg.h because of the
4442 nastiness of array-type va_list types. */
4444 static rtx
4445 expand_builtin_va_copy (tree exp)
4447 tree dst, src, t;
4448 location_t loc = EXPR_LOCATION (exp);
4450 dst = CALL_EXPR_ARG (exp, 0);
4451 src = CALL_EXPR_ARG (exp, 1);
4453 dst = stabilize_va_list_loc (loc, dst, 1);
4454 src = stabilize_va_list_loc (loc, src, 0);
4456 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4458 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4460 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4461 TREE_SIDE_EFFECTS (t) = 1;
4462 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4464 else
4466 rtx dstb, srcb, size;
4468 /* Evaluate to pointers. */
4469 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4470 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4471 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4472 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4474 dstb = convert_memory_address (Pmode, dstb);
4475 srcb = convert_memory_address (Pmode, srcb);
4477 /* "Dereference" to BLKmode memories. */
4478 dstb = gen_rtx_MEM (BLKmode, dstb);
4479 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4480 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4481 srcb = gen_rtx_MEM (BLKmode, srcb);
4482 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4483 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4485 /* Copy. */
4486 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4489 return const0_rtx;
4492 /* Expand a call to one of the builtin functions __builtin_frame_address or
4493 __builtin_return_address. */
4495 static rtx
4496 expand_builtin_frame_address (tree fndecl, tree exp)
4498 /* The argument must be a nonnegative integer constant.
4499 It counts the number of frames to scan up the stack.
4500 The value is the return address saved in that frame. */
4501 if (call_expr_nargs (exp) == 0)
4502 /* Warning about missing arg was already issued. */
4503 return const0_rtx;
4504 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4506 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4507 error ("invalid argument to %<__builtin_frame_address%>");
4508 else
4509 error ("invalid argument to %<__builtin_return_address%>");
4510 return const0_rtx;
4512 else
4514 rtx tem
4515 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4516 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4518 /* Some ports cannot access arbitrary stack frames. */
4519 if (tem == NULL)
4521 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4522 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4523 else
4524 warning (0, "unsupported argument to %<__builtin_return_address%>");
4525 return const0_rtx;
4528 /* For __builtin_frame_address, return what we've got. */
4529 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4530 return tem;
4532 if (!REG_P (tem)
4533 && ! CONSTANT_P (tem))
4534 tem = copy_addr_to_reg (tem);
4535 return tem;
4539 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4540 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4541 is the same as for allocate_dynamic_stack_space. */
4543 static rtx
4544 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4546 rtx op0;
4547 rtx result;
4548 bool valid_arglist;
4549 unsigned int align;
4550 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4551 == BUILT_IN_ALLOCA_WITH_ALIGN);
4553 /* Emit normal call if we use mudflap. */
4554 if (flag_mudflap)
4555 return NULL_RTX;
4557 valid_arglist
4558 = (alloca_with_align
4559 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4560 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4562 if (!valid_arglist)
4563 return NULL_RTX;
4565 /* Compute the argument. */
4566 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4568 /* Compute the alignment. */
4569 align = (alloca_with_align
4570 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4571 : BIGGEST_ALIGNMENT);
4573 /* Allocate the desired space. */
4574 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4575 result = convert_memory_address (ptr_mode, result);
4577 return result;
4580 /* Expand a call to bswap builtin in EXP.
4581 Return NULL_RTX if a normal call should be emitted rather than expanding the
4582 function in-line. If convenient, the result should be placed in TARGET.
4583 SUBTARGET may be used as the target for computing one of EXP's operands. */
4585 static rtx
4586 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4587 rtx subtarget)
4589 tree arg;
4590 rtx op0;
4592 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4593 return NULL_RTX;
4595 arg = CALL_EXPR_ARG (exp, 0);
4596 op0 = expand_expr (arg,
4597 subtarget && GET_MODE (subtarget) == target_mode
4598 ? subtarget : NULL_RTX,
4599 target_mode, EXPAND_NORMAL);
4600 if (GET_MODE (op0) != target_mode)
4601 op0 = convert_to_mode (target_mode, op0, 1);
4603 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4605 gcc_assert (target);
4607 return convert_to_mode (target_mode, target, 1);
4610 /* Expand a call to a unary builtin in EXP.
4611 Return NULL_RTX if a normal call should be emitted rather than expanding the
4612 function in-line. If convenient, the result should be placed in TARGET.
4613 SUBTARGET may be used as the target for computing one of EXP's operands. */
4615 static rtx
4616 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4617 rtx subtarget, optab op_optab)
4619 rtx op0;
4621 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4622 return NULL_RTX;
4624 /* Compute the argument. */
4625 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4626 (subtarget
4627 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4628 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4629 VOIDmode, EXPAND_NORMAL);
4630 /* Compute op, into TARGET if possible.
4631 Set TARGET to wherever the result comes back. */
4632 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4633 op_optab, op0, target, op_optab != clrsb_optab);
4634 gcc_assert (target);
4636 return convert_to_mode (target_mode, target, 0);
4639 /* Expand a call to __builtin_expect. We just return our argument
4640 as the builtin_expect semantic should've been already executed by
4641 tree branch prediction pass. */
4643 static rtx
4644 expand_builtin_expect (tree exp, rtx target)
4646 tree arg;
4648 if (call_expr_nargs (exp) < 2)
4649 return const0_rtx;
4650 arg = CALL_EXPR_ARG (exp, 0);
4652 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4653 /* When guessing was done, the hints should be already stripped away. */
4654 gcc_assert (!flag_guess_branch_prob
4655 || optimize == 0 || seen_error ());
4656 return target;
4659 /* Expand a call to __builtin_assume_aligned. We just return our first
4660 argument as the builtin_assume_aligned semantic should've been already
4661 executed by CCP. */
4663 static rtx
4664 expand_builtin_assume_aligned (tree exp, rtx target)
4666 if (call_expr_nargs (exp) < 2)
4667 return const0_rtx;
4668 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4669 EXPAND_NORMAL);
4670 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4671 && (call_expr_nargs (exp) < 3
4672 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4673 return target;
4676 void
4677 expand_builtin_trap (void)
4679 #ifdef HAVE_trap
4680 if (HAVE_trap)
4681 emit_insn (gen_trap ());
4682 else
4683 #endif
4684 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4685 emit_barrier ();
4688 /* Expand a call to __builtin_unreachable. We do nothing except emit
4689 a barrier saying that control flow will not pass here.
4691 It is the responsibility of the program being compiled to ensure
4692 that control flow does never reach __builtin_unreachable. */
4693 static void
4694 expand_builtin_unreachable (void)
4696 emit_barrier ();
4699 /* Expand EXP, a call to fabs, fabsf or fabsl.
4700 Return NULL_RTX if a normal call should be emitted rather than expanding
4701 the function inline. If convenient, the result should be placed
4702 in TARGET. SUBTARGET may be used as the target for computing
4703 the operand. */
4705 static rtx
4706 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4708 enum machine_mode mode;
4709 tree arg;
4710 rtx op0;
4712 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4713 return NULL_RTX;
4715 arg = CALL_EXPR_ARG (exp, 0);
4716 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4717 mode = TYPE_MODE (TREE_TYPE (arg));
4718 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4719 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4722 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4723 Return NULL is a normal call should be emitted rather than expanding the
4724 function inline. If convenient, the result should be placed in TARGET.
4725 SUBTARGET may be used as the target for computing the operand. */
4727 static rtx
4728 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4730 rtx op0, op1;
4731 tree arg;
4733 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4734 return NULL_RTX;
4736 arg = CALL_EXPR_ARG (exp, 0);
4737 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4739 arg = CALL_EXPR_ARG (exp, 1);
4740 op1 = expand_normal (arg);
4742 return expand_copysign (op0, op1, target);
4745 /* Create a new constant string literal and return a char* pointer to it.
4746 The STRING_CST value is the LEN characters at STR. */
4747 tree
4748 build_string_literal (int len, const char *str)
4750 tree t, elem, index, type;
4752 t = build_string (len, str);
4753 elem = build_type_variant (char_type_node, 1, 0);
4754 index = build_index_type (size_int (len - 1));
4755 type = build_array_type (elem, index);
4756 TREE_TYPE (t) = type;
4757 TREE_CONSTANT (t) = 1;
4758 TREE_READONLY (t) = 1;
4759 TREE_STATIC (t) = 1;
4761 type = build_pointer_type (elem);
4762 t = build1 (ADDR_EXPR, type,
4763 build4 (ARRAY_REF, elem,
4764 t, integer_zero_node, NULL_TREE, NULL_TREE));
4765 return t;
4768 /* Expand a call to __builtin___clear_cache. */
4770 static rtx
4771 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4773 #ifndef HAVE_clear_cache
4774 #ifdef CLEAR_INSN_CACHE
4775 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4776 does something. Just do the default expansion to a call to
4777 __clear_cache(). */
4778 return NULL_RTX;
4779 #else
4780 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4781 does nothing. There is no need to call it. Do nothing. */
4782 return const0_rtx;
4783 #endif /* CLEAR_INSN_CACHE */
4784 #else
4785 /* We have a "clear_cache" insn, and it will handle everything. */
4786 tree begin, end;
4787 rtx begin_rtx, end_rtx;
4789 /* We must not expand to a library call. If we did, any
4790 fallback library function in libgcc that might contain a call to
4791 __builtin___clear_cache() would recurse infinitely. */
4792 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4794 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4795 return const0_rtx;
4798 if (HAVE_clear_cache)
4800 struct expand_operand ops[2];
4802 begin = CALL_EXPR_ARG (exp, 0);
4803 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4805 end = CALL_EXPR_ARG (exp, 1);
4806 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4808 create_address_operand (&ops[0], begin_rtx);
4809 create_address_operand (&ops[1], end_rtx);
4810 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4811 return const0_rtx;
4813 return const0_rtx;
4814 #endif /* HAVE_clear_cache */
4817 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4819 static rtx
4820 round_trampoline_addr (rtx tramp)
4822 rtx temp, addend, mask;
4824 /* If we don't need too much alignment, we'll have been guaranteed
4825 proper alignment by get_trampoline_type. */
4826 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4827 return tramp;
4829 /* Round address up to desired boundary. */
4830 temp = gen_reg_rtx (Pmode);
4831 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
4832 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
4834 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4835 temp, 0, OPTAB_LIB_WIDEN);
4836 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4837 temp, 0, OPTAB_LIB_WIDEN);
4839 return tramp;
4842 static rtx
4843 expand_builtin_init_trampoline (tree exp, bool onstack)
4845 tree t_tramp, t_func, t_chain;
4846 rtx m_tramp, r_tramp, r_chain, tmp;
4848 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4849 POINTER_TYPE, VOID_TYPE))
4850 return NULL_RTX;
4852 t_tramp = CALL_EXPR_ARG (exp, 0);
4853 t_func = CALL_EXPR_ARG (exp, 1);
4854 t_chain = CALL_EXPR_ARG (exp, 2);
4856 r_tramp = expand_normal (t_tramp);
4857 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4858 MEM_NOTRAP_P (m_tramp) = 1;
4860 /* If ONSTACK, the TRAMP argument should be the address of a field
4861 within the local function's FRAME decl. Either way, let's see if
4862 we can fill in the MEM_ATTRs for this memory. */
4863 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4864 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
4865 true, 0);
4867 /* Creator of a heap trampoline is responsible for making sure the
4868 address is aligned to at least STACK_BOUNDARY. Normally malloc
4869 will ensure this anyhow. */
4870 tmp = round_trampoline_addr (r_tramp);
4871 if (tmp != r_tramp)
4873 m_tramp = change_address (m_tramp, BLKmode, tmp);
4874 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4875 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4878 /* The FUNC argument should be the address of the nested function.
4879 Extract the actual function decl to pass to the hook. */
4880 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4881 t_func = TREE_OPERAND (t_func, 0);
4882 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4884 r_chain = expand_normal (t_chain);
4886 /* Generate insns to initialize the trampoline. */
4887 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4889 if (onstack)
4891 trampolines_created = 1;
4893 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4894 "trampoline generated for nested function %qD", t_func);
4897 return const0_rtx;
4900 static rtx
4901 expand_builtin_adjust_trampoline (tree exp)
4903 rtx tramp;
4905 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4906 return NULL_RTX;
4908 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4909 tramp = round_trampoline_addr (tramp);
4910 if (targetm.calls.trampoline_adjust_address)
4911 tramp = targetm.calls.trampoline_adjust_address (tramp);
4913 return tramp;
4916 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4917 function. The function first checks whether the back end provides
4918 an insn to implement signbit for the respective mode. If not, it
4919 checks whether the floating point format of the value is such that
4920 the sign bit can be extracted. If that is not the case, the
4921 function returns NULL_RTX to indicate that a normal call should be
4922 emitted rather than expanding the function in-line. EXP is the
4923 expression that is a call to the builtin function; if convenient,
4924 the result should be placed in TARGET. */
4925 static rtx
4926 expand_builtin_signbit (tree exp, rtx target)
4928 const struct real_format *fmt;
4929 enum machine_mode fmode, imode, rmode;
4930 tree arg;
4931 int word, bitpos;
4932 enum insn_code icode;
4933 rtx temp;
4934 location_t loc = EXPR_LOCATION (exp);
4936 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4937 return NULL_RTX;
4939 arg = CALL_EXPR_ARG (exp, 0);
4940 fmode = TYPE_MODE (TREE_TYPE (arg));
4941 rmode = TYPE_MODE (TREE_TYPE (exp));
4942 fmt = REAL_MODE_FORMAT (fmode);
4944 arg = builtin_save_expr (arg);
4946 /* Expand the argument yielding a RTX expression. */
4947 temp = expand_normal (arg);
4949 /* Check if the back end provides an insn that handles signbit for the
4950 argument's mode. */
4951 icode = optab_handler (signbit_optab, fmode);
4952 if (icode != CODE_FOR_nothing)
4954 rtx last = get_last_insn ();
4955 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4956 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4957 return target;
4958 delete_insns_since (last);
4961 /* For floating point formats without a sign bit, implement signbit
4962 as "ARG < 0.0". */
4963 bitpos = fmt->signbit_ro;
4964 if (bitpos < 0)
4966 /* But we can't do this if the format supports signed zero. */
4967 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4968 return NULL_RTX;
4970 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4971 build_real (TREE_TYPE (arg), dconst0));
4972 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4975 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4977 imode = int_mode_for_mode (fmode);
4978 if (imode == BLKmode)
4979 return NULL_RTX;
4980 temp = gen_lowpart (imode, temp);
4982 else
4984 imode = word_mode;
4985 /* Handle targets with different FP word orders. */
4986 if (FLOAT_WORDS_BIG_ENDIAN)
4987 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4988 else
4989 word = bitpos / BITS_PER_WORD;
4990 temp = operand_subword_force (temp, word, fmode);
4991 bitpos = bitpos % BITS_PER_WORD;
4994 /* Force the intermediate word_mode (or narrower) result into a
4995 register. This avoids attempting to create paradoxical SUBREGs
4996 of floating point modes below. */
4997 temp = force_reg (imode, temp);
4999 /* If the bitpos is within the "result mode" lowpart, the operation
5000 can be implement with a single bitwise AND. Otherwise, we need
5001 a right shift and an AND. */
5003 if (bitpos < GET_MODE_BITSIZE (rmode))
5005 double_int mask = double_int_zero.set_bit (bitpos);
5007 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5008 temp = gen_lowpart (rmode, temp);
5009 temp = expand_binop (rmode, and_optab, temp,
5010 immed_double_int_const (mask, rmode),
5011 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5013 else
5015 /* Perform a logical right shift to place the signbit in the least
5016 significant bit, then truncate the result to the desired mode
5017 and mask just this bit. */
5018 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5019 temp = gen_lowpart (rmode, temp);
5020 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5021 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5024 return temp;
5027 /* Expand fork or exec calls. TARGET is the desired target of the
5028 call. EXP is the call. FN is the
5029 identificator of the actual function. IGNORE is nonzero if the
5030 value is to be ignored. */
5032 static rtx
5033 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5035 tree id, decl;
5036 tree call;
5038 /* If we are not profiling, just call the function. */
5039 if (!profile_arc_flag)
5040 return NULL_RTX;
5042 /* Otherwise call the wrapper. This should be equivalent for the rest of
5043 compiler, so the code does not diverge, and the wrapper may run the
5044 code necessary for keeping the profiling sane. */
5046 switch (DECL_FUNCTION_CODE (fn))
5048 case BUILT_IN_FORK:
5049 id = get_identifier ("__gcov_fork");
5050 break;
5052 case BUILT_IN_EXECL:
5053 id = get_identifier ("__gcov_execl");
5054 break;
5056 case BUILT_IN_EXECV:
5057 id = get_identifier ("__gcov_execv");
5058 break;
5060 case BUILT_IN_EXECLP:
5061 id = get_identifier ("__gcov_execlp");
5062 break;
5064 case BUILT_IN_EXECLE:
5065 id = get_identifier ("__gcov_execle");
5066 break;
5068 case BUILT_IN_EXECVP:
5069 id = get_identifier ("__gcov_execvp");
5070 break;
5072 case BUILT_IN_EXECVE:
5073 id = get_identifier ("__gcov_execve");
5074 break;
5076 default:
5077 gcc_unreachable ();
5080 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5081 FUNCTION_DECL, id, TREE_TYPE (fn));
5082 DECL_EXTERNAL (decl) = 1;
5083 TREE_PUBLIC (decl) = 1;
5084 DECL_ARTIFICIAL (decl) = 1;
5085 TREE_NOTHROW (decl) = 1;
5086 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5087 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5088 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5089 return expand_call (call, target, ignore);
5094 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5095 the pointer in these functions is void*, the tree optimizers may remove
5096 casts. The mode computed in expand_builtin isn't reliable either, due
5097 to __sync_bool_compare_and_swap.
5099 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5100 group of builtins. This gives us log2 of the mode size. */
5102 static inline enum machine_mode
5103 get_builtin_sync_mode (int fcode_diff)
5105 /* The size is not negotiable, so ask not to get BLKmode in return
5106 if the target indicates that a smaller size would be better. */
5107 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5110 /* Expand the memory expression LOC and return the appropriate memory operand
5111 for the builtin_sync operations. */
5113 static rtx
5114 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5116 rtx addr, mem;
5118 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5119 addr = convert_memory_address (Pmode, addr);
5121 /* Note that we explicitly do not want any alias information for this
5122 memory, so that we kill all other live memories. Otherwise we don't
5123 satisfy the full barrier semantics of the intrinsic. */
5124 mem = validize_mem (gen_rtx_MEM (mode, addr));
5126 /* The alignment needs to be at least according to that of the mode. */
5127 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5128 get_pointer_alignment (loc)));
5129 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5130 MEM_VOLATILE_P (mem) = 1;
5132 return mem;
5135 /* Make sure an argument is in the right mode.
5136 EXP is the tree argument.
5137 MODE is the mode it should be in. */
5139 static rtx
5140 expand_expr_force_mode (tree exp, enum machine_mode mode)
5142 rtx val;
5143 enum machine_mode old_mode;
5145 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5146 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5147 of CONST_INTs, where we know the old_mode only from the call argument. */
5149 old_mode = GET_MODE (val);
5150 if (old_mode == VOIDmode)
5151 old_mode = TYPE_MODE (TREE_TYPE (exp));
5152 val = convert_modes (mode, old_mode, val, 1);
5153 return val;
5157 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5158 EXP is the CALL_EXPR. CODE is the rtx code
5159 that corresponds to the arithmetic or logical operation from the name;
5160 an exception here is that NOT actually means NAND. TARGET is an optional
5161 place for us to store the results; AFTER is true if this is the
5162 fetch_and_xxx form. */
5164 static rtx
5165 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5166 enum rtx_code code, bool after,
5167 rtx target)
5169 rtx val, mem;
5170 location_t loc = EXPR_LOCATION (exp);
5172 if (code == NOT && warn_sync_nand)
5174 tree fndecl = get_callee_fndecl (exp);
5175 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5177 static bool warned_f_a_n, warned_n_a_f;
5179 switch (fcode)
5181 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5182 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5183 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5184 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5185 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5186 if (warned_f_a_n)
5187 break;
5189 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5190 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5191 warned_f_a_n = true;
5192 break;
5194 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5195 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5196 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5197 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5198 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5199 if (warned_n_a_f)
5200 break;
5202 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5203 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5204 warned_n_a_f = true;
5205 break;
5207 default:
5208 gcc_unreachable ();
5212 /* Expand the operands. */
5213 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5214 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5216 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5217 after);
5220 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5221 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5222 true if this is the boolean form. TARGET is a place for us to store the
5223 results; this is NOT optional if IS_BOOL is true. */
5225 static rtx
5226 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5227 bool is_bool, rtx target)
5229 rtx old_val, new_val, mem;
5230 rtx *pbool, *poval;
5232 /* Expand the operands. */
5233 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5234 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5235 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5237 pbool = poval = NULL;
5238 if (target != const0_rtx)
5240 if (is_bool)
5241 pbool = &target;
5242 else
5243 poval = &target;
5245 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5246 false, MEMMODEL_SEQ_CST,
5247 MEMMODEL_SEQ_CST))
5248 return NULL_RTX;
5250 return target;
5253 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5254 general form is actually an atomic exchange, and some targets only
5255 support a reduced form with the second argument being a constant 1.
5256 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5257 the results. */
5259 static rtx
5260 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5261 rtx target)
5263 rtx val, mem;
5265 /* Expand the operands. */
5266 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5267 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5269 return expand_sync_lock_test_and_set (target, mem, val);
5272 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5274 static void
5275 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5277 rtx mem;
5279 /* Expand the operands. */
5280 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5282 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5285 /* Given an integer representing an ``enum memmodel'', verify its
5286 correctness and return the memory model enum. */
5288 static enum memmodel
5289 get_memmodel (tree exp)
5291 rtx op;
5292 unsigned HOST_WIDE_INT val;
5294 /* If the parameter is not a constant, it's a run time value so we'll just
5295 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5296 if (TREE_CODE (exp) != INTEGER_CST)
5297 return MEMMODEL_SEQ_CST;
5299 op = expand_normal (exp);
5301 val = INTVAL (op);
5302 if (targetm.memmodel_check)
5303 val = targetm.memmodel_check (val);
5304 else if (val & ~MEMMODEL_MASK)
5306 warning (OPT_Winvalid_memory_model,
5307 "Unknown architecture specifier in memory model to builtin.");
5308 return MEMMODEL_SEQ_CST;
5311 if ((INTVAL(op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5313 warning (OPT_Winvalid_memory_model,
5314 "invalid memory model argument to builtin");
5315 return MEMMODEL_SEQ_CST;
5318 return (enum memmodel) val;
5321 /* Expand the __atomic_exchange intrinsic:
5322 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5323 EXP is the CALL_EXPR.
5324 TARGET is an optional place for us to store the results. */
5326 static rtx
5327 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5329 rtx val, mem;
5330 enum memmodel model;
5332 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5333 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5335 error ("invalid memory model for %<__atomic_exchange%>");
5336 return NULL_RTX;
5339 if (!flag_inline_atomics)
5340 return NULL_RTX;
5342 /* Expand the operands. */
5343 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5344 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5346 return expand_atomic_exchange (target, mem, val, model);
5349 /* Expand the __atomic_compare_exchange intrinsic:
5350 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5351 TYPE desired, BOOL weak,
5352 enum memmodel success,
5353 enum memmodel failure)
5354 EXP is the CALL_EXPR.
5355 TARGET is an optional place for us to store the results. */
5357 static rtx
5358 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5359 rtx target)
5361 rtx expect, desired, mem, oldval;
5362 enum memmodel success, failure;
5363 tree weak;
5364 bool is_weak;
5366 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5367 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5369 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5370 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5372 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5373 return NULL_RTX;
5376 if (failure > success)
5378 error ("failure memory model cannot be stronger than success "
5379 "memory model for %<__atomic_compare_exchange%>");
5380 return NULL_RTX;
5383 if (!flag_inline_atomics)
5384 return NULL_RTX;
5386 /* Expand the operands. */
5387 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5389 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5390 expect = convert_memory_address (Pmode, expect);
5391 expect = gen_rtx_MEM (mode, expect);
5392 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5394 weak = CALL_EXPR_ARG (exp, 3);
5395 is_weak = false;
5396 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
5397 is_weak = true;
5399 oldval = expect;
5400 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5401 &oldval, mem, oldval, desired,
5402 is_weak, success, failure))
5403 return NULL_RTX;
5405 if (oldval != expect)
5406 emit_move_insn (expect, oldval);
5408 return target;
5411 /* Expand the __atomic_load intrinsic:
5412 TYPE __atomic_load (TYPE *object, enum memmodel)
5413 EXP is the CALL_EXPR.
5414 TARGET is an optional place for us to store the results. */
5416 static rtx
5417 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5419 rtx mem;
5420 enum memmodel model;
5422 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5423 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5424 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5426 error ("invalid memory model for %<__atomic_load%>");
5427 return NULL_RTX;
5430 if (!flag_inline_atomics)
5431 return NULL_RTX;
5433 /* Expand the operand. */
5434 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5436 return expand_atomic_load (target, mem, model);
5440 /* Expand the __atomic_store intrinsic:
5441 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5442 EXP is the CALL_EXPR.
5443 TARGET is an optional place for us to store the results. */
5445 static rtx
5446 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5448 rtx mem, val;
5449 enum memmodel model;
5451 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5452 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5453 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5454 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5456 error ("invalid memory model for %<__atomic_store%>");
5457 return NULL_RTX;
5460 if (!flag_inline_atomics)
5461 return NULL_RTX;
5463 /* Expand the operands. */
5464 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5465 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5467 return expand_atomic_store (mem, val, model, false);
5470 /* Expand the __atomic_fetch_XXX intrinsic:
5471 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5472 EXP is the CALL_EXPR.
5473 TARGET is an optional place for us to store the results.
5474 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5475 FETCH_AFTER is true if returning the result of the operation.
5476 FETCH_AFTER is false if returning the value before the operation.
5477 IGNORE is true if the result is not used.
5478 EXT_CALL is the correct builtin for an external call if this cannot be
5479 resolved to an instruction sequence. */
5481 static rtx
5482 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5483 enum rtx_code code, bool fetch_after,
5484 bool ignore, enum built_in_function ext_call)
5486 rtx val, mem, ret;
5487 enum memmodel model;
5488 tree fndecl;
5489 tree addr;
5491 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5493 /* Expand the operands. */
5494 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5495 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5497 /* Only try generating instructions if inlining is turned on. */
5498 if (flag_inline_atomics)
5500 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5501 if (ret)
5502 return ret;
5505 /* Return if a different routine isn't needed for the library call. */
5506 if (ext_call == BUILT_IN_NONE)
5507 return NULL_RTX;
5509 /* Change the call to the specified function. */
5510 fndecl = get_callee_fndecl (exp);
5511 addr = CALL_EXPR_FN (exp);
5512 STRIP_NOPS (addr);
5514 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5515 TREE_OPERAND (addr, 0) = builtin_decl_explicit(ext_call);
5517 /* Expand the call here so we can emit trailing code. */
5518 ret = expand_call (exp, target, ignore);
5520 /* Replace the original function just in case it matters. */
5521 TREE_OPERAND (addr, 0) = fndecl;
5523 /* Then issue the arithmetic correction to return the right result. */
5524 if (!ignore)
5526 if (code == NOT)
5528 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5529 OPTAB_LIB_WIDEN);
5530 ret = expand_simple_unop (mode, NOT, ret, target, true);
5532 else
5533 ret = expand_simple_binop (mode, code, ret, val, target, true,
5534 OPTAB_LIB_WIDEN);
5536 return ret;
5540 #ifndef HAVE_atomic_clear
5541 # define HAVE_atomic_clear 0
5542 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5543 #endif
5545 /* Expand an atomic clear operation.
5546 void _atomic_clear (BOOL *obj, enum memmodel)
5547 EXP is the call expression. */
5549 static rtx
5550 expand_builtin_atomic_clear (tree exp)
5552 enum machine_mode mode;
5553 rtx mem, ret;
5554 enum memmodel model;
5556 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5557 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5558 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5560 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5561 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5563 error ("invalid memory model for %<__atomic_store%>");
5564 return const0_rtx;
5567 if (HAVE_atomic_clear)
5569 emit_insn (gen_atomic_clear (mem, model));
5570 return const0_rtx;
5573 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5574 Failing that, a store is issued by __atomic_store. The only way this can
5575 fail is if the bool type is larger than a word size. Unlikely, but
5576 handle it anyway for completeness. Assume a single threaded model since
5577 there is no atomic support in this case, and no barriers are required. */
5578 ret = expand_atomic_store (mem, const0_rtx, model, true);
5579 if (!ret)
5580 emit_move_insn (mem, const0_rtx);
5581 return const0_rtx;
5584 /* Expand an atomic test_and_set operation.
5585 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5586 EXP is the call expression. */
5588 static rtx
5589 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5591 rtx mem;
5592 enum memmodel model;
5593 enum machine_mode mode;
5595 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5596 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5597 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5599 return expand_atomic_test_and_set (target, mem, model);
5603 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5604 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5606 static tree
5607 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5609 int size;
5610 enum machine_mode mode;
5611 unsigned int mode_align, type_align;
5613 if (TREE_CODE (arg0) != INTEGER_CST)
5614 return NULL_TREE;
5616 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5617 mode = mode_for_size (size, MODE_INT, 0);
5618 mode_align = GET_MODE_ALIGNMENT (mode);
5620 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5621 type_align = mode_align;
5622 else
5624 tree ttype = TREE_TYPE (arg1);
5626 /* This function is usually invoked and folded immediately by the front
5627 end before anything else has a chance to look at it. The pointer
5628 parameter at this point is usually cast to a void *, so check for that
5629 and look past the cast. */
5630 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5631 && VOID_TYPE_P (TREE_TYPE (ttype)))
5632 arg1 = TREE_OPERAND (arg1, 0);
5634 ttype = TREE_TYPE (arg1);
5635 gcc_assert (POINTER_TYPE_P (ttype));
5637 /* Get the underlying type of the object. */
5638 ttype = TREE_TYPE (ttype);
5639 type_align = TYPE_ALIGN (ttype);
5642 /* If the object has smaller alignment, the the lock free routines cannot
5643 be used. */
5644 if (type_align < mode_align)
5645 return boolean_false_node;
5647 /* Check if a compare_and_swap pattern exists for the mode which represents
5648 the required size. The pattern is not allowed to fail, so the existence
5649 of the pattern indicates support is present. */
5650 if (can_compare_and_swap_p (mode, true))
5651 return boolean_true_node;
5652 else
5653 return boolean_false_node;
5656 /* Return true if the parameters to call EXP represent an object which will
5657 always generate lock free instructions. The first argument represents the
5658 size of the object, and the second parameter is a pointer to the object
5659 itself. If NULL is passed for the object, then the result is based on
5660 typical alignment for an object of the specified size. Otherwise return
5661 false. */
5663 static rtx
5664 expand_builtin_atomic_always_lock_free (tree exp)
5666 tree size;
5667 tree arg0 = CALL_EXPR_ARG (exp, 0);
5668 tree arg1 = CALL_EXPR_ARG (exp, 1);
5670 if (TREE_CODE (arg0) != INTEGER_CST)
5672 error ("non-constant argument 1 to __atomic_always_lock_free");
5673 return const0_rtx;
5676 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5677 if (size == boolean_true_node)
5678 return const1_rtx;
5679 return const0_rtx;
5682 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5683 is lock free on this architecture. */
5685 static tree
5686 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5688 if (!flag_inline_atomics)
5689 return NULL_TREE;
5691 /* If it isn't always lock free, don't generate a result. */
5692 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5693 return boolean_true_node;
5695 return NULL_TREE;
5698 /* Return true if the parameters to call EXP represent an object which will
5699 always generate lock free instructions. The first argument represents the
5700 size of the object, and the second parameter is a pointer to the object
5701 itself. If NULL is passed for the object, then the result is based on
5702 typical alignment for an object of the specified size. Otherwise return
5703 NULL*/
5705 static rtx
5706 expand_builtin_atomic_is_lock_free (tree exp)
5708 tree size;
5709 tree arg0 = CALL_EXPR_ARG (exp, 0);
5710 tree arg1 = CALL_EXPR_ARG (exp, 1);
5712 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5714 error ("non-integer argument 1 to __atomic_is_lock_free");
5715 return NULL_RTX;
5718 if (!flag_inline_atomics)
5719 return NULL_RTX;
5721 /* If the value is known at compile time, return the RTX for it. */
5722 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5723 if (size == boolean_true_node)
5724 return const1_rtx;
5726 return NULL_RTX;
5729 /* Expand the __atomic_thread_fence intrinsic:
5730 void __atomic_thread_fence (enum memmodel)
5731 EXP is the CALL_EXPR. */
5733 static void
5734 expand_builtin_atomic_thread_fence (tree exp)
5736 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5737 expand_mem_thread_fence (model);
5740 /* Expand the __atomic_signal_fence intrinsic:
5741 void __atomic_signal_fence (enum memmodel)
5742 EXP is the CALL_EXPR. */
5744 static void
5745 expand_builtin_atomic_signal_fence (tree exp)
5747 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5748 expand_mem_signal_fence (model);
5751 /* Expand the __sync_synchronize intrinsic. */
5753 static void
5754 expand_builtin_sync_synchronize (void)
5756 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5759 static rtx
5760 expand_builtin_thread_pointer (tree exp, rtx target)
5762 enum insn_code icode;
5763 if (!validate_arglist (exp, VOID_TYPE))
5764 return const0_rtx;
5765 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5766 if (icode != CODE_FOR_nothing)
5768 struct expand_operand op;
5769 if (!REG_P (target) || GET_MODE (target) != Pmode)
5770 target = gen_reg_rtx (Pmode);
5771 create_output_operand (&op, target, Pmode);
5772 expand_insn (icode, 1, &op);
5773 return target;
5775 error ("__builtin_thread_pointer is not supported on this target");
5776 return const0_rtx;
5779 static void
5780 expand_builtin_set_thread_pointer (tree exp)
5782 enum insn_code icode;
5783 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5784 return;
5785 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5786 if (icode != CODE_FOR_nothing)
5788 struct expand_operand op;
5789 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5790 Pmode, EXPAND_NORMAL);
5791 create_input_operand (&op, val, Pmode);
5792 expand_insn (icode, 1, &op);
5793 return;
5795 error ("__builtin_set_thread_pointer is not supported on this target");
5799 /* Expand an expression EXP that calls a built-in function,
5800 with result going to TARGET if that's convenient
5801 (and in mode MODE if that's convenient).
5802 SUBTARGET may be used as the target for computing one of EXP's operands.
5803 IGNORE is nonzero if the value is to be ignored. */
5806 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5807 int ignore)
5809 tree fndecl = get_callee_fndecl (exp);
5810 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5811 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5812 int flags;
5814 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5815 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5817 /* When not optimizing, generate calls to library functions for a certain
5818 set of builtins. */
5819 if (!optimize
5820 && !called_as_built_in (fndecl)
5821 && fcode != BUILT_IN_ALLOCA
5822 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5823 && fcode != BUILT_IN_FREE)
5824 return expand_call (exp, target, ignore);
5826 /* The built-in function expanders test for target == const0_rtx
5827 to determine whether the function's result will be ignored. */
5828 if (ignore)
5829 target = const0_rtx;
5831 /* If the result of a pure or const built-in function is ignored, and
5832 none of its arguments are volatile, we can avoid expanding the
5833 built-in call and just evaluate the arguments for side-effects. */
5834 if (target == const0_rtx
5835 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5836 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5838 bool volatilep = false;
5839 tree arg;
5840 call_expr_arg_iterator iter;
5842 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5843 if (TREE_THIS_VOLATILE (arg))
5845 volatilep = true;
5846 break;
5849 if (! volatilep)
5851 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5852 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5853 return const0_rtx;
5857 switch (fcode)
5859 CASE_FLT_FN (BUILT_IN_FABS):
5860 target = expand_builtin_fabs (exp, target, subtarget);
5861 if (target)
5862 return target;
5863 break;
5865 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5866 target = expand_builtin_copysign (exp, target, subtarget);
5867 if (target)
5868 return target;
5869 break;
5871 /* Just do a normal library call if we were unable to fold
5872 the values. */
5873 CASE_FLT_FN (BUILT_IN_CABS):
5874 break;
5876 CASE_FLT_FN (BUILT_IN_EXP):
5877 CASE_FLT_FN (BUILT_IN_EXP10):
5878 CASE_FLT_FN (BUILT_IN_POW10):
5879 CASE_FLT_FN (BUILT_IN_EXP2):
5880 CASE_FLT_FN (BUILT_IN_EXPM1):
5881 CASE_FLT_FN (BUILT_IN_LOGB):
5882 CASE_FLT_FN (BUILT_IN_LOG):
5883 CASE_FLT_FN (BUILT_IN_LOG10):
5884 CASE_FLT_FN (BUILT_IN_LOG2):
5885 CASE_FLT_FN (BUILT_IN_LOG1P):
5886 CASE_FLT_FN (BUILT_IN_TAN):
5887 CASE_FLT_FN (BUILT_IN_ASIN):
5888 CASE_FLT_FN (BUILT_IN_ACOS):
5889 CASE_FLT_FN (BUILT_IN_ATAN):
5890 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5891 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5892 because of possible accuracy problems. */
5893 if (! flag_unsafe_math_optimizations)
5894 break;
5895 CASE_FLT_FN (BUILT_IN_SQRT):
5896 CASE_FLT_FN (BUILT_IN_FLOOR):
5897 CASE_FLT_FN (BUILT_IN_CEIL):
5898 CASE_FLT_FN (BUILT_IN_TRUNC):
5899 CASE_FLT_FN (BUILT_IN_ROUND):
5900 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5901 CASE_FLT_FN (BUILT_IN_RINT):
5902 target = expand_builtin_mathfn (exp, target, subtarget);
5903 if (target)
5904 return target;
5905 break;
5907 CASE_FLT_FN (BUILT_IN_FMA):
5908 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5909 if (target)
5910 return target;
5911 break;
5913 CASE_FLT_FN (BUILT_IN_ILOGB):
5914 if (! flag_unsafe_math_optimizations)
5915 break;
5916 CASE_FLT_FN (BUILT_IN_ISINF):
5917 CASE_FLT_FN (BUILT_IN_FINITE):
5918 case BUILT_IN_ISFINITE:
5919 case BUILT_IN_ISNORMAL:
5920 target = expand_builtin_interclass_mathfn (exp, target);
5921 if (target)
5922 return target;
5923 break;
5925 CASE_FLT_FN (BUILT_IN_ICEIL):
5926 CASE_FLT_FN (BUILT_IN_LCEIL):
5927 CASE_FLT_FN (BUILT_IN_LLCEIL):
5928 CASE_FLT_FN (BUILT_IN_LFLOOR):
5929 CASE_FLT_FN (BUILT_IN_IFLOOR):
5930 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5931 target = expand_builtin_int_roundingfn (exp, target);
5932 if (target)
5933 return target;
5934 break;
5936 CASE_FLT_FN (BUILT_IN_IRINT):
5937 CASE_FLT_FN (BUILT_IN_LRINT):
5938 CASE_FLT_FN (BUILT_IN_LLRINT):
5939 CASE_FLT_FN (BUILT_IN_IROUND):
5940 CASE_FLT_FN (BUILT_IN_LROUND):
5941 CASE_FLT_FN (BUILT_IN_LLROUND):
5942 target = expand_builtin_int_roundingfn_2 (exp, target);
5943 if (target)
5944 return target;
5945 break;
5947 CASE_FLT_FN (BUILT_IN_POWI):
5948 target = expand_builtin_powi (exp, target);
5949 if (target)
5950 return target;
5951 break;
5953 CASE_FLT_FN (BUILT_IN_ATAN2):
5954 CASE_FLT_FN (BUILT_IN_LDEXP):
5955 CASE_FLT_FN (BUILT_IN_SCALB):
5956 CASE_FLT_FN (BUILT_IN_SCALBN):
5957 CASE_FLT_FN (BUILT_IN_SCALBLN):
5958 if (! flag_unsafe_math_optimizations)
5959 break;
5961 CASE_FLT_FN (BUILT_IN_FMOD):
5962 CASE_FLT_FN (BUILT_IN_REMAINDER):
5963 CASE_FLT_FN (BUILT_IN_DREM):
5964 CASE_FLT_FN (BUILT_IN_POW):
5965 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5966 if (target)
5967 return target;
5968 break;
5970 CASE_FLT_FN (BUILT_IN_CEXPI):
5971 target = expand_builtin_cexpi (exp, target);
5972 gcc_assert (target);
5973 return target;
5975 CASE_FLT_FN (BUILT_IN_SIN):
5976 CASE_FLT_FN (BUILT_IN_COS):
5977 if (! flag_unsafe_math_optimizations)
5978 break;
5979 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5980 if (target)
5981 return target;
5982 break;
5984 CASE_FLT_FN (BUILT_IN_SINCOS):
5985 if (! flag_unsafe_math_optimizations)
5986 break;
5987 target = expand_builtin_sincos (exp);
5988 if (target)
5989 return target;
5990 break;
5992 case BUILT_IN_APPLY_ARGS:
5993 return expand_builtin_apply_args ();
5995 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5996 FUNCTION with a copy of the parameters described by
5997 ARGUMENTS, and ARGSIZE. It returns a block of memory
5998 allocated on the stack into which is stored all the registers
5999 that might possibly be used for returning the result of a
6000 function. ARGUMENTS is the value returned by
6001 __builtin_apply_args. ARGSIZE is the number of bytes of
6002 arguments that must be copied. ??? How should this value be
6003 computed? We'll also need a safe worst case value for varargs
6004 functions. */
6005 case BUILT_IN_APPLY:
6006 if (!validate_arglist (exp, POINTER_TYPE,
6007 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6008 && !validate_arglist (exp, REFERENCE_TYPE,
6009 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6010 return const0_rtx;
6011 else
6013 rtx ops[3];
6015 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6016 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6017 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6019 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6022 /* __builtin_return (RESULT) causes the function to return the
6023 value described by RESULT. RESULT is address of the block of
6024 memory returned by __builtin_apply. */
6025 case BUILT_IN_RETURN:
6026 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6027 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6028 return const0_rtx;
6030 case BUILT_IN_SAVEREGS:
6031 return expand_builtin_saveregs ();
6033 case BUILT_IN_VA_ARG_PACK:
6034 /* All valid uses of __builtin_va_arg_pack () are removed during
6035 inlining. */
6036 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6037 return const0_rtx;
6039 case BUILT_IN_VA_ARG_PACK_LEN:
6040 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6041 inlining. */
6042 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6043 return const0_rtx;
6045 /* Return the address of the first anonymous stack arg. */
6046 case BUILT_IN_NEXT_ARG:
6047 if (fold_builtin_next_arg (exp, false))
6048 return const0_rtx;
6049 return expand_builtin_next_arg ();
6051 case BUILT_IN_CLEAR_CACHE:
6052 target = expand_builtin___clear_cache (exp);
6053 if (target)
6054 return target;
6055 break;
6057 case BUILT_IN_CLASSIFY_TYPE:
6058 return expand_builtin_classify_type (exp);
6060 case BUILT_IN_CONSTANT_P:
6061 return const0_rtx;
6063 case BUILT_IN_FRAME_ADDRESS:
6064 case BUILT_IN_RETURN_ADDRESS:
6065 return expand_builtin_frame_address (fndecl, exp);
6067 /* Returns the address of the area where the structure is returned.
6068 0 otherwise. */
6069 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6070 if (call_expr_nargs (exp) != 0
6071 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6072 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6073 return const0_rtx;
6074 else
6075 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6077 case BUILT_IN_ALLOCA:
6078 case BUILT_IN_ALLOCA_WITH_ALIGN:
6079 /* If the allocation stems from the declaration of a variable-sized
6080 object, it cannot accumulate. */
6081 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6082 if (target)
6083 return target;
6084 break;
6086 case BUILT_IN_STACK_SAVE:
6087 return expand_stack_save ();
6089 case BUILT_IN_STACK_RESTORE:
6090 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6091 return const0_rtx;
6093 case BUILT_IN_BSWAP16:
6094 case BUILT_IN_BSWAP32:
6095 case BUILT_IN_BSWAP64:
6096 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6097 if (target)
6098 return target;
6099 break;
6101 CASE_INT_FN (BUILT_IN_FFS):
6102 case BUILT_IN_FFSIMAX:
6103 target = expand_builtin_unop (target_mode, exp, target,
6104 subtarget, ffs_optab);
6105 if (target)
6106 return target;
6107 break;
6109 CASE_INT_FN (BUILT_IN_CLZ):
6110 case BUILT_IN_CLZIMAX:
6111 target = expand_builtin_unop (target_mode, exp, target,
6112 subtarget, clz_optab);
6113 if (target)
6114 return target;
6115 break;
6117 CASE_INT_FN (BUILT_IN_CTZ):
6118 case BUILT_IN_CTZIMAX:
6119 target = expand_builtin_unop (target_mode, exp, target,
6120 subtarget, ctz_optab);
6121 if (target)
6122 return target;
6123 break;
6125 CASE_INT_FN (BUILT_IN_CLRSB):
6126 case BUILT_IN_CLRSBIMAX:
6127 target = expand_builtin_unop (target_mode, exp, target,
6128 subtarget, clrsb_optab);
6129 if (target)
6130 return target;
6131 break;
6133 CASE_INT_FN (BUILT_IN_POPCOUNT):
6134 case BUILT_IN_POPCOUNTIMAX:
6135 target = expand_builtin_unop (target_mode, exp, target,
6136 subtarget, popcount_optab);
6137 if (target)
6138 return target;
6139 break;
6141 CASE_INT_FN (BUILT_IN_PARITY):
6142 case BUILT_IN_PARITYIMAX:
6143 target = expand_builtin_unop (target_mode, exp, target,
6144 subtarget, parity_optab);
6145 if (target)
6146 return target;
6147 break;
6149 case BUILT_IN_STRLEN:
6150 target = expand_builtin_strlen (exp, target, target_mode);
6151 if (target)
6152 return target;
6153 break;
6155 case BUILT_IN_STRCPY:
6156 target = expand_builtin_strcpy (exp, target);
6157 if (target)
6158 return target;
6159 break;
6161 case BUILT_IN_STRNCPY:
6162 target = expand_builtin_strncpy (exp, target);
6163 if (target)
6164 return target;
6165 break;
6167 case BUILT_IN_STPCPY:
6168 target = expand_builtin_stpcpy (exp, target, mode);
6169 if (target)
6170 return target;
6171 break;
6173 case BUILT_IN_MEMCPY:
6174 target = expand_builtin_memcpy (exp, target);
6175 if (target)
6176 return target;
6177 break;
6179 case BUILT_IN_MEMPCPY:
6180 target = expand_builtin_mempcpy (exp, target, mode);
6181 if (target)
6182 return target;
6183 break;
6185 case BUILT_IN_MEMSET:
6186 target = expand_builtin_memset (exp, target, mode);
6187 if (target)
6188 return target;
6189 break;
6191 case BUILT_IN_BZERO:
6192 target = expand_builtin_bzero (exp);
6193 if (target)
6194 return target;
6195 break;
6197 case BUILT_IN_STRCMP:
6198 target = expand_builtin_strcmp (exp, target);
6199 if (target)
6200 return target;
6201 break;
6203 case BUILT_IN_STRNCMP:
6204 target = expand_builtin_strncmp (exp, target, mode);
6205 if (target)
6206 return target;
6207 break;
6209 case BUILT_IN_BCMP:
6210 case BUILT_IN_MEMCMP:
6211 target = expand_builtin_memcmp (exp, target, mode);
6212 if (target)
6213 return target;
6214 break;
6216 case BUILT_IN_SETJMP:
6217 /* This should have been lowered to the builtins below. */
6218 gcc_unreachable ();
6220 case BUILT_IN_SETJMP_SETUP:
6221 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6222 and the receiver label. */
6223 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6225 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6226 VOIDmode, EXPAND_NORMAL);
6227 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6228 rtx label_r = label_rtx (label);
6230 /* This is copied from the handling of non-local gotos. */
6231 expand_builtin_setjmp_setup (buf_addr, label_r);
6232 nonlocal_goto_handler_labels
6233 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6234 nonlocal_goto_handler_labels);
6235 /* ??? Do not let expand_label treat us as such since we would
6236 not want to be both on the list of non-local labels and on
6237 the list of forced labels. */
6238 FORCED_LABEL (label) = 0;
6239 return const0_rtx;
6241 break;
6243 case BUILT_IN_SETJMP_DISPATCHER:
6244 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6245 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6247 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6248 rtx label_r = label_rtx (label);
6250 /* Remove the dispatcher label from the list of non-local labels
6251 since the receiver labels have been added to it above. */
6252 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6253 return const0_rtx;
6255 break;
6257 case BUILT_IN_SETJMP_RECEIVER:
6258 /* __builtin_setjmp_receiver is passed the receiver label. */
6259 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6261 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6262 rtx label_r = label_rtx (label);
6264 expand_builtin_setjmp_receiver (label_r);
6265 return const0_rtx;
6267 break;
6269 /* __builtin_longjmp is passed a pointer to an array of five words.
6270 It's similar to the C library longjmp function but works with
6271 __builtin_setjmp above. */
6272 case BUILT_IN_LONGJMP:
6273 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6275 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6276 VOIDmode, EXPAND_NORMAL);
6277 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6279 if (value != const1_rtx)
6281 error ("%<__builtin_longjmp%> second argument must be 1");
6282 return const0_rtx;
6285 expand_builtin_longjmp (buf_addr, value);
6286 return const0_rtx;
6288 break;
6290 case BUILT_IN_NONLOCAL_GOTO:
6291 target = expand_builtin_nonlocal_goto (exp);
6292 if (target)
6293 return target;
6294 break;
6296 /* This updates the setjmp buffer that is its argument with the value
6297 of the current stack pointer. */
6298 case BUILT_IN_UPDATE_SETJMP_BUF:
6299 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6301 rtx buf_addr
6302 = expand_normal (CALL_EXPR_ARG (exp, 0));
6304 expand_builtin_update_setjmp_buf (buf_addr);
6305 return const0_rtx;
6307 break;
6309 case BUILT_IN_TRAP:
6310 expand_builtin_trap ();
6311 return const0_rtx;
6313 case BUILT_IN_UNREACHABLE:
6314 expand_builtin_unreachable ();
6315 return const0_rtx;
6317 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6318 case BUILT_IN_SIGNBITD32:
6319 case BUILT_IN_SIGNBITD64:
6320 case BUILT_IN_SIGNBITD128:
6321 target = expand_builtin_signbit (exp, target);
6322 if (target)
6323 return target;
6324 break;
6326 /* Various hooks for the DWARF 2 __throw routine. */
6327 case BUILT_IN_UNWIND_INIT:
6328 expand_builtin_unwind_init ();
6329 return const0_rtx;
6330 case BUILT_IN_DWARF_CFA:
6331 return virtual_cfa_rtx;
6332 #ifdef DWARF2_UNWIND_INFO
6333 case BUILT_IN_DWARF_SP_COLUMN:
6334 return expand_builtin_dwarf_sp_column ();
6335 case BUILT_IN_INIT_DWARF_REG_SIZES:
6336 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6337 return const0_rtx;
6338 #endif
6339 case BUILT_IN_FROB_RETURN_ADDR:
6340 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6341 case BUILT_IN_EXTRACT_RETURN_ADDR:
6342 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6343 case BUILT_IN_EH_RETURN:
6344 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6345 CALL_EXPR_ARG (exp, 1));
6346 return const0_rtx;
6347 #ifdef EH_RETURN_DATA_REGNO
6348 case BUILT_IN_EH_RETURN_DATA_REGNO:
6349 return expand_builtin_eh_return_data_regno (exp);
6350 #endif
6351 case BUILT_IN_EXTEND_POINTER:
6352 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6353 case BUILT_IN_EH_POINTER:
6354 return expand_builtin_eh_pointer (exp);
6355 case BUILT_IN_EH_FILTER:
6356 return expand_builtin_eh_filter (exp);
6357 case BUILT_IN_EH_COPY_VALUES:
6358 return expand_builtin_eh_copy_values (exp);
6360 case BUILT_IN_VA_START:
6361 return expand_builtin_va_start (exp);
6362 case BUILT_IN_VA_END:
6363 return expand_builtin_va_end (exp);
6364 case BUILT_IN_VA_COPY:
6365 return expand_builtin_va_copy (exp);
6366 case BUILT_IN_EXPECT:
6367 return expand_builtin_expect (exp, target);
6368 case BUILT_IN_ASSUME_ALIGNED:
6369 return expand_builtin_assume_aligned (exp, target);
6370 case BUILT_IN_PREFETCH:
6371 expand_builtin_prefetch (exp);
6372 return const0_rtx;
6374 case BUILT_IN_INIT_TRAMPOLINE:
6375 return expand_builtin_init_trampoline (exp, true);
6376 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6377 return expand_builtin_init_trampoline (exp, false);
6378 case BUILT_IN_ADJUST_TRAMPOLINE:
6379 return expand_builtin_adjust_trampoline (exp);
6381 case BUILT_IN_FORK:
6382 case BUILT_IN_EXECL:
6383 case BUILT_IN_EXECV:
6384 case BUILT_IN_EXECLP:
6385 case BUILT_IN_EXECLE:
6386 case BUILT_IN_EXECVP:
6387 case BUILT_IN_EXECVE:
6388 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6389 if (target)
6390 return target;
6391 break;
6393 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6394 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6395 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6396 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6397 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6398 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6399 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6400 if (target)
6401 return target;
6402 break;
6404 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6405 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6406 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6407 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6408 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6409 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6410 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6411 if (target)
6412 return target;
6413 break;
6415 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6416 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6417 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6418 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6419 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6420 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6421 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6422 if (target)
6423 return target;
6424 break;
6426 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6427 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6428 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6429 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6430 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6431 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6432 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6433 if (target)
6434 return target;
6435 break;
6437 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6438 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6439 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6440 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6441 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6442 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6443 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6444 if (target)
6445 return target;
6446 break;
6448 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6449 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6450 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6451 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6452 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6453 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6454 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6455 if (target)
6456 return target;
6457 break;
6459 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6460 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6461 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6462 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6463 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6464 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6465 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6466 if (target)
6467 return target;
6468 break;
6470 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6471 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6472 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6473 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6474 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6475 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6476 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6477 if (target)
6478 return target;
6479 break;
6481 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6482 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6483 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6484 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6485 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6486 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6487 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6488 if (target)
6489 return target;
6490 break;
6492 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6493 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6494 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6495 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6496 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6497 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6498 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6499 if (target)
6500 return target;
6501 break;
6503 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6504 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6505 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6506 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6507 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6508 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6509 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6510 if (target)
6511 return target;
6512 break;
6514 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6515 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6516 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6517 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6518 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6519 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6520 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6521 if (target)
6522 return target;
6523 break;
6525 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6526 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6527 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6528 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6529 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6530 if (mode == VOIDmode)
6531 mode = TYPE_MODE (boolean_type_node);
6532 if (!target || !register_operand (target, mode))
6533 target = gen_reg_rtx (mode);
6535 mode = get_builtin_sync_mode
6536 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6537 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6538 if (target)
6539 return target;
6540 break;
6542 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6543 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6544 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6545 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6546 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6547 mode = get_builtin_sync_mode
6548 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6549 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6550 if (target)
6551 return target;
6552 break;
6554 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6555 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6556 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6557 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6558 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6559 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6560 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6561 if (target)
6562 return target;
6563 break;
6565 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6566 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6567 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6568 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6569 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6570 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6571 expand_builtin_sync_lock_release (mode, exp);
6572 return const0_rtx;
6574 case BUILT_IN_SYNC_SYNCHRONIZE:
6575 expand_builtin_sync_synchronize ();
6576 return const0_rtx;
6578 case BUILT_IN_ATOMIC_EXCHANGE_1:
6579 case BUILT_IN_ATOMIC_EXCHANGE_2:
6580 case BUILT_IN_ATOMIC_EXCHANGE_4:
6581 case BUILT_IN_ATOMIC_EXCHANGE_8:
6582 case BUILT_IN_ATOMIC_EXCHANGE_16:
6583 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6584 target = expand_builtin_atomic_exchange (mode, exp, target);
6585 if (target)
6586 return target;
6587 break;
6589 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6590 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6591 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6592 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6593 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6595 unsigned int nargs, z;
6596 VEC(tree,gc) *vec;
6598 mode =
6599 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6600 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6601 if (target)
6602 return target;
6604 /* If this is turned into an external library call, the weak parameter
6605 must be dropped to match the expected parameter list. */
6606 nargs = call_expr_nargs (exp);
6607 vec = VEC_alloc (tree, gc, nargs - 1);
6608 for (z = 0; z < 3; z++)
6609 VEC_quick_push (tree, vec, CALL_EXPR_ARG (exp, z));
6610 /* Skip the boolean weak parameter. */
6611 for (z = 4; z < 6; z++)
6612 VEC_quick_push (tree, vec, CALL_EXPR_ARG (exp, z));
6613 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6614 break;
6617 case BUILT_IN_ATOMIC_LOAD_1:
6618 case BUILT_IN_ATOMIC_LOAD_2:
6619 case BUILT_IN_ATOMIC_LOAD_4:
6620 case BUILT_IN_ATOMIC_LOAD_8:
6621 case BUILT_IN_ATOMIC_LOAD_16:
6622 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6623 target = expand_builtin_atomic_load (mode, exp, target);
6624 if (target)
6625 return target;
6626 break;
6628 case BUILT_IN_ATOMIC_STORE_1:
6629 case BUILT_IN_ATOMIC_STORE_2:
6630 case BUILT_IN_ATOMIC_STORE_4:
6631 case BUILT_IN_ATOMIC_STORE_8:
6632 case BUILT_IN_ATOMIC_STORE_16:
6633 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6634 target = expand_builtin_atomic_store (mode, exp);
6635 if (target)
6636 return const0_rtx;
6637 break;
6639 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6640 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6641 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6642 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6643 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6645 enum built_in_function lib;
6646 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6647 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6648 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6649 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6650 ignore, lib);
6651 if (target)
6652 return target;
6653 break;
6655 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6656 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6657 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6658 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6659 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6661 enum built_in_function lib;
6662 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6663 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6664 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6665 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6666 ignore, lib);
6667 if (target)
6668 return target;
6669 break;
6671 case BUILT_IN_ATOMIC_AND_FETCH_1:
6672 case BUILT_IN_ATOMIC_AND_FETCH_2:
6673 case BUILT_IN_ATOMIC_AND_FETCH_4:
6674 case BUILT_IN_ATOMIC_AND_FETCH_8:
6675 case BUILT_IN_ATOMIC_AND_FETCH_16:
6677 enum built_in_function lib;
6678 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6679 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6680 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6681 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6682 ignore, lib);
6683 if (target)
6684 return target;
6685 break;
6687 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6688 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6689 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6690 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6691 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6693 enum built_in_function lib;
6694 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6695 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6696 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6697 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6698 ignore, lib);
6699 if (target)
6700 return target;
6701 break;
6703 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6704 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6705 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6706 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6707 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6709 enum built_in_function lib;
6710 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6711 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6712 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6713 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6714 ignore, lib);
6715 if (target)
6716 return target;
6717 break;
6719 case BUILT_IN_ATOMIC_OR_FETCH_1:
6720 case BUILT_IN_ATOMIC_OR_FETCH_2:
6721 case BUILT_IN_ATOMIC_OR_FETCH_4:
6722 case BUILT_IN_ATOMIC_OR_FETCH_8:
6723 case BUILT_IN_ATOMIC_OR_FETCH_16:
6725 enum built_in_function lib;
6726 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6727 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6728 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6729 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6730 ignore, lib);
6731 if (target)
6732 return target;
6733 break;
6735 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6736 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6737 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6738 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6739 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6740 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6741 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6742 ignore, BUILT_IN_NONE);
6743 if (target)
6744 return target;
6745 break;
6747 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6748 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6749 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6750 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6751 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6752 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6753 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6754 ignore, BUILT_IN_NONE);
6755 if (target)
6756 return target;
6757 break;
6759 case BUILT_IN_ATOMIC_FETCH_AND_1:
6760 case BUILT_IN_ATOMIC_FETCH_AND_2:
6761 case BUILT_IN_ATOMIC_FETCH_AND_4:
6762 case BUILT_IN_ATOMIC_FETCH_AND_8:
6763 case BUILT_IN_ATOMIC_FETCH_AND_16:
6764 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6765 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6766 ignore, BUILT_IN_NONE);
6767 if (target)
6768 return target;
6769 break;
6771 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6772 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6773 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6774 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6775 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6776 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6777 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6778 ignore, BUILT_IN_NONE);
6779 if (target)
6780 return target;
6781 break;
6783 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6784 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6785 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6786 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6787 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6788 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6789 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6790 ignore, BUILT_IN_NONE);
6791 if (target)
6792 return target;
6793 break;
6795 case BUILT_IN_ATOMIC_FETCH_OR_1:
6796 case BUILT_IN_ATOMIC_FETCH_OR_2:
6797 case BUILT_IN_ATOMIC_FETCH_OR_4:
6798 case BUILT_IN_ATOMIC_FETCH_OR_8:
6799 case BUILT_IN_ATOMIC_FETCH_OR_16:
6800 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6801 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6802 ignore, BUILT_IN_NONE);
6803 if (target)
6804 return target;
6805 break;
6807 case BUILT_IN_ATOMIC_TEST_AND_SET:
6808 return expand_builtin_atomic_test_and_set (exp, target);
6810 case BUILT_IN_ATOMIC_CLEAR:
6811 return expand_builtin_atomic_clear (exp);
6813 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6814 return expand_builtin_atomic_always_lock_free (exp);
6816 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6817 target = expand_builtin_atomic_is_lock_free (exp);
6818 if (target)
6819 return target;
6820 break;
6822 case BUILT_IN_ATOMIC_THREAD_FENCE:
6823 expand_builtin_atomic_thread_fence (exp);
6824 return const0_rtx;
6826 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6827 expand_builtin_atomic_signal_fence (exp);
6828 return const0_rtx;
6830 case BUILT_IN_OBJECT_SIZE:
6831 return expand_builtin_object_size (exp);
6833 case BUILT_IN_MEMCPY_CHK:
6834 case BUILT_IN_MEMPCPY_CHK:
6835 case BUILT_IN_MEMMOVE_CHK:
6836 case BUILT_IN_MEMSET_CHK:
6837 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6838 if (target)
6839 return target;
6840 break;
6842 case BUILT_IN_STRCPY_CHK:
6843 case BUILT_IN_STPCPY_CHK:
6844 case BUILT_IN_STRNCPY_CHK:
6845 case BUILT_IN_STPNCPY_CHK:
6846 case BUILT_IN_STRCAT_CHK:
6847 case BUILT_IN_STRNCAT_CHK:
6848 case BUILT_IN_SNPRINTF_CHK:
6849 case BUILT_IN_VSNPRINTF_CHK:
6850 maybe_emit_chk_warning (exp, fcode);
6851 break;
6853 case BUILT_IN_SPRINTF_CHK:
6854 case BUILT_IN_VSPRINTF_CHK:
6855 maybe_emit_sprintf_chk_warning (exp, fcode);
6856 break;
6858 case BUILT_IN_FREE:
6859 if (warn_free_nonheap_object)
6860 maybe_emit_free_warning (exp);
6861 break;
6863 case BUILT_IN_THREAD_POINTER:
6864 return expand_builtin_thread_pointer (exp, target);
6866 case BUILT_IN_SET_THREAD_POINTER:
6867 expand_builtin_set_thread_pointer (exp);
6868 return const0_rtx;
6870 default: /* just do library call, if unknown builtin */
6871 break;
6874 /* The switch statement above can drop through to cause the function
6875 to be called normally. */
6876 return expand_call (exp, target, ignore);
6879 /* Determine whether a tree node represents a call to a built-in
6880 function. If the tree T is a call to a built-in function with
6881 the right number of arguments of the appropriate types, return
6882 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6883 Otherwise the return value is END_BUILTINS. */
6885 enum built_in_function
6886 builtin_mathfn_code (const_tree t)
6888 const_tree fndecl, arg, parmlist;
6889 const_tree argtype, parmtype;
6890 const_call_expr_arg_iterator iter;
6892 if (TREE_CODE (t) != CALL_EXPR
6893 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6894 return END_BUILTINS;
6896 fndecl = get_callee_fndecl (t);
6897 if (fndecl == NULL_TREE
6898 || TREE_CODE (fndecl) != FUNCTION_DECL
6899 || ! DECL_BUILT_IN (fndecl)
6900 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6901 return END_BUILTINS;
6903 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6904 init_const_call_expr_arg_iterator (t, &iter);
6905 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6907 /* If a function doesn't take a variable number of arguments,
6908 the last element in the list will have type `void'. */
6909 parmtype = TREE_VALUE (parmlist);
6910 if (VOID_TYPE_P (parmtype))
6912 if (more_const_call_expr_args_p (&iter))
6913 return END_BUILTINS;
6914 return DECL_FUNCTION_CODE (fndecl);
6917 if (! more_const_call_expr_args_p (&iter))
6918 return END_BUILTINS;
6920 arg = next_const_call_expr_arg (&iter);
6921 argtype = TREE_TYPE (arg);
6923 if (SCALAR_FLOAT_TYPE_P (parmtype))
6925 if (! SCALAR_FLOAT_TYPE_P (argtype))
6926 return END_BUILTINS;
6928 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6930 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6931 return END_BUILTINS;
6933 else if (POINTER_TYPE_P (parmtype))
6935 if (! POINTER_TYPE_P (argtype))
6936 return END_BUILTINS;
6938 else if (INTEGRAL_TYPE_P (parmtype))
6940 if (! INTEGRAL_TYPE_P (argtype))
6941 return END_BUILTINS;
6943 else
6944 return END_BUILTINS;
6947 /* Variable-length argument list. */
6948 return DECL_FUNCTION_CODE (fndecl);
6951 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6952 evaluate to a constant. */
6954 static tree
6955 fold_builtin_constant_p (tree arg)
6957 /* We return 1 for a numeric type that's known to be a constant
6958 value at compile-time or for an aggregate type that's a
6959 literal constant. */
6960 STRIP_NOPS (arg);
6962 /* If we know this is a constant, emit the constant of one. */
6963 if (CONSTANT_CLASS_P (arg)
6964 || (TREE_CODE (arg) == CONSTRUCTOR
6965 && TREE_CONSTANT (arg)))
6966 return integer_one_node;
6967 if (TREE_CODE (arg) == ADDR_EXPR)
6969 tree op = TREE_OPERAND (arg, 0);
6970 if (TREE_CODE (op) == STRING_CST
6971 || (TREE_CODE (op) == ARRAY_REF
6972 && integer_zerop (TREE_OPERAND (op, 1))
6973 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6974 return integer_one_node;
6977 /* If this expression has side effects, show we don't know it to be a
6978 constant. Likewise if it's a pointer or aggregate type since in
6979 those case we only want literals, since those are only optimized
6980 when generating RTL, not later.
6981 And finally, if we are compiling an initializer, not code, we
6982 need to return a definite result now; there's not going to be any
6983 more optimization done. */
6984 if (TREE_SIDE_EFFECTS (arg)
6985 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6986 || POINTER_TYPE_P (TREE_TYPE (arg))
6987 || cfun == 0
6988 || folding_initializer)
6989 return integer_zero_node;
6991 return NULL_TREE;
6994 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6995 return it as a truthvalue. */
6997 static tree
6998 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
7000 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7002 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7003 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7004 ret_type = TREE_TYPE (TREE_TYPE (fn));
7005 pred_type = TREE_VALUE (arg_types);
7006 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7008 pred = fold_convert_loc (loc, pred_type, pred);
7009 expected = fold_convert_loc (loc, expected_type, expected);
7010 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
7012 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7013 build_int_cst (ret_type, 0));
7016 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7017 NULL_TREE if no simplification is possible. */
7019 static tree
7020 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7022 tree inner, fndecl, inner_arg0;
7023 enum tree_code code;
7025 /* Distribute the expected value over short-circuiting operators.
7026 See through the cast from truthvalue_type_node to long. */
7027 inner_arg0 = arg0;
7028 while (TREE_CODE (inner_arg0) == NOP_EXPR
7029 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7030 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7031 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7033 /* If this is a builtin_expect within a builtin_expect keep the
7034 inner one. See through a comparison against a constant. It
7035 might have been added to create a thruthvalue. */
7036 inner = inner_arg0;
7038 if (COMPARISON_CLASS_P (inner)
7039 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7040 inner = TREE_OPERAND (inner, 0);
7042 if (TREE_CODE (inner) == CALL_EXPR
7043 && (fndecl = get_callee_fndecl (inner))
7044 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7045 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7046 return arg0;
7048 inner = inner_arg0;
7049 code = TREE_CODE (inner);
7050 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7052 tree op0 = TREE_OPERAND (inner, 0);
7053 tree op1 = TREE_OPERAND (inner, 1);
7055 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7056 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7057 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7059 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7062 /* If the argument isn't invariant then there's nothing else we can do. */
7063 if (!TREE_CONSTANT (inner_arg0))
7064 return NULL_TREE;
7066 /* If we expect that a comparison against the argument will fold to
7067 a constant return the constant. In practice, this means a true
7068 constant or the address of a non-weak symbol. */
7069 inner = inner_arg0;
7070 STRIP_NOPS (inner);
7071 if (TREE_CODE (inner) == ADDR_EXPR)
7075 inner = TREE_OPERAND (inner, 0);
7077 while (TREE_CODE (inner) == COMPONENT_REF
7078 || TREE_CODE (inner) == ARRAY_REF);
7079 if ((TREE_CODE (inner) == VAR_DECL
7080 || TREE_CODE (inner) == FUNCTION_DECL)
7081 && DECL_WEAK (inner))
7082 return NULL_TREE;
7085 /* Otherwise, ARG0 already has the proper type for the return value. */
7086 return arg0;
7089 /* Fold a call to __builtin_classify_type with argument ARG. */
7091 static tree
7092 fold_builtin_classify_type (tree arg)
7094 if (arg == 0)
7095 return build_int_cst (integer_type_node, no_type_class);
7097 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7100 /* Fold a call to __builtin_strlen with argument ARG. */
7102 static tree
7103 fold_builtin_strlen (location_t loc, tree type, tree arg)
7105 if (!validate_arg (arg, POINTER_TYPE))
7106 return NULL_TREE;
7107 else
7109 tree len = c_strlen (arg, 0);
7111 if (len)
7112 return fold_convert_loc (loc, type, len);
7114 return NULL_TREE;
7118 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7120 static tree
7121 fold_builtin_inf (location_t loc, tree type, int warn)
7123 REAL_VALUE_TYPE real;
7125 /* __builtin_inff is intended to be usable to define INFINITY on all
7126 targets. If an infinity is not available, INFINITY expands "to a
7127 positive constant of type float that overflows at translation
7128 time", footnote "In this case, using INFINITY will violate the
7129 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7130 Thus we pedwarn to ensure this constraint violation is
7131 diagnosed. */
7132 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7133 pedwarn (loc, 0, "target format does not support infinity");
7135 real_inf (&real);
7136 return build_real (type, real);
7139 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7141 static tree
7142 fold_builtin_nan (tree arg, tree type, int quiet)
7144 REAL_VALUE_TYPE real;
7145 const char *str;
7147 if (!validate_arg (arg, POINTER_TYPE))
7148 return NULL_TREE;
7149 str = c_getstr (arg);
7150 if (!str)
7151 return NULL_TREE;
7153 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7154 return NULL_TREE;
7156 return build_real (type, real);
7159 /* Return true if the floating point expression T has an integer value.
7160 We also allow +Inf, -Inf and NaN to be considered integer values. */
7162 static bool
7163 integer_valued_real_p (tree t)
7165 switch (TREE_CODE (t))
7167 case FLOAT_EXPR:
7168 return true;
7170 case ABS_EXPR:
7171 case SAVE_EXPR:
7172 return integer_valued_real_p (TREE_OPERAND (t, 0));
7174 case COMPOUND_EXPR:
7175 case MODIFY_EXPR:
7176 case BIND_EXPR:
7177 return integer_valued_real_p (TREE_OPERAND (t, 1));
7179 case PLUS_EXPR:
7180 case MINUS_EXPR:
7181 case MULT_EXPR:
7182 case MIN_EXPR:
7183 case MAX_EXPR:
7184 return integer_valued_real_p (TREE_OPERAND (t, 0))
7185 && integer_valued_real_p (TREE_OPERAND (t, 1));
7187 case COND_EXPR:
7188 return integer_valued_real_p (TREE_OPERAND (t, 1))
7189 && integer_valued_real_p (TREE_OPERAND (t, 2));
7191 case REAL_CST:
7192 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7194 case NOP_EXPR:
7196 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7197 if (TREE_CODE (type) == INTEGER_TYPE)
7198 return true;
7199 if (TREE_CODE (type) == REAL_TYPE)
7200 return integer_valued_real_p (TREE_OPERAND (t, 0));
7201 break;
7204 case CALL_EXPR:
7205 switch (builtin_mathfn_code (t))
7207 CASE_FLT_FN (BUILT_IN_CEIL):
7208 CASE_FLT_FN (BUILT_IN_FLOOR):
7209 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7210 CASE_FLT_FN (BUILT_IN_RINT):
7211 CASE_FLT_FN (BUILT_IN_ROUND):
7212 CASE_FLT_FN (BUILT_IN_TRUNC):
7213 return true;
7215 CASE_FLT_FN (BUILT_IN_FMIN):
7216 CASE_FLT_FN (BUILT_IN_FMAX):
7217 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7218 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7220 default:
7221 break;
7223 break;
7225 default:
7226 break;
7228 return false;
7231 /* FNDECL is assumed to be a builtin where truncation can be propagated
7232 across (for instance floor((double)f) == (double)floorf (f).
7233 Do the transformation for a call with argument ARG. */
7235 static tree
7236 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7238 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7240 if (!validate_arg (arg, REAL_TYPE))
7241 return NULL_TREE;
7243 /* Integer rounding functions are idempotent. */
7244 if (fcode == builtin_mathfn_code (arg))
7245 return arg;
7247 /* If argument is already integer valued, and we don't need to worry
7248 about setting errno, there's no need to perform rounding. */
7249 if (! flag_errno_math && integer_valued_real_p (arg))
7250 return arg;
7252 if (optimize)
7254 tree arg0 = strip_float_extensions (arg);
7255 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7256 tree newtype = TREE_TYPE (arg0);
7257 tree decl;
7259 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7260 && (decl = mathfn_built_in (newtype, fcode)))
7261 return fold_convert_loc (loc, ftype,
7262 build_call_expr_loc (loc, decl, 1,
7263 fold_convert_loc (loc,
7264 newtype,
7265 arg0)));
7267 return NULL_TREE;
7270 /* FNDECL is assumed to be builtin which can narrow the FP type of
7271 the argument, for instance lround((double)f) -> lroundf (f).
7272 Do the transformation for a call with argument ARG. */
7274 static tree
7275 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7277 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7279 if (!validate_arg (arg, REAL_TYPE))
7280 return NULL_TREE;
7282 /* If argument is already integer valued, and we don't need to worry
7283 about setting errno, there's no need to perform rounding. */
7284 if (! flag_errno_math && integer_valued_real_p (arg))
7285 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7286 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7288 if (optimize)
7290 tree ftype = TREE_TYPE (arg);
7291 tree arg0 = strip_float_extensions (arg);
7292 tree newtype = TREE_TYPE (arg0);
7293 tree decl;
7295 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7296 && (decl = mathfn_built_in (newtype, fcode)))
7297 return build_call_expr_loc (loc, decl, 1,
7298 fold_convert_loc (loc, newtype, arg0));
7301 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7302 sizeof (int) == sizeof (long). */
7303 if (TYPE_PRECISION (integer_type_node)
7304 == TYPE_PRECISION (long_integer_type_node))
7306 tree newfn = NULL_TREE;
7307 switch (fcode)
7309 CASE_FLT_FN (BUILT_IN_ICEIL):
7310 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7311 break;
7313 CASE_FLT_FN (BUILT_IN_IFLOOR):
7314 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7315 break;
7317 CASE_FLT_FN (BUILT_IN_IROUND):
7318 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7319 break;
7321 CASE_FLT_FN (BUILT_IN_IRINT):
7322 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7323 break;
7325 default:
7326 break;
7329 if (newfn)
7331 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7332 return fold_convert_loc (loc,
7333 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7337 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7338 sizeof (long long) == sizeof (long). */
7339 if (TYPE_PRECISION (long_long_integer_type_node)
7340 == TYPE_PRECISION (long_integer_type_node))
7342 tree newfn = NULL_TREE;
7343 switch (fcode)
7345 CASE_FLT_FN (BUILT_IN_LLCEIL):
7346 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7347 break;
7349 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7350 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7351 break;
7353 CASE_FLT_FN (BUILT_IN_LLROUND):
7354 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7355 break;
7357 CASE_FLT_FN (BUILT_IN_LLRINT):
7358 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7359 break;
7361 default:
7362 break;
7365 if (newfn)
7367 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7368 return fold_convert_loc (loc,
7369 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7373 return NULL_TREE;
7376 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7377 return type. Return NULL_TREE if no simplification can be made. */
7379 static tree
7380 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7382 tree res;
7384 if (!validate_arg (arg, COMPLEX_TYPE)
7385 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7386 return NULL_TREE;
7388 /* Calculate the result when the argument is a constant. */
7389 if (TREE_CODE (arg) == COMPLEX_CST
7390 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7391 type, mpfr_hypot)))
7392 return res;
7394 if (TREE_CODE (arg) == COMPLEX_EXPR)
7396 tree real = TREE_OPERAND (arg, 0);
7397 tree imag = TREE_OPERAND (arg, 1);
7399 /* If either part is zero, cabs is fabs of the other. */
7400 if (real_zerop (real))
7401 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7402 if (real_zerop (imag))
7403 return fold_build1_loc (loc, ABS_EXPR, type, real);
7405 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7406 if (flag_unsafe_math_optimizations
7407 && operand_equal_p (real, imag, OEP_PURE_SAME))
7409 const REAL_VALUE_TYPE sqrt2_trunc
7410 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7411 STRIP_NOPS (real);
7412 return fold_build2_loc (loc, MULT_EXPR, type,
7413 fold_build1_loc (loc, ABS_EXPR, type, real),
7414 build_real (type, sqrt2_trunc));
7418 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7419 if (TREE_CODE (arg) == NEGATE_EXPR
7420 || TREE_CODE (arg) == CONJ_EXPR)
7421 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7423 /* Don't do this when optimizing for size. */
7424 if (flag_unsafe_math_optimizations
7425 && optimize && optimize_function_for_speed_p (cfun))
7427 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7429 if (sqrtfn != NULL_TREE)
7431 tree rpart, ipart, result;
7433 arg = builtin_save_expr (arg);
7435 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7436 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7438 rpart = builtin_save_expr (rpart);
7439 ipart = builtin_save_expr (ipart);
7441 result = fold_build2_loc (loc, PLUS_EXPR, type,
7442 fold_build2_loc (loc, MULT_EXPR, type,
7443 rpart, rpart),
7444 fold_build2_loc (loc, MULT_EXPR, type,
7445 ipart, ipart));
7447 return build_call_expr_loc (loc, sqrtfn, 1, result);
7451 return NULL_TREE;
7454 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7455 complex tree type of the result. If NEG is true, the imaginary
7456 zero is negative. */
7458 static tree
7459 build_complex_cproj (tree type, bool neg)
7461 REAL_VALUE_TYPE rinf, rzero = dconst0;
7463 real_inf (&rinf);
7464 rzero.sign = neg;
7465 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7466 build_real (TREE_TYPE (type), rzero));
7469 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7470 return type. Return NULL_TREE if no simplification can be made. */
7472 static tree
7473 fold_builtin_cproj (location_t loc, tree arg, tree type)
7475 if (!validate_arg (arg, COMPLEX_TYPE)
7476 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7477 return NULL_TREE;
7479 /* If there are no infinities, return arg. */
7480 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7481 return non_lvalue_loc (loc, arg);
7483 /* Calculate the result when the argument is a constant. */
7484 if (TREE_CODE (arg) == COMPLEX_CST)
7486 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7487 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7489 if (real_isinf (real) || real_isinf (imag))
7490 return build_complex_cproj (type, imag->sign);
7491 else
7492 return arg;
7494 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7496 tree real = TREE_OPERAND (arg, 0);
7497 tree imag = TREE_OPERAND (arg, 1);
7499 STRIP_NOPS (real);
7500 STRIP_NOPS (imag);
7502 /* If the real part is inf and the imag part is known to be
7503 nonnegative, return (inf + 0i). Remember side-effects are
7504 possible in the imag part. */
7505 if (TREE_CODE (real) == REAL_CST
7506 && real_isinf (TREE_REAL_CST_PTR (real))
7507 && tree_expr_nonnegative_p (imag))
7508 return omit_one_operand_loc (loc, type,
7509 build_complex_cproj (type, false),
7510 arg);
7512 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7513 Remember side-effects are possible in the real part. */
7514 if (TREE_CODE (imag) == REAL_CST
7515 && real_isinf (TREE_REAL_CST_PTR (imag)))
7516 return
7517 omit_one_operand_loc (loc, type,
7518 build_complex_cproj (type, TREE_REAL_CST_PTR
7519 (imag)->sign), arg);
7522 return NULL_TREE;
7525 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7526 Return NULL_TREE if no simplification can be made. */
7528 static tree
7529 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7532 enum built_in_function fcode;
7533 tree res;
7535 if (!validate_arg (arg, REAL_TYPE))
7536 return NULL_TREE;
7538 /* Calculate the result when the argument is a constant. */
7539 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7540 return res;
7542 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7543 fcode = builtin_mathfn_code (arg);
7544 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7546 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7547 arg = fold_build2_loc (loc, MULT_EXPR, type,
7548 CALL_EXPR_ARG (arg, 0),
7549 build_real (type, dconsthalf));
7550 return build_call_expr_loc (loc, expfn, 1, arg);
7553 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7554 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7556 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7558 if (powfn)
7560 tree arg0 = CALL_EXPR_ARG (arg, 0);
7561 tree tree_root;
7562 /* The inner root was either sqrt or cbrt. */
7563 /* This was a conditional expression but it triggered a bug
7564 in Sun C 5.5. */
7565 REAL_VALUE_TYPE dconstroot;
7566 if (BUILTIN_SQRT_P (fcode))
7567 dconstroot = dconsthalf;
7568 else
7569 dconstroot = dconst_third ();
7571 /* Adjust for the outer root. */
7572 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7573 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7574 tree_root = build_real (type, dconstroot);
7575 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7579 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7580 if (flag_unsafe_math_optimizations
7581 && (fcode == BUILT_IN_POW
7582 || fcode == BUILT_IN_POWF
7583 || fcode == BUILT_IN_POWL))
7585 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7586 tree arg0 = CALL_EXPR_ARG (arg, 0);
7587 tree arg1 = CALL_EXPR_ARG (arg, 1);
7588 tree narg1;
7589 if (!tree_expr_nonnegative_p (arg0))
7590 arg0 = build1 (ABS_EXPR, type, arg0);
7591 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7592 build_real (type, dconsthalf));
7593 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7596 return NULL_TREE;
7599 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7600 Return NULL_TREE if no simplification can be made. */
7602 static tree
7603 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7605 const enum built_in_function fcode = builtin_mathfn_code (arg);
7606 tree res;
7608 if (!validate_arg (arg, REAL_TYPE))
7609 return NULL_TREE;
7611 /* Calculate the result when the argument is a constant. */
7612 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7613 return res;
7615 if (flag_unsafe_math_optimizations)
7617 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7618 if (BUILTIN_EXPONENT_P (fcode))
7620 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7621 const REAL_VALUE_TYPE third_trunc =
7622 real_value_truncate (TYPE_MODE (type), dconst_third ());
7623 arg = fold_build2_loc (loc, MULT_EXPR, type,
7624 CALL_EXPR_ARG (arg, 0),
7625 build_real (type, third_trunc));
7626 return build_call_expr_loc (loc, expfn, 1, arg);
7629 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7630 if (BUILTIN_SQRT_P (fcode))
7632 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7634 if (powfn)
7636 tree arg0 = CALL_EXPR_ARG (arg, 0);
7637 tree tree_root;
7638 REAL_VALUE_TYPE dconstroot = dconst_third ();
7640 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7641 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7642 tree_root = build_real (type, dconstroot);
7643 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7647 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7648 if (BUILTIN_CBRT_P (fcode))
7650 tree arg0 = CALL_EXPR_ARG (arg, 0);
7651 if (tree_expr_nonnegative_p (arg0))
7653 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7655 if (powfn)
7657 tree tree_root;
7658 REAL_VALUE_TYPE dconstroot;
7660 real_arithmetic (&dconstroot, MULT_EXPR,
7661 dconst_third_ptr (), dconst_third_ptr ());
7662 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7663 tree_root = build_real (type, dconstroot);
7664 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7669 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7670 if (fcode == BUILT_IN_POW
7671 || fcode == BUILT_IN_POWF
7672 || fcode == BUILT_IN_POWL)
7674 tree arg00 = CALL_EXPR_ARG (arg, 0);
7675 tree arg01 = CALL_EXPR_ARG (arg, 1);
7676 if (tree_expr_nonnegative_p (arg00))
7678 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7679 const REAL_VALUE_TYPE dconstroot
7680 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7681 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7682 build_real (type, dconstroot));
7683 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7687 return NULL_TREE;
7690 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7691 TYPE is the type of the return value. Return NULL_TREE if no
7692 simplification can be made. */
7694 static tree
7695 fold_builtin_cos (location_t loc,
7696 tree arg, tree type, tree fndecl)
7698 tree res, narg;
7700 if (!validate_arg (arg, REAL_TYPE))
7701 return NULL_TREE;
7703 /* Calculate the result when the argument is a constant. */
7704 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7705 return res;
7707 /* Optimize cos(-x) into cos (x). */
7708 if ((narg = fold_strip_sign_ops (arg)))
7709 return build_call_expr_loc (loc, fndecl, 1, narg);
7711 return NULL_TREE;
7714 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7715 Return NULL_TREE if no simplification can be made. */
7717 static tree
7718 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7720 if (validate_arg (arg, REAL_TYPE))
7722 tree res, narg;
7724 /* Calculate the result when the argument is a constant. */
7725 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7726 return res;
7728 /* Optimize cosh(-x) into cosh (x). */
7729 if ((narg = fold_strip_sign_ops (arg)))
7730 return build_call_expr_loc (loc, fndecl, 1, narg);
7733 return NULL_TREE;
7736 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7737 argument ARG. TYPE is the type of the return value. Return
7738 NULL_TREE if no simplification can be made. */
7740 static tree
7741 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7742 bool hyper)
7744 if (validate_arg (arg, COMPLEX_TYPE)
7745 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7747 tree tmp;
7749 /* Calculate the result when the argument is a constant. */
7750 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7751 return tmp;
7753 /* Optimize fn(-x) into fn(x). */
7754 if ((tmp = fold_strip_sign_ops (arg)))
7755 return build_call_expr_loc (loc, fndecl, 1, tmp);
7758 return NULL_TREE;
7761 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7762 Return NULL_TREE if no simplification can be made. */
7764 static tree
7765 fold_builtin_tan (tree arg, tree type)
7767 enum built_in_function fcode;
7768 tree res;
7770 if (!validate_arg (arg, REAL_TYPE))
7771 return NULL_TREE;
7773 /* Calculate the result when the argument is a constant. */
7774 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7775 return res;
7777 /* Optimize tan(atan(x)) = x. */
7778 fcode = builtin_mathfn_code (arg);
7779 if (flag_unsafe_math_optimizations
7780 && (fcode == BUILT_IN_ATAN
7781 || fcode == BUILT_IN_ATANF
7782 || fcode == BUILT_IN_ATANL))
7783 return CALL_EXPR_ARG (arg, 0);
7785 return NULL_TREE;
7788 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7789 NULL_TREE if no simplification can be made. */
7791 static tree
7792 fold_builtin_sincos (location_t loc,
7793 tree arg0, tree arg1, tree arg2)
7795 tree type;
7796 tree res, fn, call;
7798 if (!validate_arg (arg0, REAL_TYPE)
7799 || !validate_arg (arg1, POINTER_TYPE)
7800 || !validate_arg (arg2, POINTER_TYPE))
7801 return NULL_TREE;
7803 type = TREE_TYPE (arg0);
7805 /* Calculate the result when the argument is a constant. */
7806 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7807 return res;
7809 /* Canonicalize sincos to cexpi. */
7810 if (!TARGET_C99_FUNCTIONS)
7811 return NULL_TREE;
7812 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7813 if (!fn)
7814 return NULL_TREE;
7816 call = build_call_expr_loc (loc, fn, 1, arg0);
7817 call = builtin_save_expr (call);
7819 return build2 (COMPOUND_EXPR, void_type_node,
7820 build2 (MODIFY_EXPR, void_type_node,
7821 build_fold_indirect_ref_loc (loc, arg1),
7822 build1 (IMAGPART_EXPR, type, call)),
7823 build2 (MODIFY_EXPR, void_type_node,
7824 build_fold_indirect_ref_loc (loc, arg2),
7825 build1 (REALPART_EXPR, type, call)));
7828 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7829 NULL_TREE if no simplification can be made. */
7831 static tree
7832 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7834 tree rtype;
7835 tree realp, imagp, ifn;
7836 tree res;
7838 if (!validate_arg (arg0, COMPLEX_TYPE)
7839 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7840 return NULL_TREE;
7842 /* Calculate the result when the argument is a constant. */
7843 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7844 return res;
7846 rtype = TREE_TYPE (TREE_TYPE (arg0));
7848 /* In case we can figure out the real part of arg0 and it is constant zero
7849 fold to cexpi. */
7850 if (!TARGET_C99_FUNCTIONS)
7851 return NULL_TREE;
7852 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7853 if (!ifn)
7854 return NULL_TREE;
7856 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7857 && real_zerop (realp))
7859 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7860 return build_call_expr_loc (loc, ifn, 1, narg);
7863 /* In case we can easily decompose real and imaginary parts split cexp
7864 to exp (r) * cexpi (i). */
7865 if (flag_unsafe_math_optimizations
7866 && realp)
7868 tree rfn, rcall, icall;
7870 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7871 if (!rfn)
7872 return NULL_TREE;
7874 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7875 if (!imagp)
7876 return NULL_TREE;
7878 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7879 icall = builtin_save_expr (icall);
7880 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7881 rcall = builtin_save_expr (rcall);
7882 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7883 fold_build2_loc (loc, MULT_EXPR, rtype,
7884 rcall,
7885 fold_build1_loc (loc, REALPART_EXPR,
7886 rtype, icall)),
7887 fold_build2_loc (loc, MULT_EXPR, rtype,
7888 rcall,
7889 fold_build1_loc (loc, IMAGPART_EXPR,
7890 rtype, icall)));
7893 return NULL_TREE;
7896 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7897 Return NULL_TREE if no simplification can be made. */
7899 static tree
7900 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7902 if (!validate_arg (arg, REAL_TYPE))
7903 return NULL_TREE;
7905 /* Optimize trunc of constant value. */
7906 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7908 REAL_VALUE_TYPE r, x;
7909 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7911 x = TREE_REAL_CST (arg);
7912 real_trunc (&r, TYPE_MODE (type), &x);
7913 return build_real (type, r);
7916 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7919 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7920 Return NULL_TREE if no simplification can be made. */
7922 static tree
7923 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7925 if (!validate_arg (arg, REAL_TYPE))
7926 return NULL_TREE;
7928 /* Optimize floor of constant value. */
7929 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7931 REAL_VALUE_TYPE x;
7933 x = TREE_REAL_CST (arg);
7934 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7936 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7937 REAL_VALUE_TYPE r;
7939 real_floor (&r, TYPE_MODE (type), &x);
7940 return build_real (type, r);
7944 /* Fold floor (x) where x is nonnegative to trunc (x). */
7945 if (tree_expr_nonnegative_p (arg))
7947 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7948 if (truncfn)
7949 return build_call_expr_loc (loc, truncfn, 1, arg);
7952 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7955 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7956 Return NULL_TREE if no simplification can be made. */
7958 static tree
7959 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7961 if (!validate_arg (arg, REAL_TYPE))
7962 return NULL_TREE;
7964 /* Optimize ceil of constant value. */
7965 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7967 REAL_VALUE_TYPE x;
7969 x = TREE_REAL_CST (arg);
7970 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7972 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7973 REAL_VALUE_TYPE r;
7975 real_ceil (&r, TYPE_MODE (type), &x);
7976 return build_real (type, r);
7980 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7983 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7984 Return NULL_TREE if no simplification can be made. */
7986 static tree
7987 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7989 if (!validate_arg (arg, REAL_TYPE))
7990 return NULL_TREE;
7992 /* Optimize round of constant value. */
7993 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7995 REAL_VALUE_TYPE x;
7997 x = TREE_REAL_CST (arg);
7998 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8000 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8001 REAL_VALUE_TYPE r;
8003 real_round (&r, TYPE_MODE (type), &x);
8004 return build_real (type, r);
8008 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8011 /* Fold function call to builtin lround, lroundf or lroundl (or the
8012 corresponding long long versions) and other rounding functions. ARG
8013 is the argument to the call. Return NULL_TREE if no simplification
8014 can be made. */
8016 static tree
8017 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8019 if (!validate_arg (arg, REAL_TYPE))
8020 return NULL_TREE;
8022 /* Optimize lround of constant value. */
8023 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8025 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8027 if (real_isfinite (&x))
8029 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8030 tree ftype = TREE_TYPE (arg);
8031 double_int val;
8032 REAL_VALUE_TYPE r;
8034 switch (DECL_FUNCTION_CODE (fndecl))
8036 CASE_FLT_FN (BUILT_IN_IFLOOR):
8037 CASE_FLT_FN (BUILT_IN_LFLOOR):
8038 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8039 real_floor (&r, TYPE_MODE (ftype), &x);
8040 break;
8042 CASE_FLT_FN (BUILT_IN_ICEIL):
8043 CASE_FLT_FN (BUILT_IN_LCEIL):
8044 CASE_FLT_FN (BUILT_IN_LLCEIL):
8045 real_ceil (&r, TYPE_MODE (ftype), &x);
8046 break;
8048 CASE_FLT_FN (BUILT_IN_IROUND):
8049 CASE_FLT_FN (BUILT_IN_LROUND):
8050 CASE_FLT_FN (BUILT_IN_LLROUND):
8051 real_round (&r, TYPE_MODE (ftype), &x);
8052 break;
8054 default:
8055 gcc_unreachable ();
8058 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8059 if (double_int_fits_to_tree_p (itype, val))
8060 return double_int_to_tree (itype, val);
8064 switch (DECL_FUNCTION_CODE (fndecl))
8066 CASE_FLT_FN (BUILT_IN_LFLOOR):
8067 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8068 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8069 if (tree_expr_nonnegative_p (arg))
8070 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8071 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8072 break;
8073 default:;
8076 return fold_fixed_mathfn (loc, fndecl, arg);
8079 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8080 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8081 the argument to the call. Return NULL_TREE if no simplification can
8082 be made. */
8084 static tree
8085 fold_builtin_bitop (tree fndecl, tree arg)
8087 if (!validate_arg (arg, INTEGER_TYPE))
8088 return NULL_TREE;
8090 /* Optimize for constant argument. */
8091 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8093 HOST_WIDE_INT hi, width, result;
8094 unsigned HOST_WIDE_INT lo;
8095 tree type;
8097 type = TREE_TYPE (arg);
8098 width = TYPE_PRECISION (type);
8099 lo = TREE_INT_CST_LOW (arg);
8101 /* Clear all the bits that are beyond the type's precision. */
8102 if (width > HOST_BITS_PER_WIDE_INT)
8104 hi = TREE_INT_CST_HIGH (arg);
8105 if (width < HOST_BITS_PER_DOUBLE_INT)
8106 hi &= ~((unsigned HOST_WIDE_INT) (-1)
8107 << (width - HOST_BITS_PER_WIDE_INT));
8109 else
8111 hi = 0;
8112 if (width < HOST_BITS_PER_WIDE_INT)
8113 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8116 switch (DECL_FUNCTION_CODE (fndecl))
8118 CASE_INT_FN (BUILT_IN_FFS):
8119 if (lo != 0)
8120 result = ffs_hwi (lo);
8121 else if (hi != 0)
8122 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8123 else
8124 result = 0;
8125 break;
8127 CASE_INT_FN (BUILT_IN_CLZ):
8128 if (hi != 0)
8129 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8130 else if (lo != 0)
8131 result = width - floor_log2 (lo) - 1;
8132 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8133 result = width;
8134 break;
8136 CASE_INT_FN (BUILT_IN_CTZ):
8137 if (lo != 0)
8138 result = ctz_hwi (lo);
8139 else if (hi != 0)
8140 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8141 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8142 result = width;
8143 break;
8145 CASE_INT_FN (BUILT_IN_CLRSB):
8146 if (width > HOST_BITS_PER_WIDE_INT
8147 && (hi & ((unsigned HOST_WIDE_INT) 1
8148 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8150 hi = ~hi & ~((unsigned HOST_WIDE_INT) (-1)
8151 << (width - HOST_BITS_PER_WIDE_INT - 1));
8152 lo = ~lo;
8154 else if (width <= HOST_BITS_PER_WIDE_INT
8155 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8156 lo = ~lo & ~((unsigned HOST_WIDE_INT) (-1) << (width - 1));
8157 if (hi != 0)
8158 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8159 else if (lo != 0)
8160 result = width - floor_log2 (lo) - 2;
8161 else
8162 result = width - 1;
8163 break;
8165 CASE_INT_FN (BUILT_IN_POPCOUNT):
8166 result = 0;
8167 while (lo)
8168 result++, lo &= lo - 1;
8169 while (hi)
8170 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8171 break;
8173 CASE_INT_FN (BUILT_IN_PARITY):
8174 result = 0;
8175 while (lo)
8176 result++, lo &= lo - 1;
8177 while (hi)
8178 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8179 result &= 1;
8180 break;
8182 default:
8183 gcc_unreachable ();
8186 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8189 return NULL_TREE;
8192 /* Fold function call to builtin_bswap and the short, long and long long
8193 variants. Return NULL_TREE if no simplification can be made. */
8194 static tree
8195 fold_builtin_bswap (tree fndecl, tree arg)
8197 if (! validate_arg (arg, INTEGER_TYPE))
8198 return NULL_TREE;
8200 /* Optimize constant value. */
8201 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8203 HOST_WIDE_INT hi, width, r_hi = 0;
8204 unsigned HOST_WIDE_INT lo, r_lo = 0;
8205 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8207 width = TYPE_PRECISION (type);
8208 lo = TREE_INT_CST_LOW (arg);
8209 hi = TREE_INT_CST_HIGH (arg);
8211 switch (DECL_FUNCTION_CODE (fndecl))
8213 case BUILT_IN_BSWAP16:
8214 case BUILT_IN_BSWAP32:
8215 case BUILT_IN_BSWAP64:
8217 int s;
8219 for (s = 0; s < width; s += 8)
8221 int d = width - s - 8;
8222 unsigned HOST_WIDE_INT byte;
8224 if (s < HOST_BITS_PER_WIDE_INT)
8225 byte = (lo >> s) & 0xff;
8226 else
8227 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8229 if (d < HOST_BITS_PER_WIDE_INT)
8230 r_lo |= byte << d;
8231 else
8232 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8236 break;
8238 default:
8239 gcc_unreachable ();
8242 if (width < HOST_BITS_PER_WIDE_INT)
8243 return build_int_cst (type, r_lo);
8244 else
8245 return build_int_cst_wide (type, r_lo, r_hi);
8248 return NULL_TREE;
8251 /* A subroutine of fold_builtin to fold the various logarithmic
8252 functions. Return NULL_TREE if no simplification can me made.
8253 FUNC is the corresponding MPFR logarithm function. */
8255 static tree
8256 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8257 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8259 if (validate_arg (arg, REAL_TYPE))
8261 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8262 tree res;
8263 const enum built_in_function fcode = builtin_mathfn_code (arg);
8265 /* Calculate the result when the argument is a constant. */
8266 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8267 return res;
8269 /* Special case, optimize logN(expN(x)) = x. */
8270 if (flag_unsafe_math_optimizations
8271 && ((func == mpfr_log
8272 && (fcode == BUILT_IN_EXP
8273 || fcode == BUILT_IN_EXPF
8274 || fcode == BUILT_IN_EXPL))
8275 || (func == mpfr_log2
8276 && (fcode == BUILT_IN_EXP2
8277 || fcode == BUILT_IN_EXP2F
8278 || fcode == BUILT_IN_EXP2L))
8279 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8280 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8282 /* Optimize logN(func()) for various exponential functions. We
8283 want to determine the value "x" and the power "exponent" in
8284 order to transform logN(x**exponent) into exponent*logN(x). */
8285 if (flag_unsafe_math_optimizations)
8287 tree exponent = 0, x = 0;
8289 switch (fcode)
8291 CASE_FLT_FN (BUILT_IN_EXP):
8292 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8293 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8294 dconst_e ()));
8295 exponent = CALL_EXPR_ARG (arg, 0);
8296 break;
8297 CASE_FLT_FN (BUILT_IN_EXP2):
8298 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8299 x = build_real (type, dconst2);
8300 exponent = CALL_EXPR_ARG (arg, 0);
8301 break;
8302 CASE_FLT_FN (BUILT_IN_EXP10):
8303 CASE_FLT_FN (BUILT_IN_POW10):
8304 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8306 REAL_VALUE_TYPE dconst10;
8307 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8308 x = build_real (type, dconst10);
8310 exponent = CALL_EXPR_ARG (arg, 0);
8311 break;
8312 CASE_FLT_FN (BUILT_IN_SQRT):
8313 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8314 x = CALL_EXPR_ARG (arg, 0);
8315 exponent = build_real (type, dconsthalf);
8316 break;
8317 CASE_FLT_FN (BUILT_IN_CBRT):
8318 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8319 x = CALL_EXPR_ARG (arg, 0);
8320 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8321 dconst_third ()));
8322 break;
8323 CASE_FLT_FN (BUILT_IN_POW):
8324 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8325 x = CALL_EXPR_ARG (arg, 0);
8326 exponent = CALL_EXPR_ARG (arg, 1);
8327 break;
8328 default:
8329 break;
8332 /* Now perform the optimization. */
8333 if (x && exponent)
8335 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8336 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8341 return NULL_TREE;
8344 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8345 NULL_TREE if no simplification can be made. */
8347 static tree
8348 fold_builtin_hypot (location_t loc, tree fndecl,
8349 tree arg0, tree arg1, tree type)
8351 tree res, narg0, narg1;
8353 if (!validate_arg (arg0, REAL_TYPE)
8354 || !validate_arg (arg1, REAL_TYPE))
8355 return NULL_TREE;
8357 /* Calculate the result when the argument is a constant. */
8358 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8359 return res;
8361 /* If either argument to hypot has a negate or abs, strip that off.
8362 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8363 narg0 = fold_strip_sign_ops (arg0);
8364 narg1 = fold_strip_sign_ops (arg1);
8365 if (narg0 || narg1)
8367 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8368 narg1 ? narg1 : arg1);
8371 /* If either argument is zero, hypot is fabs of the other. */
8372 if (real_zerop (arg0))
8373 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8374 else if (real_zerop (arg1))
8375 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8377 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8378 if (flag_unsafe_math_optimizations
8379 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8381 const REAL_VALUE_TYPE sqrt2_trunc
8382 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8383 return fold_build2_loc (loc, MULT_EXPR, type,
8384 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8385 build_real (type, sqrt2_trunc));
8388 return NULL_TREE;
8392 /* Fold a builtin function call to pow, powf, or powl. Return
8393 NULL_TREE if no simplification can be made. */
8394 static tree
8395 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8397 tree res;
8399 if (!validate_arg (arg0, REAL_TYPE)
8400 || !validate_arg (arg1, REAL_TYPE))
8401 return NULL_TREE;
8403 /* Calculate the result when the argument is a constant. */
8404 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8405 return res;
8407 /* Optimize pow(1.0,y) = 1.0. */
8408 if (real_onep (arg0))
8409 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8411 if (TREE_CODE (arg1) == REAL_CST
8412 && !TREE_OVERFLOW (arg1))
8414 REAL_VALUE_TYPE cint;
8415 REAL_VALUE_TYPE c;
8416 HOST_WIDE_INT n;
8418 c = TREE_REAL_CST (arg1);
8420 /* Optimize pow(x,0.0) = 1.0. */
8421 if (REAL_VALUES_EQUAL (c, dconst0))
8422 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8423 arg0);
8425 /* Optimize pow(x,1.0) = x. */
8426 if (REAL_VALUES_EQUAL (c, dconst1))
8427 return arg0;
8429 /* Optimize pow(x,-1.0) = 1.0/x. */
8430 if (REAL_VALUES_EQUAL (c, dconstm1))
8431 return fold_build2_loc (loc, RDIV_EXPR, type,
8432 build_real (type, dconst1), arg0);
8434 /* Optimize pow(x,0.5) = sqrt(x). */
8435 if (flag_unsafe_math_optimizations
8436 && REAL_VALUES_EQUAL (c, dconsthalf))
8438 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8440 if (sqrtfn != NULL_TREE)
8441 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8444 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8445 if (flag_unsafe_math_optimizations)
8447 const REAL_VALUE_TYPE dconstroot
8448 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8450 if (REAL_VALUES_EQUAL (c, dconstroot))
8452 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8453 if (cbrtfn != NULL_TREE)
8454 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8458 /* Check for an integer exponent. */
8459 n = real_to_integer (&c);
8460 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8461 if (real_identical (&c, &cint))
8463 /* Attempt to evaluate pow at compile-time, unless this should
8464 raise an exception. */
8465 if (TREE_CODE (arg0) == REAL_CST
8466 && !TREE_OVERFLOW (arg0)
8467 && (n > 0
8468 || (!flag_trapping_math && !flag_errno_math)
8469 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8471 REAL_VALUE_TYPE x;
8472 bool inexact;
8474 x = TREE_REAL_CST (arg0);
8475 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8476 if (flag_unsafe_math_optimizations || !inexact)
8477 return build_real (type, x);
8480 /* Strip sign ops from even integer powers. */
8481 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8483 tree narg0 = fold_strip_sign_ops (arg0);
8484 if (narg0)
8485 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8490 if (flag_unsafe_math_optimizations)
8492 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8494 /* Optimize pow(expN(x),y) = expN(x*y). */
8495 if (BUILTIN_EXPONENT_P (fcode))
8497 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8498 tree arg = CALL_EXPR_ARG (arg0, 0);
8499 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8500 return build_call_expr_loc (loc, expfn, 1, arg);
8503 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8504 if (BUILTIN_SQRT_P (fcode))
8506 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8507 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8508 build_real (type, dconsthalf));
8509 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8512 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8513 if (BUILTIN_CBRT_P (fcode))
8515 tree arg = CALL_EXPR_ARG (arg0, 0);
8516 if (tree_expr_nonnegative_p (arg))
8518 const REAL_VALUE_TYPE dconstroot
8519 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8520 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8521 build_real (type, dconstroot));
8522 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8526 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8527 if (fcode == BUILT_IN_POW
8528 || fcode == BUILT_IN_POWF
8529 || fcode == BUILT_IN_POWL)
8531 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8532 if (tree_expr_nonnegative_p (arg00))
8534 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8535 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8536 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8541 return NULL_TREE;
8544 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8545 Return NULL_TREE if no simplification can be made. */
8546 static tree
8547 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8548 tree arg0, tree arg1, tree type)
8550 if (!validate_arg (arg0, REAL_TYPE)
8551 || !validate_arg (arg1, INTEGER_TYPE))
8552 return NULL_TREE;
8554 /* Optimize pow(1.0,y) = 1.0. */
8555 if (real_onep (arg0))
8556 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8558 if (host_integerp (arg1, 0))
8560 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8562 /* Evaluate powi at compile-time. */
8563 if (TREE_CODE (arg0) == REAL_CST
8564 && !TREE_OVERFLOW (arg0))
8566 REAL_VALUE_TYPE x;
8567 x = TREE_REAL_CST (arg0);
8568 real_powi (&x, TYPE_MODE (type), &x, c);
8569 return build_real (type, x);
8572 /* Optimize pow(x,0) = 1.0. */
8573 if (c == 0)
8574 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8575 arg0);
8577 /* Optimize pow(x,1) = x. */
8578 if (c == 1)
8579 return arg0;
8581 /* Optimize pow(x,-1) = 1.0/x. */
8582 if (c == -1)
8583 return fold_build2_loc (loc, RDIV_EXPR, type,
8584 build_real (type, dconst1), arg0);
8587 return NULL_TREE;
8590 /* A subroutine of fold_builtin to fold the various exponent
8591 functions. Return NULL_TREE if no simplification can be made.
8592 FUNC is the corresponding MPFR exponent function. */
8594 static tree
8595 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8596 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8598 if (validate_arg (arg, REAL_TYPE))
8600 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8601 tree res;
8603 /* Calculate the result when the argument is a constant. */
8604 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8605 return res;
8607 /* Optimize expN(logN(x)) = x. */
8608 if (flag_unsafe_math_optimizations)
8610 const enum built_in_function fcode = builtin_mathfn_code (arg);
8612 if ((func == mpfr_exp
8613 && (fcode == BUILT_IN_LOG
8614 || fcode == BUILT_IN_LOGF
8615 || fcode == BUILT_IN_LOGL))
8616 || (func == mpfr_exp2
8617 && (fcode == BUILT_IN_LOG2
8618 || fcode == BUILT_IN_LOG2F
8619 || fcode == BUILT_IN_LOG2L))
8620 || (func == mpfr_exp10
8621 && (fcode == BUILT_IN_LOG10
8622 || fcode == BUILT_IN_LOG10F
8623 || fcode == BUILT_IN_LOG10L)))
8624 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8628 return NULL_TREE;
8631 /* Return true if VAR is a VAR_DECL or a component thereof. */
8633 static bool
8634 var_decl_component_p (tree var)
8636 tree inner = var;
8637 while (handled_component_p (inner))
8638 inner = TREE_OPERAND (inner, 0);
8639 return SSA_VAR_P (inner);
8642 /* Fold function call to builtin memset. Return
8643 NULL_TREE if no simplification can be made. */
8645 static tree
8646 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8647 tree type, bool ignore)
8649 tree var, ret, etype;
8650 unsigned HOST_WIDE_INT length, cval;
8652 if (! validate_arg (dest, POINTER_TYPE)
8653 || ! validate_arg (c, INTEGER_TYPE)
8654 || ! validate_arg (len, INTEGER_TYPE))
8655 return NULL_TREE;
8657 if (! host_integerp (len, 1))
8658 return NULL_TREE;
8660 /* If the LEN parameter is zero, return DEST. */
8661 if (integer_zerop (len))
8662 return omit_one_operand_loc (loc, type, dest, c);
8664 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8665 return NULL_TREE;
8667 var = dest;
8668 STRIP_NOPS (var);
8669 if (TREE_CODE (var) != ADDR_EXPR)
8670 return NULL_TREE;
8672 var = TREE_OPERAND (var, 0);
8673 if (TREE_THIS_VOLATILE (var))
8674 return NULL_TREE;
8676 etype = TREE_TYPE (var);
8677 if (TREE_CODE (etype) == ARRAY_TYPE)
8678 etype = TREE_TYPE (etype);
8680 if (!INTEGRAL_TYPE_P (etype)
8681 && !POINTER_TYPE_P (etype))
8682 return NULL_TREE;
8684 if (! var_decl_component_p (var))
8685 return NULL_TREE;
8687 length = tree_low_cst (len, 1);
8688 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8689 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8690 return NULL_TREE;
8692 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8693 return NULL_TREE;
8695 if (integer_zerop (c))
8696 cval = 0;
8697 else
8699 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8700 return NULL_TREE;
8702 cval = TREE_INT_CST_LOW (c);
8703 cval &= 0xff;
8704 cval |= cval << 8;
8705 cval |= cval << 16;
8706 cval |= (cval << 31) << 1;
8709 ret = build_int_cst_type (etype, cval);
8710 var = build_fold_indirect_ref_loc (loc,
8711 fold_convert_loc (loc,
8712 build_pointer_type (etype),
8713 dest));
8714 ret = build2 (MODIFY_EXPR, etype, var, ret);
8715 if (ignore)
8716 return ret;
8718 return omit_one_operand_loc (loc, type, dest, ret);
8721 /* Fold function call to builtin memset. Return
8722 NULL_TREE if no simplification can be made. */
8724 static tree
8725 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8727 if (! validate_arg (dest, POINTER_TYPE)
8728 || ! validate_arg (size, INTEGER_TYPE))
8729 return NULL_TREE;
8731 if (!ignore)
8732 return NULL_TREE;
8734 /* New argument list transforming bzero(ptr x, int y) to
8735 memset(ptr x, int 0, size_t y). This is done this way
8736 so that if it isn't expanded inline, we fallback to
8737 calling bzero instead of memset. */
8739 return fold_builtin_memset (loc, dest, integer_zero_node,
8740 fold_convert_loc (loc, size_type_node, size),
8741 void_type_node, ignore);
8744 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8745 NULL_TREE if no simplification can be made.
8746 If ENDP is 0, return DEST (like memcpy).
8747 If ENDP is 1, return DEST+LEN (like mempcpy).
8748 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8749 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8750 (memmove). */
8752 static tree
8753 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8754 tree len, tree type, bool ignore, int endp)
8756 tree destvar, srcvar, expr;
8758 if (! validate_arg (dest, POINTER_TYPE)
8759 || ! validate_arg (src, POINTER_TYPE)
8760 || ! validate_arg (len, INTEGER_TYPE))
8761 return NULL_TREE;
8763 /* If the LEN parameter is zero, return DEST. */
8764 if (integer_zerop (len))
8765 return omit_one_operand_loc (loc, type, dest, src);
8767 /* If SRC and DEST are the same (and not volatile), return
8768 DEST{,+LEN,+LEN-1}. */
8769 if (operand_equal_p (src, dest, 0))
8770 expr = len;
8771 else
8773 tree srctype, desttype;
8774 unsigned int src_align, dest_align;
8775 tree off0;
8777 if (endp == 3)
8779 src_align = get_pointer_alignment (src);
8780 dest_align = get_pointer_alignment (dest);
8782 /* Both DEST and SRC must be pointer types.
8783 ??? This is what old code did. Is the testing for pointer types
8784 really mandatory?
8786 If either SRC is readonly or length is 1, we can use memcpy. */
8787 if (!dest_align || !src_align)
8788 return NULL_TREE;
8789 if (readonly_data_expr (src)
8790 || (host_integerp (len, 1)
8791 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8792 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8794 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8795 if (!fn)
8796 return NULL_TREE;
8797 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8800 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8801 if (TREE_CODE (src) == ADDR_EXPR
8802 && TREE_CODE (dest) == ADDR_EXPR)
8804 tree src_base, dest_base, fn;
8805 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8806 HOST_WIDE_INT size = -1;
8807 HOST_WIDE_INT maxsize = -1;
8809 srcvar = TREE_OPERAND (src, 0);
8810 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8811 &size, &maxsize);
8812 destvar = TREE_OPERAND (dest, 0);
8813 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8814 &size, &maxsize);
8815 if (host_integerp (len, 1))
8816 maxsize = tree_low_cst (len, 1);
8817 else
8818 maxsize = -1;
8819 src_offset /= BITS_PER_UNIT;
8820 dest_offset /= BITS_PER_UNIT;
8821 if (SSA_VAR_P (src_base)
8822 && SSA_VAR_P (dest_base))
8824 if (operand_equal_p (src_base, dest_base, 0)
8825 && ranges_overlap_p (src_offset, maxsize,
8826 dest_offset, maxsize))
8827 return NULL_TREE;
8829 else if (TREE_CODE (src_base) == MEM_REF
8830 && TREE_CODE (dest_base) == MEM_REF)
8832 double_int off;
8833 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8834 TREE_OPERAND (dest_base, 0), 0))
8835 return NULL_TREE;
8836 off = mem_ref_offset (src_base) +
8837 double_int::from_shwi (src_offset);
8838 if (!off.fits_shwi ())
8839 return NULL_TREE;
8840 src_offset = off.low;
8841 off = mem_ref_offset (dest_base) +
8842 double_int::from_shwi (dest_offset);
8843 if (!off.fits_shwi ())
8844 return NULL_TREE;
8845 dest_offset = off.low;
8846 if (ranges_overlap_p (src_offset, maxsize,
8847 dest_offset, maxsize))
8848 return NULL_TREE;
8850 else
8851 return NULL_TREE;
8853 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8854 if (!fn)
8855 return NULL_TREE;
8856 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8859 /* If the destination and source do not alias optimize into
8860 memcpy as well. */
8861 if ((is_gimple_min_invariant (dest)
8862 || TREE_CODE (dest) == SSA_NAME)
8863 && (is_gimple_min_invariant (src)
8864 || TREE_CODE (src) == SSA_NAME))
8866 ao_ref destr, srcr;
8867 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8868 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8869 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8871 tree fn;
8872 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8873 if (!fn)
8874 return NULL_TREE;
8875 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8879 return NULL_TREE;
8882 if (!host_integerp (len, 0))
8883 return NULL_TREE;
8884 /* FIXME:
8885 This logic lose for arguments like (type *)malloc (sizeof (type)),
8886 since we strip the casts of up to VOID return value from malloc.
8887 Perhaps we ought to inherit type from non-VOID argument here? */
8888 STRIP_NOPS (src);
8889 STRIP_NOPS (dest);
8890 if (!POINTER_TYPE_P (TREE_TYPE (src))
8891 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8892 return NULL_TREE;
8893 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8894 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8896 tree tem = TREE_OPERAND (src, 0);
8897 STRIP_NOPS (tem);
8898 if (tem != TREE_OPERAND (src, 0))
8899 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8901 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8903 tree tem = TREE_OPERAND (dest, 0);
8904 STRIP_NOPS (tem);
8905 if (tem != TREE_OPERAND (dest, 0))
8906 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8908 srctype = TREE_TYPE (TREE_TYPE (src));
8909 if (TREE_CODE (srctype) == ARRAY_TYPE
8910 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8912 srctype = TREE_TYPE (srctype);
8913 STRIP_NOPS (src);
8914 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8916 desttype = TREE_TYPE (TREE_TYPE (dest));
8917 if (TREE_CODE (desttype) == ARRAY_TYPE
8918 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8920 desttype = TREE_TYPE (desttype);
8921 STRIP_NOPS (dest);
8922 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8924 if (TREE_ADDRESSABLE (srctype)
8925 || TREE_ADDRESSABLE (desttype))
8926 return NULL_TREE;
8928 src_align = get_pointer_alignment (src);
8929 dest_align = get_pointer_alignment (dest);
8930 if (dest_align < TYPE_ALIGN (desttype)
8931 || src_align < TYPE_ALIGN (srctype))
8932 return NULL_TREE;
8934 if (!ignore)
8935 dest = builtin_save_expr (dest);
8937 /* Build accesses at offset zero with a ref-all character type. */
8938 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8939 ptr_mode, true), 0);
8941 destvar = dest;
8942 STRIP_NOPS (destvar);
8943 if (TREE_CODE (destvar) == ADDR_EXPR
8944 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8945 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8946 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8947 else
8948 destvar = NULL_TREE;
8950 srcvar = src;
8951 STRIP_NOPS (srcvar);
8952 if (TREE_CODE (srcvar) == ADDR_EXPR
8953 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8954 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8956 if (!destvar
8957 || src_align >= TYPE_ALIGN (desttype))
8958 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8959 srcvar, off0);
8960 else if (!STRICT_ALIGNMENT)
8962 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8963 src_align);
8964 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8966 else
8967 srcvar = NULL_TREE;
8969 else
8970 srcvar = NULL_TREE;
8972 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8973 return NULL_TREE;
8975 if (srcvar == NULL_TREE)
8977 STRIP_NOPS (src);
8978 if (src_align >= TYPE_ALIGN (desttype))
8979 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8980 else
8982 if (STRICT_ALIGNMENT)
8983 return NULL_TREE;
8984 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8985 src_align);
8986 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8989 else if (destvar == NULL_TREE)
8991 STRIP_NOPS (dest);
8992 if (dest_align >= TYPE_ALIGN (srctype))
8993 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8994 else
8996 if (STRICT_ALIGNMENT)
8997 return NULL_TREE;
8998 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8999 dest_align);
9000 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
9004 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
9007 if (ignore)
9008 return expr;
9010 if (endp == 0 || endp == 3)
9011 return omit_one_operand_loc (loc, type, dest, expr);
9013 if (expr == len)
9014 expr = NULL_TREE;
9016 if (endp == 2)
9017 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9018 ssize_int (1));
9020 dest = fold_build_pointer_plus_loc (loc, dest, len);
9021 dest = fold_convert_loc (loc, type, dest);
9022 if (expr)
9023 dest = omit_one_operand_loc (loc, type, dest, expr);
9024 return dest;
9027 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9028 If LEN is not NULL, it represents the length of the string to be
9029 copied. Return NULL_TREE if no simplification can be made. */
9031 tree
9032 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9034 tree fn;
9036 if (!validate_arg (dest, POINTER_TYPE)
9037 || !validate_arg (src, POINTER_TYPE))
9038 return NULL_TREE;
9040 /* If SRC and DEST are the same (and not volatile), return DEST. */
9041 if (operand_equal_p (src, dest, 0))
9042 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9044 if (optimize_function_for_size_p (cfun))
9045 return NULL_TREE;
9047 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9048 if (!fn)
9049 return NULL_TREE;
9051 if (!len)
9053 len = c_strlen (src, 1);
9054 if (! len || TREE_SIDE_EFFECTS (len))
9055 return NULL_TREE;
9058 len = fold_convert_loc (loc, size_type_node, len);
9059 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9060 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9061 build_call_expr_loc (loc, fn, 3, dest, src, len));
9064 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9065 Return NULL_TREE if no simplification can be made. */
9067 static tree
9068 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9070 tree fn, len, lenp1, call, type;
9072 if (!validate_arg (dest, POINTER_TYPE)
9073 || !validate_arg (src, POINTER_TYPE))
9074 return NULL_TREE;
9076 len = c_strlen (src, 1);
9077 if (!len
9078 || TREE_CODE (len) != INTEGER_CST)
9079 return NULL_TREE;
9081 if (optimize_function_for_size_p (cfun)
9082 /* If length is zero it's small enough. */
9083 && !integer_zerop (len))
9084 return NULL_TREE;
9086 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9087 if (!fn)
9088 return NULL_TREE;
9090 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9091 fold_convert_loc (loc, size_type_node, len),
9092 build_int_cst (size_type_node, 1));
9093 /* We use dest twice in building our expression. Save it from
9094 multiple expansions. */
9095 dest = builtin_save_expr (dest);
9096 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9098 type = TREE_TYPE (TREE_TYPE (fndecl));
9099 dest = fold_build_pointer_plus_loc (loc, dest, len);
9100 dest = fold_convert_loc (loc, type, dest);
9101 dest = omit_one_operand_loc (loc, type, dest, call);
9102 return dest;
9105 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9106 If SLEN is not NULL, it represents the length of the source string.
9107 Return NULL_TREE if no simplification can be made. */
9109 tree
9110 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9111 tree src, tree len, tree slen)
9113 tree fn;
9115 if (!validate_arg (dest, POINTER_TYPE)
9116 || !validate_arg (src, POINTER_TYPE)
9117 || !validate_arg (len, INTEGER_TYPE))
9118 return NULL_TREE;
9120 /* If the LEN parameter is zero, return DEST. */
9121 if (integer_zerop (len))
9122 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9124 /* We can't compare slen with len as constants below if len is not a
9125 constant. */
9126 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9127 return NULL_TREE;
9129 if (!slen)
9130 slen = c_strlen (src, 1);
9132 /* Now, we must be passed a constant src ptr parameter. */
9133 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9134 return NULL_TREE;
9136 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9138 /* We do not support simplification of this case, though we do
9139 support it when expanding trees into RTL. */
9140 /* FIXME: generate a call to __builtin_memset. */
9141 if (tree_int_cst_lt (slen, len))
9142 return NULL_TREE;
9144 /* OK transform into builtin memcpy. */
9145 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9146 if (!fn)
9147 return NULL_TREE;
9149 len = fold_convert_loc (loc, size_type_node, len);
9150 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9151 build_call_expr_loc (loc, fn, 3, dest, src, len));
9154 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9155 arguments to the call, and TYPE is its return type.
9156 Return NULL_TREE if no simplification can be made. */
9158 static tree
9159 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9161 if (!validate_arg (arg1, POINTER_TYPE)
9162 || !validate_arg (arg2, INTEGER_TYPE)
9163 || !validate_arg (len, INTEGER_TYPE))
9164 return NULL_TREE;
9165 else
9167 const char *p1;
9169 if (TREE_CODE (arg2) != INTEGER_CST
9170 || !host_integerp (len, 1))
9171 return NULL_TREE;
9173 p1 = c_getstr (arg1);
9174 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9176 char c;
9177 const char *r;
9178 tree tem;
9180 if (target_char_cast (arg2, &c))
9181 return NULL_TREE;
9183 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9185 if (r == NULL)
9186 return build_int_cst (TREE_TYPE (arg1), 0);
9188 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9189 return fold_convert_loc (loc, type, tem);
9191 return NULL_TREE;
9195 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9196 Return NULL_TREE if no simplification can be made. */
9198 static tree
9199 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9201 const char *p1, *p2;
9203 if (!validate_arg (arg1, POINTER_TYPE)
9204 || !validate_arg (arg2, POINTER_TYPE)
9205 || !validate_arg (len, INTEGER_TYPE))
9206 return NULL_TREE;
9208 /* If the LEN parameter is zero, return zero. */
9209 if (integer_zerop (len))
9210 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9211 arg1, arg2);
9213 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9214 if (operand_equal_p (arg1, arg2, 0))
9215 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9217 p1 = c_getstr (arg1);
9218 p2 = c_getstr (arg2);
9220 /* If all arguments are constant, and the value of len is not greater
9221 than the lengths of arg1 and arg2, evaluate at compile-time. */
9222 if (host_integerp (len, 1) && p1 && p2
9223 && compare_tree_int (len, strlen (p1) + 1) <= 0
9224 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9226 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9228 if (r > 0)
9229 return integer_one_node;
9230 else if (r < 0)
9231 return integer_minus_one_node;
9232 else
9233 return integer_zero_node;
9236 /* If len parameter is one, return an expression corresponding to
9237 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9238 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9240 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9241 tree cst_uchar_ptr_node
9242 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9244 tree ind1
9245 = fold_convert_loc (loc, integer_type_node,
9246 build1 (INDIRECT_REF, cst_uchar_node,
9247 fold_convert_loc (loc,
9248 cst_uchar_ptr_node,
9249 arg1)));
9250 tree ind2
9251 = fold_convert_loc (loc, integer_type_node,
9252 build1 (INDIRECT_REF, cst_uchar_node,
9253 fold_convert_loc (loc,
9254 cst_uchar_ptr_node,
9255 arg2)));
9256 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9259 return NULL_TREE;
9262 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9263 Return NULL_TREE if no simplification can be made. */
9265 static tree
9266 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9268 const char *p1, *p2;
9270 if (!validate_arg (arg1, POINTER_TYPE)
9271 || !validate_arg (arg2, POINTER_TYPE))
9272 return NULL_TREE;
9274 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9275 if (operand_equal_p (arg1, arg2, 0))
9276 return integer_zero_node;
9278 p1 = c_getstr (arg1);
9279 p2 = c_getstr (arg2);
9281 if (p1 && p2)
9283 const int i = strcmp (p1, p2);
9284 if (i < 0)
9285 return integer_minus_one_node;
9286 else if (i > 0)
9287 return integer_one_node;
9288 else
9289 return integer_zero_node;
9292 /* If the second arg is "", return *(const unsigned char*)arg1. */
9293 if (p2 && *p2 == '\0')
9295 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9296 tree cst_uchar_ptr_node
9297 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9299 return fold_convert_loc (loc, integer_type_node,
9300 build1 (INDIRECT_REF, cst_uchar_node,
9301 fold_convert_loc (loc,
9302 cst_uchar_ptr_node,
9303 arg1)));
9306 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9307 if (p1 && *p1 == '\0')
9309 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9310 tree cst_uchar_ptr_node
9311 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9313 tree temp
9314 = fold_convert_loc (loc, integer_type_node,
9315 build1 (INDIRECT_REF, cst_uchar_node,
9316 fold_convert_loc (loc,
9317 cst_uchar_ptr_node,
9318 arg2)));
9319 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9322 return NULL_TREE;
9325 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9326 Return NULL_TREE if no simplification can be made. */
9328 static tree
9329 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9331 const char *p1, *p2;
9333 if (!validate_arg (arg1, POINTER_TYPE)
9334 || !validate_arg (arg2, POINTER_TYPE)
9335 || !validate_arg (len, INTEGER_TYPE))
9336 return NULL_TREE;
9338 /* If the LEN parameter is zero, return zero. */
9339 if (integer_zerop (len))
9340 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9341 arg1, arg2);
9343 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9344 if (operand_equal_p (arg1, arg2, 0))
9345 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9347 p1 = c_getstr (arg1);
9348 p2 = c_getstr (arg2);
9350 if (host_integerp (len, 1) && p1 && p2)
9352 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9353 if (i > 0)
9354 return integer_one_node;
9355 else if (i < 0)
9356 return integer_minus_one_node;
9357 else
9358 return integer_zero_node;
9361 /* If the second arg is "", and the length is greater than zero,
9362 return *(const unsigned char*)arg1. */
9363 if (p2 && *p2 == '\0'
9364 && TREE_CODE (len) == INTEGER_CST
9365 && tree_int_cst_sgn (len) == 1)
9367 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9368 tree cst_uchar_ptr_node
9369 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9371 return fold_convert_loc (loc, integer_type_node,
9372 build1 (INDIRECT_REF, cst_uchar_node,
9373 fold_convert_loc (loc,
9374 cst_uchar_ptr_node,
9375 arg1)));
9378 /* If the first arg is "", and the length is greater than zero,
9379 return -*(const unsigned char*)arg2. */
9380 if (p1 && *p1 == '\0'
9381 && TREE_CODE (len) == INTEGER_CST
9382 && tree_int_cst_sgn (len) == 1)
9384 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9385 tree cst_uchar_ptr_node
9386 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9388 tree temp = fold_convert_loc (loc, integer_type_node,
9389 build1 (INDIRECT_REF, cst_uchar_node,
9390 fold_convert_loc (loc,
9391 cst_uchar_ptr_node,
9392 arg2)));
9393 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9396 /* If len parameter is one, return an expression corresponding to
9397 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9398 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9400 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9401 tree cst_uchar_ptr_node
9402 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9404 tree ind1 = fold_convert_loc (loc, integer_type_node,
9405 build1 (INDIRECT_REF, cst_uchar_node,
9406 fold_convert_loc (loc,
9407 cst_uchar_ptr_node,
9408 arg1)));
9409 tree ind2 = fold_convert_loc (loc, integer_type_node,
9410 build1 (INDIRECT_REF, cst_uchar_node,
9411 fold_convert_loc (loc,
9412 cst_uchar_ptr_node,
9413 arg2)));
9414 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9417 return NULL_TREE;
9420 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9421 ARG. Return NULL_TREE if no simplification can be made. */
9423 static tree
9424 fold_builtin_signbit (location_t loc, tree arg, tree type)
9426 if (!validate_arg (arg, REAL_TYPE))
9427 return NULL_TREE;
9429 /* If ARG is a compile-time constant, determine the result. */
9430 if (TREE_CODE (arg) == REAL_CST
9431 && !TREE_OVERFLOW (arg))
9433 REAL_VALUE_TYPE c;
9435 c = TREE_REAL_CST (arg);
9436 return (REAL_VALUE_NEGATIVE (c)
9437 ? build_one_cst (type)
9438 : build_zero_cst (type));
9441 /* If ARG is non-negative, the result is always zero. */
9442 if (tree_expr_nonnegative_p (arg))
9443 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9445 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9446 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9447 return fold_convert (type,
9448 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9449 build_real (TREE_TYPE (arg), dconst0)));
9451 return NULL_TREE;
9454 /* Fold function call to builtin copysign, copysignf or copysignl with
9455 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9456 be made. */
9458 static tree
9459 fold_builtin_copysign (location_t loc, tree fndecl,
9460 tree arg1, tree arg2, tree type)
9462 tree tem;
9464 if (!validate_arg (arg1, REAL_TYPE)
9465 || !validate_arg (arg2, REAL_TYPE))
9466 return NULL_TREE;
9468 /* copysign(X,X) is X. */
9469 if (operand_equal_p (arg1, arg2, 0))
9470 return fold_convert_loc (loc, type, arg1);
9472 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9473 if (TREE_CODE (arg1) == REAL_CST
9474 && TREE_CODE (arg2) == REAL_CST
9475 && !TREE_OVERFLOW (arg1)
9476 && !TREE_OVERFLOW (arg2))
9478 REAL_VALUE_TYPE c1, c2;
9480 c1 = TREE_REAL_CST (arg1);
9481 c2 = TREE_REAL_CST (arg2);
9482 /* c1.sign := c2.sign. */
9483 real_copysign (&c1, &c2);
9484 return build_real (type, c1);
9487 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9488 Remember to evaluate Y for side-effects. */
9489 if (tree_expr_nonnegative_p (arg2))
9490 return omit_one_operand_loc (loc, type,
9491 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9492 arg2);
9494 /* Strip sign changing operations for the first argument. */
9495 tem = fold_strip_sign_ops (arg1);
9496 if (tem)
9497 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9499 return NULL_TREE;
9502 /* Fold a call to builtin isascii with argument ARG. */
9504 static tree
9505 fold_builtin_isascii (location_t loc, tree arg)
9507 if (!validate_arg (arg, INTEGER_TYPE))
9508 return NULL_TREE;
9509 else
9511 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9512 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9513 build_int_cst (integer_type_node,
9514 ~ (unsigned HOST_WIDE_INT) 0x7f));
9515 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9516 arg, integer_zero_node);
9520 /* Fold a call to builtin toascii with argument ARG. */
9522 static tree
9523 fold_builtin_toascii (location_t loc, tree arg)
9525 if (!validate_arg (arg, INTEGER_TYPE))
9526 return NULL_TREE;
9528 /* Transform toascii(c) -> (c & 0x7f). */
9529 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9530 build_int_cst (integer_type_node, 0x7f));
9533 /* Fold a call to builtin isdigit with argument ARG. */
9535 static tree
9536 fold_builtin_isdigit (location_t loc, tree arg)
9538 if (!validate_arg (arg, INTEGER_TYPE))
9539 return NULL_TREE;
9540 else
9542 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9543 /* According to the C standard, isdigit is unaffected by locale.
9544 However, it definitely is affected by the target character set. */
9545 unsigned HOST_WIDE_INT target_digit0
9546 = lang_hooks.to_target_charset ('0');
9548 if (target_digit0 == 0)
9549 return NULL_TREE;
9551 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9552 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9553 build_int_cst (unsigned_type_node, target_digit0));
9554 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9555 build_int_cst (unsigned_type_node, 9));
9559 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9561 static tree
9562 fold_builtin_fabs (location_t loc, tree arg, tree type)
9564 if (!validate_arg (arg, REAL_TYPE))
9565 return NULL_TREE;
9567 arg = fold_convert_loc (loc, type, arg);
9568 if (TREE_CODE (arg) == REAL_CST)
9569 return fold_abs_const (arg, type);
9570 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9573 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9575 static tree
9576 fold_builtin_abs (location_t loc, tree arg, tree type)
9578 if (!validate_arg (arg, INTEGER_TYPE))
9579 return NULL_TREE;
9581 arg = fold_convert_loc (loc, type, arg);
9582 if (TREE_CODE (arg) == INTEGER_CST)
9583 return fold_abs_const (arg, type);
9584 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9587 /* Fold a fma operation with arguments ARG[012]. */
9589 tree
9590 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9591 tree type, tree arg0, tree arg1, tree arg2)
9593 if (TREE_CODE (arg0) == REAL_CST
9594 && TREE_CODE (arg1) == REAL_CST
9595 && TREE_CODE (arg2) == REAL_CST)
9596 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9598 return NULL_TREE;
9601 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9603 static tree
9604 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9606 if (validate_arg (arg0, REAL_TYPE)
9607 && validate_arg(arg1, REAL_TYPE)
9608 && validate_arg(arg2, REAL_TYPE))
9610 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9611 if (tem)
9612 return tem;
9614 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9615 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9616 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9618 return NULL_TREE;
9621 /* Fold a call to builtin fmin or fmax. */
9623 static tree
9624 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9625 tree type, bool max)
9627 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9629 /* Calculate the result when the argument is a constant. */
9630 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9632 if (res)
9633 return res;
9635 /* If either argument is NaN, return the other one. Avoid the
9636 transformation if we get (and honor) a signalling NaN. Using
9637 omit_one_operand() ensures we create a non-lvalue. */
9638 if (TREE_CODE (arg0) == REAL_CST
9639 && real_isnan (&TREE_REAL_CST (arg0))
9640 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9641 || ! TREE_REAL_CST (arg0).signalling))
9642 return omit_one_operand_loc (loc, type, arg1, arg0);
9643 if (TREE_CODE (arg1) == REAL_CST
9644 && real_isnan (&TREE_REAL_CST (arg1))
9645 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9646 || ! TREE_REAL_CST (arg1).signalling))
9647 return omit_one_operand_loc (loc, type, arg0, arg1);
9649 /* Transform fmin/fmax(x,x) -> x. */
9650 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9651 return omit_one_operand_loc (loc, type, arg0, arg1);
9653 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9654 functions to return the numeric arg if the other one is NaN.
9655 These tree codes don't honor that, so only transform if
9656 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9657 handled, so we don't have to worry about it either. */
9658 if (flag_finite_math_only)
9659 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9660 fold_convert_loc (loc, type, arg0),
9661 fold_convert_loc (loc, type, arg1));
9663 return NULL_TREE;
9666 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9668 static tree
9669 fold_builtin_carg (location_t loc, tree arg, tree type)
9671 if (validate_arg (arg, COMPLEX_TYPE)
9672 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9674 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9676 if (atan2_fn)
9678 tree new_arg = builtin_save_expr (arg);
9679 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9680 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9681 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9685 return NULL_TREE;
9688 /* Fold a call to builtin logb/ilogb. */
9690 static tree
9691 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9693 if (! validate_arg (arg, REAL_TYPE))
9694 return NULL_TREE;
9696 STRIP_NOPS (arg);
9698 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9700 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9702 switch (value->cl)
9704 case rvc_nan:
9705 case rvc_inf:
9706 /* If arg is Inf or NaN and we're logb, return it. */
9707 if (TREE_CODE (rettype) == REAL_TYPE)
9708 return fold_convert_loc (loc, rettype, arg);
9709 /* Fall through... */
9710 case rvc_zero:
9711 /* Zero may set errno and/or raise an exception for logb, also
9712 for ilogb we don't know FP_ILOGB0. */
9713 return NULL_TREE;
9714 case rvc_normal:
9715 /* For normal numbers, proceed iff radix == 2. In GCC,
9716 normalized significands are in the range [0.5, 1.0). We
9717 want the exponent as if they were [1.0, 2.0) so get the
9718 exponent and subtract 1. */
9719 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9720 return fold_convert_loc (loc, rettype,
9721 build_int_cst (integer_type_node,
9722 REAL_EXP (value)-1));
9723 break;
9727 return NULL_TREE;
9730 /* Fold a call to builtin significand, if radix == 2. */
9732 static tree
9733 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9735 if (! validate_arg (arg, REAL_TYPE))
9736 return NULL_TREE;
9738 STRIP_NOPS (arg);
9740 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9742 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9744 switch (value->cl)
9746 case rvc_zero:
9747 case rvc_nan:
9748 case rvc_inf:
9749 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9750 return fold_convert_loc (loc, rettype, arg);
9751 case rvc_normal:
9752 /* For normal numbers, proceed iff radix == 2. */
9753 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9755 REAL_VALUE_TYPE result = *value;
9756 /* In GCC, normalized significands are in the range [0.5,
9757 1.0). We want them to be [1.0, 2.0) so set the
9758 exponent to 1. */
9759 SET_REAL_EXP (&result, 1);
9760 return build_real (rettype, result);
9762 break;
9766 return NULL_TREE;
9769 /* Fold a call to builtin frexp, we can assume the base is 2. */
9771 static tree
9772 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9774 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9775 return NULL_TREE;
9777 STRIP_NOPS (arg0);
9779 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9780 return NULL_TREE;
9782 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9784 /* Proceed if a valid pointer type was passed in. */
9785 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9787 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9788 tree frac, exp;
9790 switch (value->cl)
9792 case rvc_zero:
9793 /* For +-0, return (*exp = 0, +-0). */
9794 exp = integer_zero_node;
9795 frac = arg0;
9796 break;
9797 case rvc_nan:
9798 case rvc_inf:
9799 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9800 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9801 case rvc_normal:
9803 /* Since the frexp function always expects base 2, and in
9804 GCC normalized significands are already in the range
9805 [0.5, 1.0), we have exactly what frexp wants. */
9806 REAL_VALUE_TYPE frac_rvt = *value;
9807 SET_REAL_EXP (&frac_rvt, 0);
9808 frac = build_real (rettype, frac_rvt);
9809 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9811 break;
9812 default:
9813 gcc_unreachable ();
9816 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9817 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9818 TREE_SIDE_EFFECTS (arg1) = 1;
9819 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9822 return NULL_TREE;
9825 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9826 then we can assume the base is two. If it's false, then we have to
9827 check the mode of the TYPE parameter in certain cases. */
9829 static tree
9830 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9831 tree type, bool ldexp)
9833 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9835 STRIP_NOPS (arg0);
9836 STRIP_NOPS (arg1);
9838 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9839 if (real_zerop (arg0) || integer_zerop (arg1)
9840 || (TREE_CODE (arg0) == REAL_CST
9841 && !real_isfinite (&TREE_REAL_CST (arg0))))
9842 return omit_one_operand_loc (loc, type, arg0, arg1);
9844 /* If both arguments are constant, then try to evaluate it. */
9845 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9846 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9847 && host_integerp (arg1, 0))
9849 /* Bound the maximum adjustment to twice the range of the
9850 mode's valid exponents. Use abs to ensure the range is
9851 positive as a sanity check. */
9852 const long max_exp_adj = 2 *
9853 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9854 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9856 /* Get the user-requested adjustment. */
9857 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9859 /* The requested adjustment must be inside this range. This
9860 is a preliminary cap to avoid things like overflow, we
9861 may still fail to compute the result for other reasons. */
9862 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9864 REAL_VALUE_TYPE initial_result;
9866 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9868 /* Ensure we didn't overflow. */
9869 if (! real_isinf (&initial_result))
9871 const REAL_VALUE_TYPE trunc_result
9872 = real_value_truncate (TYPE_MODE (type), initial_result);
9874 /* Only proceed if the target mode can hold the
9875 resulting value. */
9876 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9877 return build_real (type, trunc_result);
9883 return NULL_TREE;
9886 /* Fold a call to builtin modf. */
9888 static tree
9889 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9891 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9892 return NULL_TREE;
9894 STRIP_NOPS (arg0);
9896 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9897 return NULL_TREE;
9899 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9901 /* Proceed if a valid pointer type was passed in. */
9902 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9904 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9905 REAL_VALUE_TYPE trunc, frac;
9907 switch (value->cl)
9909 case rvc_nan:
9910 case rvc_zero:
9911 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9912 trunc = frac = *value;
9913 break;
9914 case rvc_inf:
9915 /* For +-Inf, return (*arg1 = arg0, +-0). */
9916 frac = dconst0;
9917 frac.sign = value->sign;
9918 trunc = *value;
9919 break;
9920 case rvc_normal:
9921 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9922 real_trunc (&trunc, VOIDmode, value);
9923 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9924 /* If the original number was negative and already
9925 integral, then the fractional part is -0.0. */
9926 if (value->sign && frac.cl == rvc_zero)
9927 frac.sign = value->sign;
9928 break;
9931 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9932 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9933 build_real (rettype, trunc));
9934 TREE_SIDE_EFFECTS (arg1) = 1;
9935 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9936 build_real (rettype, frac));
9939 return NULL_TREE;
9942 /* Given a location LOC, an interclass builtin function decl FNDECL
9943 and its single argument ARG, return an folded expression computing
9944 the same, or NULL_TREE if we either couldn't or didn't want to fold
9945 (the latter happen if there's an RTL instruction available). */
9947 static tree
9948 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9950 enum machine_mode mode;
9952 if (!validate_arg (arg, REAL_TYPE))
9953 return NULL_TREE;
9955 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9956 return NULL_TREE;
9958 mode = TYPE_MODE (TREE_TYPE (arg));
9960 /* If there is no optab, try generic code. */
9961 switch (DECL_FUNCTION_CODE (fndecl))
9963 tree result;
9965 CASE_FLT_FN (BUILT_IN_ISINF):
9967 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9968 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9969 tree const type = TREE_TYPE (arg);
9970 REAL_VALUE_TYPE r;
9971 char buf[128];
9973 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9974 real_from_string (&r, buf);
9975 result = build_call_expr (isgr_fn, 2,
9976 fold_build1_loc (loc, ABS_EXPR, type, arg),
9977 build_real (type, r));
9978 return result;
9980 CASE_FLT_FN (BUILT_IN_FINITE):
9981 case BUILT_IN_ISFINITE:
9983 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9984 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9985 tree const type = TREE_TYPE (arg);
9986 REAL_VALUE_TYPE r;
9987 char buf[128];
9989 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9990 real_from_string (&r, buf);
9991 result = build_call_expr (isle_fn, 2,
9992 fold_build1_loc (loc, ABS_EXPR, type, arg),
9993 build_real (type, r));
9994 /*result = fold_build2_loc (loc, UNGT_EXPR,
9995 TREE_TYPE (TREE_TYPE (fndecl)),
9996 fold_build1_loc (loc, ABS_EXPR, type, arg),
9997 build_real (type, r));
9998 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9999 TREE_TYPE (TREE_TYPE (fndecl)),
10000 result);*/
10001 return result;
10003 case BUILT_IN_ISNORMAL:
10005 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10006 islessequal(fabs(x),DBL_MAX). */
10007 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10008 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10009 tree const type = TREE_TYPE (arg);
10010 REAL_VALUE_TYPE rmax, rmin;
10011 char buf[128];
10013 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10014 real_from_string (&rmax, buf);
10015 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10016 real_from_string (&rmin, buf);
10017 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10018 result = build_call_expr (isle_fn, 2, arg,
10019 build_real (type, rmax));
10020 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
10021 build_call_expr (isge_fn, 2, arg,
10022 build_real (type, rmin)));
10023 return result;
10025 default:
10026 break;
10029 return NULL_TREE;
10032 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10033 ARG is the argument for the call. */
10035 static tree
10036 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10038 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10039 REAL_VALUE_TYPE r;
10041 if (!validate_arg (arg, REAL_TYPE))
10042 return NULL_TREE;
10044 switch (builtin_index)
10046 case BUILT_IN_ISINF:
10047 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10048 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10050 if (TREE_CODE (arg) == REAL_CST)
10052 r = TREE_REAL_CST (arg);
10053 if (real_isinf (&r))
10054 return real_compare (GT_EXPR, &r, &dconst0)
10055 ? integer_one_node : integer_minus_one_node;
10056 else
10057 return integer_zero_node;
10060 return NULL_TREE;
10062 case BUILT_IN_ISINF_SIGN:
10064 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10065 /* In a boolean context, GCC will fold the inner COND_EXPR to
10066 1. So e.g. "if (isinf_sign(x))" would be folded to just
10067 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10068 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10069 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10070 tree tmp = NULL_TREE;
10072 arg = builtin_save_expr (arg);
10074 if (signbit_fn && isinf_fn)
10076 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10077 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10079 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10080 signbit_call, integer_zero_node);
10081 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10082 isinf_call, integer_zero_node);
10084 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10085 integer_minus_one_node, integer_one_node);
10086 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10087 isinf_call, tmp,
10088 integer_zero_node);
10091 return tmp;
10094 case BUILT_IN_ISFINITE:
10095 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10096 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10097 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10099 if (TREE_CODE (arg) == REAL_CST)
10101 r = TREE_REAL_CST (arg);
10102 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10105 return NULL_TREE;
10107 case BUILT_IN_ISNAN:
10108 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10109 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10111 if (TREE_CODE (arg) == REAL_CST)
10113 r = TREE_REAL_CST (arg);
10114 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10117 arg = builtin_save_expr (arg);
10118 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10120 default:
10121 gcc_unreachable ();
10125 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10126 This builtin will generate code to return the appropriate floating
10127 point classification depending on the value of the floating point
10128 number passed in. The possible return values must be supplied as
10129 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10130 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10131 one floating point argument which is "type generic". */
10133 static tree
10134 fold_builtin_fpclassify (location_t loc, tree exp)
10136 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10137 arg, type, res, tmp;
10138 enum machine_mode mode;
10139 REAL_VALUE_TYPE r;
10140 char buf[128];
10142 /* Verify the required arguments in the original call. */
10143 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10144 INTEGER_TYPE, INTEGER_TYPE,
10145 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10146 return NULL_TREE;
10148 fp_nan = CALL_EXPR_ARG (exp, 0);
10149 fp_infinite = CALL_EXPR_ARG (exp, 1);
10150 fp_normal = CALL_EXPR_ARG (exp, 2);
10151 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10152 fp_zero = CALL_EXPR_ARG (exp, 4);
10153 arg = CALL_EXPR_ARG (exp, 5);
10154 type = TREE_TYPE (arg);
10155 mode = TYPE_MODE (type);
10156 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10158 /* fpclassify(x) ->
10159 isnan(x) ? FP_NAN :
10160 (fabs(x) == Inf ? FP_INFINITE :
10161 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10162 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10164 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10165 build_real (type, dconst0));
10166 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10167 tmp, fp_zero, fp_subnormal);
10169 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10170 real_from_string (&r, buf);
10171 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10172 arg, build_real (type, r));
10173 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10175 if (HONOR_INFINITIES (mode))
10177 real_inf (&r);
10178 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10179 build_real (type, r));
10180 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10181 fp_infinite, res);
10184 if (HONOR_NANS (mode))
10186 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10187 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10190 return res;
10193 /* Fold a call to an unordered comparison function such as
10194 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10195 being called and ARG0 and ARG1 are the arguments for the call.
10196 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10197 the opposite of the desired result. UNORDERED_CODE is used
10198 for modes that can hold NaNs and ORDERED_CODE is used for
10199 the rest. */
10201 static tree
10202 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10203 enum tree_code unordered_code,
10204 enum tree_code ordered_code)
10206 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10207 enum tree_code code;
10208 tree type0, type1;
10209 enum tree_code code0, code1;
10210 tree cmp_type = NULL_TREE;
10212 type0 = TREE_TYPE (arg0);
10213 type1 = TREE_TYPE (arg1);
10215 code0 = TREE_CODE (type0);
10216 code1 = TREE_CODE (type1);
10218 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10219 /* Choose the wider of two real types. */
10220 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10221 ? type0 : type1;
10222 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10223 cmp_type = type0;
10224 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10225 cmp_type = type1;
10227 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10228 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10230 if (unordered_code == UNORDERED_EXPR)
10232 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10233 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10234 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10237 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10238 : ordered_code;
10239 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10240 fold_build2_loc (loc, code, type, arg0, arg1));
10243 /* Fold a call to built-in function FNDECL with 0 arguments.
10244 IGNORE is true if the result of the function call is ignored. This
10245 function returns NULL_TREE if no simplification was possible. */
10247 static tree
10248 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10250 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10251 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10252 switch (fcode)
10254 CASE_FLT_FN (BUILT_IN_INF):
10255 case BUILT_IN_INFD32:
10256 case BUILT_IN_INFD64:
10257 case BUILT_IN_INFD128:
10258 return fold_builtin_inf (loc, type, true);
10260 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10261 return fold_builtin_inf (loc, type, false);
10263 case BUILT_IN_CLASSIFY_TYPE:
10264 return fold_builtin_classify_type (NULL_TREE);
10266 default:
10267 break;
10269 return NULL_TREE;
10272 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10273 IGNORE is true if the result of the function call is ignored. This
10274 function returns NULL_TREE if no simplification was possible. */
10276 static tree
10277 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10279 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10280 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10281 switch (fcode)
10283 case BUILT_IN_CONSTANT_P:
10285 tree val = fold_builtin_constant_p (arg0);
10287 /* Gimplification will pull the CALL_EXPR for the builtin out of
10288 an if condition. When not optimizing, we'll not CSE it back.
10289 To avoid link error types of regressions, return false now. */
10290 if (!val && !optimize)
10291 val = integer_zero_node;
10293 return val;
10296 case BUILT_IN_CLASSIFY_TYPE:
10297 return fold_builtin_classify_type (arg0);
10299 case BUILT_IN_STRLEN:
10300 return fold_builtin_strlen (loc, type, arg0);
10302 CASE_FLT_FN (BUILT_IN_FABS):
10303 return fold_builtin_fabs (loc, arg0, type);
10305 case BUILT_IN_ABS:
10306 case BUILT_IN_LABS:
10307 case BUILT_IN_LLABS:
10308 case BUILT_IN_IMAXABS:
10309 return fold_builtin_abs (loc, arg0, type);
10311 CASE_FLT_FN (BUILT_IN_CONJ):
10312 if (validate_arg (arg0, COMPLEX_TYPE)
10313 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10314 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10315 break;
10317 CASE_FLT_FN (BUILT_IN_CREAL):
10318 if (validate_arg (arg0, COMPLEX_TYPE)
10319 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10320 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10321 break;
10323 CASE_FLT_FN (BUILT_IN_CIMAG):
10324 if (validate_arg (arg0, COMPLEX_TYPE)
10325 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10326 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10327 break;
10329 CASE_FLT_FN (BUILT_IN_CCOS):
10330 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
10332 CASE_FLT_FN (BUILT_IN_CCOSH):
10333 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
10335 CASE_FLT_FN (BUILT_IN_CPROJ):
10336 return fold_builtin_cproj(loc, arg0, type);
10338 CASE_FLT_FN (BUILT_IN_CSIN):
10339 if (validate_arg (arg0, COMPLEX_TYPE)
10340 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10341 return do_mpc_arg1 (arg0, type, mpc_sin);
10342 break;
10344 CASE_FLT_FN (BUILT_IN_CSINH):
10345 if (validate_arg (arg0, COMPLEX_TYPE)
10346 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10347 return do_mpc_arg1 (arg0, type, mpc_sinh);
10348 break;
10350 CASE_FLT_FN (BUILT_IN_CTAN):
10351 if (validate_arg (arg0, COMPLEX_TYPE)
10352 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10353 return do_mpc_arg1 (arg0, type, mpc_tan);
10354 break;
10356 CASE_FLT_FN (BUILT_IN_CTANH):
10357 if (validate_arg (arg0, COMPLEX_TYPE)
10358 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10359 return do_mpc_arg1 (arg0, type, mpc_tanh);
10360 break;
10362 CASE_FLT_FN (BUILT_IN_CLOG):
10363 if (validate_arg (arg0, COMPLEX_TYPE)
10364 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10365 return do_mpc_arg1 (arg0, type, mpc_log);
10366 break;
10368 CASE_FLT_FN (BUILT_IN_CSQRT):
10369 if (validate_arg (arg0, COMPLEX_TYPE)
10370 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10371 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10372 break;
10374 CASE_FLT_FN (BUILT_IN_CASIN):
10375 if (validate_arg (arg0, COMPLEX_TYPE)
10376 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10377 return do_mpc_arg1 (arg0, type, mpc_asin);
10378 break;
10380 CASE_FLT_FN (BUILT_IN_CACOS):
10381 if (validate_arg (arg0, COMPLEX_TYPE)
10382 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10383 return do_mpc_arg1 (arg0, type, mpc_acos);
10384 break;
10386 CASE_FLT_FN (BUILT_IN_CATAN):
10387 if (validate_arg (arg0, COMPLEX_TYPE)
10388 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10389 return do_mpc_arg1 (arg0, type, mpc_atan);
10390 break;
10392 CASE_FLT_FN (BUILT_IN_CASINH):
10393 if (validate_arg (arg0, COMPLEX_TYPE)
10394 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10395 return do_mpc_arg1 (arg0, type, mpc_asinh);
10396 break;
10398 CASE_FLT_FN (BUILT_IN_CACOSH):
10399 if (validate_arg (arg0, COMPLEX_TYPE)
10400 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10401 return do_mpc_arg1 (arg0, type, mpc_acosh);
10402 break;
10404 CASE_FLT_FN (BUILT_IN_CATANH):
10405 if (validate_arg (arg0, COMPLEX_TYPE)
10406 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10407 return do_mpc_arg1 (arg0, type, mpc_atanh);
10408 break;
10410 CASE_FLT_FN (BUILT_IN_CABS):
10411 return fold_builtin_cabs (loc, arg0, type, fndecl);
10413 CASE_FLT_FN (BUILT_IN_CARG):
10414 return fold_builtin_carg (loc, arg0, type);
10416 CASE_FLT_FN (BUILT_IN_SQRT):
10417 return fold_builtin_sqrt (loc, arg0, type);
10419 CASE_FLT_FN (BUILT_IN_CBRT):
10420 return fold_builtin_cbrt (loc, arg0, type);
10422 CASE_FLT_FN (BUILT_IN_ASIN):
10423 if (validate_arg (arg0, REAL_TYPE))
10424 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10425 &dconstm1, &dconst1, true);
10426 break;
10428 CASE_FLT_FN (BUILT_IN_ACOS):
10429 if (validate_arg (arg0, REAL_TYPE))
10430 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10431 &dconstm1, &dconst1, true);
10432 break;
10434 CASE_FLT_FN (BUILT_IN_ATAN):
10435 if (validate_arg (arg0, REAL_TYPE))
10436 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10437 break;
10439 CASE_FLT_FN (BUILT_IN_ASINH):
10440 if (validate_arg (arg0, REAL_TYPE))
10441 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10442 break;
10444 CASE_FLT_FN (BUILT_IN_ACOSH):
10445 if (validate_arg (arg0, REAL_TYPE))
10446 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10447 &dconst1, NULL, true);
10448 break;
10450 CASE_FLT_FN (BUILT_IN_ATANH):
10451 if (validate_arg (arg0, REAL_TYPE))
10452 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10453 &dconstm1, &dconst1, false);
10454 break;
10456 CASE_FLT_FN (BUILT_IN_SIN):
10457 if (validate_arg (arg0, REAL_TYPE))
10458 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10459 break;
10461 CASE_FLT_FN (BUILT_IN_COS):
10462 return fold_builtin_cos (loc, arg0, type, fndecl);
10464 CASE_FLT_FN (BUILT_IN_TAN):
10465 return fold_builtin_tan (arg0, type);
10467 CASE_FLT_FN (BUILT_IN_CEXP):
10468 return fold_builtin_cexp (loc, arg0, type);
10470 CASE_FLT_FN (BUILT_IN_CEXPI):
10471 if (validate_arg (arg0, REAL_TYPE))
10472 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10473 break;
10475 CASE_FLT_FN (BUILT_IN_SINH):
10476 if (validate_arg (arg0, REAL_TYPE))
10477 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10478 break;
10480 CASE_FLT_FN (BUILT_IN_COSH):
10481 return fold_builtin_cosh (loc, arg0, type, fndecl);
10483 CASE_FLT_FN (BUILT_IN_TANH):
10484 if (validate_arg (arg0, REAL_TYPE))
10485 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10486 break;
10488 CASE_FLT_FN (BUILT_IN_ERF):
10489 if (validate_arg (arg0, REAL_TYPE))
10490 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10491 break;
10493 CASE_FLT_FN (BUILT_IN_ERFC):
10494 if (validate_arg (arg0, REAL_TYPE))
10495 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10496 break;
10498 CASE_FLT_FN (BUILT_IN_TGAMMA):
10499 if (validate_arg (arg0, REAL_TYPE))
10500 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10501 break;
10503 CASE_FLT_FN (BUILT_IN_EXP):
10504 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10506 CASE_FLT_FN (BUILT_IN_EXP2):
10507 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10509 CASE_FLT_FN (BUILT_IN_EXP10):
10510 CASE_FLT_FN (BUILT_IN_POW10):
10511 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10513 CASE_FLT_FN (BUILT_IN_EXPM1):
10514 if (validate_arg (arg0, REAL_TYPE))
10515 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10516 break;
10518 CASE_FLT_FN (BUILT_IN_LOG):
10519 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10521 CASE_FLT_FN (BUILT_IN_LOG2):
10522 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10524 CASE_FLT_FN (BUILT_IN_LOG10):
10525 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10527 CASE_FLT_FN (BUILT_IN_LOG1P):
10528 if (validate_arg (arg0, REAL_TYPE))
10529 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10530 &dconstm1, NULL, false);
10531 break;
10533 CASE_FLT_FN (BUILT_IN_J0):
10534 if (validate_arg (arg0, REAL_TYPE))
10535 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10536 NULL, NULL, 0);
10537 break;
10539 CASE_FLT_FN (BUILT_IN_J1):
10540 if (validate_arg (arg0, REAL_TYPE))
10541 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10542 NULL, NULL, 0);
10543 break;
10545 CASE_FLT_FN (BUILT_IN_Y0):
10546 if (validate_arg (arg0, REAL_TYPE))
10547 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10548 &dconst0, NULL, false);
10549 break;
10551 CASE_FLT_FN (BUILT_IN_Y1):
10552 if (validate_arg (arg0, REAL_TYPE))
10553 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10554 &dconst0, NULL, false);
10555 break;
10557 CASE_FLT_FN (BUILT_IN_NAN):
10558 case BUILT_IN_NAND32:
10559 case BUILT_IN_NAND64:
10560 case BUILT_IN_NAND128:
10561 return fold_builtin_nan (arg0, type, true);
10563 CASE_FLT_FN (BUILT_IN_NANS):
10564 return fold_builtin_nan (arg0, type, false);
10566 CASE_FLT_FN (BUILT_IN_FLOOR):
10567 return fold_builtin_floor (loc, fndecl, arg0);
10569 CASE_FLT_FN (BUILT_IN_CEIL):
10570 return fold_builtin_ceil (loc, fndecl, arg0);
10572 CASE_FLT_FN (BUILT_IN_TRUNC):
10573 return fold_builtin_trunc (loc, fndecl, arg0);
10575 CASE_FLT_FN (BUILT_IN_ROUND):
10576 return fold_builtin_round (loc, fndecl, arg0);
10578 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10579 CASE_FLT_FN (BUILT_IN_RINT):
10580 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10582 CASE_FLT_FN (BUILT_IN_ICEIL):
10583 CASE_FLT_FN (BUILT_IN_LCEIL):
10584 CASE_FLT_FN (BUILT_IN_LLCEIL):
10585 CASE_FLT_FN (BUILT_IN_LFLOOR):
10586 CASE_FLT_FN (BUILT_IN_IFLOOR):
10587 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10588 CASE_FLT_FN (BUILT_IN_IROUND):
10589 CASE_FLT_FN (BUILT_IN_LROUND):
10590 CASE_FLT_FN (BUILT_IN_LLROUND):
10591 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10593 CASE_FLT_FN (BUILT_IN_IRINT):
10594 CASE_FLT_FN (BUILT_IN_LRINT):
10595 CASE_FLT_FN (BUILT_IN_LLRINT):
10596 return fold_fixed_mathfn (loc, fndecl, arg0);
10598 case BUILT_IN_BSWAP16:
10599 case BUILT_IN_BSWAP32:
10600 case BUILT_IN_BSWAP64:
10601 return fold_builtin_bswap (fndecl, arg0);
10603 CASE_INT_FN (BUILT_IN_FFS):
10604 CASE_INT_FN (BUILT_IN_CLZ):
10605 CASE_INT_FN (BUILT_IN_CTZ):
10606 CASE_INT_FN (BUILT_IN_CLRSB):
10607 CASE_INT_FN (BUILT_IN_POPCOUNT):
10608 CASE_INT_FN (BUILT_IN_PARITY):
10609 return fold_builtin_bitop (fndecl, arg0);
10611 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10612 return fold_builtin_signbit (loc, arg0, type);
10614 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10615 return fold_builtin_significand (loc, arg0, type);
10617 CASE_FLT_FN (BUILT_IN_ILOGB):
10618 CASE_FLT_FN (BUILT_IN_LOGB):
10619 return fold_builtin_logb (loc, arg0, type);
10621 case BUILT_IN_ISASCII:
10622 return fold_builtin_isascii (loc, arg0);
10624 case BUILT_IN_TOASCII:
10625 return fold_builtin_toascii (loc, arg0);
10627 case BUILT_IN_ISDIGIT:
10628 return fold_builtin_isdigit (loc, arg0);
10630 CASE_FLT_FN (BUILT_IN_FINITE):
10631 case BUILT_IN_FINITED32:
10632 case BUILT_IN_FINITED64:
10633 case BUILT_IN_FINITED128:
10634 case BUILT_IN_ISFINITE:
10636 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10637 if (ret)
10638 return ret;
10639 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10642 CASE_FLT_FN (BUILT_IN_ISINF):
10643 case BUILT_IN_ISINFD32:
10644 case BUILT_IN_ISINFD64:
10645 case BUILT_IN_ISINFD128:
10647 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10648 if (ret)
10649 return ret;
10650 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10653 case BUILT_IN_ISNORMAL:
10654 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10656 case BUILT_IN_ISINF_SIGN:
10657 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10659 CASE_FLT_FN (BUILT_IN_ISNAN):
10660 case BUILT_IN_ISNAND32:
10661 case BUILT_IN_ISNAND64:
10662 case BUILT_IN_ISNAND128:
10663 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10665 case BUILT_IN_PRINTF:
10666 case BUILT_IN_PRINTF_UNLOCKED:
10667 case BUILT_IN_VPRINTF:
10668 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10670 case BUILT_IN_FREE:
10671 if (integer_zerop (arg0))
10672 return build_empty_stmt (loc);
10673 break;
10675 default:
10676 break;
10679 return NULL_TREE;
10683 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10684 IGNORE is true if the result of the function call is ignored. This
10685 function returns NULL_TREE if no simplification was possible. */
10687 static tree
10688 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10690 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10691 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10693 switch (fcode)
10695 CASE_FLT_FN (BUILT_IN_JN):
10696 if (validate_arg (arg0, INTEGER_TYPE)
10697 && validate_arg (arg1, REAL_TYPE))
10698 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10699 break;
10701 CASE_FLT_FN (BUILT_IN_YN):
10702 if (validate_arg (arg0, INTEGER_TYPE)
10703 && validate_arg (arg1, REAL_TYPE))
10704 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10705 &dconst0, false);
10706 break;
10708 CASE_FLT_FN (BUILT_IN_DREM):
10709 CASE_FLT_FN (BUILT_IN_REMAINDER):
10710 if (validate_arg (arg0, REAL_TYPE)
10711 && validate_arg(arg1, REAL_TYPE))
10712 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10713 break;
10715 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10716 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10717 if (validate_arg (arg0, REAL_TYPE)
10718 && validate_arg(arg1, POINTER_TYPE))
10719 return do_mpfr_lgamma_r (arg0, arg1, type);
10720 break;
10722 CASE_FLT_FN (BUILT_IN_ATAN2):
10723 if (validate_arg (arg0, REAL_TYPE)
10724 && validate_arg(arg1, REAL_TYPE))
10725 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10726 break;
10728 CASE_FLT_FN (BUILT_IN_FDIM):
10729 if (validate_arg (arg0, REAL_TYPE)
10730 && validate_arg(arg1, REAL_TYPE))
10731 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10732 break;
10734 CASE_FLT_FN (BUILT_IN_HYPOT):
10735 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10737 CASE_FLT_FN (BUILT_IN_CPOW):
10738 if (validate_arg (arg0, COMPLEX_TYPE)
10739 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10740 && validate_arg (arg1, COMPLEX_TYPE)
10741 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10742 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10743 break;
10745 CASE_FLT_FN (BUILT_IN_LDEXP):
10746 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10747 CASE_FLT_FN (BUILT_IN_SCALBN):
10748 CASE_FLT_FN (BUILT_IN_SCALBLN):
10749 return fold_builtin_load_exponent (loc, arg0, arg1,
10750 type, /*ldexp=*/false);
10752 CASE_FLT_FN (BUILT_IN_FREXP):
10753 return fold_builtin_frexp (loc, arg0, arg1, type);
10755 CASE_FLT_FN (BUILT_IN_MODF):
10756 return fold_builtin_modf (loc, arg0, arg1, type);
10758 case BUILT_IN_BZERO:
10759 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10761 case BUILT_IN_FPUTS:
10762 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10764 case BUILT_IN_FPUTS_UNLOCKED:
10765 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10767 case BUILT_IN_STRSTR:
10768 return fold_builtin_strstr (loc, arg0, arg1, type);
10770 case BUILT_IN_STRCAT:
10771 return fold_builtin_strcat (loc, arg0, arg1);
10773 case BUILT_IN_STRSPN:
10774 return fold_builtin_strspn (loc, arg0, arg1);
10776 case BUILT_IN_STRCSPN:
10777 return fold_builtin_strcspn (loc, arg0, arg1);
10779 case BUILT_IN_STRCHR:
10780 case BUILT_IN_INDEX:
10781 return fold_builtin_strchr (loc, arg0, arg1, type);
10783 case BUILT_IN_STRRCHR:
10784 case BUILT_IN_RINDEX:
10785 return fold_builtin_strrchr (loc, arg0, arg1, type);
10787 case BUILT_IN_STRCPY:
10788 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10790 case BUILT_IN_STPCPY:
10791 if (ignore)
10793 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10794 if (!fn)
10795 break;
10797 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10799 else
10800 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10801 break;
10803 case BUILT_IN_STRCMP:
10804 return fold_builtin_strcmp (loc, arg0, arg1);
10806 case BUILT_IN_STRPBRK:
10807 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10809 case BUILT_IN_EXPECT:
10810 return fold_builtin_expect (loc, arg0, arg1);
10812 CASE_FLT_FN (BUILT_IN_POW):
10813 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10815 CASE_FLT_FN (BUILT_IN_POWI):
10816 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10818 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10819 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10821 CASE_FLT_FN (BUILT_IN_FMIN):
10822 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10824 CASE_FLT_FN (BUILT_IN_FMAX):
10825 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10827 case BUILT_IN_ISGREATER:
10828 return fold_builtin_unordered_cmp (loc, fndecl,
10829 arg0, arg1, UNLE_EXPR, LE_EXPR);
10830 case BUILT_IN_ISGREATEREQUAL:
10831 return fold_builtin_unordered_cmp (loc, fndecl,
10832 arg0, arg1, UNLT_EXPR, LT_EXPR);
10833 case BUILT_IN_ISLESS:
10834 return fold_builtin_unordered_cmp (loc, fndecl,
10835 arg0, arg1, UNGE_EXPR, GE_EXPR);
10836 case BUILT_IN_ISLESSEQUAL:
10837 return fold_builtin_unordered_cmp (loc, fndecl,
10838 arg0, arg1, UNGT_EXPR, GT_EXPR);
10839 case BUILT_IN_ISLESSGREATER:
10840 return fold_builtin_unordered_cmp (loc, fndecl,
10841 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10842 case BUILT_IN_ISUNORDERED:
10843 return fold_builtin_unordered_cmp (loc, fndecl,
10844 arg0, arg1, UNORDERED_EXPR,
10845 NOP_EXPR);
10847 /* We do the folding for va_start in the expander. */
10848 case BUILT_IN_VA_START:
10849 break;
10851 case BUILT_IN_SPRINTF:
10852 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10854 case BUILT_IN_OBJECT_SIZE:
10855 return fold_builtin_object_size (arg0, arg1);
10857 case BUILT_IN_PRINTF:
10858 case BUILT_IN_PRINTF_UNLOCKED:
10859 case BUILT_IN_VPRINTF:
10860 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10862 case BUILT_IN_PRINTF_CHK:
10863 case BUILT_IN_VPRINTF_CHK:
10864 if (!validate_arg (arg0, INTEGER_TYPE)
10865 || TREE_SIDE_EFFECTS (arg0))
10866 return NULL_TREE;
10867 else
10868 return fold_builtin_printf (loc, fndecl,
10869 arg1, NULL_TREE, ignore, fcode);
10870 break;
10872 case BUILT_IN_FPRINTF:
10873 case BUILT_IN_FPRINTF_UNLOCKED:
10874 case BUILT_IN_VFPRINTF:
10875 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10876 ignore, fcode);
10878 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10879 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10881 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10882 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10884 default:
10885 break;
10887 return NULL_TREE;
10890 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10891 and ARG2. IGNORE is true if the result of the function call is ignored.
10892 This function returns NULL_TREE if no simplification was possible. */
10894 static tree
10895 fold_builtin_3 (location_t loc, tree fndecl,
10896 tree arg0, tree arg1, tree arg2, bool ignore)
10898 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10899 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10900 switch (fcode)
10903 CASE_FLT_FN (BUILT_IN_SINCOS):
10904 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10906 CASE_FLT_FN (BUILT_IN_FMA):
10907 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10908 break;
10910 CASE_FLT_FN (BUILT_IN_REMQUO):
10911 if (validate_arg (arg0, REAL_TYPE)
10912 && validate_arg(arg1, REAL_TYPE)
10913 && validate_arg(arg2, POINTER_TYPE))
10914 return do_mpfr_remquo (arg0, arg1, arg2);
10915 break;
10917 case BUILT_IN_MEMSET:
10918 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10920 case BUILT_IN_BCOPY:
10921 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10922 void_type_node, true, /*endp=*/3);
10924 case BUILT_IN_MEMCPY:
10925 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10926 type, ignore, /*endp=*/0);
10928 case BUILT_IN_MEMPCPY:
10929 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10930 type, ignore, /*endp=*/1);
10932 case BUILT_IN_MEMMOVE:
10933 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10934 type, ignore, /*endp=*/3);
10936 case BUILT_IN_STRNCAT:
10937 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10939 case BUILT_IN_STRNCPY:
10940 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10942 case BUILT_IN_STRNCMP:
10943 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10945 case BUILT_IN_MEMCHR:
10946 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10948 case BUILT_IN_BCMP:
10949 case BUILT_IN_MEMCMP:
10950 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10952 case BUILT_IN_SPRINTF:
10953 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10955 case BUILT_IN_SNPRINTF:
10956 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10958 case BUILT_IN_STRCPY_CHK:
10959 case BUILT_IN_STPCPY_CHK:
10960 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10961 ignore, fcode);
10963 case BUILT_IN_STRCAT_CHK:
10964 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10966 case BUILT_IN_PRINTF_CHK:
10967 case BUILT_IN_VPRINTF_CHK:
10968 if (!validate_arg (arg0, INTEGER_TYPE)
10969 || TREE_SIDE_EFFECTS (arg0))
10970 return NULL_TREE;
10971 else
10972 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10973 break;
10975 case BUILT_IN_FPRINTF:
10976 case BUILT_IN_FPRINTF_UNLOCKED:
10977 case BUILT_IN_VFPRINTF:
10978 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10979 ignore, fcode);
10981 case BUILT_IN_FPRINTF_CHK:
10982 case BUILT_IN_VFPRINTF_CHK:
10983 if (!validate_arg (arg1, INTEGER_TYPE)
10984 || TREE_SIDE_EFFECTS (arg1))
10985 return NULL_TREE;
10986 else
10987 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10988 ignore, fcode);
10990 default:
10991 break;
10993 return NULL_TREE;
10996 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10997 ARG2, and ARG3. IGNORE is true if the result of the function call is
10998 ignored. This function returns NULL_TREE if no simplification was
10999 possible. */
11001 static tree
11002 fold_builtin_4 (location_t loc, tree fndecl,
11003 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11005 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11007 switch (fcode)
11009 case BUILT_IN_MEMCPY_CHK:
11010 case BUILT_IN_MEMPCPY_CHK:
11011 case BUILT_IN_MEMMOVE_CHK:
11012 case BUILT_IN_MEMSET_CHK:
11013 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11014 NULL_TREE, ignore,
11015 DECL_FUNCTION_CODE (fndecl));
11017 case BUILT_IN_STRNCPY_CHK:
11018 case BUILT_IN_STPNCPY_CHK:
11019 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
11020 ignore, fcode);
11022 case BUILT_IN_STRNCAT_CHK:
11023 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11025 case BUILT_IN_SNPRINTF:
11026 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
11028 case BUILT_IN_FPRINTF_CHK:
11029 case BUILT_IN_VFPRINTF_CHK:
11030 if (!validate_arg (arg1, INTEGER_TYPE)
11031 || TREE_SIDE_EFFECTS (arg1))
11032 return NULL_TREE;
11033 else
11034 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11035 ignore, fcode);
11036 break;
11038 default:
11039 break;
11041 return NULL_TREE;
11044 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11045 arguments, where NARGS <= 4. IGNORE is true if the result of the
11046 function call is ignored. This function returns NULL_TREE if no
11047 simplification was possible. Note that this only folds builtins with
11048 fixed argument patterns. Foldings that do varargs-to-varargs
11049 transformations, or that match calls with more than 4 arguments,
11050 need to be handled with fold_builtin_varargs instead. */
11052 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11054 static tree
11055 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11057 tree ret = NULL_TREE;
11059 switch (nargs)
11061 case 0:
11062 ret = fold_builtin_0 (loc, fndecl, ignore);
11063 break;
11064 case 1:
11065 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11066 break;
11067 case 2:
11068 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11069 break;
11070 case 3:
11071 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11072 break;
11073 case 4:
11074 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11075 ignore);
11076 break;
11077 default:
11078 break;
11080 if (ret)
11082 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11083 SET_EXPR_LOCATION (ret, loc);
11084 TREE_NO_WARNING (ret) = 1;
11085 return ret;
11087 return NULL_TREE;
11090 /* Builtins with folding operations that operate on "..." arguments
11091 need special handling; we need to store the arguments in a convenient
11092 data structure before attempting any folding. Fortunately there are
11093 only a few builtins that fall into this category. FNDECL is the
11094 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11095 result of the function call is ignored. */
11097 static tree
11098 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11099 bool ignore ATTRIBUTE_UNUSED)
11101 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11102 tree ret = NULL_TREE;
11104 switch (fcode)
11106 case BUILT_IN_SPRINTF_CHK:
11107 case BUILT_IN_VSPRINTF_CHK:
11108 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11109 break;
11111 case BUILT_IN_SNPRINTF_CHK:
11112 case BUILT_IN_VSNPRINTF_CHK:
11113 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11114 break;
11116 case BUILT_IN_FPCLASSIFY:
11117 ret = fold_builtin_fpclassify (loc, exp);
11118 break;
11120 default:
11121 break;
11123 if (ret)
11125 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11126 SET_EXPR_LOCATION (ret, loc);
11127 TREE_NO_WARNING (ret) = 1;
11128 return ret;
11130 return NULL_TREE;
11133 /* Return true if FNDECL shouldn't be folded right now.
11134 If a built-in function has an inline attribute always_inline
11135 wrapper, defer folding it after always_inline functions have
11136 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11137 might not be performed. */
11139 bool
11140 avoid_folding_inline_builtin (tree fndecl)
11142 return (DECL_DECLARED_INLINE_P (fndecl)
11143 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11144 && cfun
11145 && !cfun->always_inline_functions_inlined
11146 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11149 /* A wrapper function for builtin folding that prevents warnings for
11150 "statement without effect" and the like, caused by removing the
11151 call node earlier than the warning is generated. */
11153 tree
11154 fold_call_expr (location_t loc, tree exp, bool ignore)
11156 tree ret = NULL_TREE;
11157 tree fndecl = get_callee_fndecl (exp);
11158 if (fndecl
11159 && TREE_CODE (fndecl) == FUNCTION_DECL
11160 && DECL_BUILT_IN (fndecl)
11161 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11162 yet. Defer folding until we see all the arguments
11163 (after inlining). */
11164 && !CALL_EXPR_VA_ARG_PACK (exp))
11166 int nargs = call_expr_nargs (exp);
11168 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11169 instead last argument is __builtin_va_arg_pack (). Defer folding
11170 even in that case, until arguments are finalized. */
11171 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11173 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11174 if (fndecl2
11175 && TREE_CODE (fndecl2) == FUNCTION_DECL
11176 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11177 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11178 return NULL_TREE;
11181 if (avoid_folding_inline_builtin (fndecl))
11182 return NULL_TREE;
11184 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11185 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11186 CALL_EXPR_ARGP (exp), ignore);
11187 else
11189 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11191 tree *args = CALL_EXPR_ARGP (exp);
11192 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11194 if (!ret)
11195 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11196 if (ret)
11197 return ret;
11200 return NULL_TREE;
11203 /* Conveniently construct a function call expression. FNDECL names the
11204 function to be called and N arguments are passed in the array
11205 ARGARRAY. */
11207 tree
11208 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11210 tree fntype = TREE_TYPE (fndecl);
11211 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11213 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11216 /* Conveniently construct a function call expression. FNDECL names the
11217 function to be called and the arguments are passed in the vector
11218 VEC. */
11220 tree
11221 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
11223 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
11224 VEC_address (tree, vec));
11228 /* Conveniently construct a function call expression. FNDECL names the
11229 function to be called, N is the number of arguments, and the "..."
11230 parameters are the argument expressions. */
11232 tree
11233 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11235 va_list ap;
11236 tree *argarray = XALLOCAVEC (tree, n);
11237 int i;
11239 va_start (ap, n);
11240 for (i = 0; i < n; i++)
11241 argarray[i] = va_arg (ap, tree);
11242 va_end (ap);
11243 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11246 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11247 varargs macros aren't supported by all bootstrap compilers. */
11249 tree
11250 build_call_expr (tree fndecl, int n, ...)
11252 va_list ap;
11253 tree *argarray = XALLOCAVEC (tree, n);
11254 int i;
11256 va_start (ap, n);
11257 for (i = 0; i < n; i++)
11258 argarray[i] = va_arg (ap, tree);
11259 va_end (ap);
11260 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11263 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11264 N arguments are passed in the array ARGARRAY. */
11266 tree
11267 fold_builtin_call_array (location_t loc, tree type,
11268 tree fn,
11269 int n,
11270 tree *argarray)
11272 tree ret = NULL_TREE;
11273 tree exp;
11275 if (TREE_CODE (fn) == ADDR_EXPR)
11277 tree fndecl = TREE_OPERAND (fn, 0);
11278 if (TREE_CODE (fndecl) == FUNCTION_DECL
11279 && DECL_BUILT_IN (fndecl))
11281 /* If last argument is __builtin_va_arg_pack (), arguments to this
11282 function are not finalized yet. Defer folding until they are. */
11283 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11285 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11286 if (fndecl2
11287 && TREE_CODE (fndecl2) == FUNCTION_DECL
11288 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11289 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11290 return build_call_array_loc (loc, type, fn, n, argarray);
11292 if (avoid_folding_inline_builtin (fndecl))
11293 return build_call_array_loc (loc, type, fn, n, argarray);
11294 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11296 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11297 if (ret)
11298 return ret;
11300 return build_call_array_loc (loc, type, fn, n, argarray);
11302 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11304 /* First try the transformations that don't require consing up
11305 an exp. */
11306 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11307 if (ret)
11308 return ret;
11311 /* If we got this far, we need to build an exp. */
11312 exp = build_call_array_loc (loc, type, fn, n, argarray);
11313 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11314 return ret ? ret : exp;
11318 return build_call_array_loc (loc, type, fn, n, argarray);
11321 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11322 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11323 of arguments in ARGS to be omitted. OLDNARGS is the number of
11324 elements in ARGS. */
11326 static tree
11327 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11328 int skip, tree fndecl, int n, va_list newargs)
11330 int nargs = oldnargs - skip + n;
11331 tree *buffer;
11333 if (n > 0)
11335 int i, j;
11337 buffer = XALLOCAVEC (tree, nargs);
11338 for (i = 0; i < n; i++)
11339 buffer[i] = va_arg (newargs, tree);
11340 for (j = skip; j < oldnargs; j++, i++)
11341 buffer[i] = args[j];
11343 else
11344 buffer = args + skip;
11346 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11349 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11350 list ARGS along with N new arguments specified as the "..."
11351 parameters. SKIP is the number of arguments in ARGS to be omitted.
11352 OLDNARGS is the number of elements in ARGS. */
11354 static tree
11355 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11356 int skip, tree fndecl, int n, ...)
11358 va_list ap;
11359 tree t;
11361 va_start (ap, n);
11362 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11363 va_end (ap);
11365 return t;
11368 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11369 along with N new arguments specified as the "..." parameters. SKIP
11370 is the number of arguments in EXP to be omitted. This function is used
11371 to do varargs-to-varargs transformations. */
11373 static tree
11374 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11376 va_list ap;
11377 tree t;
11379 va_start (ap, n);
11380 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11381 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11382 va_end (ap);
11384 return t;
11387 /* Validate a single argument ARG against a tree code CODE representing
11388 a type. */
11390 static bool
11391 validate_arg (const_tree arg, enum tree_code code)
11393 if (!arg)
11394 return false;
11395 else if (code == POINTER_TYPE)
11396 return POINTER_TYPE_P (TREE_TYPE (arg));
11397 else if (code == INTEGER_TYPE)
11398 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11399 return code == TREE_CODE (TREE_TYPE (arg));
11402 /* This function validates the types of a function call argument list
11403 against a specified list of tree_codes. If the last specifier is a 0,
11404 that represents an ellipses, otherwise the last specifier must be a
11405 VOID_TYPE.
11407 This is the GIMPLE version of validate_arglist. Eventually we want to
11408 completely convert builtins.c to work from GIMPLEs and the tree based
11409 validate_arglist will then be removed. */
11411 bool
11412 validate_gimple_arglist (const_gimple call, ...)
11414 enum tree_code code;
11415 bool res = 0;
11416 va_list ap;
11417 const_tree arg;
11418 size_t i;
11420 va_start (ap, call);
11421 i = 0;
11425 code = (enum tree_code) va_arg (ap, int);
11426 switch (code)
11428 case 0:
11429 /* This signifies an ellipses, any further arguments are all ok. */
11430 res = true;
11431 goto end;
11432 case VOID_TYPE:
11433 /* This signifies an endlink, if no arguments remain, return
11434 true, otherwise return false. */
11435 res = (i == gimple_call_num_args (call));
11436 goto end;
11437 default:
11438 /* If no parameters remain or the parameter's code does not
11439 match the specified code, return false. Otherwise continue
11440 checking any remaining arguments. */
11441 arg = gimple_call_arg (call, i++);
11442 if (!validate_arg (arg, code))
11443 goto end;
11444 break;
11447 while (1);
11449 /* We need gotos here since we can only have one VA_CLOSE in a
11450 function. */
11451 end: ;
11452 va_end (ap);
11454 return res;
11457 /* This function validates the types of a function call argument list
11458 against a specified list of tree_codes. If the last specifier is a 0,
11459 that represents an ellipses, otherwise the last specifier must be a
11460 VOID_TYPE. */
11462 bool
11463 validate_arglist (const_tree callexpr, ...)
11465 enum tree_code code;
11466 bool res = 0;
11467 va_list ap;
11468 const_call_expr_arg_iterator iter;
11469 const_tree arg;
11471 va_start (ap, callexpr);
11472 init_const_call_expr_arg_iterator (callexpr, &iter);
11476 code = (enum tree_code) va_arg (ap, int);
11477 switch (code)
11479 case 0:
11480 /* This signifies an ellipses, any further arguments are all ok. */
11481 res = true;
11482 goto end;
11483 case VOID_TYPE:
11484 /* This signifies an endlink, if no arguments remain, return
11485 true, otherwise return false. */
11486 res = !more_const_call_expr_args_p (&iter);
11487 goto end;
11488 default:
11489 /* If no parameters remain or the parameter's code does not
11490 match the specified code, return false. Otherwise continue
11491 checking any remaining arguments. */
11492 arg = next_const_call_expr_arg (&iter);
11493 if (!validate_arg (arg, code))
11494 goto end;
11495 break;
11498 while (1);
11500 /* We need gotos here since we can only have one VA_CLOSE in a
11501 function. */
11502 end: ;
11503 va_end (ap);
11505 return res;
11508 /* Default target-specific builtin expander that does nothing. */
11511 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11512 rtx target ATTRIBUTE_UNUSED,
11513 rtx subtarget ATTRIBUTE_UNUSED,
11514 enum machine_mode mode ATTRIBUTE_UNUSED,
11515 int ignore ATTRIBUTE_UNUSED)
11517 return NULL_RTX;
11520 /* Returns true is EXP represents data that would potentially reside
11521 in a readonly section. */
11523 static bool
11524 readonly_data_expr (tree exp)
11526 STRIP_NOPS (exp);
11528 if (TREE_CODE (exp) != ADDR_EXPR)
11529 return false;
11531 exp = get_base_address (TREE_OPERAND (exp, 0));
11532 if (!exp)
11533 return false;
11535 /* Make sure we call decl_readonly_section only for trees it
11536 can handle (since it returns true for everything it doesn't
11537 understand). */
11538 if (TREE_CODE (exp) == STRING_CST
11539 || TREE_CODE (exp) == CONSTRUCTOR
11540 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11541 return decl_readonly_section (exp, 0);
11542 else
11543 return false;
11546 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11547 to the call, and TYPE is its return type.
11549 Return NULL_TREE if no simplification was possible, otherwise return the
11550 simplified form of the call as a tree.
11552 The simplified form may be a constant or other expression which
11553 computes the same value, but in a more efficient manner (including
11554 calls to other builtin functions).
11556 The call may contain arguments which need to be evaluated, but
11557 which are not useful to determine the result of the call. In
11558 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11559 COMPOUND_EXPR will be an argument which must be evaluated.
11560 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11561 COMPOUND_EXPR in the chain will contain the tree for the simplified
11562 form of the builtin function call. */
11564 static tree
11565 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11567 if (!validate_arg (s1, POINTER_TYPE)
11568 || !validate_arg (s2, POINTER_TYPE))
11569 return NULL_TREE;
11570 else
11572 tree fn;
11573 const char *p1, *p2;
11575 p2 = c_getstr (s2);
11576 if (p2 == NULL)
11577 return NULL_TREE;
11579 p1 = c_getstr (s1);
11580 if (p1 != NULL)
11582 const char *r = strstr (p1, p2);
11583 tree tem;
11585 if (r == NULL)
11586 return build_int_cst (TREE_TYPE (s1), 0);
11588 /* Return an offset into the constant string argument. */
11589 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11590 return fold_convert_loc (loc, type, tem);
11593 /* The argument is const char *, and the result is char *, so we need
11594 a type conversion here to avoid a warning. */
11595 if (p2[0] == '\0')
11596 return fold_convert_loc (loc, type, s1);
11598 if (p2[1] != '\0')
11599 return NULL_TREE;
11601 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11602 if (!fn)
11603 return NULL_TREE;
11605 /* New argument list transforming strstr(s1, s2) to
11606 strchr(s1, s2[0]). */
11607 return build_call_expr_loc (loc, fn, 2, s1,
11608 build_int_cst (integer_type_node, p2[0]));
11612 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11613 the call, and TYPE is its return type.
11615 Return NULL_TREE if no simplification was possible, otherwise return the
11616 simplified form of the call as a tree.
11618 The simplified form may be a constant or other expression which
11619 computes the same value, but in a more efficient manner (including
11620 calls to other builtin functions).
11622 The call may contain arguments which need to be evaluated, but
11623 which are not useful to determine the result of the call. In
11624 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11625 COMPOUND_EXPR will be an argument which must be evaluated.
11626 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11627 COMPOUND_EXPR in the chain will contain the tree for the simplified
11628 form of the builtin function call. */
11630 static tree
11631 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11633 if (!validate_arg (s1, POINTER_TYPE)
11634 || !validate_arg (s2, INTEGER_TYPE))
11635 return NULL_TREE;
11636 else
11638 const char *p1;
11640 if (TREE_CODE (s2) != INTEGER_CST)
11641 return NULL_TREE;
11643 p1 = c_getstr (s1);
11644 if (p1 != NULL)
11646 char c;
11647 const char *r;
11648 tree tem;
11650 if (target_char_cast (s2, &c))
11651 return NULL_TREE;
11653 r = strchr (p1, c);
11655 if (r == NULL)
11656 return build_int_cst (TREE_TYPE (s1), 0);
11658 /* Return an offset into the constant string argument. */
11659 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11660 return fold_convert_loc (loc, type, tem);
11662 return NULL_TREE;
11666 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11667 the call, and TYPE is its return type.
11669 Return NULL_TREE if no simplification was possible, otherwise return the
11670 simplified form of the call as a tree.
11672 The simplified form may be a constant or other expression which
11673 computes the same value, but in a more efficient manner (including
11674 calls to other builtin functions).
11676 The call may contain arguments which need to be evaluated, but
11677 which are not useful to determine the result of the call. In
11678 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11679 COMPOUND_EXPR will be an argument which must be evaluated.
11680 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11681 COMPOUND_EXPR in the chain will contain the tree for the simplified
11682 form of the builtin function call. */
11684 static tree
11685 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11687 if (!validate_arg (s1, POINTER_TYPE)
11688 || !validate_arg (s2, INTEGER_TYPE))
11689 return NULL_TREE;
11690 else
11692 tree fn;
11693 const char *p1;
11695 if (TREE_CODE (s2) != INTEGER_CST)
11696 return NULL_TREE;
11698 p1 = c_getstr (s1);
11699 if (p1 != NULL)
11701 char c;
11702 const char *r;
11703 tree tem;
11705 if (target_char_cast (s2, &c))
11706 return NULL_TREE;
11708 r = strrchr (p1, c);
11710 if (r == NULL)
11711 return build_int_cst (TREE_TYPE (s1), 0);
11713 /* Return an offset into the constant string argument. */
11714 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11715 return fold_convert_loc (loc, type, tem);
11718 if (! integer_zerop (s2))
11719 return NULL_TREE;
11721 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11722 if (!fn)
11723 return NULL_TREE;
11725 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11726 return build_call_expr_loc (loc, fn, 2, s1, s2);
11730 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11731 to the call, and TYPE is its return type.
11733 Return NULL_TREE if no simplification was possible, otherwise return the
11734 simplified form of the call as a tree.
11736 The simplified form may be a constant or other expression which
11737 computes the same value, but in a more efficient manner (including
11738 calls to other builtin functions).
11740 The call may contain arguments which need to be evaluated, but
11741 which are not useful to determine the result of the call. In
11742 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11743 COMPOUND_EXPR will be an argument which must be evaluated.
11744 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11745 COMPOUND_EXPR in the chain will contain the tree for the simplified
11746 form of the builtin function call. */
11748 static tree
11749 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11751 if (!validate_arg (s1, POINTER_TYPE)
11752 || !validate_arg (s2, POINTER_TYPE))
11753 return NULL_TREE;
11754 else
11756 tree fn;
11757 const char *p1, *p2;
11759 p2 = c_getstr (s2);
11760 if (p2 == NULL)
11761 return NULL_TREE;
11763 p1 = c_getstr (s1);
11764 if (p1 != NULL)
11766 const char *r = strpbrk (p1, p2);
11767 tree tem;
11769 if (r == NULL)
11770 return build_int_cst (TREE_TYPE (s1), 0);
11772 /* Return an offset into the constant string argument. */
11773 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11774 return fold_convert_loc (loc, type, tem);
11777 if (p2[0] == '\0')
11778 /* strpbrk(x, "") == NULL.
11779 Evaluate and ignore s1 in case it had side-effects. */
11780 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11782 if (p2[1] != '\0')
11783 return NULL_TREE; /* Really call strpbrk. */
11785 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11786 if (!fn)
11787 return NULL_TREE;
11789 /* New argument list transforming strpbrk(s1, s2) to
11790 strchr(s1, s2[0]). */
11791 return build_call_expr_loc (loc, fn, 2, s1,
11792 build_int_cst (integer_type_node, p2[0]));
11796 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11797 to the call.
11799 Return NULL_TREE if no simplification was possible, otherwise return the
11800 simplified form of the call as a tree.
11802 The simplified form may be a constant or other expression which
11803 computes the same value, but in a more efficient manner (including
11804 calls to other builtin functions).
11806 The call may contain arguments which need to be evaluated, but
11807 which are not useful to determine the result of the call. In
11808 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11809 COMPOUND_EXPR will be an argument which must be evaluated.
11810 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11811 COMPOUND_EXPR in the chain will contain the tree for the simplified
11812 form of the builtin function call. */
11814 static tree
11815 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11817 if (!validate_arg (dst, POINTER_TYPE)
11818 || !validate_arg (src, POINTER_TYPE))
11819 return NULL_TREE;
11820 else
11822 const char *p = c_getstr (src);
11824 /* If the string length is zero, return the dst parameter. */
11825 if (p && *p == '\0')
11826 return dst;
11828 if (optimize_insn_for_speed_p ())
11830 /* See if we can store by pieces into (dst + strlen(dst)). */
11831 tree newdst, call;
11832 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11833 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11835 if (!strlen_fn || !strcpy_fn)
11836 return NULL_TREE;
11838 /* If we don't have a movstr we don't want to emit an strcpy
11839 call. We have to do that if the length of the source string
11840 isn't computable (in that case we can use memcpy probably
11841 later expanding to a sequence of mov instructions). If we
11842 have movstr instructions we can emit strcpy calls. */
11843 if (!HAVE_movstr)
11845 tree len = c_strlen (src, 1);
11846 if (! len || TREE_SIDE_EFFECTS (len))
11847 return NULL_TREE;
11850 /* Stabilize the argument list. */
11851 dst = builtin_save_expr (dst);
11853 /* Create strlen (dst). */
11854 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11855 /* Create (dst p+ strlen (dst)). */
11857 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11858 newdst = builtin_save_expr (newdst);
11860 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11861 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11863 return NULL_TREE;
11867 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11868 arguments to the call.
11870 Return NULL_TREE if no simplification was possible, otherwise return the
11871 simplified form of the call as a tree.
11873 The simplified form may be a constant or other expression which
11874 computes the same value, but in a more efficient manner (including
11875 calls to other builtin functions).
11877 The call may contain arguments which need to be evaluated, but
11878 which are not useful to determine the result of the call. In
11879 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11880 COMPOUND_EXPR will be an argument which must be evaluated.
11881 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11882 COMPOUND_EXPR in the chain will contain the tree for the simplified
11883 form of the builtin function call. */
11885 static tree
11886 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11888 if (!validate_arg (dst, POINTER_TYPE)
11889 || !validate_arg (src, POINTER_TYPE)
11890 || !validate_arg (len, INTEGER_TYPE))
11891 return NULL_TREE;
11892 else
11894 const char *p = c_getstr (src);
11896 /* If the requested length is zero, or the src parameter string
11897 length is zero, return the dst parameter. */
11898 if (integer_zerop (len) || (p && *p == '\0'))
11899 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11901 /* If the requested len is greater than or equal to the string
11902 length, call strcat. */
11903 if (TREE_CODE (len) == INTEGER_CST && p
11904 && compare_tree_int (len, strlen (p)) >= 0)
11906 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11908 /* If the replacement _DECL isn't initialized, don't do the
11909 transformation. */
11910 if (!fn)
11911 return NULL_TREE;
11913 return build_call_expr_loc (loc, fn, 2, dst, src);
11915 return NULL_TREE;
11919 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11920 to the call.
11922 Return NULL_TREE if no simplification was possible, otherwise return the
11923 simplified form of the call as a tree.
11925 The simplified form may be a constant or other expression which
11926 computes the same value, but in a more efficient manner (including
11927 calls to other builtin functions).
11929 The call may contain arguments which need to be evaluated, but
11930 which are not useful to determine the result of the call. In
11931 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11932 COMPOUND_EXPR will be an argument which must be evaluated.
11933 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11934 COMPOUND_EXPR in the chain will contain the tree for the simplified
11935 form of the builtin function call. */
11937 static tree
11938 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11940 if (!validate_arg (s1, POINTER_TYPE)
11941 || !validate_arg (s2, POINTER_TYPE))
11942 return NULL_TREE;
11943 else
11945 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11947 /* If both arguments are constants, evaluate at compile-time. */
11948 if (p1 && p2)
11950 const size_t r = strspn (p1, p2);
11951 return build_int_cst (size_type_node, r);
11954 /* If either argument is "", return NULL_TREE. */
11955 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11956 /* Evaluate and ignore both arguments in case either one has
11957 side-effects. */
11958 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11959 s1, s2);
11960 return NULL_TREE;
11964 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11965 to the call.
11967 Return NULL_TREE if no simplification was possible, otherwise return the
11968 simplified form of the call as a tree.
11970 The simplified form may be a constant or other expression which
11971 computes the same value, but in a more efficient manner (including
11972 calls to other builtin functions).
11974 The call may contain arguments which need to be evaluated, but
11975 which are not useful to determine the result of the call. In
11976 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11977 COMPOUND_EXPR will be an argument which must be evaluated.
11978 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11979 COMPOUND_EXPR in the chain will contain the tree for the simplified
11980 form of the builtin function call. */
11982 static tree
11983 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11985 if (!validate_arg (s1, POINTER_TYPE)
11986 || !validate_arg (s2, POINTER_TYPE))
11987 return NULL_TREE;
11988 else
11990 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11992 /* If both arguments are constants, evaluate at compile-time. */
11993 if (p1 && p2)
11995 const size_t r = strcspn (p1, p2);
11996 return build_int_cst (size_type_node, r);
11999 /* If the first argument is "", return NULL_TREE. */
12000 if (p1 && *p1 == '\0')
12002 /* Evaluate and ignore argument s2 in case it has
12003 side-effects. */
12004 return omit_one_operand_loc (loc, size_type_node,
12005 size_zero_node, s2);
12008 /* If the second argument is "", return __builtin_strlen(s1). */
12009 if (p2 && *p2 == '\0')
12011 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
12013 /* If the replacement _DECL isn't initialized, don't do the
12014 transformation. */
12015 if (!fn)
12016 return NULL_TREE;
12018 return build_call_expr_loc (loc, fn, 1, s1);
12020 return NULL_TREE;
12024 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
12025 to the call. IGNORE is true if the value returned
12026 by the builtin will be ignored. UNLOCKED is true is true if this
12027 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
12028 the known length of the string. Return NULL_TREE if no simplification
12029 was possible. */
12031 tree
12032 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
12033 bool ignore, bool unlocked, tree len)
12035 /* If we're using an unlocked function, assume the other unlocked
12036 functions exist explicitly. */
12037 tree const fn_fputc = (unlocked
12038 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
12039 : builtin_decl_implicit (BUILT_IN_FPUTC));
12040 tree const fn_fwrite = (unlocked
12041 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
12042 : builtin_decl_implicit (BUILT_IN_FWRITE));
12044 /* If the return value is used, don't do the transformation. */
12045 if (!ignore)
12046 return NULL_TREE;
12048 /* Verify the arguments in the original call. */
12049 if (!validate_arg (arg0, POINTER_TYPE)
12050 || !validate_arg (arg1, POINTER_TYPE))
12051 return NULL_TREE;
12053 if (! len)
12054 len = c_strlen (arg0, 0);
12056 /* Get the length of the string passed to fputs. If the length
12057 can't be determined, punt. */
12058 if (!len
12059 || TREE_CODE (len) != INTEGER_CST)
12060 return NULL_TREE;
12062 switch (compare_tree_int (len, 1))
12064 case -1: /* length is 0, delete the call entirely . */
12065 return omit_one_operand_loc (loc, integer_type_node,
12066 integer_zero_node, arg1);;
12068 case 0: /* length is 1, call fputc. */
12070 const char *p = c_getstr (arg0);
12072 if (p != NULL)
12074 if (fn_fputc)
12075 return build_call_expr_loc (loc, fn_fputc, 2,
12076 build_int_cst
12077 (integer_type_node, p[0]), arg1);
12078 else
12079 return NULL_TREE;
12082 /* FALLTHROUGH */
12083 case 1: /* length is greater than 1, call fwrite. */
12085 /* If optimizing for size keep fputs. */
12086 if (optimize_function_for_size_p (cfun))
12087 return NULL_TREE;
12088 /* New argument list transforming fputs(string, stream) to
12089 fwrite(string, 1, len, stream). */
12090 if (fn_fwrite)
12091 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12092 size_one_node, len, arg1);
12093 else
12094 return NULL_TREE;
12096 default:
12097 gcc_unreachable ();
12099 return NULL_TREE;
12102 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12103 produced. False otherwise. This is done so that we don't output the error
12104 or warning twice or three times. */
12106 bool
12107 fold_builtin_next_arg (tree exp, bool va_start_p)
12109 tree fntype = TREE_TYPE (current_function_decl);
12110 int nargs = call_expr_nargs (exp);
12111 tree arg;
12112 /* There is good chance the current input_location points inside the
12113 definition of the va_start macro (perhaps on the token for
12114 builtin) in a system header, so warnings will not be emitted.
12115 Use the location in real source code. */
12116 source_location current_location =
12117 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12118 NULL);
12120 if (!stdarg_p (fntype))
12122 error ("%<va_start%> used in function with fixed args");
12123 return true;
12126 if (va_start_p)
12128 if (va_start_p && (nargs != 2))
12130 error ("wrong number of arguments to function %<va_start%>");
12131 return true;
12133 arg = CALL_EXPR_ARG (exp, 1);
12135 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12136 when we checked the arguments and if needed issued a warning. */
12137 else
12139 if (nargs == 0)
12141 /* Evidently an out of date version of <stdarg.h>; can't validate
12142 va_start's second argument, but can still work as intended. */
12143 warning_at (current_location,
12144 OPT_Wvarargs,
12145 "%<__builtin_next_arg%> called without an argument");
12146 return true;
12148 else if (nargs > 1)
12150 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12151 return true;
12153 arg = CALL_EXPR_ARG (exp, 0);
12156 if (TREE_CODE (arg) == SSA_NAME)
12157 arg = SSA_NAME_VAR (arg);
12159 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12160 or __builtin_next_arg (0) the first time we see it, after checking
12161 the arguments and if needed issuing a warning. */
12162 if (!integer_zerop (arg))
12164 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12166 /* Strip off all nops for the sake of the comparison. This
12167 is not quite the same as STRIP_NOPS. It does more.
12168 We must also strip off INDIRECT_EXPR for C++ reference
12169 parameters. */
12170 while (CONVERT_EXPR_P (arg)
12171 || TREE_CODE (arg) == INDIRECT_REF)
12172 arg = TREE_OPERAND (arg, 0);
12173 if (arg != last_parm)
12175 /* FIXME: Sometimes with the tree optimizers we can get the
12176 not the last argument even though the user used the last
12177 argument. We just warn and set the arg to be the last
12178 argument so that we will get wrong-code because of
12179 it. */
12180 warning_at (current_location,
12181 OPT_Wvarargs,
12182 "second parameter of %<va_start%> not last named argument");
12185 /* Undefined by C99 7.15.1.4p4 (va_start):
12186 "If the parameter parmN is declared with the register storage
12187 class, with a function or array type, or with a type that is
12188 not compatible with the type that results after application of
12189 the default argument promotions, the behavior is undefined."
12191 else if (DECL_REGISTER (arg))
12193 warning_at (current_location,
12194 OPT_Wvarargs,
12195 "undefined behaviour when second parameter of "
12196 "%<va_start%> is declared with %<register%> storage");
12199 /* We want to verify the second parameter just once before the tree
12200 optimizers are run and then avoid keeping it in the tree,
12201 as otherwise we could warn even for correct code like:
12202 void foo (int i, ...)
12203 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12204 if (va_start_p)
12205 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12206 else
12207 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12209 return false;
12213 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12214 ORIG may be null if this is a 2-argument call. We don't attempt to
12215 simplify calls with more than 3 arguments.
12217 Return NULL_TREE if no simplification was possible, otherwise return the
12218 simplified form of the call as a tree. If IGNORED is true, it means that
12219 the caller does not use the returned value of the function. */
12221 static tree
12222 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12223 tree orig, int ignored)
12225 tree call, retval;
12226 const char *fmt_str = NULL;
12228 /* Verify the required arguments in the original call. We deal with two
12229 types of sprintf() calls: 'sprintf (str, fmt)' and
12230 'sprintf (dest, "%s", orig)'. */
12231 if (!validate_arg (dest, POINTER_TYPE)
12232 || !validate_arg (fmt, POINTER_TYPE))
12233 return NULL_TREE;
12234 if (orig && !validate_arg (orig, POINTER_TYPE))
12235 return NULL_TREE;
12237 /* Check whether the format is a literal string constant. */
12238 fmt_str = c_getstr (fmt);
12239 if (fmt_str == NULL)
12240 return NULL_TREE;
12242 call = NULL_TREE;
12243 retval = NULL_TREE;
12245 if (!init_target_chars ())
12246 return NULL_TREE;
12248 /* If the format doesn't contain % args or %%, use strcpy. */
12249 if (strchr (fmt_str, target_percent) == NULL)
12251 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12253 if (!fn)
12254 return NULL_TREE;
12256 /* Don't optimize sprintf (buf, "abc", ptr++). */
12257 if (orig)
12258 return NULL_TREE;
12260 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12261 'format' is known to contain no % formats. */
12262 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12263 if (!ignored)
12264 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12267 /* If the format is "%s", use strcpy if the result isn't used. */
12268 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12270 tree fn;
12271 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12273 if (!fn)
12274 return NULL_TREE;
12276 /* Don't crash on sprintf (str1, "%s"). */
12277 if (!orig)
12278 return NULL_TREE;
12280 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12281 if (!ignored)
12283 retval = c_strlen (orig, 1);
12284 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12285 return NULL_TREE;
12287 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12290 if (call && retval)
12292 retval = fold_convert_loc
12293 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12294 retval);
12295 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12297 else
12298 return call;
12301 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12302 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12303 attempt to simplify calls with more than 4 arguments.
12305 Return NULL_TREE if no simplification was possible, otherwise return the
12306 simplified form of the call as a tree. If IGNORED is true, it means that
12307 the caller does not use the returned value of the function. */
12309 static tree
12310 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12311 tree orig, int ignored)
12313 tree call, retval;
12314 const char *fmt_str = NULL;
12315 unsigned HOST_WIDE_INT destlen;
12317 /* Verify the required arguments in the original call. We deal with two
12318 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12319 'snprintf (dest, cst, "%s", orig)'. */
12320 if (!validate_arg (dest, POINTER_TYPE)
12321 || !validate_arg (destsize, INTEGER_TYPE)
12322 || !validate_arg (fmt, POINTER_TYPE))
12323 return NULL_TREE;
12324 if (orig && !validate_arg (orig, POINTER_TYPE))
12325 return NULL_TREE;
12327 if (!host_integerp (destsize, 1))
12328 return NULL_TREE;
12330 /* Check whether the format is a literal string constant. */
12331 fmt_str = c_getstr (fmt);
12332 if (fmt_str == NULL)
12333 return NULL_TREE;
12335 call = NULL_TREE;
12336 retval = NULL_TREE;
12338 if (!init_target_chars ())
12339 return NULL_TREE;
12341 destlen = tree_low_cst (destsize, 1);
12343 /* If the format doesn't contain % args or %%, use strcpy. */
12344 if (strchr (fmt_str, target_percent) == NULL)
12346 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12347 size_t len = strlen (fmt_str);
12349 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12350 if (orig)
12351 return NULL_TREE;
12353 /* We could expand this as
12354 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12355 or to
12356 memcpy (str, fmt_with_nul_at_cstm1, cst);
12357 but in the former case that might increase code size
12358 and in the latter case grow .rodata section too much.
12359 So punt for now. */
12360 if (len >= destlen)
12361 return NULL_TREE;
12363 if (!fn)
12364 return NULL_TREE;
12366 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12367 'format' is known to contain no % formats and
12368 strlen (fmt) < cst. */
12369 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12371 if (!ignored)
12372 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12375 /* If the format is "%s", use strcpy if the result isn't used. */
12376 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12378 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12379 unsigned HOST_WIDE_INT origlen;
12381 /* Don't crash on snprintf (str1, cst, "%s"). */
12382 if (!orig)
12383 return NULL_TREE;
12385 retval = c_strlen (orig, 1);
12386 if (!retval || !host_integerp (retval, 1))
12387 return NULL_TREE;
12389 origlen = tree_low_cst (retval, 1);
12390 /* We could expand this as
12391 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12392 or to
12393 memcpy (str1, str2_with_nul_at_cstm1, cst);
12394 but in the former case that might increase code size
12395 and in the latter case grow .rodata section too much.
12396 So punt for now. */
12397 if (origlen >= destlen)
12398 return NULL_TREE;
12400 /* Convert snprintf (str1, cst, "%s", str2) into
12401 strcpy (str1, str2) if strlen (str2) < cst. */
12402 if (!fn)
12403 return NULL_TREE;
12405 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12407 if (ignored)
12408 retval = NULL_TREE;
12411 if (call && retval)
12413 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12414 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12415 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12417 else
12418 return call;
12421 /* Expand a call EXP to __builtin_object_size. */
12424 expand_builtin_object_size (tree exp)
12426 tree ost;
12427 int object_size_type;
12428 tree fndecl = get_callee_fndecl (exp);
12430 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12432 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12433 exp, fndecl);
12434 expand_builtin_trap ();
12435 return const0_rtx;
12438 ost = CALL_EXPR_ARG (exp, 1);
12439 STRIP_NOPS (ost);
12441 if (TREE_CODE (ost) != INTEGER_CST
12442 || tree_int_cst_sgn (ost) < 0
12443 || compare_tree_int (ost, 3) > 0)
12445 error ("%Klast argument of %D is not integer constant between 0 and 3",
12446 exp, fndecl);
12447 expand_builtin_trap ();
12448 return const0_rtx;
12451 object_size_type = tree_low_cst (ost, 0);
12453 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12456 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12457 FCODE is the BUILT_IN_* to use.
12458 Return NULL_RTX if we failed; the caller should emit a normal call,
12459 otherwise try to get the result in TARGET, if convenient (and in
12460 mode MODE if that's convenient). */
12462 static rtx
12463 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12464 enum built_in_function fcode)
12466 tree dest, src, len, size;
12468 if (!validate_arglist (exp,
12469 POINTER_TYPE,
12470 fcode == BUILT_IN_MEMSET_CHK
12471 ? INTEGER_TYPE : POINTER_TYPE,
12472 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12473 return NULL_RTX;
12475 dest = CALL_EXPR_ARG (exp, 0);
12476 src = CALL_EXPR_ARG (exp, 1);
12477 len = CALL_EXPR_ARG (exp, 2);
12478 size = CALL_EXPR_ARG (exp, 3);
12480 if (! host_integerp (size, 1))
12481 return NULL_RTX;
12483 if (host_integerp (len, 1) || integer_all_onesp (size))
12485 tree fn;
12487 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12489 warning_at (tree_nonartificial_location (exp),
12490 0, "%Kcall to %D will always overflow destination buffer",
12491 exp, get_callee_fndecl (exp));
12492 return NULL_RTX;
12495 fn = NULL_TREE;
12496 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12497 mem{cpy,pcpy,move,set} is available. */
12498 switch (fcode)
12500 case BUILT_IN_MEMCPY_CHK:
12501 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12502 break;
12503 case BUILT_IN_MEMPCPY_CHK:
12504 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12505 break;
12506 case BUILT_IN_MEMMOVE_CHK:
12507 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12508 break;
12509 case BUILT_IN_MEMSET_CHK:
12510 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12511 break;
12512 default:
12513 break;
12516 if (! fn)
12517 return NULL_RTX;
12519 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12520 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12521 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12522 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12524 else if (fcode == BUILT_IN_MEMSET_CHK)
12525 return NULL_RTX;
12526 else
12528 unsigned int dest_align = get_pointer_alignment (dest);
12530 /* If DEST is not a pointer type, call the normal function. */
12531 if (dest_align == 0)
12532 return NULL_RTX;
12534 /* If SRC and DEST are the same (and not volatile), do nothing. */
12535 if (operand_equal_p (src, dest, 0))
12537 tree expr;
12539 if (fcode != BUILT_IN_MEMPCPY_CHK)
12541 /* Evaluate and ignore LEN in case it has side-effects. */
12542 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12543 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12546 expr = fold_build_pointer_plus (dest, len);
12547 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12550 /* __memmove_chk special case. */
12551 if (fcode == BUILT_IN_MEMMOVE_CHK)
12553 unsigned int src_align = get_pointer_alignment (src);
12555 if (src_align == 0)
12556 return NULL_RTX;
12558 /* If src is categorized for a readonly section we can use
12559 normal __memcpy_chk. */
12560 if (readonly_data_expr (src))
12562 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12563 if (!fn)
12564 return NULL_RTX;
12565 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12566 dest, src, len, size);
12567 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12568 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12569 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12572 return NULL_RTX;
12576 /* Emit warning if a buffer overflow is detected at compile time. */
12578 static void
12579 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12581 int is_strlen = 0;
12582 tree len, size;
12583 location_t loc = tree_nonartificial_location (exp);
12585 switch (fcode)
12587 case BUILT_IN_STRCPY_CHK:
12588 case BUILT_IN_STPCPY_CHK:
12589 /* For __strcat_chk the warning will be emitted only if overflowing
12590 by at least strlen (dest) + 1 bytes. */
12591 case BUILT_IN_STRCAT_CHK:
12592 len = CALL_EXPR_ARG (exp, 1);
12593 size = CALL_EXPR_ARG (exp, 2);
12594 is_strlen = 1;
12595 break;
12596 case BUILT_IN_STRNCAT_CHK:
12597 case BUILT_IN_STRNCPY_CHK:
12598 case BUILT_IN_STPNCPY_CHK:
12599 len = CALL_EXPR_ARG (exp, 2);
12600 size = CALL_EXPR_ARG (exp, 3);
12601 break;
12602 case BUILT_IN_SNPRINTF_CHK:
12603 case BUILT_IN_VSNPRINTF_CHK:
12604 len = CALL_EXPR_ARG (exp, 1);
12605 size = CALL_EXPR_ARG (exp, 3);
12606 break;
12607 default:
12608 gcc_unreachable ();
12611 if (!len || !size)
12612 return;
12614 if (! host_integerp (size, 1) || integer_all_onesp (size))
12615 return;
12617 if (is_strlen)
12619 len = c_strlen (len, 1);
12620 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12621 return;
12623 else if (fcode == BUILT_IN_STRNCAT_CHK)
12625 tree src = CALL_EXPR_ARG (exp, 1);
12626 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12627 return;
12628 src = c_strlen (src, 1);
12629 if (! src || ! host_integerp (src, 1))
12631 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12632 exp, get_callee_fndecl (exp));
12633 return;
12635 else if (tree_int_cst_lt (src, size))
12636 return;
12638 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12639 return;
12641 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12642 exp, get_callee_fndecl (exp));
12645 /* Emit warning if a buffer overflow is detected at compile time
12646 in __sprintf_chk/__vsprintf_chk calls. */
12648 static void
12649 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12651 tree size, len, fmt;
12652 const char *fmt_str;
12653 int nargs = call_expr_nargs (exp);
12655 /* Verify the required arguments in the original call. */
12657 if (nargs < 4)
12658 return;
12659 size = CALL_EXPR_ARG (exp, 2);
12660 fmt = CALL_EXPR_ARG (exp, 3);
12662 if (! host_integerp (size, 1) || integer_all_onesp (size))
12663 return;
12665 /* Check whether the format is a literal string constant. */
12666 fmt_str = c_getstr (fmt);
12667 if (fmt_str == NULL)
12668 return;
12670 if (!init_target_chars ())
12671 return;
12673 /* If the format doesn't contain % args or %%, we know its size. */
12674 if (strchr (fmt_str, target_percent) == 0)
12675 len = build_int_cstu (size_type_node, strlen (fmt_str));
12676 /* If the format is "%s" and first ... argument is a string literal,
12677 we know it too. */
12678 else if (fcode == BUILT_IN_SPRINTF_CHK
12679 && strcmp (fmt_str, target_percent_s) == 0)
12681 tree arg;
12683 if (nargs < 5)
12684 return;
12685 arg = CALL_EXPR_ARG (exp, 4);
12686 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12687 return;
12689 len = c_strlen (arg, 1);
12690 if (!len || ! host_integerp (len, 1))
12691 return;
12693 else
12694 return;
12696 if (! tree_int_cst_lt (len, size))
12697 warning_at (tree_nonartificial_location (exp),
12698 0, "%Kcall to %D will always overflow destination buffer",
12699 exp, get_callee_fndecl (exp));
12702 /* Emit warning if a free is called with address of a variable. */
12704 static void
12705 maybe_emit_free_warning (tree exp)
12707 tree arg = CALL_EXPR_ARG (exp, 0);
12709 STRIP_NOPS (arg);
12710 if (TREE_CODE (arg) != ADDR_EXPR)
12711 return;
12713 arg = get_base_address (TREE_OPERAND (arg, 0));
12714 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12715 return;
12717 if (SSA_VAR_P (arg))
12718 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12719 "%Kattempt to free a non-heap object %qD", exp, arg);
12720 else
12721 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12722 "%Kattempt to free a non-heap object", exp);
12725 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12726 if possible. */
12728 tree
12729 fold_builtin_object_size (tree ptr, tree ost)
12731 unsigned HOST_WIDE_INT bytes;
12732 int object_size_type;
12734 if (!validate_arg (ptr, POINTER_TYPE)
12735 || !validate_arg (ost, INTEGER_TYPE))
12736 return NULL_TREE;
12738 STRIP_NOPS (ost);
12740 if (TREE_CODE (ost) != INTEGER_CST
12741 || tree_int_cst_sgn (ost) < 0
12742 || compare_tree_int (ost, 3) > 0)
12743 return NULL_TREE;
12745 object_size_type = tree_low_cst (ost, 0);
12747 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12748 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12749 and (size_t) 0 for types 2 and 3. */
12750 if (TREE_SIDE_EFFECTS (ptr))
12751 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12753 if (TREE_CODE (ptr) == ADDR_EXPR)
12755 bytes = compute_builtin_object_size (ptr, object_size_type);
12756 if (double_int_fits_to_tree_p (size_type_node,
12757 double_int::from_uhwi (bytes)))
12758 return build_int_cstu (size_type_node, bytes);
12760 else if (TREE_CODE (ptr) == SSA_NAME)
12762 /* If object size is not known yet, delay folding until
12763 later. Maybe subsequent passes will help determining
12764 it. */
12765 bytes = compute_builtin_object_size (ptr, object_size_type);
12766 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12767 && double_int_fits_to_tree_p (size_type_node,
12768 double_int::from_uhwi (bytes)))
12769 return build_int_cstu (size_type_node, bytes);
12772 return NULL_TREE;
12775 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12776 DEST, SRC, LEN, and SIZE are the arguments to the call.
12777 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12778 code of the builtin. If MAXLEN is not NULL, it is maximum length
12779 passed as third argument. */
12781 tree
12782 fold_builtin_memory_chk (location_t loc, tree fndecl,
12783 tree dest, tree src, tree len, tree size,
12784 tree maxlen, bool ignore,
12785 enum built_in_function fcode)
12787 tree fn;
12789 if (!validate_arg (dest, POINTER_TYPE)
12790 || !validate_arg (src,
12791 (fcode == BUILT_IN_MEMSET_CHK
12792 ? INTEGER_TYPE : POINTER_TYPE))
12793 || !validate_arg (len, INTEGER_TYPE)
12794 || !validate_arg (size, INTEGER_TYPE))
12795 return NULL_TREE;
12797 /* If SRC and DEST are the same (and not volatile), return DEST
12798 (resp. DEST+LEN for __mempcpy_chk). */
12799 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12801 if (fcode != BUILT_IN_MEMPCPY_CHK)
12802 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12803 dest, len);
12804 else
12806 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12807 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12811 if (! host_integerp (size, 1))
12812 return NULL_TREE;
12814 if (! integer_all_onesp (size))
12816 if (! host_integerp (len, 1))
12818 /* If LEN is not constant, try MAXLEN too.
12819 For MAXLEN only allow optimizing into non-_ocs function
12820 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12821 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12823 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12825 /* (void) __mempcpy_chk () can be optimized into
12826 (void) __memcpy_chk (). */
12827 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12828 if (!fn)
12829 return NULL_TREE;
12831 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12833 return NULL_TREE;
12836 else
12837 maxlen = len;
12839 if (tree_int_cst_lt (size, maxlen))
12840 return NULL_TREE;
12843 fn = NULL_TREE;
12844 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12845 mem{cpy,pcpy,move,set} is available. */
12846 switch (fcode)
12848 case BUILT_IN_MEMCPY_CHK:
12849 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12850 break;
12851 case BUILT_IN_MEMPCPY_CHK:
12852 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12853 break;
12854 case BUILT_IN_MEMMOVE_CHK:
12855 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12856 break;
12857 case BUILT_IN_MEMSET_CHK:
12858 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12859 break;
12860 default:
12861 break;
12864 if (!fn)
12865 return NULL_TREE;
12867 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12870 /* Fold a call to the __st[rp]cpy_chk builtin.
12871 DEST, SRC, and SIZE are the arguments to the call.
12872 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12873 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12874 strings passed as second argument. */
12876 tree
12877 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12878 tree src, tree size,
12879 tree maxlen, bool ignore,
12880 enum built_in_function fcode)
12882 tree len, fn;
12884 if (!validate_arg (dest, POINTER_TYPE)
12885 || !validate_arg (src, POINTER_TYPE)
12886 || !validate_arg (size, INTEGER_TYPE))
12887 return NULL_TREE;
12889 /* If SRC and DEST are the same (and not volatile), return DEST. */
12890 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12891 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12893 if (! host_integerp (size, 1))
12894 return NULL_TREE;
12896 if (! integer_all_onesp (size))
12898 len = c_strlen (src, 1);
12899 if (! len || ! host_integerp (len, 1))
12901 /* If LEN is not constant, try MAXLEN too.
12902 For MAXLEN only allow optimizing into non-_ocs function
12903 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12904 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12906 if (fcode == BUILT_IN_STPCPY_CHK)
12908 if (! ignore)
12909 return NULL_TREE;
12911 /* If return value of __stpcpy_chk is ignored,
12912 optimize into __strcpy_chk. */
12913 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12914 if (!fn)
12915 return NULL_TREE;
12917 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12920 if (! len || TREE_SIDE_EFFECTS (len))
12921 return NULL_TREE;
12923 /* If c_strlen returned something, but not a constant,
12924 transform __strcpy_chk into __memcpy_chk. */
12925 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12926 if (!fn)
12927 return NULL_TREE;
12929 len = fold_convert_loc (loc, size_type_node, len);
12930 len = size_binop_loc (loc, PLUS_EXPR, len,
12931 build_int_cst (size_type_node, 1));
12932 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12933 build_call_expr_loc (loc, fn, 4,
12934 dest, src, len, size));
12937 else
12938 maxlen = len;
12940 if (! tree_int_cst_lt (maxlen, size))
12941 return NULL_TREE;
12944 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12945 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12946 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12947 if (!fn)
12948 return NULL_TREE;
12950 return build_call_expr_loc (loc, fn, 2, dest, src);
12953 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12954 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12955 length passed as third argument. IGNORE is true if return value can be
12956 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12958 tree
12959 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12960 tree len, tree size, tree maxlen, bool ignore,
12961 enum built_in_function fcode)
12963 tree fn;
12965 if (!validate_arg (dest, POINTER_TYPE)
12966 || !validate_arg (src, POINTER_TYPE)
12967 || !validate_arg (len, INTEGER_TYPE)
12968 || !validate_arg (size, INTEGER_TYPE))
12969 return NULL_TREE;
12971 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12973 /* If return value of __stpncpy_chk is ignored,
12974 optimize into __strncpy_chk. */
12975 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12976 if (fn)
12977 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12980 if (! host_integerp (size, 1))
12981 return NULL_TREE;
12983 if (! integer_all_onesp (size))
12985 if (! host_integerp (len, 1))
12987 /* If LEN is not constant, try MAXLEN too.
12988 For MAXLEN only allow optimizing into non-_ocs function
12989 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12990 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12991 return NULL_TREE;
12993 else
12994 maxlen = len;
12996 if (tree_int_cst_lt (size, maxlen))
12997 return NULL_TREE;
13000 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
13001 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
13002 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
13003 if (!fn)
13004 return NULL_TREE;
13006 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13009 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
13010 are the arguments to the call. */
13012 static tree
13013 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
13014 tree src, tree size)
13016 tree fn;
13017 const char *p;
13019 if (!validate_arg (dest, POINTER_TYPE)
13020 || !validate_arg (src, POINTER_TYPE)
13021 || !validate_arg (size, INTEGER_TYPE))
13022 return NULL_TREE;
13024 p = c_getstr (src);
13025 /* If the SRC parameter is "", return DEST. */
13026 if (p && *p == '\0')
13027 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13029 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
13030 return NULL_TREE;
13032 /* If __builtin_strcat_chk is used, assume strcat is available. */
13033 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
13034 if (!fn)
13035 return NULL_TREE;
13037 return build_call_expr_loc (loc, fn, 2, dest, src);
13040 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
13041 LEN, and SIZE. */
13043 static tree
13044 fold_builtin_strncat_chk (location_t loc, tree fndecl,
13045 tree dest, tree src, tree len, tree size)
13047 tree fn;
13048 const char *p;
13050 if (!validate_arg (dest, POINTER_TYPE)
13051 || !validate_arg (src, POINTER_TYPE)
13052 || !validate_arg (size, INTEGER_TYPE)
13053 || !validate_arg (size, INTEGER_TYPE))
13054 return NULL_TREE;
13056 p = c_getstr (src);
13057 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13058 if (p && *p == '\0')
13059 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
13060 else if (integer_zerop (len))
13061 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13063 if (! host_integerp (size, 1))
13064 return NULL_TREE;
13066 if (! integer_all_onesp (size))
13068 tree src_len = c_strlen (src, 1);
13069 if (src_len
13070 && host_integerp (src_len, 1)
13071 && host_integerp (len, 1)
13072 && ! tree_int_cst_lt (len, src_len))
13074 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13075 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13076 if (!fn)
13077 return NULL_TREE;
13079 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13081 return NULL_TREE;
13084 /* If __builtin_strncat_chk is used, assume strncat is available. */
13085 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13086 if (!fn)
13087 return NULL_TREE;
13089 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13092 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13093 Return NULL_TREE if a normal call should be emitted rather than
13094 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13095 or BUILT_IN_VSPRINTF_CHK. */
13097 static tree
13098 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13099 enum built_in_function fcode)
13101 tree dest, size, len, fn, fmt, flag;
13102 const char *fmt_str;
13104 /* Verify the required arguments in the original call. */
13105 if (nargs < 4)
13106 return NULL_TREE;
13107 dest = args[0];
13108 if (!validate_arg (dest, POINTER_TYPE))
13109 return NULL_TREE;
13110 flag = args[1];
13111 if (!validate_arg (flag, INTEGER_TYPE))
13112 return NULL_TREE;
13113 size = args[2];
13114 if (!validate_arg (size, INTEGER_TYPE))
13115 return NULL_TREE;
13116 fmt = args[3];
13117 if (!validate_arg (fmt, POINTER_TYPE))
13118 return NULL_TREE;
13120 if (! host_integerp (size, 1))
13121 return NULL_TREE;
13123 len = NULL_TREE;
13125 if (!init_target_chars ())
13126 return NULL_TREE;
13128 /* Check whether the format is a literal string constant. */
13129 fmt_str = c_getstr (fmt);
13130 if (fmt_str != NULL)
13132 /* If the format doesn't contain % args or %%, we know the size. */
13133 if (strchr (fmt_str, target_percent) == 0)
13135 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13136 len = build_int_cstu (size_type_node, strlen (fmt_str));
13138 /* If the format is "%s" and first ... argument is a string literal,
13139 we know the size too. */
13140 else if (fcode == BUILT_IN_SPRINTF_CHK
13141 && strcmp (fmt_str, target_percent_s) == 0)
13143 tree arg;
13145 if (nargs == 5)
13147 arg = args[4];
13148 if (validate_arg (arg, POINTER_TYPE))
13150 len = c_strlen (arg, 1);
13151 if (! len || ! host_integerp (len, 1))
13152 len = NULL_TREE;
13158 if (! integer_all_onesp (size))
13160 if (! len || ! tree_int_cst_lt (len, size))
13161 return NULL_TREE;
13164 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13165 or if format doesn't contain % chars or is "%s". */
13166 if (! integer_zerop (flag))
13168 if (fmt_str == NULL)
13169 return NULL_TREE;
13170 if (strchr (fmt_str, target_percent) != NULL
13171 && strcmp (fmt_str, target_percent_s))
13172 return NULL_TREE;
13175 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13176 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13177 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13178 if (!fn)
13179 return NULL_TREE;
13181 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13184 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13185 a normal call should be emitted rather than expanding the function
13186 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13188 static tree
13189 fold_builtin_sprintf_chk (location_t loc, tree exp,
13190 enum built_in_function fcode)
13192 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13193 CALL_EXPR_ARGP (exp), fcode);
13196 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13197 NULL_TREE if a normal call should be emitted rather than expanding
13198 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13199 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13200 passed as second argument. */
13202 static tree
13203 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13204 tree maxlen, enum built_in_function fcode)
13206 tree dest, size, len, fn, fmt, flag;
13207 const char *fmt_str;
13209 /* Verify the required arguments in the original call. */
13210 if (nargs < 5)
13211 return NULL_TREE;
13212 dest = args[0];
13213 if (!validate_arg (dest, POINTER_TYPE))
13214 return NULL_TREE;
13215 len = args[1];
13216 if (!validate_arg (len, INTEGER_TYPE))
13217 return NULL_TREE;
13218 flag = args[2];
13219 if (!validate_arg (flag, INTEGER_TYPE))
13220 return NULL_TREE;
13221 size = args[3];
13222 if (!validate_arg (size, INTEGER_TYPE))
13223 return NULL_TREE;
13224 fmt = args[4];
13225 if (!validate_arg (fmt, POINTER_TYPE))
13226 return NULL_TREE;
13228 if (! host_integerp (size, 1))
13229 return NULL_TREE;
13231 if (! integer_all_onesp (size))
13233 if (! host_integerp (len, 1))
13235 /* If LEN is not constant, try MAXLEN too.
13236 For MAXLEN only allow optimizing into non-_ocs function
13237 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13238 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13239 return NULL_TREE;
13241 else
13242 maxlen = len;
13244 if (tree_int_cst_lt (size, maxlen))
13245 return NULL_TREE;
13248 if (!init_target_chars ())
13249 return NULL_TREE;
13251 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13252 or if format doesn't contain % chars or is "%s". */
13253 if (! integer_zerop (flag))
13255 fmt_str = c_getstr (fmt);
13256 if (fmt_str == NULL)
13257 return NULL_TREE;
13258 if (strchr (fmt_str, target_percent) != NULL
13259 && strcmp (fmt_str, target_percent_s))
13260 return NULL_TREE;
13263 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13264 available. */
13265 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13266 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13267 if (!fn)
13268 return NULL_TREE;
13270 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13273 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13274 a normal call should be emitted rather than expanding the function
13275 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13276 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13277 passed as second argument. */
13279 tree
13280 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13281 enum built_in_function fcode)
13283 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13284 CALL_EXPR_ARGP (exp), maxlen, fcode);
13287 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13288 FMT and ARG are the arguments to the call; we don't fold cases with
13289 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13291 Return NULL_TREE if no simplification was possible, otherwise return the
13292 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13293 code of the function to be simplified. */
13295 static tree
13296 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13297 tree arg, bool ignore,
13298 enum built_in_function fcode)
13300 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13301 const char *fmt_str = NULL;
13303 /* If the return value is used, don't do the transformation. */
13304 if (! ignore)
13305 return NULL_TREE;
13307 /* Verify the required arguments in the original call. */
13308 if (!validate_arg (fmt, POINTER_TYPE))
13309 return NULL_TREE;
13311 /* Check whether the format is a literal string constant. */
13312 fmt_str = c_getstr (fmt);
13313 if (fmt_str == NULL)
13314 return NULL_TREE;
13316 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13318 /* If we're using an unlocked function, assume the other
13319 unlocked functions exist explicitly. */
13320 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13321 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13323 else
13325 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13326 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13329 if (!init_target_chars ())
13330 return NULL_TREE;
13332 if (strcmp (fmt_str, target_percent_s) == 0
13333 || strchr (fmt_str, target_percent) == NULL)
13335 const char *str;
13337 if (strcmp (fmt_str, target_percent_s) == 0)
13339 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13340 return NULL_TREE;
13342 if (!arg || !validate_arg (arg, POINTER_TYPE))
13343 return NULL_TREE;
13345 str = c_getstr (arg);
13346 if (str == NULL)
13347 return NULL_TREE;
13349 else
13351 /* The format specifier doesn't contain any '%' characters. */
13352 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13353 && arg)
13354 return NULL_TREE;
13355 str = fmt_str;
13358 /* If the string was "", printf does nothing. */
13359 if (str[0] == '\0')
13360 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13362 /* If the string has length of 1, call putchar. */
13363 if (str[1] == '\0')
13365 /* Given printf("c"), (where c is any one character,)
13366 convert "c"[0] to an int and pass that to the replacement
13367 function. */
13368 newarg = build_int_cst (integer_type_node, str[0]);
13369 if (fn_putchar)
13370 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13372 else
13374 /* If the string was "string\n", call puts("string"). */
13375 size_t len = strlen (str);
13376 if ((unsigned char)str[len - 1] == target_newline
13377 && (size_t) (int) len == len
13378 && (int) len > 0)
13380 char *newstr;
13381 tree offset_node, string_cst;
13383 /* Create a NUL-terminated string that's one char shorter
13384 than the original, stripping off the trailing '\n'. */
13385 newarg = build_string_literal (len, str);
13386 string_cst = string_constant (newarg, &offset_node);
13387 gcc_checking_assert (string_cst
13388 && (TREE_STRING_LENGTH (string_cst)
13389 == (int) len)
13390 && integer_zerop (offset_node)
13391 && (unsigned char)
13392 TREE_STRING_POINTER (string_cst)[len - 1]
13393 == target_newline);
13394 /* build_string_literal creates a new STRING_CST,
13395 modify it in place to avoid double copying. */
13396 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13397 newstr[len - 1] = '\0';
13398 if (fn_puts)
13399 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13401 else
13402 /* We'd like to arrange to call fputs(string,stdout) here,
13403 but we need stdout and don't have a way to get it yet. */
13404 return NULL_TREE;
13408 /* The other optimizations can be done only on the non-va_list variants. */
13409 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13410 return NULL_TREE;
13412 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13413 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13415 if (!arg || !validate_arg (arg, POINTER_TYPE))
13416 return NULL_TREE;
13417 if (fn_puts)
13418 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13421 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13422 else if (strcmp (fmt_str, target_percent_c) == 0)
13424 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13425 return NULL_TREE;
13426 if (fn_putchar)
13427 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13430 if (!call)
13431 return NULL_TREE;
13433 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13436 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13437 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13438 more than 3 arguments, and ARG may be null in the 2-argument case.
13440 Return NULL_TREE if no simplification was possible, otherwise return the
13441 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13442 code of the function to be simplified. */
13444 static tree
13445 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13446 tree fmt, tree arg, bool ignore,
13447 enum built_in_function fcode)
13449 tree fn_fputc, fn_fputs, call = NULL_TREE;
13450 const char *fmt_str = NULL;
13452 /* If the return value is used, don't do the transformation. */
13453 if (! ignore)
13454 return NULL_TREE;
13456 /* Verify the required arguments in the original call. */
13457 if (!validate_arg (fp, POINTER_TYPE))
13458 return NULL_TREE;
13459 if (!validate_arg (fmt, POINTER_TYPE))
13460 return NULL_TREE;
13462 /* Check whether the format is a literal string constant. */
13463 fmt_str = c_getstr (fmt);
13464 if (fmt_str == NULL)
13465 return NULL_TREE;
13467 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13469 /* If we're using an unlocked function, assume the other
13470 unlocked functions exist explicitly. */
13471 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13472 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13474 else
13476 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13477 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13480 if (!init_target_chars ())
13481 return NULL_TREE;
13483 /* If the format doesn't contain % args or %%, use strcpy. */
13484 if (strchr (fmt_str, target_percent) == NULL)
13486 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13487 && arg)
13488 return NULL_TREE;
13490 /* If the format specifier was "", fprintf does nothing. */
13491 if (fmt_str[0] == '\0')
13493 /* If FP has side-effects, just wait until gimplification is
13494 done. */
13495 if (TREE_SIDE_EFFECTS (fp))
13496 return NULL_TREE;
13498 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13501 /* When "string" doesn't contain %, replace all cases of
13502 fprintf (fp, string) with fputs (string, fp). The fputs
13503 builtin will take care of special cases like length == 1. */
13504 if (fn_fputs)
13505 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13508 /* The other optimizations can be done only on the non-va_list variants. */
13509 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13510 return NULL_TREE;
13512 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13513 else if (strcmp (fmt_str, target_percent_s) == 0)
13515 if (!arg || !validate_arg (arg, POINTER_TYPE))
13516 return NULL_TREE;
13517 if (fn_fputs)
13518 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13521 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13522 else if (strcmp (fmt_str, target_percent_c) == 0)
13524 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13525 return NULL_TREE;
13526 if (fn_fputc)
13527 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13530 if (!call)
13531 return NULL_TREE;
13532 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13535 /* Initialize format string characters in the target charset. */
13537 static bool
13538 init_target_chars (void)
13540 static bool init;
13541 if (!init)
13543 target_newline = lang_hooks.to_target_charset ('\n');
13544 target_percent = lang_hooks.to_target_charset ('%');
13545 target_c = lang_hooks.to_target_charset ('c');
13546 target_s = lang_hooks.to_target_charset ('s');
13547 if (target_newline == 0 || target_percent == 0 || target_c == 0
13548 || target_s == 0)
13549 return false;
13551 target_percent_c[0] = target_percent;
13552 target_percent_c[1] = target_c;
13553 target_percent_c[2] = '\0';
13555 target_percent_s[0] = target_percent;
13556 target_percent_s[1] = target_s;
13557 target_percent_s[2] = '\0';
13559 target_percent_s_newline[0] = target_percent;
13560 target_percent_s_newline[1] = target_s;
13561 target_percent_s_newline[2] = target_newline;
13562 target_percent_s_newline[3] = '\0';
13564 init = true;
13566 return true;
13569 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13570 and no overflow/underflow occurred. INEXACT is true if M was not
13571 exactly calculated. TYPE is the tree type for the result. This
13572 function assumes that you cleared the MPFR flags and then
13573 calculated M to see if anything subsequently set a flag prior to
13574 entering this function. Return NULL_TREE if any checks fail. */
13576 static tree
13577 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13579 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13580 overflow/underflow occurred. If -frounding-math, proceed iff the
13581 result of calling FUNC was exact. */
13582 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13583 && (!flag_rounding_math || !inexact))
13585 REAL_VALUE_TYPE rr;
13587 real_from_mpfr (&rr, m, type, GMP_RNDN);
13588 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13589 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13590 but the mpft_t is not, then we underflowed in the
13591 conversion. */
13592 if (real_isfinite (&rr)
13593 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13595 REAL_VALUE_TYPE rmode;
13597 real_convert (&rmode, TYPE_MODE (type), &rr);
13598 /* Proceed iff the specified mode can hold the value. */
13599 if (real_identical (&rmode, &rr))
13600 return build_real (type, rmode);
13603 return NULL_TREE;
13606 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13607 number and no overflow/underflow occurred. INEXACT is true if M
13608 was not exactly calculated. TYPE is the tree type for the result.
13609 This function assumes that you cleared the MPFR flags and then
13610 calculated M to see if anything subsequently set a flag prior to
13611 entering this function. Return NULL_TREE if any checks fail, if
13612 FORCE_CONVERT is true, then bypass the checks. */
13614 static tree
13615 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13617 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13618 overflow/underflow occurred. If -frounding-math, proceed iff the
13619 result of calling FUNC was exact. */
13620 if (force_convert
13621 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13622 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13623 && (!flag_rounding_math || !inexact)))
13625 REAL_VALUE_TYPE re, im;
13627 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13628 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13629 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13630 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13631 but the mpft_t is not, then we underflowed in the
13632 conversion. */
13633 if (force_convert
13634 || (real_isfinite (&re) && real_isfinite (&im)
13635 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13636 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13638 REAL_VALUE_TYPE re_mode, im_mode;
13640 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13641 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13642 /* Proceed iff the specified mode can hold the value. */
13643 if (force_convert
13644 || (real_identical (&re_mode, &re)
13645 && real_identical (&im_mode, &im)))
13646 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13647 build_real (TREE_TYPE (type), im_mode));
13650 return NULL_TREE;
13653 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13654 FUNC on it and return the resulting value as a tree with type TYPE.
13655 If MIN and/or MAX are not NULL, then the supplied ARG must be
13656 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13657 acceptable values, otherwise they are not. The mpfr precision is
13658 set to the precision of TYPE. We assume that function FUNC returns
13659 zero if the result could be calculated exactly within the requested
13660 precision. */
13662 static tree
13663 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13664 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13665 bool inclusive)
13667 tree result = NULL_TREE;
13669 STRIP_NOPS (arg);
13671 /* To proceed, MPFR must exactly represent the target floating point
13672 format, which only happens when the target base equals two. */
13673 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13674 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13676 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13678 if (real_isfinite (ra)
13679 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13680 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13682 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13683 const int prec = fmt->p;
13684 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13685 int inexact;
13686 mpfr_t m;
13688 mpfr_init2 (m, prec);
13689 mpfr_from_real (m, ra, GMP_RNDN);
13690 mpfr_clear_flags ();
13691 inexact = func (m, m, rnd);
13692 result = do_mpfr_ckconv (m, type, inexact);
13693 mpfr_clear (m);
13697 return result;
13700 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13701 FUNC on it and return the resulting value as a tree with type TYPE.
13702 The mpfr precision is set to the precision of TYPE. We assume that
13703 function FUNC returns zero if the result could be calculated
13704 exactly within the requested precision. */
13706 static tree
13707 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13708 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13710 tree result = NULL_TREE;
13712 STRIP_NOPS (arg1);
13713 STRIP_NOPS (arg2);
13715 /* To proceed, MPFR must exactly represent the target floating point
13716 format, which only happens when the target base equals two. */
13717 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13718 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13719 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13721 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13722 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13724 if (real_isfinite (ra1) && real_isfinite (ra2))
13726 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13727 const int prec = fmt->p;
13728 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13729 int inexact;
13730 mpfr_t m1, m2;
13732 mpfr_inits2 (prec, m1, m2, NULL);
13733 mpfr_from_real (m1, ra1, GMP_RNDN);
13734 mpfr_from_real (m2, ra2, GMP_RNDN);
13735 mpfr_clear_flags ();
13736 inexact = func (m1, m1, m2, rnd);
13737 result = do_mpfr_ckconv (m1, type, inexact);
13738 mpfr_clears (m1, m2, NULL);
13742 return result;
13745 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13746 FUNC on it and return the resulting value as a tree with type TYPE.
13747 The mpfr precision is set to the precision of TYPE. We assume that
13748 function FUNC returns zero if the result could be calculated
13749 exactly within the requested precision. */
13751 static tree
13752 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13753 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13755 tree result = NULL_TREE;
13757 STRIP_NOPS (arg1);
13758 STRIP_NOPS (arg2);
13759 STRIP_NOPS (arg3);
13761 /* To proceed, MPFR must exactly represent the target floating point
13762 format, which only happens when the target base equals two. */
13763 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13764 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13765 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13766 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13768 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13769 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13770 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13772 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13774 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13775 const int prec = fmt->p;
13776 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13777 int inexact;
13778 mpfr_t m1, m2, m3;
13780 mpfr_inits2 (prec, m1, m2, m3, NULL);
13781 mpfr_from_real (m1, ra1, GMP_RNDN);
13782 mpfr_from_real (m2, ra2, GMP_RNDN);
13783 mpfr_from_real (m3, ra3, GMP_RNDN);
13784 mpfr_clear_flags ();
13785 inexact = func (m1, m1, m2, m3, rnd);
13786 result = do_mpfr_ckconv (m1, type, inexact);
13787 mpfr_clears (m1, m2, m3, NULL);
13791 return result;
13794 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13795 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13796 If ARG_SINP and ARG_COSP are NULL then the result is returned
13797 as a complex value.
13798 The type is taken from the type of ARG and is used for setting the
13799 precision of the calculation and results. */
13801 static tree
13802 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13804 tree const type = TREE_TYPE (arg);
13805 tree result = NULL_TREE;
13807 STRIP_NOPS (arg);
13809 /* To proceed, MPFR must exactly represent the target floating point
13810 format, which only happens when the target base equals two. */
13811 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13812 && TREE_CODE (arg) == REAL_CST
13813 && !TREE_OVERFLOW (arg))
13815 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13817 if (real_isfinite (ra))
13819 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13820 const int prec = fmt->p;
13821 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13822 tree result_s, result_c;
13823 int inexact;
13824 mpfr_t m, ms, mc;
13826 mpfr_inits2 (prec, m, ms, mc, NULL);
13827 mpfr_from_real (m, ra, GMP_RNDN);
13828 mpfr_clear_flags ();
13829 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13830 result_s = do_mpfr_ckconv (ms, type, inexact);
13831 result_c = do_mpfr_ckconv (mc, type, inexact);
13832 mpfr_clears (m, ms, mc, NULL);
13833 if (result_s && result_c)
13835 /* If we are to return in a complex value do so. */
13836 if (!arg_sinp && !arg_cosp)
13837 return build_complex (build_complex_type (type),
13838 result_c, result_s);
13840 /* Dereference the sin/cos pointer arguments. */
13841 arg_sinp = build_fold_indirect_ref (arg_sinp);
13842 arg_cosp = build_fold_indirect_ref (arg_cosp);
13843 /* Proceed if valid pointer type were passed in. */
13844 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13845 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13847 /* Set the values. */
13848 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13849 result_s);
13850 TREE_SIDE_EFFECTS (result_s) = 1;
13851 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13852 result_c);
13853 TREE_SIDE_EFFECTS (result_c) = 1;
13854 /* Combine the assignments into a compound expr. */
13855 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13856 result_s, result_c));
13861 return result;
13864 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13865 two-argument mpfr order N Bessel function FUNC on them and return
13866 the resulting value as a tree with type TYPE. The mpfr precision
13867 is set to the precision of TYPE. We assume that function FUNC
13868 returns zero if the result could be calculated exactly within the
13869 requested precision. */
13870 static tree
13871 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13872 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13873 const REAL_VALUE_TYPE *min, bool inclusive)
13875 tree result = NULL_TREE;
13877 STRIP_NOPS (arg1);
13878 STRIP_NOPS (arg2);
13880 /* To proceed, MPFR must exactly represent the target floating point
13881 format, which only happens when the target base equals two. */
13882 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13883 && host_integerp (arg1, 0)
13884 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13886 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13887 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13889 if (n == (long)n
13890 && real_isfinite (ra)
13891 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13893 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13894 const int prec = fmt->p;
13895 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13896 int inexact;
13897 mpfr_t m;
13899 mpfr_init2 (m, prec);
13900 mpfr_from_real (m, ra, GMP_RNDN);
13901 mpfr_clear_flags ();
13902 inexact = func (m, n, m, rnd);
13903 result = do_mpfr_ckconv (m, type, inexact);
13904 mpfr_clear (m);
13908 return result;
13911 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13912 the pointer *(ARG_QUO) and return the result. The type is taken
13913 from the type of ARG0 and is used for setting the precision of the
13914 calculation and results. */
13916 static tree
13917 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13919 tree const type = TREE_TYPE (arg0);
13920 tree result = NULL_TREE;
13922 STRIP_NOPS (arg0);
13923 STRIP_NOPS (arg1);
13925 /* To proceed, MPFR must exactly represent the target floating point
13926 format, which only happens when the target base equals two. */
13927 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13928 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13929 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13931 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13932 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13934 if (real_isfinite (ra0) && real_isfinite (ra1))
13936 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13937 const int prec = fmt->p;
13938 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13939 tree result_rem;
13940 long integer_quo;
13941 mpfr_t m0, m1;
13943 mpfr_inits2 (prec, m0, m1, NULL);
13944 mpfr_from_real (m0, ra0, GMP_RNDN);
13945 mpfr_from_real (m1, ra1, GMP_RNDN);
13946 mpfr_clear_flags ();
13947 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13948 /* Remquo is independent of the rounding mode, so pass
13949 inexact=0 to do_mpfr_ckconv(). */
13950 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13951 mpfr_clears (m0, m1, NULL);
13952 if (result_rem)
13954 /* MPFR calculates quo in the host's long so it may
13955 return more bits in quo than the target int can hold
13956 if sizeof(host long) > sizeof(target int). This can
13957 happen even for native compilers in LP64 mode. In
13958 these cases, modulo the quo value with the largest
13959 number that the target int can hold while leaving one
13960 bit for the sign. */
13961 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13962 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13964 /* Dereference the quo pointer argument. */
13965 arg_quo = build_fold_indirect_ref (arg_quo);
13966 /* Proceed iff a valid pointer type was passed in. */
13967 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13969 /* Set the value. */
13970 tree result_quo
13971 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13972 build_int_cst (TREE_TYPE (arg_quo),
13973 integer_quo));
13974 TREE_SIDE_EFFECTS (result_quo) = 1;
13975 /* Combine the quo assignment with the rem. */
13976 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13977 result_quo, result_rem));
13982 return result;
13985 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13986 resulting value as a tree with type TYPE. The mpfr precision is
13987 set to the precision of TYPE. We assume that this mpfr function
13988 returns zero if the result could be calculated exactly within the
13989 requested precision. In addition, the integer pointer represented
13990 by ARG_SG will be dereferenced and set to the appropriate signgam
13991 (-1,1) value. */
13993 static tree
13994 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13996 tree result = NULL_TREE;
13998 STRIP_NOPS (arg);
14000 /* To proceed, MPFR must exactly represent the target floating point
14001 format, which only happens when the target base equals two. Also
14002 verify ARG is a constant and that ARG_SG is an int pointer. */
14003 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
14004 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
14005 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
14006 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
14008 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
14010 /* In addition to NaN and Inf, the argument cannot be zero or a
14011 negative integer. */
14012 if (real_isfinite (ra)
14013 && ra->cl != rvc_zero
14014 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
14016 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14017 const int prec = fmt->p;
14018 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
14019 int inexact, sg;
14020 mpfr_t m;
14021 tree result_lg;
14023 mpfr_init2 (m, prec);
14024 mpfr_from_real (m, ra, GMP_RNDN);
14025 mpfr_clear_flags ();
14026 inexact = mpfr_lgamma (m, &sg, m, rnd);
14027 result_lg = do_mpfr_ckconv (m, type, inexact);
14028 mpfr_clear (m);
14029 if (result_lg)
14031 tree result_sg;
14033 /* Dereference the arg_sg pointer argument. */
14034 arg_sg = build_fold_indirect_ref (arg_sg);
14035 /* Assign the signgam value into *arg_sg. */
14036 result_sg = fold_build2 (MODIFY_EXPR,
14037 TREE_TYPE (arg_sg), arg_sg,
14038 build_int_cst (TREE_TYPE (arg_sg), sg));
14039 TREE_SIDE_EFFECTS (result_sg) = 1;
14040 /* Combine the signgam assignment with the lgamma result. */
14041 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14042 result_sg, result_lg));
14047 return result;
14050 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14051 function FUNC on it and return the resulting value as a tree with
14052 type TYPE. The mpfr precision is set to the precision of TYPE. We
14053 assume that function FUNC returns zero if the result could be
14054 calculated exactly within the requested precision. */
14056 static tree
14057 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14059 tree result = NULL_TREE;
14061 STRIP_NOPS (arg);
14063 /* To proceed, MPFR must exactly represent the target floating point
14064 format, which only happens when the target base equals two. */
14065 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14066 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14067 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14069 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14070 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14072 if (real_isfinite (re) && real_isfinite (im))
14074 const struct real_format *const fmt =
14075 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14076 const int prec = fmt->p;
14077 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14078 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14079 int inexact;
14080 mpc_t m;
14082 mpc_init2 (m, prec);
14083 mpfr_from_real (mpc_realref(m), re, rnd);
14084 mpfr_from_real (mpc_imagref(m), im, rnd);
14085 mpfr_clear_flags ();
14086 inexact = func (m, m, crnd);
14087 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14088 mpc_clear (m);
14092 return result;
14095 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14096 mpc function FUNC on it and return the resulting value as a tree
14097 with type TYPE. The mpfr precision is set to the precision of
14098 TYPE. We assume that function FUNC returns zero if the result
14099 could be calculated exactly within the requested precision. If
14100 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14101 in the arguments and/or results. */
14103 tree
14104 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14105 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14107 tree result = NULL_TREE;
14109 STRIP_NOPS (arg0);
14110 STRIP_NOPS (arg1);
14112 /* To proceed, MPFR must exactly represent the target floating point
14113 format, which only happens when the target base equals two. */
14114 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14115 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14116 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14117 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14118 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14120 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14121 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14122 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14123 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14125 if (do_nonfinite
14126 || (real_isfinite (re0) && real_isfinite (im0)
14127 && real_isfinite (re1) && real_isfinite (im1)))
14129 const struct real_format *const fmt =
14130 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14131 const int prec = fmt->p;
14132 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14133 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14134 int inexact;
14135 mpc_t m0, m1;
14137 mpc_init2 (m0, prec);
14138 mpc_init2 (m1, prec);
14139 mpfr_from_real (mpc_realref(m0), re0, rnd);
14140 mpfr_from_real (mpc_imagref(m0), im0, rnd);
14141 mpfr_from_real (mpc_realref(m1), re1, rnd);
14142 mpfr_from_real (mpc_imagref(m1), im1, rnd);
14143 mpfr_clear_flags ();
14144 inexact = func (m0, m0, m1, crnd);
14145 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14146 mpc_clear (m0);
14147 mpc_clear (m1);
14151 return result;
14154 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14155 a normal call should be emitted rather than expanding the function
14156 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14158 static tree
14159 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14161 int nargs = gimple_call_num_args (stmt);
14163 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14164 (nargs > 0
14165 ? gimple_call_arg_ptr (stmt, 0)
14166 : &error_mark_node), fcode);
14169 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14170 a normal call should be emitted rather than expanding the function
14171 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14172 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14173 passed as second argument. */
14175 tree
14176 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14177 enum built_in_function fcode)
14179 int nargs = gimple_call_num_args (stmt);
14181 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14182 (nargs > 0
14183 ? gimple_call_arg_ptr (stmt, 0)
14184 : &error_mark_node), maxlen, fcode);
14187 /* Builtins with folding operations that operate on "..." arguments
14188 need special handling; we need to store the arguments in a convenient
14189 data structure before attempting any folding. Fortunately there are
14190 only a few builtins that fall into this category. FNDECL is the
14191 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14192 result of the function call is ignored. */
14194 static tree
14195 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14196 bool ignore ATTRIBUTE_UNUSED)
14198 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14199 tree ret = NULL_TREE;
14201 switch (fcode)
14203 case BUILT_IN_SPRINTF_CHK:
14204 case BUILT_IN_VSPRINTF_CHK:
14205 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14206 break;
14208 case BUILT_IN_SNPRINTF_CHK:
14209 case BUILT_IN_VSNPRINTF_CHK:
14210 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14212 default:
14213 break;
14215 if (ret)
14217 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14218 TREE_NO_WARNING (ret) = 1;
14219 return ret;
14221 return NULL_TREE;
14224 /* A wrapper function for builtin folding that prevents warnings for
14225 "statement without effect" and the like, caused by removing the
14226 call node earlier than the warning is generated. */
14228 tree
14229 fold_call_stmt (gimple stmt, bool ignore)
14231 tree ret = NULL_TREE;
14232 tree fndecl = gimple_call_fndecl (stmt);
14233 location_t loc = gimple_location (stmt);
14234 if (fndecl
14235 && TREE_CODE (fndecl) == FUNCTION_DECL
14236 && DECL_BUILT_IN (fndecl)
14237 && !gimple_call_va_arg_pack_p (stmt))
14239 int nargs = gimple_call_num_args (stmt);
14240 tree *args = (nargs > 0
14241 ? gimple_call_arg_ptr (stmt, 0)
14242 : &error_mark_node);
14244 if (avoid_folding_inline_builtin (fndecl))
14245 return NULL_TREE;
14246 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14248 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14250 else
14252 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14253 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14254 if (!ret)
14255 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14256 if (ret)
14258 /* Propagate location information from original call to
14259 expansion of builtin. Otherwise things like
14260 maybe_emit_chk_warning, that operate on the expansion
14261 of a builtin, will use the wrong location information. */
14262 if (gimple_has_location (stmt))
14264 tree realret = ret;
14265 if (TREE_CODE (ret) == NOP_EXPR)
14266 realret = TREE_OPERAND (ret, 0);
14267 if (CAN_HAVE_LOCATION_P (realret)
14268 && !EXPR_HAS_LOCATION (realret))
14269 SET_EXPR_LOCATION (realret, loc);
14270 return realret;
14272 return ret;
14276 return NULL_TREE;
14279 /* Look up the function in builtin_decl that corresponds to DECL
14280 and set ASMSPEC as its user assembler name. DECL must be a
14281 function decl that declares a builtin. */
14283 void
14284 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14286 tree builtin;
14287 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14288 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14289 && asmspec != 0);
14291 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14292 set_user_assembler_name (builtin, asmspec);
14293 switch (DECL_FUNCTION_CODE (decl))
14295 case BUILT_IN_MEMCPY:
14296 init_block_move_fn (asmspec);
14297 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14298 break;
14299 case BUILT_IN_MEMSET:
14300 init_block_clear_fn (asmspec);
14301 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14302 break;
14303 case BUILT_IN_MEMMOVE:
14304 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14305 break;
14306 case BUILT_IN_MEMCMP:
14307 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14308 break;
14309 case BUILT_IN_ABORT:
14310 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14311 break;
14312 case BUILT_IN_FFS:
14313 if (INT_TYPE_SIZE < BITS_PER_WORD)
14315 set_user_assembler_libfunc ("ffs", asmspec);
14316 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14317 MODE_INT, 0), "ffs");
14319 break;
14320 default:
14321 break;
14325 /* Return true if DECL is a builtin that expands to a constant or similarly
14326 simple code. */
14327 bool
14328 is_simple_builtin (tree decl)
14330 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14331 switch (DECL_FUNCTION_CODE (decl))
14333 /* Builtins that expand to constants. */
14334 case BUILT_IN_CONSTANT_P:
14335 case BUILT_IN_EXPECT:
14336 case BUILT_IN_OBJECT_SIZE:
14337 case BUILT_IN_UNREACHABLE:
14338 /* Simple register moves or loads from stack. */
14339 case BUILT_IN_ASSUME_ALIGNED:
14340 case BUILT_IN_RETURN_ADDRESS:
14341 case BUILT_IN_EXTRACT_RETURN_ADDR:
14342 case BUILT_IN_FROB_RETURN_ADDR:
14343 case BUILT_IN_RETURN:
14344 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14345 case BUILT_IN_FRAME_ADDRESS:
14346 case BUILT_IN_VA_END:
14347 case BUILT_IN_STACK_SAVE:
14348 case BUILT_IN_STACK_RESTORE:
14349 /* Exception state returns or moves registers around. */
14350 case BUILT_IN_EH_FILTER:
14351 case BUILT_IN_EH_POINTER:
14352 case BUILT_IN_EH_COPY_VALUES:
14353 return true;
14355 default:
14356 return false;
14359 return false;
14362 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14363 most probably expanded inline into reasonably simple code. This is a
14364 superset of is_simple_builtin. */
14365 bool
14366 is_inexpensive_builtin (tree decl)
14368 if (!decl)
14369 return false;
14370 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14371 return true;
14372 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14373 switch (DECL_FUNCTION_CODE (decl))
14375 case BUILT_IN_ABS:
14376 case BUILT_IN_ALLOCA:
14377 case BUILT_IN_ALLOCA_WITH_ALIGN:
14378 case BUILT_IN_BSWAP16:
14379 case BUILT_IN_BSWAP32:
14380 case BUILT_IN_BSWAP64:
14381 case BUILT_IN_CLZ:
14382 case BUILT_IN_CLZIMAX:
14383 case BUILT_IN_CLZL:
14384 case BUILT_IN_CLZLL:
14385 case BUILT_IN_CTZ:
14386 case BUILT_IN_CTZIMAX:
14387 case BUILT_IN_CTZL:
14388 case BUILT_IN_CTZLL:
14389 case BUILT_IN_FFS:
14390 case BUILT_IN_FFSIMAX:
14391 case BUILT_IN_FFSL:
14392 case BUILT_IN_FFSLL:
14393 case BUILT_IN_IMAXABS:
14394 case BUILT_IN_FINITE:
14395 case BUILT_IN_FINITEF:
14396 case BUILT_IN_FINITEL:
14397 case BUILT_IN_FINITED32:
14398 case BUILT_IN_FINITED64:
14399 case BUILT_IN_FINITED128:
14400 case BUILT_IN_FPCLASSIFY:
14401 case BUILT_IN_ISFINITE:
14402 case BUILT_IN_ISINF_SIGN:
14403 case BUILT_IN_ISINF:
14404 case BUILT_IN_ISINFF:
14405 case BUILT_IN_ISINFL:
14406 case BUILT_IN_ISINFD32:
14407 case BUILT_IN_ISINFD64:
14408 case BUILT_IN_ISINFD128:
14409 case BUILT_IN_ISNAN:
14410 case BUILT_IN_ISNANF:
14411 case BUILT_IN_ISNANL:
14412 case BUILT_IN_ISNAND32:
14413 case BUILT_IN_ISNAND64:
14414 case BUILT_IN_ISNAND128:
14415 case BUILT_IN_ISNORMAL:
14416 case BUILT_IN_ISGREATER:
14417 case BUILT_IN_ISGREATEREQUAL:
14418 case BUILT_IN_ISLESS:
14419 case BUILT_IN_ISLESSEQUAL:
14420 case BUILT_IN_ISLESSGREATER:
14421 case BUILT_IN_ISUNORDERED:
14422 case BUILT_IN_VA_ARG_PACK:
14423 case BUILT_IN_VA_ARG_PACK_LEN:
14424 case BUILT_IN_VA_COPY:
14425 case BUILT_IN_TRAP:
14426 case BUILT_IN_SAVEREGS:
14427 case BUILT_IN_POPCOUNTL:
14428 case BUILT_IN_POPCOUNTLL:
14429 case BUILT_IN_POPCOUNTIMAX:
14430 case BUILT_IN_POPCOUNT:
14431 case BUILT_IN_PARITYL:
14432 case BUILT_IN_PARITYLL:
14433 case BUILT_IN_PARITYIMAX:
14434 case BUILT_IN_PARITY:
14435 case BUILT_IN_LABS:
14436 case BUILT_IN_LLABS:
14437 case BUILT_IN_PREFETCH:
14438 return true;
14440 default:
14441 return is_simple_builtin (decl);
14444 return false;