2013-10-11 Marc Glisse <marc.glisse@inria.fr>
[official-gcc.git] / gcc / builtins.c
blob5df5c548bd735425c6a8ebae5fb5cfb044a39179
1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "realmpfr.h"
28 #include "gimple.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "predict.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "langhooks.h"
45 #include "basic-block.h"
46 #include "tree-ssa.h"
47 #include "value-prof.h"
48 #include "diagnostic-core.h"
49 #include "builtins.h"
50 #include "ubsan.h"
53 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
55 struct target_builtins default_target_builtins;
56 #if SWITCHABLE_TARGET
57 struct target_builtins *this_target_builtins = &default_target_builtins;
58 #endif
60 /* Define the names of the builtin function types and codes. */
61 const char *const built_in_class_names[BUILT_IN_LAST]
62 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
64 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
65 const char * built_in_names[(int) END_BUILTINS] =
67 #include "builtins.def"
69 #undef DEF_BUILTIN
71 /* Setup an array of _DECL trees, make sure each element is
72 initialized to NULL_TREE. */
73 builtin_info_type builtin_info;
75 /* Non-zero if __builtin_constant_p should be folded right away. */
76 bool force_folding_builtin_constant_p;
78 static const char *c_getstr (tree);
79 static rtx c_readstr (const char *, enum machine_mode);
80 static int target_char_cast (tree, char *);
81 static rtx get_memory_rtx (tree, tree);
82 static int apply_args_size (void);
83 static int apply_result_size (void);
84 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
85 static rtx result_vector (int, rtx);
86 #endif
87 static void expand_builtin_update_setjmp_buf (rtx);
88 static void expand_builtin_prefetch (tree);
89 static rtx expand_builtin_apply_args (void);
90 static rtx expand_builtin_apply_args_1 (void);
91 static rtx expand_builtin_apply (rtx, rtx, rtx);
92 static void expand_builtin_return (rtx);
93 static enum type_class type_to_class (tree);
94 static rtx expand_builtin_classify_type (tree);
95 static void expand_errno_check (tree, rtx);
96 static rtx expand_builtin_mathfn (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
98 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
99 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
100 static rtx expand_builtin_interclass_mathfn (tree, rtx);
101 static rtx expand_builtin_sincos (tree);
102 static rtx expand_builtin_cexpi (tree, rtx);
103 static rtx expand_builtin_int_roundingfn (tree, rtx);
104 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
105 static rtx expand_builtin_next_arg (void);
106 static rtx expand_builtin_va_start (tree);
107 static rtx expand_builtin_va_end (tree);
108 static rtx expand_builtin_va_copy (tree);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_memcpy (tree, rtx);
114 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
116 enum machine_mode, int);
117 static rtx expand_builtin_strcpy (tree, rtx);
118 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
119 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strncpy (tree, rtx);
121 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
122 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
124 static rtx expand_builtin_bzero (tree);
125 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_alloca (tree, bool);
127 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
128 static rtx expand_builtin_frame_address (tree, tree);
129 static tree stabilize_va_list_loc (location_t, tree, int);
130 static rtx expand_builtin_expect (tree, rtx);
131 static tree fold_builtin_constant_p (tree);
132 static tree fold_builtin_expect (location_t, tree, tree);
133 static tree fold_builtin_classify_type (tree);
134 static tree fold_builtin_strlen (location_t, tree, tree);
135 static tree fold_builtin_inf (location_t, tree, int);
136 static tree fold_builtin_nan (tree, tree, int);
137 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
138 static bool validate_arg (const_tree, enum tree_code code);
139 static bool integer_valued_real_p (tree);
140 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
141 static bool readonly_data_expr (tree);
142 static rtx expand_builtin_fabs (tree, rtx, rtx);
143 static rtx expand_builtin_signbit (tree, rtx);
144 static tree fold_builtin_sqrt (location_t, tree, tree);
145 static tree fold_builtin_cbrt (location_t, tree, tree);
146 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
147 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
148 static tree fold_builtin_cos (location_t, tree, tree, tree);
149 static tree fold_builtin_cosh (location_t, tree, tree, tree);
150 static tree fold_builtin_tan (tree, tree);
151 static tree fold_builtin_trunc (location_t, tree, tree);
152 static tree fold_builtin_floor (location_t, tree, tree);
153 static tree fold_builtin_ceil (location_t, tree, tree);
154 static tree fold_builtin_round (location_t, tree, tree);
155 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
156 static tree fold_builtin_bitop (tree, tree);
157 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
158 static tree fold_builtin_strchr (location_t, tree, tree, tree);
159 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
160 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
161 static tree fold_builtin_strcmp (location_t, tree, tree);
162 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
163 static tree fold_builtin_signbit (location_t, tree, tree);
164 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
165 static tree fold_builtin_isascii (location_t, tree);
166 static tree fold_builtin_toascii (location_t, tree);
167 static tree fold_builtin_isdigit (location_t, tree);
168 static tree fold_builtin_fabs (location_t, tree, tree);
169 static tree fold_builtin_abs (location_t, tree, tree);
170 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
171 enum tree_code);
172 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
173 static tree fold_builtin_0 (location_t, tree, bool);
174 static tree fold_builtin_1 (location_t, tree, tree, bool);
175 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
176 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
177 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
178 static tree fold_builtin_varargs (location_t, tree, tree, bool);
180 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
181 static tree fold_builtin_strstr (location_t, tree, tree, tree);
182 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
183 static tree fold_builtin_strcat (location_t, tree, tree);
184 static tree fold_builtin_strncat (location_t, tree, tree, tree);
185 static tree fold_builtin_strspn (location_t, tree, tree);
186 static tree fold_builtin_strcspn (location_t, tree, tree);
187 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
188 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
190 static rtx expand_builtin_object_size (tree);
191 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
192 enum built_in_function);
193 static void maybe_emit_chk_warning (tree, enum built_in_function);
194 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
195 static void maybe_emit_free_warning (tree);
196 static tree fold_builtin_object_size (tree, tree);
197 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
198 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
199 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
200 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
201 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
202 enum built_in_function);
203 static bool init_target_chars (void);
205 static unsigned HOST_WIDE_INT target_newline;
206 static unsigned HOST_WIDE_INT target_percent;
207 static unsigned HOST_WIDE_INT target_c;
208 static unsigned HOST_WIDE_INT target_s;
209 static char target_percent_c[3];
210 static char target_percent_s[3];
211 static char target_percent_s_newline[4];
212 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
213 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
214 static tree do_mpfr_arg2 (tree, tree, tree,
215 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
216 static tree do_mpfr_arg3 (tree, tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218 static tree do_mpfr_sincos (tree, tree, tree);
219 static tree do_mpfr_bessel_n (tree, tree, tree,
220 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
221 const REAL_VALUE_TYPE *, bool);
222 static tree do_mpfr_remquo (tree, tree, tree);
223 static tree do_mpfr_lgamma_r (tree, tree, tree);
224 static void expand_builtin_sync_synchronize (void);
226 /* Return true if NAME starts with __builtin_ or __sync_. */
228 static bool
229 is_builtin_name (const char *name)
231 if (strncmp (name, "__builtin_", 10) == 0)
232 return true;
233 if (strncmp (name, "__sync_", 7) == 0)
234 return true;
235 if (strncmp (name, "__atomic_", 9) == 0)
236 return true;
237 return false;
241 /* Return true if DECL is a function symbol representing a built-in. */
243 bool
244 is_builtin_fn (tree decl)
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
249 /* By default we assume that c99 functions are present at the runtime,
250 but sincos is not. */
251 bool
252 default_libc_has_function (enum function_class fn_class)
254 if (fn_class == function_c94
255 || fn_class == function_c99_misc
256 || fn_class == function_c99_math_complex)
257 return true;
259 return false;
262 bool
263 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
265 return true;
268 bool
269 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
271 return false;
274 /* Return true if NODE should be considered for inline expansion regardless
275 of the optimization level. This means whenever a function is invoked with
276 its "internal" name, which normally contains the prefix "__builtin". */
278 static bool
279 called_as_built_in (tree node)
281 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
282 we want the name used to call the function, not the name it
283 will have. */
284 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
285 return is_builtin_name (name);
288 /* Compute values M and N such that M divides (address of EXP - N) and such
289 that N < M. If these numbers can be determined, store M in alignp and N in
290 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
291 *alignp and any bit-offset to *bitposp.
293 Note that the address (and thus the alignment) computed here is based
294 on the address to which a symbol resolves, whereas DECL_ALIGN is based
295 on the address at which an object is actually located. These two
296 addresses are not always the same. For example, on ARM targets,
297 the address &foo of a Thumb function foo() has the lowest bit set,
298 whereas foo() itself starts on an even address.
300 If ADDR_P is true we are taking the address of the memory reference EXP
301 and thus cannot rely on the access taking place. */
303 static bool
304 get_object_alignment_2 (tree exp, unsigned int *alignp,
305 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
307 HOST_WIDE_INT bitsize, bitpos;
308 tree offset;
309 enum machine_mode mode;
310 int unsignedp, volatilep;
311 unsigned int inner, align = BITS_PER_UNIT;
312 bool known_alignment = false;
314 /* Get the innermost object and the constant (bitpos) and possibly
315 variable (offset) offset of the access. */
316 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
317 &mode, &unsignedp, &volatilep, true);
319 /* Extract alignment information from the innermost object and
320 possibly adjust bitpos and offset. */
321 if (TREE_CODE (exp) == FUNCTION_DECL)
323 /* Function addresses can encode extra information besides their
324 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
325 allows the low bit to be used as a virtual bit, we know
326 that the address itself must be at least 2-byte aligned. */
327 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
328 align = 2 * BITS_PER_UNIT;
330 else if (TREE_CODE (exp) == LABEL_DECL)
332 else if (TREE_CODE (exp) == CONST_DECL)
334 /* The alignment of a CONST_DECL is determined by its initializer. */
335 exp = DECL_INITIAL (exp);
336 align = TYPE_ALIGN (TREE_TYPE (exp));
337 #ifdef CONSTANT_ALIGNMENT
338 if (CONSTANT_CLASS_P (exp))
339 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
340 #endif
341 known_alignment = true;
343 else if (DECL_P (exp))
345 align = DECL_ALIGN (exp);
346 known_alignment = true;
348 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
350 align = TYPE_ALIGN (TREE_TYPE (exp));
352 else if (TREE_CODE (exp) == INDIRECT_REF
353 || TREE_CODE (exp) == MEM_REF
354 || TREE_CODE (exp) == TARGET_MEM_REF)
356 tree addr = TREE_OPERAND (exp, 0);
357 unsigned ptr_align;
358 unsigned HOST_WIDE_INT ptr_bitpos;
360 if (TREE_CODE (addr) == BIT_AND_EXPR
361 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
363 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
364 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
365 align *= BITS_PER_UNIT;
366 addr = TREE_OPERAND (addr, 0);
369 known_alignment
370 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
371 align = MAX (ptr_align, align);
373 /* The alignment of the pointer operand in a TARGET_MEM_REF
374 has to take the variable offset parts into account. */
375 if (TREE_CODE (exp) == TARGET_MEM_REF)
377 if (TMR_INDEX (exp))
379 unsigned HOST_WIDE_INT step = 1;
380 if (TMR_STEP (exp))
381 step = TREE_INT_CST_LOW (TMR_STEP (exp));
382 align = MIN (align, (step & -step) * BITS_PER_UNIT);
384 if (TMR_INDEX2 (exp))
385 align = BITS_PER_UNIT;
386 known_alignment = false;
389 /* When EXP is an actual memory reference then we can use
390 TYPE_ALIGN of a pointer indirection to derive alignment.
391 Do so only if get_pointer_alignment_1 did not reveal absolute
392 alignment knowledge and if using that alignment would
393 improve the situation. */
394 if (!addr_p && !known_alignment
395 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
396 align = TYPE_ALIGN (TREE_TYPE (exp));
397 else
399 /* Else adjust bitpos accordingly. */
400 bitpos += ptr_bitpos;
401 if (TREE_CODE (exp) == MEM_REF
402 || TREE_CODE (exp) == TARGET_MEM_REF)
403 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
406 else if (TREE_CODE (exp) == STRING_CST)
408 /* STRING_CST are the only constant objects we allow to be not
409 wrapped inside a CONST_DECL. */
410 align = TYPE_ALIGN (TREE_TYPE (exp));
411 #ifdef CONSTANT_ALIGNMENT
412 if (CONSTANT_CLASS_P (exp))
413 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
414 #endif
415 known_alignment = true;
418 /* If there is a non-constant offset part extract the maximum
419 alignment that can prevail. */
420 inner = ~0U;
421 while (offset)
423 tree next_offset;
425 if (TREE_CODE (offset) == PLUS_EXPR)
427 next_offset = TREE_OPERAND (offset, 0);
428 offset = TREE_OPERAND (offset, 1);
430 else
431 next_offset = NULL;
432 if (host_integerp (offset, 1))
434 /* Any overflow in calculating offset_bits won't change
435 the alignment. */
436 unsigned offset_bits
437 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
439 if (offset_bits)
440 inner = MIN (inner, (offset_bits & -offset_bits));
442 else if (TREE_CODE (offset) == MULT_EXPR
443 && host_integerp (TREE_OPERAND (offset, 1), 1))
445 /* Any overflow in calculating offset_factor won't change
446 the alignment. */
447 unsigned offset_factor
448 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
449 * BITS_PER_UNIT);
451 if (offset_factor)
452 inner = MIN (inner, (offset_factor & -offset_factor));
454 else
456 inner = MIN (inner, BITS_PER_UNIT);
457 break;
459 offset = next_offset;
461 /* Alignment is innermost object alignment adjusted by the constant
462 and non-constant offset parts. */
463 align = MIN (align, inner);
465 *alignp = align;
466 *bitposp = bitpos & (*alignp - 1);
467 return known_alignment;
470 /* For a memory reference expression EXP compute values M and N such that M
471 divides (&EXP - N) and such that N < M. If these numbers can be determined,
472 store M in alignp and N in *BITPOSP and return true. Otherwise return false
473 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
475 bool
476 get_object_alignment_1 (tree exp, unsigned int *alignp,
477 unsigned HOST_WIDE_INT *bitposp)
479 return get_object_alignment_2 (exp, alignp, bitposp, false);
482 /* Return the alignment in bits of EXP, an object. */
484 unsigned int
485 get_object_alignment (tree exp)
487 unsigned HOST_WIDE_INT bitpos = 0;
488 unsigned int align;
490 get_object_alignment_1 (exp, &align, &bitpos);
492 /* align and bitpos now specify known low bits of the pointer.
493 ptr & (align - 1) == bitpos. */
495 if (bitpos != 0)
496 align = (bitpos & -bitpos);
497 return align;
500 /* For a pointer valued expression EXP compute values M and N such that M
501 divides (EXP - N) and such that N < M. If these numbers can be determined,
502 store M in alignp and N in *BITPOSP and return true. Return false if
503 the results are just a conservative approximation.
505 If EXP is not a pointer, false is returned too. */
507 bool
508 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
509 unsigned HOST_WIDE_INT *bitposp)
511 STRIP_NOPS (exp);
513 if (TREE_CODE (exp) == ADDR_EXPR)
514 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
515 alignp, bitposp, true);
516 else if (TREE_CODE (exp) == SSA_NAME
517 && POINTER_TYPE_P (TREE_TYPE (exp)))
519 unsigned int ptr_align, ptr_misalign;
520 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
522 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
524 *bitposp = ptr_misalign * BITS_PER_UNIT;
525 *alignp = ptr_align * BITS_PER_UNIT;
526 /* We cannot really tell whether this result is an approximation. */
527 return true;
529 else
531 *bitposp = 0;
532 *alignp = BITS_PER_UNIT;
533 return false;
536 else if (TREE_CODE (exp) == INTEGER_CST)
538 *alignp = BIGGEST_ALIGNMENT;
539 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
540 & (BIGGEST_ALIGNMENT - 1));
541 return true;
544 *bitposp = 0;
545 *alignp = BITS_PER_UNIT;
546 return false;
549 /* Return the alignment in bits of EXP, a pointer valued expression.
550 The alignment returned is, by default, the alignment of the thing that
551 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
553 Otherwise, look at the expression to see if we can do better, i.e., if the
554 expression is actually pointing at an object whose alignment is tighter. */
556 unsigned int
557 get_pointer_alignment (tree exp)
559 unsigned HOST_WIDE_INT bitpos = 0;
560 unsigned int align;
562 get_pointer_alignment_1 (exp, &align, &bitpos);
564 /* align and bitpos now specify known low bits of the pointer.
565 ptr & (align - 1) == bitpos. */
567 if (bitpos != 0)
568 align = (bitpos & -bitpos);
570 return align;
573 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
574 way, because it could contain a zero byte in the middle.
575 TREE_STRING_LENGTH is the size of the character array, not the string.
577 ONLY_VALUE should be nonzero if the result is not going to be emitted
578 into the instruction stream and zero if it is going to be expanded.
579 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
580 is returned, otherwise NULL, since
581 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
582 evaluate the side-effects.
584 The value returned is of type `ssizetype'.
586 Unfortunately, string_constant can't access the values of const char
587 arrays with initializers, so neither can we do so here. */
589 tree
590 c_strlen (tree src, int only_value)
592 tree offset_node;
593 HOST_WIDE_INT offset;
594 int max;
595 const char *ptr;
596 location_t loc;
598 STRIP_NOPS (src);
599 if (TREE_CODE (src) == COND_EXPR
600 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
602 tree len1, len2;
604 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
605 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
606 if (tree_int_cst_equal (len1, len2))
607 return len1;
610 if (TREE_CODE (src) == COMPOUND_EXPR
611 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
612 return c_strlen (TREE_OPERAND (src, 1), only_value);
614 loc = EXPR_LOC_OR_HERE (src);
616 src = string_constant (src, &offset_node);
617 if (src == 0)
618 return NULL_TREE;
620 max = TREE_STRING_LENGTH (src) - 1;
621 ptr = TREE_STRING_POINTER (src);
623 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
625 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
626 compute the offset to the following null if we don't know where to
627 start searching for it. */
628 int i;
630 for (i = 0; i < max; i++)
631 if (ptr[i] == 0)
632 return NULL_TREE;
634 /* We don't know the starting offset, but we do know that the string
635 has no internal zero bytes. We can assume that the offset falls
636 within the bounds of the string; otherwise, the programmer deserves
637 what he gets. Subtract the offset from the length of the string,
638 and return that. This would perhaps not be valid if we were dealing
639 with named arrays in addition to literal string constants. */
641 return size_diffop_loc (loc, size_int (max), offset_node);
644 /* We have a known offset into the string. Start searching there for
645 a null character if we can represent it as a single HOST_WIDE_INT. */
646 if (offset_node == 0)
647 offset = 0;
648 else if (! host_integerp (offset_node, 0))
649 offset = -1;
650 else
651 offset = tree_low_cst (offset_node, 0);
653 /* If the offset is known to be out of bounds, warn, and call strlen at
654 runtime. */
655 if (offset < 0 || offset > max)
657 /* Suppress multiple warnings for propagated constant strings. */
658 if (! TREE_NO_WARNING (src))
660 warning_at (loc, 0, "offset outside bounds of constant string");
661 TREE_NO_WARNING (src) = 1;
663 return NULL_TREE;
666 /* Use strlen to search for the first zero byte. Since any strings
667 constructed with build_string will have nulls appended, we win even
668 if we get handed something like (char[4])"abcd".
670 Since OFFSET is our starting index into the string, no further
671 calculation is needed. */
672 return ssize_int (strlen (ptr + offset));
675 /* Return a char pointer for a C string if it is a string constant
676 or sum of string constant and integer constant. */
678 static const char *
679 c_getstr (tree src)
681 tree offset_node;
683 src = string_constant (src, &offset_node);
684 if (src == 0)
685 return 0;
687 if (offset_node == 0)
688 return TREE_STRING_POINTER (src);
689 else if (!host_integerp (offset_node, 1)
690 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
691 return 0;
693 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
696 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
697 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
699 static rtx
700 c_readstr (const char *str, enum machine_mode mode)
702 HOST_WIDE_INT c[2];
703 HOST_WIDE_INT ch;
704 unsigned int i, j;
706 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
708 c[0] = 0;
709 c[1] = 0;
710 ch = 1;
711 for (i = 0; i < GET_MODE_SIZE (mode); i++)
713 j = i;
714 if (WORDS_BIG_ENDIAN)
715 j = GET_MODE_SIZE (mode) - i - 1;
716 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
717 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
718 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
719 j *= BITS_PER_UNIT;
720 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
722 if (ch)
723 ch = (unsigned char) str[i];
724 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
726 return immed_double_const (c[0], c[1], mode);
729 /* Cast a target constant CST to target CHAR and if that value fits into
730 host char type, return zero and put that value into variable pointed to by
731 P. */
733 static int
734 target_char_cast (tree cst, char *p)
736 unsigned HOST_WIDE_INT val, hostval;
738 if (TREE_CODE (cst) != INTEGER_CST
739 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
740 return 1;
742 val = TREE_INT_CST_LOW (cst);
743 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
744 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
746 hostval = val;
747 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
748 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
750 if (val != hostval)
751 return 1;
753 *p = hostval;
754 return 0;
757 /* Similar to save_expr, but assumes that arbitrary code is not executed
758 in between the multiple evaluations. In particular, we assume that a
759 non-addressable local variable will not be modified. */
761 static tree
762 builtin_save_expr (tree exp)
764 if (TREE_CODE (exp) == SSA_NAME
765 || (TREE_ADDRESSABLE (exp) == 0
766 && (TREE_CODE (exp) == PARM_DECL
767 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
768 return exp;
770 return save_expr (exp);
773 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
774 times to get the address of either a higher stack frame, or a return
775 address located within it (depending on FNDECL_CODE). */
777 static rtx
778 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
780 int i;
782 #ifdef INITIAL_FRAME_ADDRESS_RTX
783 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
784 #else
785 rtx tem;
787 /* For a zero count with __builtin_return_address, we don't care what
788 frame address we return, because target-specific definitions will
789 override us. Therefore frame pointer elimination is OK, and using
790 the soft frame pointer is OK.
792 For a nonzero count, or a zero count with __builtin_frame_address,
793 we require a stable offset from the current frame pointer to the
794 previous one, so we must use the hard frame pointer, and
795 we must disable frame pointer elimination. */
796 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
797 tem = frame_pointer_rtx;
798 else
800 tem = hard_frame_pointer_rtx;
802 /* Tell reload not to eliminate the frame pointer. */
803 crtl->accesses_prior_frames = 1;
805 #endif
807 /* Some machines need special handling before we can access
808 arbitrary frames. For example, on the SPARC, we must first flush
809 all register windows to the stack. */
810 #ifdef SETUP_FRAME_ADDRESSES
811 if (count > 0)
812 SETUP_FRAME_ADDRESSES ();
813 #endif
815 /* On the SPARC, the return address is not in the frame, it is in a
816 register. There is no way to access it off of the current frame
817 pointer, but it can be accessed off the previous frame pointer by
818 reading the value from the register window save area. */
819 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
820 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
821 count--;
822 #endif
824 /* Scan back COUNT frames to the specified frame. */
825 for (i = 0; i < count; i++)
827 /* Assume the dynamic chain pointer is in the word that the
828 frame address points to, unless otherwise specified. */
829 #ifdef DYNAMIC_CHAIN_ADDRESS
830 tem = DYNAMIC_CHAIN_ADDRESS (tem);
831 #endif
832 tem = memory_address (Pmode, tem);
833 tem = gen_frame_mem (Pmode, tem);
834 tem = copy_to_reg (tem);
837 /* For __builtin_frame_address, return what we've got. But, on
838 the SPARC for example, we may have to add a bias. */
839 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
840 #ifdef FRAME_ADDR_RTX
841 return FRAME_ADDR_RTX (tem);
842 #else
843 return tem;
844 #endif
846 /* For __builtin_return_address, get the return address from that frame. */
847 #ifdef RETURN_ADDR_RTX
848 tem = RETURN_ADDR_RTX (count, tem);
849 #else
850 tem = memory_address (Pmode,
851 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
852 tem = gen_frame_mem (Pmode, tem);
853 #endif
854 return tem;
857 /* Alias set used for setjmp buffer. */
858 static alias_set_type setjmp_alias_set = -1;
860 /* Construct the leading half of a __builtin_setjmp call. Control will
861 return to RECEIVER_LABEL. This is also called directly by the SJLJ
862 exception handling code. */
864 void
865 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
867 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
868 rtx stack_save;
869 rtx mem;
871 if (setjmp_alias_set == -1)
872 setjmp_alias_set = new_alias_set ();
874 buf_addr = convert_memory_address (Pmode, buf_addr);
876 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
878 /* We store the frame pointer and the address of receiver_label in
879 the buffer and use the rest of it for the stack save area, which
880 is machine-dependent. */
882 mem = gen_rtx_MEM (Pmode, buf_addr);
883 set_mem_alias_set (mem, setjmp_alias_set);
884 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
886 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
887 GET_MODE_SIZE (Pmode))),
888 set_mem_alias_set (mem, setjmp_alias_set);
890 emit_move_insn (validize_mem (mem),
891 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
893 stack_save = gen_rtx_MEM (sa_mode,
894 plus_constant (Pmode, buf_addr,
895 2 * GET_MODE_SIZE (Pmode)));
896 set_mem_alias_set (stack_save, setjmp_alias_set);
897 emit_stack_save (SAVE_NONLOCAL, &stack_save);
899 /* If there is further processing to do, do it. */
900 #ifdef HAVE_builtin_setjmp_setup
901 if (HAVE_builtin_setjmp_setup)
902 emit_insn (gen_builtin_setjmp_setup (buf_addr));
903 #endif
905 /* We have a nonlocal label. */
906 cfun->has_nonlocal_label = 1;
909 /* Construct the trailing part of a __builtin_setjmp call. This is
910 also called directly by the SJLJ exception handling code.
911 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
913 void
914 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
916 rtx chain;
918 /* Mark the FP as used when we get here, so we have to make sure it's
919 marked as used by this function. */
920 emit_use (hard_frame_pointer_rtx);
922 /* Mark the static chain as clobbered here so life information
923 doesn't get messed up for it. */
924 chain = targetm.calls.static_chain (current_function_decl, true);
925 if (chain && REG_P (chain))
926 emit_clobber (chain);
928 /* Now put in the code to restore the frame pointer, and argument
929 pointer, if needed. */
930 #ifdef HAVE_nonlocal_goto
931 if (! HAVE_nonlocal_goto)
932 #endif
933 /* First adjust our frame pointer to its actual value. It was
934 previously set to the start of the virtual area corresponding to
935 the stacked variables when we branched here and now needs to be
936 adjusted to the actual hardware fp value.
938 Assignments to virtual registers are converted by
939 instantiate_virtual_regs into the corresponding assignment
940 to the underlying register (fp in this case) that makes
941 the original assignment true.
942 So the following insn will actually be decrementing fp by
943 STARTING_FRAME_OFFSET. */
944 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
946 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
947 if (fixed_regs[ARG_POINTER_REGNUM])
949 #ifdef ELIMINABLE_REGS
950 /* If the argument pointer can be eliminated in favor of the
951 frame pointer, we don't need to restore it. We assume here
952 that if such an elimination is present, it can always be used.
953 This is the case on all known machines; if we don't make this
954 assumption, we do unnecessary saving on many machines. */
955 size_t i;
956 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
958 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
959 if (elim_regs[i].from == ARG_POINTER_REGNUM
960 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
961 break;
963 if (i == ARRAY_SIZE (elim_regs))
964 #endif
966 /* Now restore our arg pointer from the address at which it
967 was saved in our stack frame. */
968 emit_move_insn (crtl->args.internal_arg_pointer,
969 copy_to_reg (get_arg_pointer_save_area ()));
972 #endif
974 #ifdef HAVE_builtin_setjmp_receiver
975 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
976 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
977 else
978 #endif
979 #ifdef HAVE_nonlocal_goto_receiver
980 if (HAVE_nonlocal_goto_receiver)
981 emit_insn (gen_nonlocal_goto_receiver ());
982 else
983 #endif
984 { /* Nothing */ }
986 /* We must not allow the code we just generated to be reordered by
987 scheduling. Specifically, the update of the frame pointer must
988 happen immediately, not later. Similarly, we must block
989 (frame-related) register values to be used across this code. */
990 emit_insn (gen_blockage ());
993 /* __builtin_longjmp is passed a pointer to an array of five words (not
994 all will be used on all machines). It operates similarly to the C
995 library function of the same name, but is more efficient. Much of
996 the code below is copied from the handling of non-local gotos. */
998 static void
999 expand_builtin_longjmp (rtx buf_addr, rtx value)
1001 rtx fp, lab, stack, insn, last;
1002 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1004 /* DRAP is needed for stack realign if longjmp is expanded to current
1005 function */
1006 if (SUPPORTS_STACK_ALIGNMENT)
1007 crtl->need_drap = true;
1009 if (setjmp_alias_set == -1)
1010 setjmp_alias_set = new_alias_set ();
1012 buf_addr = convert_memory_address (Pmode, buf_addr);
1014 buf_addr = force_reg (Pmode, buf_addr);
1016 /* We require that the user must pass a second argument of 1, because
1017 that is what builtin_setjmp will return. */
1018 gcc_assert (value == const1_rtx);
1020 last = get_last_insn ();
1021 #ifdef HAVE_builtin_longjmp
1022 if (HAVE_builtin_longjmp)
1023 emit_insn (gen_builtin_longjmp (buf_addr));
1024 else
1025 #endif
1027 fp = gen_rtx_MEM (Pmode, buf_addr);
1028 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1029 GET_MODE_SIZE (Pmode)));
1031 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1032 2 * GET_MODE_SIZE (Pmode)));
1033 set_mem_alias_set (fp, setjmp_alias_set);
1034 set_mem_alias_set (lab, setjmp_alias_set);
1035 set_mem_alias_set (stack, setjmp_alias_set);
1037 /* Pick up FP, label, and SP from the block and jump. This code is
1038 from expand_goto in stmt.c; see there for detailed comments. */
1039 #ifdef HAVE_nonlocal_goto
1040 if (HAVE_nonlocal_goto)
1041 /* We have to pass a value to the nonlocal_goto pattern that will
1042 get copied into the static_chain pointer, but it does not matter
1043 what that value is, because builtin_setjmp does not use it. */
1044 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1045 else
1046 #endif
1048 lab = copy_to_reg (lab);
1050 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1051 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1053 emit_move_insn (hard_frame_pointer_rtx, fp);
1054 emit_stack_restore (SAVE_NONLOCAL, stack);
1056 emit_use (hard_frame_pointer_rtx);
1057 emit_use (stack_pointer_rtx);
1058 emit_indirect_jump (lab);
1062 /* Search backwards and mark the jump insn as a non-local goto.
1063 Note that this precludes the use of __builtin_longjmp to a
1064 __builtin_setjmp target in the same function. However, we've
1065 already cautioned the user that these functions are for
1066 internal exception handling use only. */
1067 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1069 gcc_assert (insn != last);
1071 if (JUMP_P (insn))
1073 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1074 break;
1076 else if (CALL_P (insn))
1077 break;
1081 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1082 and the address of the save area. */
1084 static rtx
1085 expand_builtin_nonlocal_goto (tree exp)
1087 tree t_label, t_save_area;
1088 rtx r_label, r_save_area, r_fp, r_sp, insn;
1090 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1091 return NULL_RTX;
1093 t_label = CALL_EXPR_ARG (exp, 0);
1094 t_save_area = CALL_EXPR_ARG (exp, 1);
1096 r_label = expand_normal (t_label);
1097 r_label = convert_memory_address (Pmode, r_label);
1098 r_save_area = expand_normal (t_save_area);
1099 r_save_area = convert_memory_address (Pmode, r_save_area);
1100 /* Copy the address of the save location to a register just in case it was
1101 based on the frame pointer. */
1102 r_save_area = copy_to_reg (r_save_area);
1103 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1104 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1105 plus_constant (Pmode, r_save_area,
1106 GET_MODE_SIZE (Pmode)));
1108 crtl->has_nonlocal_goto = 1;
1110 #ifdef HAVE_nonlocal_goto
1111 /* ??? We no longer need to pass the static chain value, afaik. */
1112 if (HAVE_nonlocal_goto)
1113 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1114 else
1115 #endif
1117 r_label = copy_to_reg (r_label);
1119 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1120 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1122 /* Restore frame pointer for containing function. */
1123 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1124 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1126 /* USE of hard_frame_pointer_rtx added for consistency;
1127 not clear if really needed. */
1128 emit_use (hard_frame_pointer_rtx);
1129 emit_use (stack_pointer_rtx);
1131 /* If the architecture is using a GP register, we must
1132 conservatively assume that the target function makes use of it.
1133 The prologue of functions with nonlocal gotos must therefore
1134 initialize the GP register to the appropriate value, and we
1135 must then make sure that this value is live at the point
1136 of the jump. (Note that this doesn't necessarily apply
1137 to targets with a nonlocal_goto pattern; they are free
1138 to implement it in their own way. Note also that this is
1139 a no-op if the GP register is a global invariant.) */
1140 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1141 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1142 emit_use (pic_offset_table_rtx);
1144 emit_indirect_jump (r_label);
1147 /* Search backwards to the jump insn and mark it as a
1148 non-local goto. */
1149 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1151 if (JUMP_P (insn))
1153 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1154 break;
1156 else if (CALL_P (insn))
1157 break;
1160 return const0_rtx;
1163 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1164 (not all will be used on all machines) that was passed to __builtin_setjmp.
1165 It updates the stack pointer in that block to correspond to the current
1166 stack pointer. */
1168 static void
1169 expand_builtin_update_setjmp_buf (rtx buf_addr)
1171 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1172 rtx stack_save
1173 = gen_rtx_MEM (sa_mode,
1174 memory_address
1175 (sa_mode,
1176 plus_constant (Pmode, buf_addr,
1177 2 * GET_MODE_SIZE (Pmode))));
1179 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1182 /* Expand a call to __builtin_prefetch. For a target that does not support
1183 data prefetch, evaluate the memory address argument in case it has side
1184 effects. */
1186 static void
1187 expand_builtin_prefetch (tree exp)
1189 tree arg0, arg1, arg2;
1190 int nargs;
1191 rtx op0, op1, op2;
1193 if (!validate_arglist (exp, POINTER_TYPE, 0))
1194 return;
1196 arg0 = CALL_EXPR_ARG (exp, 0);
1198 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1199 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1200 locality). */
1201 nargs = call_expr_nargs (exp);
1202 if (nargs > 1)
1203 arg1 = CALL_EXPR_ARG (exp, 1);
1204 else
1205 arg1 = integer_zero_node;
1206 if (nargs > 2)
1207 arg2 = CALL_EXPR_ARG (exp, 2);
1208 else
1209 arg2 = integer_three_node;
1211 /* Argument 0 is an address. */
1212 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1214 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1215 if (TREE_CODE (arg1) != INTEGER_CST)
1217 error ("second argument to %<__builtin_prefetch%> must be a constant");
1218 arg1 = integer_zero_node;
1220 op1 = expand_normal (arg1);
1221 /* Argument 1 must be either zero or one. */
1222 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1224 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1225 " using zero");
1226 op1 = const0_rtx;
1229 /* Argument 2 (locality) must be a compile-time constant int. */
1230 if (TREE_CODE (arg2) != INTEGER_CST)
1232 error ("third argument to %<__builtin_prefetch%> must be a constant");
1233 arg2 = integer_zero_node;
1235 op2 = expand_normal (arg2);
1236 /* Argument 2 must be 0, 1, 2, or 3. */
1237 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1239 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1240 op2 = const0_rtx;
1243 #ifdef HAVE_prefetch
1244 if (HAVE_prefetch)
1246 struct expand_operand ops[3];
1248 create_address_operand (&ops[0], op0);
1249 create_integer_operand (&ops[1], INTVAL (op1));
1250 create_integer_operand (&ops[2], INTVAL (op2));
1251 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1252 return;
1254 #endif
1256 /* Don't do anything with direct references to volatile memory, but
1257 generate code to handle other side effects. */
1258 if (!MEM_P (op0) && side_effects_p (op0))
1259 emit_insn (op0);
1262 /* Get a MEM rtx for expression EXP which is the address of an operand
1263 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1264 the maximum length of the block of memory that might be accessed or
1265 NULL if unknown. */
1267 static rtx
1268 get_memory_rtx (tree exp, tree len)
1270 tree orig_exp = exp;
1271 rtx addr, mem;
1273 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1274 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1275 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1276 exp = TREE_OPERAND (exp, 0);
1278 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1279 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1281 /* Get an expression we can use to find the attributes to assign to MEM.
1282 First remove any nops. */
1283 while (CONVERT_EXPR_P (exp)
1284 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1285 exp = TREE_OPERAND (exp, 0);
1287 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1288 (as builtin stringops may alias with anything). */
1289 exp = fold_build2 (MEM_REF,
1290 build_array_type (char_type_node,
1291 build_range_type (sizetype,
1292 size_one_node, len)),
1293 exp, build_int_cst (ptr_type_node, 0));
1295 /* If the MEM_REF has no acceptable address, try to get the base object
1296 from the original address we got, and build an all-aliasing
1297 unknown-sized access to that one. */
1298 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1299 set_mem_attributes (mem, exp, 0);
1300 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1301 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1302 0))))
1304 exp = build_fold_addr_expr (exp);
1305 exp = fold_build2 (MEM_REF,
1306 build_array_type (char_type_node,
1307 build_range_type (sizetype,
1308 size_zero_node,
1309 NULL)),
1310 exp, build_int_cst (ptr_type_node, 0));
1311 set_mem_attributes (mem, exp, 0);
1313 set_mem_alias_set (mem, 0);
1314 return mem;
1317 /* Built-in functions to perform an untyped call and return. */
1319 #define apply_args_mode \
1320 (this_target_builtins->x_apply_args_mode)
1321 #define apply_result_mode \
1322 (this_target_builtins->x_apply_result_mode)
1324 /* Return the size required for the block returned by __builtin_apply_args,
1325 and initialize apply_args_mode. */
1327 static int
1328 apply_args_size (void)
1330 static int size = -1;
1331 int align;
1332 unsigned int regno;
1333 enum machine_mode mode;
1335 /* The values computed by this function never change. */
1336 if (size < 0)
1338 /* The first value is the incoming arg-pointer. */
1339 size = GET_MODE_SIZE (Pmode);
1341 /* The second value is the structure value address unless this is
1342 passed as an "invisible" first argument. */
1343 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1344 size += GET_MODE_SIZE (Pmode);
1346 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1347 if (FUNCTION_ARG_REGNO_P (regno))
1349 mode = targetm.calls.get_raw_arg_mode (regno);
1351 gcc_assert (mode != VOIDmode);
1353 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1354 if (size % align != 0)
1355 size = CEIL (size, align) * align;
1356 size += GET_MODE_SIZE (mode);
1357 apply_args_mode[regno] = mode;
1359 else
1361 apply_args_mode[regno] = VOIDmode;
1364 return size;
1367 /* Return the size required for the block returned by __builtin_apply,
1368 and initialize apply_result_mode. */
1370 static int
1371 apply_result_size (void)
1373 static int size = -1;
1374 int align, regno;
1375 enum machine_mode mode;
1377 /* The values computed by this function never change. */
1378 if (size < 0)
1380 size = 0;
1382 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1383 if (targetm.calls.function_value_regno_p (regno))
1385 mode = targetm.calls.get_raw_result_mode (regno);
1387 gcc_assert (mode != VOIDmode);
1389 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1390 if (size % align != 0)
1391 size = CEIL (size, align) * align;
1392 size += GET_MODE_SIZE (mode);
1393 apply_result_mode[regno] = mode;
1395 else
1396 apply_result_mode[regno] = VOIDmode;
1398 /* Allow targets that use untyped_call and untyped_return to override
1399 the size so that machine-specific information can be stored here. */
1400 #ifdef APPLY_RESULT_SIZE
1401 size = APPLY_RESULT_SIZE;
1402 #endif
1404 return size;
1407 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1408 /* Create a vector describing the result block RESULT. If SAVEP is true,
1409 the result block is used to save the values; otherwise it is used to
1410 restore the values. */
1412 static rtx
1413 result_vector (int savep, rtx result)
1415 int regno, size, align, nelts;
1416 enum machine_mode mode;
1417 rtx reg, mem;
1418 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1420 size = nelts = 0;
1421 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1422 if ((mode = apply_result_mode[regno]) != VOIDmode)
1424 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1425 if (size % align != 0)
1426 size = CEIL (size, align) * align;
1427 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1428 mem = adjust_address (result, mode, size);
1429 savevec[nelts++] = (savep
1430 ? gen_rtx_SET (VOIDmode, mem, reg)
1431 : gen_rtx_SET (VOIDmode, reg, mem));
1432 size += GET_MODE_SIZE (mode);
1434 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1436 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1438 /* Save the state required to perform an untyped call with the same
1439 arguments as were passed to the current function. */
1441 static rtx
1442 expand_builtin_apply_args_1 (void)
1444 rtx registers, tem;
1445 int size, align, regno;
1446 enum machine_mode mode;
1447 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1449 /* Create a block where the arg-pointer, structure value address,
1450 and argument registers can be saved. */
1451 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1453 /* Walk past the arg-pointer and structure value address. */
1454 size = GET_MODE_SIZE (Pmode);
1455 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1456 size += GET_MODE_SIZE (Pmode);
1458 /* Save each register used in calling a function to the block. */
1459 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1460 if ((mode = apply_args_mode[regno]) != VOIDmode)
1462 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1463 if (size % align != 0)
1464 size = CEIL (size, align) * align;
1466 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1468 emit_move_insn (adjust_address (registers, mode, size), tem);
1469 size += GET_MODE_SIZE (mode);
1472 /* Save the arg pointer to the block. */
1473 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1474 #ifdef STACK_GROWS_DOWNWARD
1475 /* We need the pointer as the caller actually passed them to us, not
1476 as we might have pretended they were passed. Make sure it's a valid
1477 operand, as emit_move_insn isn't expected to handle a PLUS. */
1479 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1480 NULL_RTX);
1481 #endif
1482 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1484 size = GET_MODE_SIZE (Pmode);
1486 /* Save the structure value address unless this is passed as an
1487 "invisible" first argument. */
1488 if (struct_incoming_value)
1490 emit_move_insn (adjust_address (registers, Pmode, size),
1491 copy_to_reg (struct_incoming_value));
1492 size += GET_MODE_SIZE (Pmode);
1495 /* Return the address of the block. */
1496 return copy_addr_to_reg (XEXP (registers, 0));
1499 /* __builtin_apply_args returns block of memory allocated on
1500 the stack into which is stored the arg pointer, structure
1501 value address, static chain, and all the registers that might
1502 possibly be used in performing a function call. The code is
1503 moved to the start of the function so the incoming values are
1504 saved. */
1506 static rtx
1507 expand_builtin_apply_args (void)
1509 /* Don't do __builtin_apply_args more than once in a function.
1510 Save the result of the first call and reuse it. */
1511 if (apply_args_value != 0)
1512 return apply_args_value;
1514 /* When this function is called, it means that registers must be
1515 saved on entry to this function. So we migrate the
1516 call to the first insn of this function. */
1517 rtx temp;
1518 rtx seq;
1520 start_sequence ();
1521 temp = expand_builtin_apply_args_1 ();
1522 seq = get_insns ();
1523 end_sequence ();
1525 apply_args_value = temp;
1527 /* Put the insns after the NOTE that starts the function.
1528 If this is inside a start_sequence, make the outer-level insn
1529 chain current, so the code is placed at the start of the
1530 function. If internal_arg_pointer is a non-virtual pseudo,
1531 it needs to be placed after the function that initializes
1532 that pseudo. */
1533 push_topmost_sequence ();
1534 if (REG_P (crtl->args.internal_arg_pointer)
1535 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1536 emit_insn_before (seq, parm_birth_insn);
1537 else
1538 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1539 pop_topmost_sequence ();
1540 return temp;
1544 /* Perform an untyped call and save the state required to perform an
1545 untyped return of whatever value was returned by the given function. */
1547 static rtx
1548 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1550 int size, align, regno;
1551 enum machine_mode mode;
1552 rtx incoming_args, result, reg, dest, src, call_insn;
1553 rtx old_stack_level = 0;
1554 rtx call_fusage = 0;
1555 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1557 arguments = convert_memory_address (Pmode, arguments);
1559 /* Create a block where the return registers can be saved. */
1560 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1562 /* Fetch the arg pointer from the ARGUMENTS block. */
1563 incoming_args = gen_reg_rtx (Pmode);
1564 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1565 #ifndef STACK_GROWS_DOWNWARD
1566 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1567 incoming_args, 0, OPTAB_LIB_WIDEN);
1568 #endif
1570 /* Push a new argument block and copy the arguments. Do not allow
1571 the (potential) memcpy call below to interfere with our stack
1572 manipulations. */
1573 do_pending_stack_adjust ();
1574 NO_DEFER_POP;
1576 /* Save the stack with nonlocal if available. */
1577 #ifdef HAVE_save_stack_nonlocal
1578 if (HAVE_save_stack_nonlocal)
1579 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1580 else
1581 #endif
1582 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1584 /* Allocate a block of memory onto the stack and copy the memory
1585 arguments to the outgoing arguments address. We can pass TRUE
1586 as the 4th argument because we just saved the stack pointer
1587 and will restore it right after the call. */
1588 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1590 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1591 may have already set current_function_calls_alloca to true.
1592 current_function_calls_alloca won't be set if argsize is zero,
1593 so we have to guarantee need_drap is true here. */
1594 if (SUPPORTS_STACK_ALIGNMENT)
1595 crtl->need_drap = true;
1597 dest = virtual_outgoing_args_rtx;
1598 #ifndef STACK_GROWS_DOWNWARD
1599 if (CONST_INT_P (argsize))
1600 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1601 else
1602 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1603 #endif
1604 dest = gen_rtx_MEM (BLKmode, dest);
1605 set_mem_align (dest, PARM_BOUNDARY);
1606 src = gen_rtx_MEM (BLKmode, incoming_args);
1607 set_mem_align (src, PARM_BOUNDARY);
1608 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1610 /* Refer to the argument block. */
1611 apply_args_size ();
1612 arguments = gen_rtx_MEM (BLKmode, arguments);
1613 set_mem_align (arguments, PARM_BOUNDARY);
1615 /* Walk past the arg-pointer and structure value address. */
1616 size = GET_MODE_SIZE (Pmode);
1617 if (struct_value)
1618 size += GET_MODE_SIZE (Pmode);
1620 /* Restore each of the registers previously saved. Make USE insns
1621 for each of these registers for use in making the call. */
1622 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1623 if ((mode = apply_args_mode[regno]) != VOIDmode)
1625 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1626 if (size % align != 0)
1627 size = CEIL (size, align) * align;
1628 reg = gen_rtx_REG (mode, regno);
1629 emit_move_insn (reg, adjust_address (arguments, mode, size));
1630 use_reg (&call_fusage, reg);
1631 size += GET_MODE_SIZE (mode);
1634 /* Restore the structure value address unless this is passed as an
1635 "invisible" first argument. */
1636 size = GET_MODE_SIZE (Pmode);
1637 if (struct_value)
1639 rtx value = gen_reg_rtx (Pmode);
1640 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1641 emit_move_insn (struct_value, value);
1642 if (REG_P (struct_value))
1643 use_reg (&call_fusage, struct_value);
1644 size += GET_MODE_SIZE (Pmode);
1647 /* All arguments and registers used for the call are set up by now! */
1648 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1650 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1651 and we don't want to load it into a register as an optimization,
1652 because prepare_call_address already did it if it should be done. */
1653 if (GET_CODE (function) != SYMBOL_REF)
1654 function = memory_address (FUNCTION_MODE, function);
1656 /* Generate the actual call instruction and save the return value. */
1657 #ifdef HAVE_untyped_call
1658 if (HAVE_untyped_call)
1659 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1660 result, result_vector (1, result)));
1661 else
1662 #endif
1663 #ifdef HAVE_call_value
1664 if (HAVE_call_value)
1666 rtx valreg = 0;
1668 /* Locate the unique return register. It is not possible to
1669 express a call that sets more than one return register using
1670 call_value; use untyped_call for that. In fact, untyped_call
1671 only needs to save the return registers in the given block. */
1672 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1673 if ((mode = apply_result_mode[regno]) != VOIDmode)
1675 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1677 valreg = gen_rtx_REG (mode, regno);
1680 emit_call_insn (GEN_CALL_VALUE (valreg,
1681 gen_rtx_MEM (FUNCTION_MODE, function),
1682 const0_rtx, NULL_RTX, const0_rtx));
1684 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1686 else
1687 #endif
1688 gcc_unreachable ();
1690 /* Find the CALL insn we just emitted, and attach the register usage
1691 information. */
1692 call_insn = last_call_insn ();
1693 add_function_usage_to (call_insn, call_fusage);
1695 /* Restore the stack. */
1696 #ifdef HAVE_save_stack_nonlocal
1697 if (HAVE_save_stack_nonlocal)
1698 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1699 else
1700 #endif
1701 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1702 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1704 OK_DEFER_POP;
1706 /* Return the address of the result block. */
1707 result = copy_addr_to_reg (XEXP (result, 0));
1708 return convert_memory_address (ptr_mode, result);
1711 /* Perform an untyped return. */
1713 static void
1714 expand_builtin_return (rtx result)
1716 int size, align, regno;
1717 enum machine_mode mode;
1718 rtx reg;
1719 rtx call_fusage = 0;
1721 result = convert_memory_address (Pmode, result);
1723 apply_result_size ();
1724 result = gen_rtx_MEM (BLKmode, result);
1726 #ifdef HAVE_untyped_return
1727 if (HAVE_untyped_return)
1729 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1730 emit_barrier ();
1731 return;
1733 #endif
1735 /* Restore the return value and note that each value is used. */
1736 size = 0;
1737 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1738 if ((mode = apply_result_mode[regno]) != VOIDmode)
1740 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1741 if (size % align != 0)
1742 size = CEIL (size, align) * align;
1743 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1744 emit_move_insn (reg, adjust_address (result, mode, size));
1746 push_to_sequence (call_fusage);
1747 emit_use (reg);
1748 call_fusage = get_insns ();
1749 end_sequence ();
1750 size += GET_MODE_SIZE (mode);
1753 /* Put the USE insns before the return. */
1754 emit_insn (call_fusage);
1756 /* Return whatever values was restored by jumping directly to the end
1757 of the function. */
1758 expand_naked_return ();
1761 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1763 static enum type_class
1764 type_to_class (tree type)
1766 switch (TREE_CODE (type))
1768 case VOID_TYPE: return void_type_class;
1769 case INTEGER_TYPE: return integer_type_class;
1770 case ENUMERAL_TYPE: return enumeral_type_class;
1771 case BOOLEAN_TYPE: return boolean_type_class;
1772 case POINTER_TYPE: return pointer_type_class;
1773 case REFERENCE_TYPE: return reference_type_class;
1774 case OFFSET_TYPE: return offset_type_class;
1775 case REAL_TYPE: return real_type_class;
1776 case COMPLEX_TYPE: return complex_type_class;
1777 case FUNCTION_TYPE: return function_type_class;
1778 case METHOD_TYPE: return method_type_class;
1779 case RECORD_TYPE: return record_type_class;
1780 case UNION_TYPE:
1781 case QUAL_UNION_TYPE: return union_type_class;
1782 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1783 ? string_type_class : array_type_class);
1784 case LANG_TYPE: return lang_type_class;
1785 default: return no_type_class;
1789 /* Expand a call EXP to __builtin_classify_type. */
1791 static rtx
1792 expand_builtin_classify_type (tree exp)
1794 if (call_expr_nargs (exp))
1795 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1796 return GEN_INT (no_type_class);
1799 /* This helper macro, meant to be used in mathfn_built_in below,
1800 determines which among a set of three builtin math functions is
1801 appropriate for a given type mode. The `F' and `L' cases are
1802 automatically generated from the `double' case. */
1803 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1804 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1805 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1806 fcodel = BUILT_IN_MATHFN##L ; break;
1807 /* Similar to above, but appends _R after any F/L suffix. */
1808 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1809 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1810 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1811 fcodel = BUILT_IN_MATHFN##L_R ; break;
1813 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1814 if available. If IMPLICIT is true use the implicit builtin declaration,
1815 otherwise use the explicit declaration. If we can't do the conversion,
1816 return zero. */
1818 static tree
1819 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1821 enum built_in_function fcode, fcodef, fcodel, fcode2;
1823 switch (fn)
1825 CASE_MATHFN (BUILT_IN_ACOS)
1826 CASE_MATHFN (BUILT_IN_ACOSH)
1827 CASE_MATHFN (BUILT_IN_ASIN)
1828 CASE_MATHFN (BUILT_IN_ASINH)
1829 CASE_MATHFN (BUILT_IN_ATAN)
1830 CASE_MATHFN (BUILT_IN_ATAN2)
1831 CASE_MATHFN (BUILT_IN_ATANH)
1832 CASE_MATHFN (BUILT_IN_CBRT)
1833 CASE_MATHFN (BUILT_IN_CEIL)
1834 CASE_MATHFN (BUILT_IN_CEXPI)
1835 CASE_MATHFN (BUILT_IN_COPYSIGN)
1836 CASE_MATHFN (BUILT_IN_COS)
1837 CASE_MATHFN (BUILT_IN_COSH)
1838 CASE_MATHFN (BUILT_IN_DREM)
1839 CASE_MATHFN (BUILT_IN_ERF)
1840 CASE_MATHFN (BUILT_IN_ERFC)
1841 CASE_MATHFN (BUILT_IN_EXP)
1842 CASE_MATHFN (BUILT_IN_EXP10)
1843 CASE_MATHFN (BUILT_IN_EXP2)
1844 CASE_MATHFN (BUILT_IN_EXPM1)
1845 CASE_MATHFN (BUILT_IN_FABS)
1846 CASE_MATHFN (BUILT_IN_FDIM)
1847 CASE_MATHFN (BUILT_IN_FLOOR)
1848 CASE_MATHFN (BUILT_IN_FMA)
1849 CASE_MATHFN (BUILT_IN_FMAX)
1850 CASE_MATHFN (BUILT_IN_FMIN)
1851 CASE_MATHFN (BUILT_IN_FMOD)
1852 CASE_MATHFN (BUILT_IN_FREXP)
1853 CASE_MATHFN (BUILT_IN_GAMMA)
1854 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1855 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1856 CASE_MATHFN (BUILT_IN_HYPOT)
1857 CASE_MATHFN (BUILT_IN_ILOGB)
1858 CASE_MATHFN (BUILT_IN_ICEIL)
1859 CASE_MATHFN (BUILT_IN_IFLOOR)
1860 CASE_MATHFN (BUILT_IN_INF)
1861 CASE_MATHFN (BUILT_IN_IRINT)
1862 CASE_MATHFN (BUILT_IN_IROUND)
1863 CASE_MATHFN (BUILT_IN_ISINF)
1864 CASE_MATHFN (BUILT_IN_J0)
1865 CASE_MATHFN (BUILT_IN_J1)
1866 CASE_MATHFN (BUILT_IN_JN)
1867 CASE_MATHFN (BUILT_IN_LCEIL)
1868 CASE_MATHFN (BUILT_IN_LDEXP)
1869 CASE_MATHFN (BUILT_IN_LFLOOR)
1870 CASE_MATHFN (BUILT_IN_LGAMMA)
1871 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1872 CASE_MATHFN (BUILT_IN_LLCEIL)
1873 CASE_MATHFN (BUILT_IN_LLFLOOR)
1874 CASE_MATHFN (BUILT_IN_LLRINT)
1875 CASE_MATHFN (BUILT_IN_LLROUND)
1876 CASE_MATHFN (BUILT_IN_LOG)
1877 CASE_MATHFN (BUILT_IN_LOG10)
1878 CASE_MATHFN (BUILT_IN_LOG1P)
1879 CASE_MATHFN (BUILT_IN_LOG2)
1880 CASE_MATHFN (BUILT_IN_LOGB)
1881 CASE_MATHFN (BUILT_IN_LRINT)
1882 CASE_MATHFN (BUILT_IN_LROUND)
1883 CASE_MATHFN (BUILT_IN_MODF)
1884 CASE_MATHFN (BUILT_IN_NAN)
1885 CASE_MATHFN (BUILT_IN_NANS)
1886 CASE_MATHFN (BUILT_IN_NEARBYINT)
1887 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1888 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1889 CASE_MATHFN (BUILT_IN_POW)
1890 CASE_MATHFN (BUILT_IN_POWI)
1891 CASE_MATHFN (BUILT_IN_POW10)
1892 CASE_MATHFN (BUILT_IN_REMAINDER)
1893 CASE_MATHFN (BUILT_IN_REMQUO)
1894 CASE_MATHFN (BUILT_IN_RINT)
1895 CASE_MATHFN (BUILT_IN_ROUND)
1896 CASE_MATHFN (BUILT_IN_SCALB)
1897 CASE_MATHFN (BUILT_IN_SCALBLN)
1898 CASE_MATHFN (BUILT_IN_SCALBN)
1899 CASE_MATHFN (BUILT_IN_SIGNBIT)
1900 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1901 CASE_MATHFN (BUILT_IN_SIN)
1902 CASE_MATHFN (BUILT_IN_SINCOS)
1903 CASE_MATHFN (BUILT_IN_SINH)
1904 CASE_MATHFN (BUILT_IN_SQRT)
1905 CASE_MATHFN (BUILT_IN_TAN)
1906 CASE_MATHFN (BUILT_IN_TANH)
1907 CASE_MATHFN (BUILT_IN_TGAMMA)
1908 CASE_MATHFN (BUILT_IN_TRUNC)
1909 CASE_MATHFN (BUILT_IN_Y0)
1910 CASE_MATHFN (BUILT_IN_Y1)
1911 CASE_MATHFN (BUILT_IN_YN)
1913 default:
1914 return NULL_TREE;
1917 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1918 fcode2 = fcode;
1919 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1920 fcode2 = fcodef;
1921 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1922 fcode2 = fcodel;
1923 else
1924 return NULL_TREE;
1926 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1927 return NULL_TREE;
1929 return builtin_decl_explicit (fcode2);
1932 /* Like mathfn_built_in_1(), but always use the implicit array. */
1934 tree
1935 mathfn_built_in (tree type, enum built_in_function fn)
1937 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1940 /* If errno must be maintained, expand the RTL to check if the result,
1941 TARGET, of a built-in function call, EXP, is NaN, and if so set
1942 errno to EDOM. */
1944 static void
1945 expand_errno_check (tree exp, rtx target)
1947 rtx lab = gen_label_rtx ();
1949 /* Test the result; if it is NaN, set errno=EDOM because
1950 the argument was not in the domain. */
1951 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1952 NULL_RTX, NULL_RTX, lab,
1953 /* The jump is very likely. */
1954 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1956 #ifdef TARGET_EDOM
1957 /* If this built-in doesn't throw an exception, set errno directly. */
1958 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1960 #ifdef GEN_ERRNO_RTX
1961 rtx errno_rtx = GEN_ERRNO_RTX;
1962 #else
1963 rtx errno_rtx
1964 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1965 #endif
1966 emit_move_insn (errno_rtx,
1967 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1968 emit_label (lab);
1969 return;
1971 #endif
1973 /* Make sure the library call isn't expanded as a tail call. */
1974 CALL_EXPR_TAILCALL (exp) = 0;
1976 /* We can't set errno=EDOM directly; let the library call do it.
1977 Pop the arguments right away in case the call gets deleted. */
1978 NO_DEFER_POP;
1979 expand_call (exp, target, 0);
1980 OK_DEFER_POP;
1981 emit_label (lab);
1984 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1985 Return NULL_RTX if a normal call should be emitted rather than expanding
1986 the function in-line. EXP is the expression that is a call to the builtin
1987 function; if convenient, the result should be placed in TARGET.
1988 SUBTARGET may be used as the target for computing one of EXP's operands. */
1990 static rtx
1991 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1993 optab builtin_optab;
1994 rtx op0, insns;
1995 tree fndecl = get_callee_fndecl (exp);
1996 enum machine_mode mode;
1997 bool errno_set = false;
1998 bool try_widening = false;
1999 tree arg;
2001 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2002 return NULL_RTX;
2004 arg = CALL_EXPR_ARG (exp, 0);
2006 switch (DECL_FUNCTION_CODE (fndecl))
2008 CASE_FLT_FN (BUILT_IN_SQRT):
2009 errno_set = ! tree_expr_nonnegative_p (arg);
2010 try_widening = true;
2011 builtin_optab = sqrt_optab;
2012 break;
2013 CASE_FLT_FN (BUILT_IN_EXP):
2014 errno_set = true; builtin_optab = exp_optab; break;
2015 CASE_FLT_FN (BUILT_IN_EXP10):
2016 CASE_FLT_FN (BUILT_IN_POW10):
2017 errno_set = true; builtin_optab = exp10_optab; break;
2018 CASE_FLT_FN (BUILT_IN_EXP2):
2019 errno_set = true; builtin_optab = exp2_optab; break;
2020 CASE_FLT_FN (BUILT_IN_EXPM1):
2021 errno_set = true; builtin_optab = expm1_optab; break;
2022 CASE_FLT_FN (BUILT_IN_LOGB):
2023 errno_set = true; builtin_optab = logb_optab; break;
2024 CASE_FLT_FN (BUILT_IN_LOG):
2025 errno_set = true; builtin_optab = log_optab; break;
2026 CASE_FLT_FN (BUILT_IN_LOG10):
2027 errno_set = true; builtin_optab = log10_optab; break;
2028 CASE_FLT_FN (BUILT_IN_LOG2):
2029 errno_set = true; builtin_optab = log2_optab; break;
2030 CASE_FLT_FN (BUILT_IN_LOG1P):
2031 errno_set = true; builtin_optab = log1p_optab; break;
2032 CASE_FLT_FN (BUILT_IN_ASIN):
2033 builtin_optab = asin_optab; break;
2034 CASE_FLT_FN (BUILT_IN_ACOS):
2035 builtin_optab = acos_optab; break;
2036 CASE_FLT_FN (BUILT_IN_TAN):
2037 builtin_optab = tan_optab; break;
2038 CASE_FLT_FN (BUILT_IN_ATAN):
2039 builtin_optab = atan_optab; break;
2040 CASE_FLT_FN (BUILT_IN_FLOOR):
2041 builtin_optab = floor_optab; break;
2042 CASE_FLT_FN (BUILT_IN_CEIL):
2043 builtin_optab = ceil_optab; break;
2044 CASE_FLT_FN (BUILT_IN_TRUNC):
2045 builtin_optab = btrunc_optab; break;
2046 CASE_FLT_FN (BUILT_IN_ROUND):
2047 builtin_optab = round_optab; break;
2048 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2049 builtin_optab = nearbyint_optab;
2050 if (flag_trapping_math)
2051 break;
2052 /* Else fallthrough and expand as rint. */
2053 CASE_FLT_FN (BUILT_IN_RINT):
2054 builtin_optab = rint_optab; break;
2055 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2056 builtin_optab = significand_optab; break;
2057 default:
2058 gcc_unreachable ();
2061 /* Make a suitable register to place result in. */
2062 mode = TYPE_MODE (TREE_TYPE (exp));
2064 if (! flag_errno_math || ! HONOR_NANS (mode))
2065 errno_set = false;
2067 /* Before working hard, check whether the instruction is available, but try
2068 to widen the mode for specific operations. */
2069 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2070 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2071 && (!errno_set || !optimize_insn_for_size_p ()))
2073 rtx result = gen_reg_rtx (mode);
2075 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2076 need to expand the argument again. This way, we will not perform
2077 side-effects more the once. */
2078 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2080 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2082 start_sequence ();
2084 /* Compute into RESULT.
2085 Set RESULT to wherever the result comes back. */
2086 result = expand_unop (mode, builtin_optab, op0, result, 0);
2088 if (result != 0)
2090 if (errno_set)
2091 expand_errno_check (exp, result);
2093 /* Output the entire sequence. */
2094 insns = get_insns ();
2095 end_sequence ();
2096 emit_insn (insns);
2097 return result;
2100 /* If we were unable to expand via the builtin, stop the sequence
2101 (without outputting the insns) and call to the library function
2102 with the stabilized argument list. */
2103 end_sequence ();
2106 return expand_call (exp, target, target == const0_rtx);
2109 /* Expand a call to the builtin binary math functions (pow and atan2).
2110 Return NULL_RTX if a normal call should be emitted rather than expanding the
2111 function in-line. EXP is the expression that is a call to the builtin
2112 function; if convenient, the result should be placed in TARGET.
2113 SUBTARGET may be used as the target for computing one of EXP's
2114 operands. */
2116 static rtx
2117 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2119 optab builtin_optab;
2120 rtx op0, op1, insns, result;
2121 int op1_type = REAL_TYPE;
2122 tree fndecl = get_callee_fndecl (exp);
2123 tree arg0, arg1;
2124 enum machine_mode mode;
2125 bool errno_set = true;
2127 switch (DECL_FUNCTION_CODE (fndecl))
2129 CASE_FLT_FN (BUILT_IN_SCALBN):
2130 CASE_FLT_FN (BUILT_IN_SCALBLN):
2131 CASE_FLT_FN (BUILT_IN_LDEXP):
2132 op1_type = INTEGER_TYPE;
2133 default:
2134 break;
2137 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2138 return NULL_RTX;
2140 arg0 = CALL_EXPR_ARG (exp, 0);
2141 arg1 = CALL_EXPR_ARG (exp, 1);
2143 switch (DECL_FUNCTION_CODE (fndecl))
2145 CASE_FLT_FN (BUILT_IN_POW):
2146 builtin_optab = pow_optab; break;
2147 CASE_FLT_FN (BUILT_IN_ATAN2):
2148 builtin_optab = atan2_optab; break;
2149 CASE_FLT_FN (BUILT_IN_SCALB):
2150 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2151 return 0;
2152 builtin_optab = scalb_optab; break;
2153 CASE_FLT_FN (BUILT_IN_SCALBN):
2154 CASE_FLT_FN (BUILT_IN_SCALBLN):
2155 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2156 return 0;
2157 /* Fall through... */
2158 CASE_FLT_FN (BUILT_IN_LDEXP):
2159 builtin_optab = ldexp_optab; break;
2160 CASE_FLT_FN (BUILT_IN_FMOD):
2161 builtin_optab = fmod_optab; break;
2162 CASE_FLT_FN (BUILT_IN_REMAINDER):
2163 CASE_FLT_FN (BUILT_IN_DREM):
2164 builtin_optab = remainder_optab; break;
2165 default:
2166 gcc_unreachable ();
2169 /* Make a suitable register to place result in. */
2170 mode = TYPE_MODE (TREE_TYPE (exp));
2172 /* Before working hard, check whether the instruction is available. */
2173 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2174 return NULL_RTX;
2176 result = gen_reg_rtx (mode);
2178 if (! flag_errno_math || ! HONOR_NANS (mode))
2179 errno_set = false;
2181 if (errno_set && optimize_insn_for_size_p ())
2182 return 0;
2184 /* Always stabilize the argument list. */
2185 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2186 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2188 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2189 op1 = expand_normal (arg1);
2191 start_sequence ();
2193 /* Compute into RESULT.
2194 Set RESULT to wherever the result comes back. */
2195 result = expand_binop (mode, builtin_optab, op0, op1,
2196 result, 0, OPTAB_DIRECT);
2198 /* If we were unable to expand via the builtin, stop the sequence
2199 (without outputting the insns) and call to the library function
2200 with the stabilized argument list. */
2201 if (result == 0)
2203 end_sequence ();
2204 return expand_call (exp, target, target == const0_rtx);
2207 if (errno_set)
2208 expand_errno_check (exp, result);
2210 /* Output the entire sequence. */
2211 insns = get_insns ();
2212 end_sequence ();
2213 emit_insn (insns);
2215 return result;
2218 /* Expand a call to the builtin trinary math functions (fma).
2219 Return NULL_RTX if a normal call should be emitted rather than expanding the
2220 function in-line. EXP is the expression that is a call to the builtin
2221 function; if convenient, the result should be placed in TARGET.
2222 SUBTARGET may be used as the target for computing one of EXP's
2223 operands. */
2225 static rtx
2226 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2228 optab builtin_optab;
2229 rtx op0, op1, op2, insns, result;
2230 tree fndecl = get_callee_fndecl (exp);
2231 tree arg0, arg1, arg2;
2232 enum machine_mode mode;
2234 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2235 return NULL_RTX;
2237 arg0 = CALL_EXPR_ARG (exp, 0);
2238 arg1 = CALL_EXPR_ARG (exp, 1);
2239 arg2 = CALL_EXPR_ARG (exp, 2);
2241 switch (DECL_FUNCTION_CODE (fndecl))
2243 CASE_FLT_FN (BUILT_IN_FMA):
2244 builtin_optab = fma_optab; break;
2245 default:
2246 gcc_unreachable ();
2249 /* Make a suitable register to place result in. */
2250 mode = TYPE_MODE (TREE_TYPE (exp));
2252 /* Before working hard, check whether the instruction is available. */
2253 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2254 return NULL_RTX;
2256 result = gen_reg_rtx (mode);
2258 /* Always stabilize the argument list. */
2259 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2260 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2261 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2263 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2264 op1 = expand_normal (arg1);
2265 op2 = expand_normal (arg2);
2267 start_sequence ();
2269 /* Compute into RESULT.
2270 Set RESULT to wherever the result comes back. */
2271 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2272 result, 0);
2274 /* If we were unable to expand via the builtin, stop the sequence
2275 (without outputting the insns) and call to the library function
2276 with the stabilized argument list. */
2277 if (result == 0)
2279 end_sequence ();
2280 return expand_call (exp, target, target == const0_rtx);
2283 /* Output the entire sequence. */
2284 insns = get_insns ();
2285 end_sequence ();
2286 emit_insn (insns);
2288 return result;
2291 /* Expand a call to the builtin sin and cos math functions.
2292 Return NULL_RTX if a normal call should be emitted rather than expanding the
2293 function in-line. EXP is the expression that is a call to the builtin
2294 function; if convenient, the result should be placed in TARGET.
2295 SUBTARGET may be used as the target for computing one of EXP's
2296 operands. */
2298 static rtx
2299 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2301 optab builtin_optab;
2302 rtx op0, insns;
2303 tree fndecl = get_callee_fndecl (exp);
2304 enum machine_mode mode;
2305 tree arg;
2307 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2308 return NULL_RTX;
2310 arg = CALL_EXPR_ARG (exp, 0);
2312 switch (DECL_FUNCTION_CODE (fndecl))
2314 CASE_FLT_FN (BUILT_IN_SIN):
2315 CASE_FLT_FN (BUILT_IN_COS):
2316 builtin_optab = sincos_optab; break;
2317 default:
2318 gcc_unreachable ();
2321 /* Make a suitable register to place result in. */
2322 mode = TYPE_MODE (TREE_TYPE (exp));
2324 /* Check if sincos insn is available, otherwise fallback
2325 to sin or cos insn. */
2326 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2327 switch (DECL_FUNCTION_CODE (fndecl))
2329 CASE_FLT_FN (BUILT_IN_SIN):
2330 builtin_optab = sin_optab; break;
2331 CASE_FLT_FN (BUILT_IN_COS):
2332 builtin_optab = cos_optab; break;
2333 default:
2334 gcc_unreachable ();
2337 /* Before working hard, check whether the instruction is available. */
2338 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2340 rtx result = gen_reg_rtx (mode);
2342 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2343 need to expand the argument again. This way, we will not perform
2344 side-effects more the once. */
2345 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2347 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2349 start_sequence ();
2351 /* Compute into RESULT.
2352 Set RESULT to wherever the result comes back. */
2353 if (builtin_optab == sincos_optab)
2355 int ok;
2357 switch (DECL_FUNCTION_CODE (fndecl))
2359 CASE_FLT_FN (BUILT_IN_SIN):
2360 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2361 break;
2362 CASE_FLT_FN (BUILT_IN_COS):
2363 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2364 break;
2365 default:
2366 gcc_unreachable ();
2368 gcc_assert (ok);
2370 else
2371 result = expand_unop (mode, builtin_optab, op0, result, 0);
2373 if (result != 0)
2375 /* Output the entire sequence. */
2376 insns = get_insns ();
2377 end_sequence ();
2378 emit_insn (insns);
2379 return result;
2382 /* If we were unable to expand via the builtin, stop the sequence
2383 (without outputting the insns) and call to the library function
2384 with the stabilized argument list. */
2385 end_sequence ();
2388 return expand_call (exp, target, target == const0_rtx);
2391 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2392 return an RTL instruction code that implements the functionality.
2393 If that isn't possible or available return CODE_FOR_nothing. */
2395 static enum insn_code
2396 interclass_mathfn_icode (tree arg, tree fndecl)
2398 bool errno_set = false;
2399 optab builtin_optab = unknown_optab;
2400 enum machine_mode mode;
2402 switch (DECL_FUNCTION_CODE (fndecl))
2404 CASE_FLT_FN (BUILT_IN_ILOGB):
2405 errno_set = true; builtin_optab = ilogb_optab; break;
2406 CASE_FLT_FN (BUILT_IN_ISINF):
2407 builtin_optab = isinf_optab; break;
2408 case BUILT_IN_ISNORMAL:
2409 case BUILT_IN_ISFINITE:
2410 CASE_FLT_FN (BUILT_IN_FINITE):
2411 case BUILT_IN_FINITED32:
2412 case BUILT_IN_FINITED64:
2413 case BUILT_IN_FINITED128:
2414 case BUILT_IN_ISINFD32:
2415 case BUILT_IN_ISINFD64:
2416 case BUILT_IN_ISINFD128:
2417 /* These builtins have no optabs (yet). */
2418 break;
2419 default:
2420 gcc_unreachable ();
2423 /* There's no easy way to detect the case we need to set EDOM. */
2424 if (flag_errno_math && errno_set)
2425 return CODE_FOR_nothing;
2427 /* Optab mode depends on the mode of the input argument. */
2428 mode = TYPE_MODE (TREE_TYPE (arg));
2430 if (builtin_optab)
2431 return optab_handler (builtin_optab, mode);
2432 return CODE_FOR_nothing;
2435 /* Expand a call to one of the builtin math functions that operate on
2436 floating point argument and output an integer result (ilogb, isinf,
2437 isnan, etc).
2438 Return 0 if a normal call should be emitted rather than expanding the
2439 function in-line. EXP is the expression that is a call to the builtin
2440 function; if convenient, the result should be placed in TARGET. */
2442 static rtx
2443 expand_builtin_interclass_mathfn (tree exp, rtx target)
2445 enum insn_code icode = CODE_FOR_nothing;
2446 rtx op0;
2447 tree fndecl = get_callee_fndecl (exp);
2448 enum machine_mode mode;
2449 tree arg;
2451 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2452 return NULL_RTX;
2454 arg = CALL_EXPR_ARG (exp, 0);
2455 icode = interclass_mathfn_icode (arg, fndecl);
2456 mode = TYPE_MODE (TREE_TYPE (arg));
2458 if (icode != CODE_FOR_nothing)
2460 struct expand_operand ops[1];
2461 rtx last = get_last_insn ();
2462 tree orig_arg = arg;
2464 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2465 need to expand the argument again. This way, we will not perform
2466 side-effects more the once. */
2467 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2469 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2471 if (mode != GET_MODE (op0))
2472 op0 = convert_to_mode (mode, op0, 0);
2474 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2475 if (maybe_legitimize_operands (icode, 0, 1, ops)
2476 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2477 return ops[0].value;
2479 delete_insns_since (last);
2480 CALL_EXPR_ARG (exp, 0) = orig_arg;
2483 return NULL_RTX;
2486 /* Expand a call to the builtin sincos math function.
2487 Return NULL_RTX if a normal call should be emitted rather than expanding the
2488 function in-line. EXP is the expression that is a call to the builtin
2489 function. */
2491 static rtx
2492 expand_builtin_sincos (tree exp)
2494 rtx op0, op1, op2, target1, target2;
2495 enum machine_mode mode;
2496 tree arg, sinp, cosp;
2497 int result;
2498 location_t loc = EXPR_LOCATION (exp);
2499 tree alias_type, alias_off;
2501 if (!validate_arglist (exp, REAL_TYPE,
2502 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2503 return NULL_RTX;
2505 arg = CALL_EXPR_ARG (exp, 0);
2506 sinp = CALL_EXPR_ARG (exp, 1);
2507 cosp = CALL_EXPR_ARG (exp, 2);
2509 /* Make a suitable register to place result in. */
2510 mode = TYPE_MODE (TREE_TYPE (arg));
2512 /* Check if sincos insn is available, otherwise emit the call. */
2513 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2514 return NULL_RTX;
2516 target1 = gen_reg_rtx (mode);
2517 target2 = gen_reg_rtx (mode);
2519 op0 = expand_normal (arg);
2520 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2521 alias_off = build_int_cst (alias_type, 0);
2522 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2523 sinp, alias_off));
2524 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2525 cosp, alias_off));
2527 /* Compute into target1 and target2.
2528 Set TARGET to wherever the result comes back. */
2529 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2530 gcc_assert (result);
2532 /* Move target1 and target2 to the memory locations indicated
2533 by op1 and op2. */
2534 emit_move_insn (op1, target1);
2535 emit_move_insn (op2, target2);
2537 return const0_rtx;
2540 /* Expand a call to the internal cexpi builtin to the sincos math function.
2541 EXP is the expression that is a call to the builtin function; if convenient,
2542 the result should be placed in TARGET. */
2544 static rtx
2545 expand_builtin_cexpi (tree exp, rtx target)
2547 tree fndecl = get_callee_fndecl (exp);
2548 tree arg, type;
2549 enum machine_mode mode;
2550 rtx op0, op1, op2;
2551 location_t loc = EXPR_LOCATION (exp);
2553 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2554 return NULL_RTX;
2556 arg = CALL_EXPR_ARG (exp, 0);
2557 type = TREE_TYPE (arg);
2558 mode = TYPE_MODE (TREE_TYPE (arg));
2560 /* Try expanding via a sincos optab, fall back to emitting a libcall
2561 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2562 is only generated from sincos, cexp or if we have either of them. */
2563 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2565 op1 = gen_reg_rtx (mode);
2566 op2 = gen_reg_rtx (mode);
2568 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2570 /* Compute into op1 and op2. */
2571 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2573 else if (targetm.libc_has_function (function_sincos))
2575 tree call, fn = NULL_TREE;
2576 tree top1, top2;
2577 rtx op1a, op2a;
2579 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2580 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2581 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2582 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2583 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2584 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2585 else
2586 gcc_unreachable ();
2588 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2589 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2590 op1a = copy_addr_to_reg (XEXP (op1, 0));
2591 op2a = copy_addr_to_reg (XEXP (op2, 0));
2592 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2593 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2595 /* Make sure not to fold the sincos call again. */
2596 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2597 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2598 call, 3, arg, top1, top2));
2600 else
2602 tree call, fn = NULL_TREE, narg;
2603 tree ctype = build_complex_type (type);
2605 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2606 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2607 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2608 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2609 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2610 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2611 else
2612 gcc_unreachable ();
2614 /* If we don't have a decl for cexp create one. This is the
2615 friendliest fallback if the user calls __builtin_cexpi
2616 without full target C99 function support. */
2617 if (fn == NULL_TREE)
2619 tree fntype;
2620 const char *name = NULL;
2622 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2623 name = "cexpf";
2624 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2625 name = "cexp";
2626 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2627 name = "cexpl";
2629 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2630 fn = build_fn_decl (name, fntype);
2633 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2634 build_real (type, dconst0), arg);
2636 /* Make sure not to fold the cexp call again. */
2637 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2638 return expand_expr (build_call_nary (ctype, call, 1, narg),
2639 target, VOIDmode, EXPAND_NORMAL);
2642 /* Now build the proper return type. */
2643 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2644 make_tree (TREE_TYPE (arg), op2),
2645 make_tree (TREE_TYPE (arg), op1)),
2646 target, VOIDmode, EXPAND_NORMAL);
2649 /* Conveniently construct a function call expression. FNDECL names the
2650 function to be called, N is the number of arguments, and the "..."
2651 parameters are the argument expressions. Unlike build_call_exr
2652 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2654 static tree
2655 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2657 va_list ap;
2658 tree fntype = TREE_TYPE (fndecl);
2659 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2661 va_start (ap, n);
2662 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2663 va_end (ap);
2664 SET_EXPR_LOCATION (fn, loc);
2665 return fn;
2668 /* Expand a call to one of the builtin rounding functions gcc defines
2669 as an extension (lfloor and lceil). As these are gcc extensions we
2670 do not need to worry about setting errno to EDOM.
2671 If expanding via optab fails, lower expression to (int)(floor(x)).
2672 EXP is the expression that is a call to the builtin function;
2673 if convenient, the result should be placed in TARGET. */
2675 static rtx
2676 expand_builtin_int_roundingfn (tree exp, rtx target)
2678 convert_optab builtin_optab;
2679 rtx op0, insns, tmp;
2680 tree fndecl = get_callee_fndecl (exp);
2681 enum built_in_function fallback_fn;
2682 tree fallback_fndecl;
2683 enum machine_mode mode;
2684 tree arg;
2686 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2687 gcc_unreachable ();
2689 arg = CALL_EXPR_ARG (exp, 0);
2691 switch (DECL_FUNCTION_CODE (fndecl))
2693 CASE_FLT_FN (BUILT_IN_ICEIL):
2694 CASE_FLT_FN (BUILT_IN_LCEIL):
2695 CASE_FLT_FN (BUILT_IN_LLCEIL):
2696 builtin_optab = lceil_optab;
2697 fallback_fn = BUILT_IN_CEIL;
2698 break;
2700 CASE_FLT_FN (BUILT_IN_IFLOOR):
2701 CASE_FLT_FN (BUILT_IN_LFLOOR):
2702 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2703 builtin_optab = lfloor_optab;
2704 fallback_fn = BUILT_IN_FLOOR;
2705 break;
2707 default:
2708 gcc_unreachable ();
2711 /* Make a suitable register to place result in. */
2712 mode = TYPE_MODE (TREE_TYPE (exp));
2714 target = gen_reg_rtx (mode);
2716 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2717 need to expand the argument again. This way, we will not perform
2718 side-effects more the once. */
2719 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2721 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2723 start_sequence ();
2725 /* Compute into TARGET. */
2726 if (expand_sfix_optab (target, op0, builtin_optab))
2728 /* Output the entire sequence. */
2729 insns = get_insns ();
2730 end_sequence ();
2731 emit_insn (insns);
2732 return target;
2735 /* If we were unable to expand via the builtin, stop the sequence
2736 (without outputting the insns). */
2737 end_sequence ();
2739 /* Fall back to floating point rounding optab. */
2740 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2742 /* For non-C99 targets we may end up without a fallback fndecl here
2743 if the user called __builtin_lfloor directly. In this case emit
2744 a call to the floor/ceil variants nevertheless. This should result
2745 in the best user experience for not full C99 targets. */
2746 if (fallback_fndecl == NULL_TREE)
2748 tree fntype;
2749 const char *name = NULL;
2751 switch (DECL_FUNCTION_CODE (fndecl))
2753 case BUILT_IN_ICEIL:
2754 case BUILT_IN_LCEIL:
2755 case BUILT_IN_LLCEIL:
2756 name = "ceil";
2757 break;
2758 case BUILT_IN_ICEILF:
2759 case BUILT_IN_LCEILF:
2760 case BUILT_IN_LLCEILF:
2761 name = "ceilf";
2762 break;
2763 case BUILT_IN_ICEILL:
2764 case BUILT_IN_LCEILL:
2765 case BUILT_IN_LLCEILL:
2766 name = "ceill";
2767 break;
2768 case BUILT_IN_IFLOOR:
2769 case BUILT_IN_LFLOOR:
2770 case BUILT_IN_LLFLOOR:
2771 name = "floor";
2772 break;
2773 case BUILT_IN_IFLOORF:
2774 case BUILT_IN_LFLOORF:
2775 case BUILT_IN_LLFLOORF:
2776 name = "floorf";
2777 break;
2778 case BUILT_IN_IFLOORL:
2779 case BUILT_IN_LFLOORL:
2780 case BUILT_IN_LLFLOORL:
2781 name = "floorl";
2782 break;
2783 default:
2784 gcc_unreachable ();
2787 fntype = build_function_type_list (TREE_TYPE (arg),
2788 TREE_TYPE (arg), NULL_TREE);
2789 fallback_fndecl = build_fn_decl (name, fntype);
2792 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2794 tmp = expand_normal (exp);
2795 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2797 /* Truncate the result of floating point optab to integer
2798 via expand_fix (). */
2799 target = gen_reg_rtx (mode);
2800 expand_fix (target, tmp, 0);
2802 return target;
2805 /* Expand a call to one of the builtin math functions doing integer
2806 conversion (lrint).
2807 Return 0 if a normal call should be emitted rather than expanding the
2808 function in-line. EXP is the expression that is a call to the builtin
2809 function; if convenient, the result should be placed in TARGET. */
2811 static rtx
2812 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2814 convert_optab builtin_optab;
2815 rtx op0, insns;
2816 tree fndecl = get_callee_fndecl (exp);
2817 tree arg;
2818 enum machine_mode mode;
2819 enum built_in_function fallback_fn = BUILT_IN_NONE;
2821 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2822 gcc_unreachable ();
2824 arg = CALL_EXPR_ARG (exp, 0);
2826 switch (DECL_FUNCTION_CODE (fndecl))
2828 CASE_FLT_FN (BUILT_IN_IRINT):
2829 fallback_fn = BUILT_IN_LRINT;
2830 /* FALLTHRU */
2831 CASE_FLT_FN (BUILT_IN_LRINT):
2832 CASE_FLT_FN (BUILT_IN_LLRINT):
2833 builtin_optab = lrint_optab;
2834 break;
2836 CASE_FLT_FN (BUILT_IN_IROUND):
2837 fallback_fn = BUILT_IN_LROUND;
2838 /* FALLTHRU */
2839 CASE_FLT_FN (BUILT_IN_LROUND):
2840 CASE_FLT_FN (BUILT_IN_LLROUND):
2841 builtin_optab = lround_optab;
2842 break;
2844 default:
2845 gcc_unreachable ();
2848 /* There's no easy way to detect the case we need to set EDOM. */
2849 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2850 return NULL_RTX;
2852 /* Make a suitable register to place result in. */
2853 mode = TYPE_MODE (TREE_TYPE (exp));
2855 /* There's no easy way to detect the case we need to set EDOM. */
2856 if (!flag_errno_math)
2858 rtx result = gen_reg_rtx (mode);
2860 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2861 need to expand the argument again. This way, we will not perform
2862 side-effects more the once. */
2863 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2865 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2867 start_sequence ();
2869 if (expand_sfix_optab (result, op0, builtin_optab))
2871 /* Output the entire sequence. */
2872 insns = get_insns ();
2873 end_sequence ();
2874 emit_insn (insns);
2875 return result;
2878 /* If we were unable to expand via the builtin, stop the sequence
2879 (without outputting the insns) and call to the library function
2880 with the stabilized argument list. */
2881 end_sequence ();
2884 if (fallback_fn != BUILT_IN_NONE)
2886 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2887 targets, (int) round (x) should never be transformed into
2888 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2889 a call to lround in the hope that the target provides at least some
2890 C99 functions. This should result in the best user experience for
2891 not full C99 targets. */
2892 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2893 fallback_fn, 0);
2895 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2896 fallback_fndecl, 1, arg);
2898 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2899 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2900 return convert_to_mode (mode, target, 0);
2903 return expand_call (exp, target, target == const0_rtx);
2906 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2907 a normal call should be emitted rather than expanding the function
2908 in-line. EXP is the expression that is a call to the builtin
2909 function; if convenient, the result should be placed in TARGET. */
2911 static rtx
2912 expand_builtin_powi (tree exp, rtx target)
2914 tree arg0, arg1;
2915 rtx op0, op1;
2916 enum machine_mode mode;
2917 enum machine_mode mode2;
2919 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2920 return NULL_RTX;
2922 arg0 = CALL_EXPR_ARG (exp, 0);
2923 arg1 = CALL_EXPR_ARG (exp, 1);
2924 mode = TYPE_MODE (TREE_TYPE (exp));
2926 /* Emit a libcall to libgcc. */
2928 /* Mode of the 2nd argument must match that of an int. */
2929 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2931 if (target == NULL_RTX)
2932 target = gen_reg_rtx (mode);
2934 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2935 if (GET_MODE (op0) != mode)
2936 op0 = convert_to_mode (mode, op0, 0);
2937 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2938 if (GET_MODE (op1) != mode2)
2939 op1 = convert_to_mode (mode2, op1, 0);
2941 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2942 target, LCT_CONST, mode, 2,
2943 op0, mode, op1, mode2);
2945 return target;
2948 /* Expand expression EXP which is a call to the strlen builtin. Return
2949 NULL_RTX if we failed the caller should emit a normal call, otherwise
2950 try to get the result in TARGET, if convenient. */
2952 static rtx
2953 expand_builtin_strlen (tree exp, rtx target,
2954 enum machine_mode target_mode)
2956 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2957 return NULL_RTX;
2958 else
2960 struct expand_operand ops[4];
2961 rtx pat;
2962 tree len;
2963 tree src = CALL_EXPR_ARG (exp, 0);
2964 rtx src_reg, before_strlen;
2965 enum machine_mode insn_mode = target_mode;
2966 enum insn_code icode = CODE_FOR_nothing;
2967 unsigned int align;
2969 /* If the length can be computed at compile-time, return it. */
2970 len = c_strlen (src, 0);
2971 if (len)
2972 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2974 /* If the length can be computed at compile-time and is constant
2975 integer, but there are side-effects in src, evaluate
2976 src for side-effects, then return len.
2977 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2978 can be optimized into: i++; x = 3; */
2979 len = c_strlen (src, 1);
2980 if (len && TREE_CODE (len) == INTEGER_CST)
2982 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2983 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2986 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2988 /* If SRC is not a pointer type, don't do this operation inline. */
2989 if (align == 0)
2990 return NULL_RTX;
2992 /* Bail out if we can't compute strlen in the right mode. */
2993 while (insn_mode != VOIDmode)
2995 icode = optab_handler (strlen_optab, insn_mode);
2996 if (icode != CODE_FOR_nothing)
2997 break;
2999 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3001 if (insn_mode == VOIDmode)
3002 return NULL_RTX;
3004 /* Make a place to hold the source address. We will not expand
3005 the actual source until we are sure that the expansion will
3006 not fail -- there are trees that cannot be expanded twice. */
3007 src_reg = gen_reg_rtx (Pmode);
3009 /* Mark the beginning of the strlen sequence so we can emit the
3010 source operand later. */
3011 before_strlen = get_last_insn ();
3013 create_output_operand (&ops[0], target, insn_mode);
3014 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3015 create_integer_operand (&ops[2], 0);
3016 create_integer_operand (&ops[3], align);
3017 if (!maybe_expand_insn (icode, 4, ops))
3018 return NULL_RTX;
3020 /* Now that we are assured of success, expand the source. */
3021 start_sequence ();
3022 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3023 if (pat != src_reg)
3025 #ifdef POINTERS_EXTEND_UNSIGNED
3026 if (GET_MODE (pat) != Pmode)
3027 pat = convert_to_mode (Pmode, pat,
3028 POINTERS_EXTEND_UNSIGNED);
3029 #endif
3030 emit_move_insn (src_reg, pat);
3032 pat = get_insns ();
3033 end_sequence ();
3035 if (before_strlen)
3036 emit_insn_after (pat, before_strlen);
3037 else
3038 emit_insn_before (pat, get_insns ());
3040 /* Return the value in the proper mode for this function. */
3041 if (GET_MODE (ops[0].value) == target_mode)
3042 target = ops[0].value;
3043 else if (target != 0)
3044 convert_move (target, ops[0].value, 0);
3045 else
3046 target = convert_to_mode (target_mode, ops[0].value, 0);
3048 return target;
3052 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3053 bytes from constant string DATA + OFFSET and return it as target
3054 constant. */
3056 static rtx
3057 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3058 enum machine_mode mode)
3060 const char *str = (const char *) data;
3062 gcc_assert (offset >= 0
3063 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3064 <= strlen (str) + 1));
3066 return c_readstr (str + offset, mode);
3069 /* Expand a call EXP to the memcpy builtin.
3070 Return NULL_RTX if we failed, the caller should emit a normal call,
3071 otherwise try to get the result in TARGET, if convenient (and in
3072 mode MODE if that's convenient). */
3074 static rtx
3075 expand_builtin_memcpy (tree exp, rtx target)
3077 if (!validate_arglist (exp,
3078 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3079 return NULL_RTX;
3080 else
3082 tree dest = CALL_EXPR_ARG (exp, 0);
3083 tree src = CALL_EXPR_ARG (exp, 1);
3084 tree len = CALL_EXPR_ARG (exp, 2);
3085 const char *src_str;
3086 unsigned int src_align = get_pointer_alignment (src);
3087 unsigned int dest_align = get_pointer_alignment (dest);
3088 rtx dest_mem, src_mem, dest_addr, len_rtx;
3089 HOST_WIDE_INT expected_size = -1;
3090 unsigned int expected_align = 0;
3092 /* If DEST is not a pointer type, call the normal function. */
3093 if (dest_align == 0)
3094 return NULL_RTX;
3096 /* If either SRC is not a pointer type, don't do this
3097 operation in-line. */
3098 if (src_align == 0)
3099 return NULL_RTX;
3101 if (currently_expanding_gimple_stmt)
3102 stringop_block_profile (currently_expanding_gimple_stmt,
3103 &expected_align, &expected_size);
3105 if (expected_align < dest_align)
3106 expected_align = dest_align;
3107 dest_mem = get_memory_rtx (dest, len);
3108 set_mem_align (dest_mem, dest_align);
3109 len_rtx = expand_normal (len);
3110 src_str = c_getstr (src);
3112 /* If SRC is a string constant and block move would be done
3113 by pieces, we can avoid loading the string from memory
3114 and only stored the computed constants. */
3115 if (src_str
3116 && CONST_INT_P (len_rtx)
3117 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3118 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3119 CONST_CAST (char *, src_str),
3120 dest_align, false))
3122 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3123 builtin_memcpy_read_str,
3124 CONST_CAST (char *, src_str),
3125 dest_align, false, 0);
3126 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3127 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3128 return dest_mem;
3131 src_mem = get_memory_rtx (src, len);
3132 set_mem_align (src_mem, src_align);
3134 /* Copy word part most expediently. */
3135 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3136 CALL_EXPR_TAILCALL (exp)
3137 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3138 expected_align, expected_size);
3140 if (dest_addr == 0)
3142 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3143 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3145 return dest_addr;
3149 /* Expand a call EXP to the mempcpy builtin.
3150 Return NULL_RTX if we failed; the caller should emit a normal call,
3151 otherwise try to get the result in TARGET, if convenient (and in
3152 mode MODE if that's convenient). If ENDP is 0 return the
3153 destination pointer, if ENDP is 1 return the end pointer ala
3154 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3155 stpcpy. */
3157 static rtx
3158 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3160 if (!validate_arglist (exp,
3161 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3162 return NULL_RTX;
3163 else
3165 tree dest = CALL_EXPR_ARG (exp, 0);
3166 tree src = CALL_EXPR_ARG (exp, 1);
3167 tree len = CALL_EXPR_ARG (exp, 2);
3168 return expand_builtin_mempcpy_args (dest, src, len,
3169 target, mode, /*endp=*/ 1);
3173 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3174 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3175 so that this can also be called without constructing an actual CALL_EXPR.
3176 The other arguments and return value are the same as for
3177 expand_builtin_mempcpy. */
3179 static rtx
3180 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3181 rtx target, enum machine_mode mode, int endp)
3183 /* If return value is ignored, transform mempcpy into memcpy. */
3184 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3186 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3187 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3188 dest, src, len);
3189 return expand_expr (result, target, mode, EXPAND_NORMAL);
3191 else
3193 const char *src_str;
3194 unsigned int src_align = get_pointer_alignment (src);
3195 unsigned int dest_align = get_pointer_alignment (dest);
3196 rtx dest_mem, src_mem, len_rtx;
3198 /* If either SRC or DEST is not a pointer type, don't do this
3199 operation in-line. */
3200 if (dest_align == 0 || src_align == 0)
3201 return NULL_RTX;
3203 /* If LEN is not constant, call the normal function. */
3204 if (! host_integerp (len, 1))
3205 return NULL_RTX;
3207 len_rtx = expand_normal (len);
3208 src_str = c_getstr (src);
3210 /* If SRC is a string constant and block move would be done
3211 by pieces, we can avoid loading the string from memory
3212 and only stored the computed constants. */
3213 if (src_str
3214 && CONST_INT_P (len_rtx)
3215 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3216 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3217 CONST_CAST (char *, src_str),
3218 dest_align, false))
3220 dest_mem = get_memory_rtx (dest, len);
3221 set_mem_align (dest_mem, dest_align);
3222 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3223 builtin_memcpy_read_str,
3224 CONST_CAST (char *, src_str),
3225 dest_align, false, endp);
3226 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3227 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3228 return dest_mem;
3231 if (CONST_INT_P (len_rtx)
3232 && can_move_by_pieces (INTVAL (len_rtx),
3233 MIN (dest_align, src_align)))
3235 dest_mem = get_memory_rtx (dest, len);
3236 set_mem_align (dest_mem, dest_align);
3237 src_mem = get_memory_rtx (src, len);
3238 set_mem_align (src_mem, src_align);
3239 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3240 MIN (dest_align, src_align), endp);
3241 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3242 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3243 return dest_mem;
3246 return NULL_RTX;
3250 #ifndef HAVE_movstr
3251 # define HAVE_movstr 0
3252 # define CODE_FOR_movstr CODE_FOR_nothing
3253 #endif
3255 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3256 we failed, the caller should emit a normal call, otherwise try to
3257 get the result in TARGET, if convenient. If ENDP is 0 return the
3258 destination pointer, if ENDP is 1 return the end pointer ala
3259 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3260 stpcpy. */
3262 static rtx
3263 expand_movstr (tree dest, tree src, rtx target, int endp)
3265 struct expand_operand ops[3];
3266 rtx dest_mem;
3267 rtx src_mem;
3269 if (!HAVE_movstr)
3270 return NULL_RTX;
3272 dest_mem = get_memory_rtx (dest, NULL);
3273 src_mem = get_memory_rtx (src, NULL);
3274 if (!endp)
3276 target = force_reg (Pmode, XEXP (dest_mem, 0));
3277 dest_mem = replace_equiv_address (dest_mem, target);
3280 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3281 create_fixed_operand (&ops[1], dest_mem);
3282 create_fixed_operand (&ops[2], src_mem);
3283 expand_insn (CODE_FOR_movstr, 3, ops);
3285 if (endp && target != const0_rtx)
3287 target = ops[0].value;
3288 /* movstr is supposed to set end to the address of the NUL
3289 terminator. If the caller requested a mempcpy-like return value,
3290 adjust it. */
3291 if (endp == 1)
3293 rtx tem = plus_constant (GET_MODE (target),
3294 gen_lowpart (GET_MODE (target), target), 1);
3295 emit_move_insn (target, force_operand (tem, NULL_RTX));
3298 return target;
3301 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3302 NULL_RTX if we failed the caller should emit a normal call, otherwise
3303 try to get the result in TARGET, if convenient (and in mode MODE if that's
3304 convenient). */
3306 static rtx
3307 expand_builtin_strcpy (tree exp, rtx target)
3309 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3311 tree dest = CALL_EXPR_ARG (exp, 0);
3312 tree src = CALL_EXPR_ARG (exp, 1);
3313 return expand_builtin_strcpy_args (dest, src, target);
3315 return NULL_RTX;
3318 /* Helper function to do the actual work for expand_builtin_strcpy. The
3319 arguments to the builtin_strcpy call DEST and SRC are broken out
3320 so that this can also be called without constructing an actual CALL_EXPR.
3321 The other arguments and return value are the same as for
3322 expand_builtin_strcpy. */
3324 static rtx
3325 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3327 return expand_movstr (dest, src, target, /*endp=*/0);
3330 /* Expand a call EXP to the stpcpy builtin.
3331 Return NULL_RTX if we failed the caller should emit a normal call,
3332 otherwise try to get the result in TARGET, if convenient (and in
3333 mode MODE if that's convenient). */
3335 static rtx
3336 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3338 tree dst, src;
3339 location_t loc = EXPR_LOCATION (exp);
3341 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3342 return NULL_RTX;
3344 dst = CALL_EXPR_ARG (exp, 0);
3345 src = CALL_EXPR_ARG (exp, 1);
3347 /* If return value is ignored, transform stpcpy into strcpy. */
3348 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3350 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3351 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3352 return expand_expr (result, target, mode, EXPAND_NORMAL);
3354 else
3356 tree len, lenp1;
3357 rtx ret;
3359 /* Ensure we get an actual string whose length can be evaluated at
3360 compile-time, not an expression containing a string. This is
3361 because the latter will potentially produce pessimized code
3362 when used to produce the return value. */
3363 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3364 return expand_movstr (dst, src, target, /*endp=*/2);
3366 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3367 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3368 target, mode, /*endp=*/2);
3370 if (ret)
3371 return ret;
3373 if (TREE_CODE (len) == INTEGER_CST)
3375 rtx len_rtx = expand_normal (len);
3377 if (CONST_INT_P (len_rtx))
3379 ret = expand_builtin_strcpy_args (dst, src, target);
3381 if (ret)
3383 if (! target)
3385 if (mode != VOIDmode)
3386 target = gen_reg_rtx (mode);
3387 else
3388 target = gen_reg_rtx (GET_MODE (ret));
3390 if (GET_MODE (target) != GET_MODE (ret))
3391 ret = gen_lowpart (GET_MODE (target), ret);
3393 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3394 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3395 gcc_assert (ret);
3397 return target;
3402 return expand_movstr (dst, src, target, /*endp=*/2);
3406 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3407 bytes from constant string DATA + OFFSET and return it as target
3408 constant. */
3411 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3412 enum machine_mode mode)
3414 const char *str = (const char *) data;
3416 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3417 return const0_rtx;
3419 return c_readstr (str + offset, mode);
3422 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3423 NULL_RTX if we failed the caller should emit a normal call. */
3425 static rtx
3426 expand_builtin_strncpy (tree exp, rtx target)
3428 location_t loc = EXPR_LOCATION (exp);
3430 if (validate_arglist (exp,
3431 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3433 tree dest = CALL_EXPR_ARG (exp, 0);
3434 tree src = CALL_EXPR_ARG (exp, 1);
3435 tree len = CALL_EXPR_ARG (exp, 2);
3436 tree slen = c_strlen (src, 1);
3438 /* We must be passed a constant len and src parameter. */
3439 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3440 return NULL_RTX;
3442 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3444 /* We're required to pad with trailing zeros if the requested
3445 len is greater than strlen(s2)+1. In that case try to
3446 use store_by_pieces, if it fails, punt. */
3447 if (tree_int_cst_lt (slen, len))
3449 unsigned int dest_align = get_pointer_alignment (dest);
3450 const char *p = c_getstr (src);
3451 rtx dest_mem;
3453 if (!p || dest_align == 0 || !host_integerp (len, 1)
3454 || !can_store_by_pieces (tree_low_cst (len, 1),
3455 builtin_strncpy_read_str,
3456 CONST_CAST (char *, p),
3457 dest_align, false))
3458 return NULL_RTX;
3460 dest_mem = get_memory_rtx (dest, len);
3461 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3462 builtin_strncpy_read_str,
3463 CONST_CAST (char *, p), dest_align, false, 0);
3464 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3465 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3466 return dest_mem;
3469 return NULL_RTX;
3472 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3473 bytes from constant string DATA + OFFSET and return it as target
3474 constant. */
3477 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3478 enum machine_mode mode)
3480 const char *c = (const char *) data;
3481 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3483 memset (p, *c, GET_MODE_SIZE (mode));
3485 return c_readstr (p, mode);
3488 /* Callback routine for store_by_pieces. Return the RTL of a register
3489 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3490 char value given in the RTL register data. For example, if mode is
3491 4 bytes wide, return the RTL for 0x01010101*data. */
3493 static rtx
3494 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3495 enum machine_mode mode)
3497 rtx target, coeff;
3498 size_t size;
3499 char *p;
3501 size = GET_MODE_SIZE (mode);
3502 if (size == 1)
3503 return (rtx) data;
3505 p = XALLOCAVEC (char, size);
3506 memset (p, 1, size);
3507 coeff = c_readstr (p, mode);
3509 target = convert_to_mode (mode, (rtx) data, 1);
3510 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3511 return force_reg (mode, target);
3514 /* Expand expression EXP, which is a call to the memset builtin. Return
3515 NULL_RTX if we failed the caller should emit a normal call, otherwise
3516 try to get the result in TARGET, if convenient (and in mode MODE if that's
3517 convenient). */
3519 static rtx
3520 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3522 if (!validate_arglist (exp,
3523 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3524 return NULL_RTX;
3525 else
3527 tree dest = CALL_EXPR_ARG (exp, 0);
3528 tree val = CALL_EXPR_ARG (exp, 1);
3529 tree len = CALL_EXPR_ARG (exp, 2);
3530 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3534 /* Helper function to do the actual work for expand_builtin_memset. The
3535 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3536 so that this can also be called without constructing an actual CALL_EXPR.
3537 The other arguments and return value are the same as for
3538 expand_builtin_memset. */
3540 static rtx
3541 expand_builtin_memset_args (tree dest, tree val, tree len,
3542 rtx target, enum machine_mode mode, tree orig_exp)
3544 tree fndecl, fn;
3545 enum built_in_function fcode;
3546 enum machine_mode val_mode;
3547 char c;
3548 unsigned int dest_align;
3549 rtx dest_mem, dest_addr, len_rtx;
3550 HOST_WIDE_INT expected_size = -1;
3551 unsigned int expected_align = 0;
3553 dest_align = get_pointer_alignment (dest);
3555 /* If DEST is not a pointer type, don't do this operation in-line. */
3556 if (dest_align == 0)
3557 return NULL_RTX;
3559 if (currently_expanding_gimple_stmt)
3560 stringop_block_profile (currently_expanding_gimple_stmt,
3561 &expected_align, &expected_size);
3563 if (expected_align < dest_align)
3564 expected_align = dest_align;
3566 /* If the LEN parameter is zero, return DEST. */
3567 if (integer_zerop (len))
3569 /* Evaluate and ignore VAL in case it has side-effects. */
3570 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3571 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3574 /* Stabilize the arguments in case we fail. */
3575 dest = builtin_save_expr (dest);
3576 val = builtin_save_expr (val);
3577 len = builtin_save_expr (len);
3579 len_rtx = expand_normal (len);
3580 dest_mem = get_memory_rtx (dest, len);
3581 val_mode = TYPE_MODE (unsigned_char_type_node);
3583 if (TREE_CODE (val) != INTEGER_CST)
3585 rtx val_rtx;
3587 val_rtx = expand_normal (val);
3588 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3590 /* Assume that we can memset by pieces if we can store
3591 * the coefficients by pieces (in the required modes).
3592 * We can't pass builtin_memset_gen_str as that emits RTL. */
3593 c = 1;
3594 if (host_integerp (len, 1)
3595 && can_store_by_pieces (tree_low_cst (len, 1),
3596 builtin_memset_read_str, &c, dest_align,
3597 true))
3599 val_rtx = force_reg (val_mode, val_rtx);
3600 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3601 builtin_memset_gen_str, val_rtx, dest_align,
3602 true, 0);
3604 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3605 dest_align, expected_align,
3606 expected_size))
3607 goto do_libcall;
3609 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3610 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3611 return dest_mem;
3614 if (target_char_cast (val, &c))
3615 goto do_libcall;
3617 if (c)
3619 if (host_integerp (len, 1)
3620 && can_store_by_pieces (tree_low_cst (len, 1),
3621 builtin_memset_read_str, &c, dest_align,
3622 true))
3623 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3624 builtin_memset_read_str, &c, dest_align, true, 0);
3625 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3626 gen_int_mode (c, val_mode),
3627 dest_align, expected_align,
3628 expected_size))
3629 goto do_libcall;
3631 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3632 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3633 return dest_mem;
3636 set_mem_align (dest_mem, dest_align);
3637 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3638 CALL_EXPR_TAILCALL (orig_exp)
3639 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3640 expected_align, expected_size);
3642 if (dest_addr == 0)
3644 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3645 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3648 return dest_addr;
3650 do_libcall:
3651 fndecl = get_callee_fndecl (orig_exp);
3652 fcode = DECL_FUNCTION_CODE (fndecl);
3653 if (fcode == BUILT_IN_MEMSET)
3654 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3655 dest, val, len);
3656 else if (fcode == BUILT_IN_BZERO)
3657 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3658 dest, len);
3659 else
3660 gcc_unreachable ();
3661 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3662 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3663 return expand_call (fn, target, target == const0_rtx);
3666 /* Expand expression EXP, which is a call to the bzero builtin. Return
3667 NULL_RTX if we failed the caller should emit a normal call. */
3669 static rtx
3670 expand_builtin_bzero (tree exp)
3672 tree dest, size;
3673 location_t loc = EXPR_LOCATION (exp);
3675 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3676 return NULL_RTX;
3678 dest = CALL_EXPR_ARG (exp, 0);
3679 size = CALL_EXPR_ARG (exp, 1);
3681 /* New argument list transforming bzero(ptr x, int y) to
3682 memset(ptr x, int 0, size_t y). This is done this way
3683 so that if it isn't expanded inline, we fallback to
3684 calling bzero instead of memset. */
3686 return expand_builtin_memset_args (dest, integer_zero_node,
3687 fold_convert_loc (loc,
3688 size_type_node, size),
3689 const0_rtx, VOIDmode, exp);
3692 /* Expand expression EXP, which is a call to the memcmp built-in function.
3693 Return NULL_RTX if we failed and the caller should emit a normal call,
3694 otherwise try to get the result in TARGET, if convenient (and in mode
3695 MODE, if that's convenient). */
3697 static rtx
3698 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3699 ATTRIBUTE_UNUSED enum machine_mode mode)
3701 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3703 if (!validate_arglist (exp,
3704 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3705 return NULL_RTX;
3707 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3708 implementing memcmp because it will stop if it encounters two
3709 zero bytes. */
3710 #if defined HAVE_cmpmemsi
3712 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3713 rtx result;
3714 rtx insn;
3715 tree arg1 = CALL_EXPR_ARG (exp, 0);
3716 tree arg2 = CALL_EXPR_ARG (exp, 1);
3717 tree len = CALL_EXPR_ARG (exp, 2);
3719 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3720 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3721 enum machine_mode insn_mode;
3723 if (HAVE_cmpmemsi)
3724 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3725 else
3726 return NULL_RTX;
3728 /* If we don't have POINTER_TYPE, call the function. */
3729 if (arg1_align == 0 || arg2_align == 0)
3730 return NULL_RTX;
3732 /* Make a place to write the result of the instruction. */
3733 result = target;
3734 if (! (result != 0
3735 && REG_P (result) && GET_MODE (result) == insn_mode
3736 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3737 result = gen_reg_rtx (insn_mode);
3739 arg1_rtx = get_memory_rtx (arg1, len);
3740 arg2_rtx = get_memory_rtx (arg2, len);
3741 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3743 /* Set MEM_SIZE as appropriate. */
3744 if (CONST_INT_P (arg3_rtx))
3746 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3747 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3750 if (HAVE_cmpmemsi)
3751 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3752 GEN_INT (MIN (arg1_align, arg2_align)));
3753 else
3754 gcc_unreachable ();
3756 if (insn)
3757 emit_insn (insn);
3758 else
3759 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3760 TYPE_MODE (integer_type_node), 3,
3761 XEXP (arg1_rtx, 0), Pmode,
3762 XEXP (arg2_rtx, 0), Pmode,
3763 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3764 TYPE_UNSIGNED (sizetype)),
3765 TYPE_MODE (sizetype));
3767 /* Return the value in the proper mode for this function. */
3768 mode = TYPE_MODE (TREE_TYPE (exp));
3769 if (GET_MODE (result) == mode)
3770 return result;
3771 else if (target != 0)
3773 convert_move (target, result, 0);
3774 return target;
3776 else
3777 return convert_to_mode (mode, result, 0);
3779 #endif /* HAVE_cmpmemsi. */
3781 return NULL_RTX;
3784 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3785 if we failed the caller should emit a normal call, otherwise try to get
3786 the result in TARGET, if convenient. */
3788 static rtx
3789 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3791 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3792 return NULL_RTX;
3794 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3795 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3796 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3798 rtx arg1_rtx, arg2_rtx;
3799 rtx result, insn = NULL_RTX;
3800 tree fndecl, fn;
3801 tree arg1 = CALL_EXPR_ARG (exp, 0);
3802 tree arg2 = CALL_EXPR_ARG (exp, 1);
3804 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3805 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3807 /* If we don't have POINTER_TYPE, call the function. */
3808 if (arg1_align == 0 || arg2_align == 0)
3809 return NULL_RTX;
3811 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3812 arg1 = builtin_save_expr (arg1);
3813 arg2 = builtin_save_expr (arg2);
3815 arg1_rtx = get_memory_rtx (arg1, NULL);
3816 arg2_rtx = get_memory_rtx (arg2, NULL);
3818 #ifdef HAVE_cmpstrsi
3819 /* Try to call cmpstrsi. */
3820 if (HAVE_cmpstrsi)
3822 enum machine_mode insn_mode
3823 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3825 /* Make a place to write the result of the instruction. */
3826 result = target;
3827 if (! (result != 0
3828 && REG_P (result) && GET_MODE (result) == insn_mode
3829 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3830 result = gen_reg_rtx (insn_mode);
3832 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3833 GEN_INT (MIN (arg1_align, arg2_align)));
3835 #endif
3836 #ifdef HAVE_cmpstrnsi
3837 /* Try to determine at least one length and call cmpstrnsi. */
3838 if (!insn && HAVE_cmpstrnsi)
3840 tree len;
3841 rtx arg3_rtx;
3843 enum machine_mode insn_mode
3844 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3845 tree len1 = c_strlen (arg1, 1);
3846 tree len2 = c_strlen (arg2, 1);
3848 if (len1)
3849 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3850 if (len2)
3851 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3853 /* If we don't have a constant length for the first, use the length
3854 of the second, if we know it. We don't require a constant for
3855 this case; some cost analysis could be done if both are available
3856 but neither is constant. For now, assume they're equally cheap,
3857 unless one has side effects. If both strings have constant lengths,
3858 use the smaller. */
3860 if (!len1)
3861 len = len2;
3862 else if (!len2)
3863 len = len1;
3864 else if (TREE_SIDE_EFFECTS (len1))
3865 len = len2;
3866 else if (TREE_SIDE_EFFECTS (len2))
3867 len = len1;
3868 else if (TREE_CODE (len1) != INTEGER_CST)
3869 len = len2;
3870 else if (TREE_CODE (len2) != INTEGER_CST)
3871 len = len1;
3872 else if (tree_int_cst_lt (len1, len2))
3873 len = len1;
3874 else
3875 len = len2;
3877 /* If both arguments have side effects, we cannot optimize. */
3878 if (!len || TREE_SIDE_EFFECTS (len))
3879 goto do_libcall;
3881 arg3_rtx = expand_normal (len);
3883 /* Make a place to write the result of the instruction. */
3884 result = target;
3885 if (! (result != 0
3886 && REG_P (result) && GET_MODE (result) == insn_mode
3887 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3888 result = gen_reg_rtx (insn_mode);
3890 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3891 GEN_INT (MIN (arg1_align, arg2_align)));
3893 #endif
3895 if (insn)
3897 enum machine_mode mode;
3898 emit_insn (insn);
3900 /* Return the value in the proper mode for this function. */
3901 mode = TYPE_MODE (TREE_TYPE (exp));
3902 if (GET_MODE (result) == mode)
3903 return result;
3904 if (target == 0)
3905 return convert_to_mode (mode, result, 0);
3906 convert_move (target, result, 0);
3907 return target;
3910 /* Expand the library call ourselves using a stabilized argument
3911 list to avoid re-evaluating the function's arguments twice. */
3912 #ifdef HAVE_cmpstrnsi
3913 do_libcall:
3914 #endif
3915 fndecl = get_callee_fndecl (exp);
3916 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3917 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3918 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3919 return expand_call (fn, target, target == const0_rtx);
3921 #endif
3922 return NULL_RTX;
3925 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3926 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3927 the result in TARGET, if convenient. */
3929 static rtx
3930 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3931 ATTRIBUTE_UNUSED enum machine_mode mode)
3933 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3935 if (!validate_arglist (exp,
3936 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3937 return NULL_RTX;
3939 /* If c_strlen can determine an expression for one of the string
3940 lengths, and it doesn't have side effects, then emit cmpstrnsi
3941 using length MIN(strlen(string)+1, arg3). */
3942 #ifdef HAVE_cmpstrnsi
3943 if (HAVE_cmpstrnsi)
3945 tree len, len1, len2;
3946 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3947 rtx result, insn;
3948 tree fndecl, fn;
3949 tree arg1 = CALL_EXPR_ARG (exp, 0);
3950 tree arg2 = CALL_EXPR_ARG (exp, 1);
3951 tree arg3 = CALL_EXPR_ARG (exp, 2);
3953 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3954 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3955 enum machine_mode insn_mode
3956 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3958 len1 = c_strlen (arg1, 1);
3959 len2 = c_strlen (arg2, 1);
3961 if (len1)
3962 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3963 if (len2)
3964 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3966 /* If we don't have a constant length for the first, use the length
3967 of the second, if we know it. We don't require a constant for
3968 this case; some cost analysis could be done if both are available
3969 but neither is constant. For now, assume they're equally cheap,
3970 unless one has side effects. If both strings have constant lengths,
3971 use the smaller. */
3973 if (!len1)
3974 len = len2;
3975 else if (!len2)
3976 len = len1;
3977 else if (TREE_SIDE_EFFECTS (len1))
3978 len = len2;
3979 else if (TREE_SIDE_EFFECTS (len2))
3980 len = len1;
3981 else if (TREE_CODE (len1) != INTEGER_CST)
3982 len = len2;
3983 else if (TREE_CODE (len2) != INTEGER_CST)
3984 len = len1;
3985 else if (tree_int_cst_lt (len1, len2))
3986 len = len1;
3987 else
3988 len = len2;
3990 /* If both arguments have side effects, we cannot optimize. */
3991 if (!len || TREE_SIDE_EFFECTS (len))
3992 return NULL_RTX;
3994 /* The actual new length parameter is MIN(len,arg3). */
3995 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3996 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3998 /* If we don't have POINTER_TYPE, call the function. */
3999 if (arg1_align == 0 || arg2_align == 0)
4000 return NULL_RTX;
4002 /* Make a place to write the result of the instruction. */
4003 result = target;
4004 if (! (result != 0
4005 && REG_P (result) && GET_MODE (result) == insn_mode
4006 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4007 result = gen_reg_rtx (insn_mode);
4009 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4010 arg1 = builtin_save_expr (arg1);
4011 arg2 = builtin_save_expr (arg2);
4012 len = builtin_save_expr (len);
4014 arg1_rtx = get_memory_rtx (arg1, len);
4015 arg2_rtx = get_memory_rtx (arg2, len);
4016 arg3_rtx = expand_normal (len);
4017 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4018 GEN_INT (MIN (arg1_align, arg2_align)));
4019 if (insn)
4021 emit_insn (insn);
4023 /* Return the value in the proper mode for this function. */
4024 mode = TYPE_MODE (TREE_TYPE (exp));
4025 if (GET_MODE (result) == mode)
4026 return result;
4027 if (target == 0)
4028 return convert_to_mode (mode, result, 0);
4029 convert_move (target, result, 0);
4030 return target;
4033 /* Expand the library call ourselves using a stabilized argument
4034 list to avoid re-evaluating the function's arguments twice. */
4035 fndecl = get_callee_fndecl (exp);
4036 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4037 arg1, arg2, len);
4038 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4039 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4040 return expand_call (fn, target, target == const0_rtx);
4042 #endif
4043 return NULL_RTX;
4046 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4047 if that's convenient. */
4050 expand_builtin_saveregs (void)
4052 rtx val, seq;
4054 /* Don't do __builtin_saveregs more than once in a function.
4055 Save the result of the first call and reuse it. */
4056 if (saveregs_value != 0)
4057 return saveregs_value;
4059 /* When this function is called, it means that registers must be
4060 saved on entry to this function. So we migrate the call to the
4061 first insn of this function. */
4063 start_sequence ();
4065 /* Do whatever the machine needs done in this case. */
4066 val = targetm.calls.expand_builtin_saveregs ();
4068 seq = get_insns ();
4069 end_sequence ();
4071 saveregs_value = val;
4073 /* Put the insns after the NOTE that starts the function. If this
4074 is inside a start_sequence, make the outer-level insn chain current, so
4075 the code is placed at the start of the function. */
4076 push_topmost_sequence ();
4077 emit_insn_after (seq, entry_of_function ());
4078 pop_topmost_sequence ();
4080 return val;
4083 /* Expand a call to __builtin_next_arg. */
4085 static rtx
4086 expand_builtin_next_arg (void)
4088 /* Checking arguments is already done in fold_builtin_next_arg
4089 that must be called before this function. */
4090 return expand_binop (ptr_mode, add_optab,
4091 crtl->args.internal_arg_pointer,
4092 crtl->args.arg_offset_rtx,
4093 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4096 /* Make it easier for the backends by protecting the valist argument
4097 from multiple evaluations. */
4099 static tree
4100 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4102 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4104 /* The current way of determining the type of valist is completely
4105 bogus. We should have the information on the va builtin instead. */
4106 if (!vatype)
4107 vatype = targetm.fn_abi_va_list (cfun->decl);
4109 if (TREE_CODE (vatype) == ARRAY_TYPE)
4111 if (TREE_SIDE_EFFECTS (valist))
4112 valist = save_expr (valist);
4114 /* For this case, the backends will be expecting a pointer to
4115 vatype, but it's possible we've actually been given an array
4116 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4117 So fix it. */
4118 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4120 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4121 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4124 else
4126 tree pt = build_pointer_type (vatype);
4128 if (! needs_lvalue)
4130 if (! TREE_SIDE_EFFECTS (valist))
4131 return valist;
4133 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4134 TREE_SIDE_EFFECTS (valist) = 1;
4137 if (TREE_SIDE_EFFECTS (valist))
4138 valist = save_expr (valist);
4139 valist = fold_build2_loc (loc, MEM_REF,
4140 vatype, valist, build_int_cst (pt, 0));
4143 return valist;
4146 /* The "standard" definition of va_list is void*. */
4148 tree
4149 std_build_builtin_va_list (void)
4151 return ptr_type_node;
4154 /* The "standard" abi va_list is va_list_type_node. */
4156 tree
4157 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4159 return va_list_type_node;
4162 /* The "standard" type of va_list is va_list_type_node. */
4164 tree
4165 std_canonical_va_list_type (tree type)
4167 tree wtype, htype;
4169 if (INDIRECT_REF_P (type))
4170 type = TREE_TYPE (type);
4171 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4172 type = TREE_TYPE (type);
4173 wtype = va_list_type_node;
4174 htype = type;
4175 /* Treat structure va_list types. */
4176 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4177 htype = TREE_TYPE (htype);
4178 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4180 /* If va_list is an array type, the argument may have decayed
4181 to a pointer type, e.g. by being passed to another function.
4182 In that case, unwrap both types so that we can compare the
4183 underlying records. */
4184 if (TREE_CODE (htype) == ARRAY_TYPE
4185 || POINTER_TYPE_P (htype))
4187 wtype = TREE_TYPE (wtype);
4188 htype = TREE_TYPE (htype);
4191 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4192 return va_list_type_node;
4194 return NULL_TREE;
4197 /* The "standard" implementation of va_start: just assign `nextarg' to
4198 the variable. */
4200 void
4201 std_expand_builtin_va_start (tree valist, rtx nextarg)
4203 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4204 convert_move (va_r, nextarg, 0);
4207 /* Expand EXP, a call to __builtin_va_start. */
4209 static rtx
4210 expand_builtin_va_start (tree exp)
4212 rtx nextarg;
4213 tree valist;
4214 location_t loc = EXPR_LOCATION (exp);
4216 if (call_expr_nargs (exp) < 2)
4218 error_at (loc, "too few arguments to function %<va_start%>");
4219 return const0_rtx;
4222 if (fold_builtin_next_arg (exp, true))
4223 return const0_rtx;
4225 nextarg = expand_builtin_next_arg ();
4226 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4228 if (targetm.expand_builtin_va_start)
4229 targetm.expand_builtin_va_start (valist, nextarg);
4230 else
4231 std_expand_builtin_va_start (valist, nextarg);
4233 return const0_rtx;
4237 /* Return a dummy expression of type TYPE in order to keep going after an
4238 error. */
4240 static tree
4241 dummy_object (tree type)
4243 tree t = build_int_cst (build_pointer_type (type), 0);
4244 return build2 (MEM_REF, type, t, t);
4247 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4248 builtin function, but a very special sort of operator. */
4250 enum gimplify_status
4251 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4253 tree promoted_type, have_va_type;
4254 tree valist = TREE_OPERAND (*expr_p, 0);
4255 tree type = TREE_TYPE (*expr_p);
4256 tree t;
4257 location_t loc = EXPR_LOCATION (*expr_p);
4259 /* Verify that valist is of the proper type. */
4260 have_va_type = TREE_TYPE (valist);
4261 if (have_va_type == error_mark_node)
4262 return GS_ERROR;
4263 have_va_type = targetm.canonical_va_list_type (have_va_type);
4265 if (have_va_type == NULL_TREE)
4267 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4268 return GS_ERROR;
4271 /* Generate a diagnostic for requesting data of a type that cannot
4272 be passed through `...' due to type promotion at the call site. */
4273 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4274 != type)
4276 static bool gave_help;
4277 bool warned;
4279 /* Unfortunately, this is merely undefined, rather than a constraint
4280 violation, so we cannot make this an error. If this call is never
4281 executed, the program is still strictly conforming. */
4282 warned = warning_at (loc, 0,
4283 "%qT is promoted to %qT when passed through %<...%>",
4284 type, promoted_type);
4285 if (!gave_help && warned)
4287 gave_help = true;
4288 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4289 promoted_type, type);
4292 /* We can, however, treat "undefined" any way we please.
4293 Call abort to encourage the user to fix the program. */
4294 if (warned)
4295 inform (loc, "if this code is reached, the program will abort");
4296 /* Before the abort, allow the evaluation of the va_list
4297 expression to exit or longjmp. */
4298 gimplify_and_add (valist, pre_p);
4299 t = build_call_expr_loc (loc,
4300 builtin_decl_implicit (BUILT_IN_TRAP), 0);
4301 gimplify_and_add (t, pre_p);
4303 /* This is dead code, but go ahead and finish so that the
4304 mode of the result comes out right. */
4305 *expr_p = dummy_object (type);
4306 return GS_ALL_DONE;
4308 else
4310 /* Make it easier for the backends by protecting the valist argument
4311 from multiple evaluations. */
4312 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4314 /* For this case, the backends will be expecting a pointer to
4315 TREE_TYPE (abi), but it's possible we've
4316 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4317 So fix it. */
4318 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4320 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4321 valist = fold_convert_loc (loc, p1,
4322 build_fold_addr_expr_loc (loc, valist));
4325 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4327 else
4328 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4330 if (!targetm.gimplify_va_arg_expr)
4331 /* FIXME: Once most targets are converted we should merely
4332 assert this is non-null. */
4333 return GS_ALL_DONE;
4335 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4336 return GS_OK;
4340 /* Expand EXP, a call to __builtin_va_end. */
4342 static rtx
4343 expand_builtin_va_end (tree exp)
4345 tree valist = CALL_EXPR_ARG (exp, 0);
4347 /* Evaluate for side effects, if needed. I hate macros that don't
4348 do that. */
4349 if (TREE_SIDE_EFFECTS (valist))
4350 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4352 return const0_rtx;
4355 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4356 builtin rather than just as an assignment in stdarg.h because of the
4357 nastiness of array-type va_list types. */
4359 static rtx
4360 expand_builtin_va_copy (tree exp)
4362 tree dst, src, t;
4363 location_t loc = EXPR_LOCATION (exp);
4365 dst = CALL_EXPR_ARG (exp, 0);
4366 src = CALL_EXPR_ARG (exp, 1);
4368 dst = stabilize_va_list_loc (loc, dst, 1);
4369 src = stabilize_va_list_loc (loc, src, 0);
4371 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4373 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4375 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4376 TREE_SIDE_EFFECTS (t) = 1;
4377 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4379 else
4381 rtx dstb, srcb, size;
4383 /* Evaluate to pointers. */
4384 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4385 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4386 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4387 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4389 dstb = convert_memory_address (Pmode, dstb);
4390 srcb = convert_memory_address (Pmode, srcb);
4392 /* "Dereference" to BLKmode memories. */
4393 dstb = gen_rtx_MEM (BLKmode, dstb);
4394 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4395 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4396 srcb = gen_rtx_MEM (BLKmode, srcb);
4397 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4398 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4400 /* Copy. */
4401 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4404 return const0_rtx;
4407 /* Expand a call to one of the builtin functions __builtin_frame_address or
4408 __builtin_return_address. */
4410 static rtx
4411 expand_builtin_frame_address (tree fndecl, tree exp)
4413 /* The argument must be a nonnegative integer constant.
4414 It counts the number of frames to scan up the stack.
4415 The value is the return address saved in that frame. */
4416 if (call_expr_nargs (exp) == 0)
4417 /* Warning about missing arg was already issued. */
4418 return const0_rtx;
4419 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4421 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4422 error ("invalid argument to %<__builtin_frame_address%>");
4423 else
4424 error ("invalid argument to %<__builtin_return_address%>");
4425 return const0_rtx;
4427 else
4429 rtx tem
4430 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4431 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4433 /* Some ports cannot access arbitrary stack frames. */
4434 if (tem == NULL)
4436 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4437 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4438 else
4439 warning (0, "unsupported argument to %<__builtin_return_address%>");
4440 return const0_rtx;
4443 /* For __builtin_frame_address, return what we've got. */
4444 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4445 return tem;
4447 if (!REG_P (tem)
4448 && ! CONSTANT_P (tem))
4449 tem = copy_addr_to_reg (tem);
4450 return tem;
4454 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4455 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4456 is the same as for allocate_dynamic_stack_space. */
4458 static rtx
4459 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4461 rtx op0;
4462 rtx result;
4463 bool valid_arglist;
4464 unsigned int align;
4465 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4466 == BUILT_IN_ALLOCA_WITH_ALIGN);
4468 /* Emit normal call if we use mudflap. */
4469 if (flag_mudflap)
4470 return NULL_RTX;
4472 valid_arglist
4473 = (alloca_with_align
4474 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4475 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4477 if (!valid_arglist)
4478 return NULL_RTX;
4480 /* Compute the argument. */
4481 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4483 /* Compute the alignment. */
4484 align = (alloca_with_align
4485 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4486 : BIGGEST_ALIGNMENT);
4488 /* Allocate the desired space. */
4489 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4490 result = convert_memory_address (ptr_mode, result);
4492 return result;
4495 /* Expand a call to bswap builtin in EXP.
4496 Return NULL_RTX if a normal call should be emitted rather than expanding the
4497 function in-line. If convenient, the result should be placed in TARGET.
4498 SUBTARGET may be used as the target for computing one of EXP's operands. */
4500 static rtx
4501 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4502 rtx subtarget)
4504 tree arg;
4505 rtx op0;
4507 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4508 return NULL_RTX;
4510 arg = CALL_EXPR_ARG (exp, 0);
4511 op0 = expand_expr (arg,
4512 subtarget && GET_MODE (subtarget) == target_mode
4513 ? subtarget : NULL_RTX,
4514 target_mode, EXPAND_NORMAL);
4515 if (GET_MODE (op0) != target_mode)
4516 op0 = convert_to_mode (target_mode, op0, 1);
4518 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4520 gcc_assert (target);
4522 return convert_to_mode (target_mode, target, 1);
4525 /* Expand a call to a unary builtin in EXP.
4526 Return NULL_RTX if a normal call should be emitted rather than expanding the
4527 function in-line. If convenient, the result should be placed in TARGET.
4528 SUBTARGET may be used as the target for computing one of EXP's operands. */
4530 static rtx
4531 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4532 rtx subtarget, optab op_optab)
4534 rtx op0;
4536 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4537 return NULL_RTX;
4539 /* Compute the argument. */
4540 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4541 (subtarget
4542 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4543 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4544 VOIDmode, EXPAND_NORMAL);
4545 /* Compute op, into TARGET if possible.
4546 Set TARGET to wherever the result comes back. */
4547 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4548 op_optab, op0, target, op_optab != clrsb_optab);
4549 gcc_assert (target);
4551 return convert_to_mode (target_mode, target, 0);
4554 /* Expand a call to __builtin_expect. We just return our argument
4555 as the builtin_expect semantic should've been already executed by
4556 tree branch prediction pass. */
4558 static rtx
4559 expand_builtin_expect (tree exp, rtx target)
4561 tree arg;
4563 if (call_expr_nargs (exp) < 2)
4564 return const0_rtx;
4565 arg = CALL_EXPR_ARG (exp, 0);
4567 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4568 /* When guessing was done, the hints should be already stripped away. */
4569 gcc_assert (!flag_guess_branch_prob
4570 || optimize == 0 || seen_error ());
4571 return target;
4574 /* Expand a call to __builtin_assume_aligned. We just return our first
4575 argument as the builtin_assume_aligned semantic should've been already
4576 executed by CCP. */
4578 static rtx
4579 expand_builtin_assume_aligned (tree exp, rtx target)
4581 if (call_expr_nargs (exp) < 2)
4582 return const0_rtx;
4583 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4584 EXPAND_NORMAL);
4585 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4586 && (call_expr_nargs (exp) < 3
4587 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4588 return target;
4591 void
4592 expand_builtin_trap (void)
4594 #ifdef HAVE_trap
4595 if (HAVE_trap)
4597 rtx insn = emit_insn (gen_trap ());
4598 /* For trap insns when not accumulating outgoing args force
4599 REG_ARGS_SIZE note to prevent crossjumping of calls with
4600 different args sizes. */
4601 if (!ACCUMULATE_OUTGOING_ARGS)
4602 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4604 else
4605 #endif
4606 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4607 emit_barrier ();
4610 /* Expand a call to __builtin_unreachable. We do nothing except emit
4611 a barrier saying that control flow will not pass here.
4613 It is the responsibility of the program being compiled to ensure
4614 that control flow does never reach __builtin_unreachable. */
4615 static void
4616 expand_builtin_unreachable (void)
4618 emit_barrier ();
4621 /* Expand EXP, a call to fabs, fabsf or fabsl.
4622 Return NULL_RTX if a normal call should be emitted rather than expanding
4623 the function inline. If convenient, the result should be placed
4624 in TARGET. SUBTARGET may be used as the target for computing
4625 the operand. */
4627 static rtx
4628 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4630 enum machine_mode mode;
4631 tree arg;
4632 rtx op0;
4634 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4635 return NULL_RTX;
4637 arg = CALL_EXPR_ARG (exp, 0);
4638 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4639 mode = TYPE_MODE (TREE_TYPE (arg));
4640 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4641 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4644 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4645 Return NULL is a normal call should be emitted rather than expanding the
4646 function inline. If convenient, the result should be placed in TARGET.
4647 SUBTARGET may be used as the target for computing the operand. */
4649 static rtx
4650 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4652 rtx op0, op1;
4653 tree arg;
4655 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4656 return NULL_RTX;
4658 arg = CALL_EXPR_ARG (exp, 0);
4659 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4661 arg = CALL_EXPR_ARG (exp, 1);
4662 op1 = expand_normal (arg);
4664 return expand_copysign (op0, op1, target);
4667 /* Create a new constant string literal and return a char* pointer to it.
4668 The STRING_CST value is the LEN characters at STR. */
4669 tree
4670 build_string_literal (int len, const char *str)
4672 tree t, elem, index, type;
4674 t = build_string (len, str);
4675 elem = build_type_variant (char_type_node, 1, 0);
4676 index = build_index_type (size_int (len - 1));
4677 type = build_array_type (elem, index);
4678 TREE_TYPE (t) = type;
4679 TREE_CONSTANT (t) = 1;
4680 TREE_READONLY (t) = 1;
4681 TREE_STATIC (t) = 1;
4683 type = build_pointer_type (elem);
4684 t = build1 (ADDR_EXPR, type,
4685 build4 (ARRAY_REF, elem,
4686 t, integer_zero_node, NULL_TREE, NULL_TREE));
4687 return t;
4690 /* Expand a call to __builtin___clear_cache. */
4692 static rtx
4693 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4695 #ifndef HAVE_clear_cache
4696 #ifdef CLEAR_INSN_CACHE
4697 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4698 does something. Just do the default expansion to a call to
4699 __clear_cache(). */
4700 return NULL_RTX;
4701 #else
4702 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4703 does nothing. There is no need to call it. Do nothing. */
4704 return const0_rtx;
4705 #endif /* CLEAR_INSN_CACHE */
4706 #else
4707 /* We have a "clear_cache" insn, and it will handle everything. */
4708 tree begin, end;
4709 rtx begin_rtx, end_rtx;
4711 /* We must not expand to a library call. If we did, any
4712 fallback library function in libgcc that might contain a call to
4713 __builtin___clear_cache() would recurse infinitely. */
4714 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4716 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4717 return const0_rtx;
4720 if (HAVE_clear_cache)
4722 struct expand_operand ops[2];
4724 begin = CALL_EXPR_ARG (exp, 0);
4725 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4727 end = CALL_EXPR_ARG (exp, 1);
4728 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4730 create_address_operand (&ops[0], begin_rtx);
4731 create_address_operand (&ops[1], end_rtx);
4732 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4733 return const0_rtx;
4735 return const0_rtx;
4736 #endif /* HAVE_clear_cache */
4739 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4741 static rtx
4742 round_trampoline_addr (rtx tramp)
4744 rtx temp, addend, mask;
4746 /* If we don't need too much alignment, we'll have been guaranteed
4747 proper alignment by get_trampoline_type. */
4748 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4749 return tramp;
4751 /* Round address up to desired boundary. */
4752 temp = gen_reg_rtx (Pmode);
4753 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4754 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4756 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4757 temp, 0, OPTAB_LIB_WIDEN);
4758 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4759 temp, 0, OPTAB_LIB_WIDEN);
4761 return tramp;
4764 static rtx
4765 expand_builtin_init_trampoline (tree exp, bool onstack)
4767 tree t_tramp, t_func, t_chain;
4768 rtx m_tramp, r_tramp, r_chain, tmp;
4770 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4771 POINTER_TYPE, VOID_TYPE))
4772 return NULL_RTX;
4774 t_tramp = CALL_EXPR_ARG (exp, 0);
4775 t_func = CALL_EXPR_ARG (exp, 1);
4776 t_chain = CALL_EXPR_ARG (exp, 2);
4778 r_tramp = expand_normal (t_tramp);
4779 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4780 MEM_NOTRAP_P (m_tramp) = 1;
4782 /* If ONSTACK, the TRAMP argument should be the address of a field
4783 within the local function's FRAME decl. Either way, let's see if
4784 we can fill in the MEM_ATTRs for this memory. */
4785 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4786 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4788 /* Creator of a heap trampoline is responsible for making sure the
4789 address is aligned to at least STACK_BOUNDARY. Normally malloc
4790 will ensure this anyhow. */
4791 tmp = round_trampoline_addr (r_tramp);
4792 if (tmp != r_tramp)
4794 m_tramp = change_address (m_tramp, BLKmode, tmp);
4795 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4796 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4799 /* The FUNC argument should be the address of the nested function.
4800 Extract the actual function decl to pass to the hook. */
4801 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4802 t_func = TREE_OPERAND (t_func, 0);
4803 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4805 r_chain = expand_normal (t_chain);
4807 /* Generate insns to initialize the trampoline. */
4808 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4810 if (onstack)
4812 trampolines_created = 1;
4814 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4815 "trampoline generated for nested function %qD", t_func);
4818 return const0_rtx;
4821 static rtx
4822 expand_builtin_adjust_trampoline (tree exp)
4824 rtx tramp;
4826 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4827 return NULL_RTX;
4829 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4830 tramp = round_trampoline_addr (tramp);
4831 if (targetm.calls.trampoline_adjust_address)
4832 tramp = targetm.calls.trampoline_adjust_address (tramp);
4834 return tramp;
4837 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4838 function. The function first checks whether the back end provides
4839 an insn to implement signbit for the respective mode. If not, it
4840 checks whether the floating point format of the value is such that
4841 the sign bit can be extracted. If that is not the case, the
4842 function returns NULL_RTX to indicate that a normal call should be
4843 emitted rather than expanding the function in-line. EXP is the
4844 expression that is a call to the builtin function; if convenient,
4845 the result should be placed in TARGET. */
4846 static rtx
4847 expand_builtin_signbit (tree exp, rtx target)
4849 const struct real_format *fmt;
4850 enum machine_mode fmode, imode, rmode;
4851 tree arg;
4852 int word, bitpos;
4853 enum insn_code icode;
4854 rtx temp;
4855 location_t loc = EXPR_LOCATION (exp);
4857 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4858 return NULL_RTX;
4860 arg = CALL_EXPR_ARG (exp, 0);
4861 fmode = TYPE_MODE (TREE_TYPE (arg));
4862 rmode = TYPE_MODE (TREE_TYPE (exp));
4863 fmt = REAL_MODE_FORMAT (fmode);
4865 arg = builtin_save_expr (arg);
4867 /* Expand the argument yielding a RTX expression. */
4868 temp = expand_normal (arg);
4870 /* Check if the back end provides an insn that handles signbit for the
4871 argument's mode. */
4872 icode = optab_handler (signbit_optab, fmode);
4873 if (icode != CODE_FOR_nothing)
4875 rtx last = get_last_insn ();
4876 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4877 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4878 return target;
4879 delete_insns_since (last);
4882 /* For floating point formats without a sign bit, implement signbit
4883 as "ARG < 0.0". */
4884 bitpos = fmt->signbit_ro;
4885 if (bitpos < 0)
4887 /* But we can't do this if the format supports signed zero. */
4888 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4889 return NULL_RTX;
4891 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4892 build_real (TREE_TYPE (arg), dconst0));
4893 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4896 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4898 imode = int_mode_for_mode (fmode);
4899 if (imode == BLKmode)
4900 return NULL_RTX;
4901 temp = gen_lowpart (imode, temp);
4903 else
4905 imode = word_mode;
4906 /* Handle targets with different FP word orders. */
4907 if (FLOAT_WORDS_BIG_ENDIAN)
4908 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4909 else
4910 word = bitpos / BITS_PER_WORD;
4911 temp = operand_subword_force (temp, word, fmode);
4912 bitpos = bitpos % BITS_PER_WORD;
4915 /* Force the intermediate word_mode (or narrower) result into a
4916 register. This avoids attempting to create paradoxical SUBREGs
4917 of floating point modes below. */
4918 temp = force_reg (imode, temp);
4920 /* If the bitpos is within the "result mode" lowpart, the operation
4921 can be implement with a single bitwise AND. Otherwise, we need
4922 a right shift and an AND. */
4924 if (bitpos < GET_MODE_BITSIZE (rmode))
4926 double_int mask = double_int_zero.set_bit (bitpos);
4928 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4929 temp = gen_lowpart (rmode, temp);
4930 temp = expand_binop (rmode, and_optab, temp,
4931 immed_double_int_const (mask, rmode),
4932 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4934 else
4936 /* Perform a logical right shift to place the signbit in the least
4937 significant bit, then truncate the result to the desired mode
4938 and mask just this bit. */
4939 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4940 temp = gen_lowpart (rmode, temp);
4941 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4942 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4945 return temp;
4948 /* Expand fork or exec calls. TARGET is the desired target of the
4949 call. EXP is the call. FN is the
4950 identificator of the actual function. IGNORE is nonzero if the
4951 value is to be ignored. */
4953 static rtx
4954 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4956 tree id, decl;
4957 tree call;
4959 /* If we are not profiling, just call the function. */
4960 if (!profile_arc_flag)
4961 return NULL_RTX;
4963 /* Otherwise call the wrapper. This should be equivalent for the rest of
4964 compiler, so the code does not diverge, and the wrapper may run the
4965 code necessary for keeping the profiling sane. */
4967 switch (DECL_FUNCTION_CODE (fn))
4969 case BUILT_IN_FORK:
4970 id = get_identifier ("__gcov_fork");
4971 break;
4973 case BUILT_IN_EXECL:
4974 id = get_identifier ("__gcov_execl");
4975 break;
4977 case BUILT_IN_EXECV:
4978 id = get_identifier ("__gcov_execv");
4979 break;
4981 case BUILT_IN_EXECLP:
4982 id = get_identifier ("__gcov_execlp");
4983 break;
4985 case BUILT_IN_EXECLE:
4986 id = get_identifier ("__gcov_execle");
4987 break;
4989 case BUILT_IN_EXECVP:
4990 id = get_identifier ("__gcov_execvp");
4991 break;
4993 case BUILT_IN_EXECVE:
4994 id = get_identifier ("__gcov_execve");
4995 break;
4997 default:
4998 gcc_unreachable ();
5001 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5002 FUNCTION_DECL, id, TREE_TYPE (fn));
5003 DECL_EXTERNAL (decl) = 1;
5004 TREE_PUBLIC (decl) = 1;
5005 DECL_ARTIFICIAL (decl) = 1;
5006 TREE_NOTHROW (decl) = 1;
5007 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5008 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5009 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5010 return expand_call (call, target, ignore);
5015 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5016 the pointer in these functions is void*, the tree optimizers may remove
5017 casts. The mode computed in expand_builtin isn't reliable either, due
5018 to __sync_bool_compare_and_swap.
5020 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5021 group of builtins. This gives us log2 of the mode size. */
5023 static inline enum machine_mode
5024 get_builtin_sync_mode (int fcode_diff)
5026 /* The size is not negotiable, so ask not to get BLKmode in return
5027 if the target indicates that a smaller size would be better. */
5028 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5031 /* Expand the memory expression LOC and return the appropriate memory operand
5032 for the builtin_sync operations. */
5034 static rtx
5035 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5037 rtx addr, mem;
5039 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5040 addr = convert_memory_address (Pmode, addr);
5042 /* Note that we explicitly do not want any alias information for this
5043 memory, so that we kill all other live memories. Otherwise we don't
5044 satisfy the full barrier semantics of the intrinsic. */
5045 mem = validize_mem (gen_rtx_MEM (mode, addr));
5047 /* The alignment needs to be at least according to that of the mode. */
5048 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5049 get_pointer_alignment (loc)));
5050 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5051 MEM_VOLATILE_P (mem) = 1;
5053 return mem;
5056 /* Make sure an argument is in the right mode.
5057 EXP is the tree argument.
5058 MODE is the mode it should be in. */
5060 static rtx
5061 expand_expr_force_mode (tree exp, enum machine_mode mode)
5063 rtx val;
5064 enum machine_mode old_mode;
5066 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5067 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5068 of CONST_INTs, where we know the old_mode only from the call argument. */
5070 old_mode = GET_MODE (val);
5071 if (old_mode == VOIDmode)
5072 old_mode = TYPE_MODE (TREE_TYPE (exp));
5073 val = convert_modes (mode, old_mode, val, 1);
5074 return val;
5078 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5079 EXP is the CALL_EXPR. CODE is the rtx code
5080 that corresponds to the arithmetic or logical operation from the name;
5081 an exception here is that NOT actually means NAND. TARGET is an optional
5082 place for us to store the results; AFTER is true if this is the
5083 fetch_and_xxx form. */
5085 static rtx
5086 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5087 enum rtx_code code, bool after,
5088 rtx target)
5090 rtx val, mem;
5091 location_t loc = EXPR_LOCATION (exp);
5093 if (code == NOT && warn_sync_nand)
5095 tree fndecl = get_callee_fndecl (exp);
5096 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5098 static bool warned_f_a_n, warned_n_a_f;
5100 switch (fcode)
5102 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5103 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5104 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5105 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5106 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5107 if (warned_f_a_n)
5108 break;
5110 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5111 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5112 warned_f_a_n = true;
5113 break;
5115 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5116 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5117 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5118 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5119 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5120 if (warned_n_a_f)
5121 break;
5123 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5124 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5125 warned_n_a_f = true;
5126 break;
5128 default:
5129 gcc_unreachable ();
5133 /* Expand the operands. */
5134 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5135 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5137 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5138 after);
5141 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5142 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5143 true if this is the boolean form. TARGET is a place for us to store the
5144 results; this is NOT optional if IS_BOOL is true. */
5146 static rtx
5147 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5148 bool is_bool, rtx target)
5150 rtx old_val, new_val, mem;
5151 rtx *pbool, *poval;
5153 /* Expand the operands. */
5154 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5155 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5156 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5158 pbool = poval = NULL;
5159 if (target != const0_rtx)
5161 if (is_bool)
5162 pbool = &target;
5163 else
5164 poval = &target;
5166 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5167 false, MEMMODEL_SEQ_CST,
5168 MEMMODEL_SEQ_CST))
5169 return NULL_RTX;
5171 return target;
5174 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5175 general form is actually an atomic exchange, and some targets only
5176 support a reduced form with the second argument being a constant 1.
5177 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5178 the results. */
5180 static rtx
5181 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5182 rtx target)
5184 rtx val, mem;
5186 /* Expand the operands. */
5187 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5188 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5190 return expand_sync_lock_test_and_set (target, mem, val);
5193 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5195 static void
5196 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5198 rtx mem;
5200 /* Expand the operands. */
5201 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5203 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5206 /* Given an integer representing an ``enum memmodel'', verify its
5207 correctness and return the memory model enum. */
5209 static enum memmodel
5210 get_memmodel (tree exp)
5212 rtx op;
5213 unsigned HOST_WIDE_INT val;
5215 /* If the parameter is not a constant, it's a run time value so we'll just
5216 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5217 if (TREE_CODE (exp) != INTEGER_CST)
5218 return MEMMODEL_SEQ_CST;
5220 op = expand_normal (exp);
5222 val = INTVAL (op);
5223 if (targetm.memmodel_check)
5224 val = targetm.memmodel_check (val);
5225 else if (val & ~MEMMODEL_MASK)
5227 warning (OPT_Winvalid_memory_model,
5228 "Unknown architecture specifier in memory model to builtin.");
5229 return MEMMODEL_SEQ_CST;
5232 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5234 warning (OPT_Winvalid_memory_model,
5235 "invalid memory model argument to builtin");
5236 return MEMMODEL_SEQ_CST;
5239 return (enum memmodel) val;
5242 /* Expand the __atomic_exchange intrinsic:
5243 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5244 EXP is the CALL_EXPR.
5245 TARGET is an optional place for us to store the results. */
5247 static rtx
5248 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5250 rtx val, mem;
5251 enum memmodel model;
5253 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5254 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5256 error ("invalid memory model for %<__atomic_exchange%>");
5257 return NULL_RTX;
5260 if (!flag_inline_atomics)
5261 return NULL_RTX;
5263 /* Expand the operands. */
5264 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5265 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5267 return expand_atomic_exchange (target, mem, val, model);
5270 /* Expand the __atomic_compare_exchange intrinsic:
5271 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5272 TYPE desired, BOOL weak,
5273 enum memmodel success,
5274 enum memmodel failure)
5275 EXP is the CALL_EXPR.
5276 TARGET is an optional place for us to store the results. */
5278 static rtx
5279 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5280 rtx target)
5282 rtx expect, desired, mem, oldval;
5283 enum memmodel success, failure;
5284 tree weak;
5285 bool is_weak;
5287 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5288 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5290 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5291 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5293 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5294 return NULL_RTX;
5297 if (failure > success)
5299 error ("failure memory model cannot be stronger than success "
5300 "memory model for %<__atomic_compare_exchange%>");
5301 return NULL_RTX;
5304 if (!flag_inline_atomics)
5305 return NULL_RTX;
5307 /* Expand the operands. */
5308 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5310 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5311 expect = convert_memory_address (Pmode, expect);
5312 expect = gen_rtx_MEM (mode, expect);
5313 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5315 weak = CALL_EXPR_ARG (exp, 3);
5316 is_weak = false;
5317 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
5318 is_weak = true;
5320 oldval = expect;
5321 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5322 &oldval, mem, oldval, desired,
5323 is_weak, success, failure))
5324 return NULL_RTX;
5326 if (oldval != expect)
5327 emit_move_insn (expect, oldval);
5329 return target;
5332 /* Expand the __atomic_load intrinsic:
5333 TYPE __atomic_load (TYPE *object, enum memmodel)
5334 EXP is the CALL_EXPR.
5335 TARGET is an optional place for us to store the results. */
5337 static rtx
5338 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5340 rtx mem;
5341 enum memmodel model;
5343 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5344 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5345 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5347 error ("invalid memory model for %<__atomic_load%>");
5348 return NULL_RTX;
5351 if (!flag_inline_atomics)
5352 return NULL_RTX;
5354 /* Expand the operand. */
5355 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5357 return expand_atomic_load (target, mem, model);
5361 /* Expand the __atomic_store intrinsic:
5362 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5363 EXP is the CALL_EXPR.
5364 TARGET is an optional place for us to store the results. */
5366 static rtx
5367 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5369 rtx mem, val;
5370 enum memmodel model;
5372 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5373 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5374 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5375 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5377 error ("invalid memory model for %<__atomic_store%>");
5378 return NULL_RTX;
5381 if (!flag_inline_atomics)
5382 return NULL_RTX;
5384 /* Expand the operands. */
5385 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5386 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5388 return expand_atomic_store (mem, val, model, false);
5391 /* Expand the __atomic_fetch_XXX intrinsic:
5392 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5393 EXP is the CALL_EXPR.
5394 TARGET is an optional place for us to store the results.
5395 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5396 FETCH_AFTER is true if returning the result of the operation.
5397 FETCH_AFTER is false if returning the value before the operation.
5398 IGNORE is true if the result is not used.
5399 EXT_CALL is the correct builtin for an external call if this cannot be
5400 resolved to an instruction sequence. */
5402 static rtx
5403 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5404 enum rtx_code code, bool fetch_after,
5405 bool ignore, enum built_in_function ext_call)
5407 rtx val, mem, ret;
5408 enum memmodel model;
5409 tree fndecl;
5410 tree addr;
5412 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5414 /* Expand the operands. */
5415 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5416 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5418 /* Only try generating instructions if inlining is turned on. */
5419 if (flag_inline_atomics)
5421 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5422 if (ret)
5423 return ret;
5426 /* Return if a different routine isn't needed for the library call. */
5427 if (ext_call == BUILT_IN_NONE)
5428 return NULL_RTX;
5430 /* Change the call to the specified function. */
5431 fndecl = get_callee_fndecl (exp);
5432 addr = CALL_EXPR_FN (exp);
5433 STRIP_NOPS (addr);
5435 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5436 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5438 /* Expand the call here so we can emit trailing code. */
5439 ret = expand_call (exp, target, ignore);
5441 /* Replace the original function just in case it matters. */
5442 TREE_OPERAND (addr, 0) = fndecl;
5444 /* Then issue the arithmetic correction to return the right result. */
5445 if (!ignore)
5447 if (code == NOT)
5449 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5450 OPTAB_LIB_WIDEN);
5451 ret = expand_simple_unop (mode, NOT, ret, target, true);
5453 else
5454 ret = expand_simple_binop (mode, code, ret, val, target, true,
5455 OPTAB_LIB_WIDEN);
5457 return ret;
5461 #ifndef HAVE_atomic_clear
5462 # define HAVE_atomic_clear 0
5463 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5464 #endif
5466 /* Expand an atomic clear operation.
5467 void _atomic_clear (BOOL *obj, enum memmodel)
5468 EXP is the call expression. */
5470 static rtx
5471 expand_builtin_atomic_clear (tree exp)
5473 enum machine_mode mode;
5474 rtx mem, ret;
5475 enum memmodel model;
5477 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5478 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5479 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5481 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5482 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5484 error ("invalid memory model for %<__atomic_store%>");
5485 return const0_rtx;
5488 if (HAVE_atomic_clear)
5490 emit_insn (gen_atomic_clear (mem, model));
5491 return const0_rtx;
5494 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5495 Failing that, a store is issued by __atomic_store. The only way this can
5496 fail is if the bool type is larger than a word size. Unlikely, but
5497 handle it anyway for completeness. Assume a single threaded model since
5498 there is no atomic support in this case, and no barriers are required. */
5499 ret = expand_atomic_store (mem, const0_rtx, model, true);
5500 if (!ret)
5501 emit_move_insn (mem, const0_rtx);
5502 return const0_rtx;
5505 /* Expand an atomic test_and_set operation.
5506 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5507 EXP is the call expression. */
5509 static rtx
5510 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5512 rtx mem;
5513 enum memmodel model;
5514 enum machine_mode mode;
5516 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5517 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5518 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5520 return expand_atomic_test_and_set (target, mem, model);
5524 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5525 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5527 static tree
5528 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5530 int size;
5531 enum machine_mode mode;
5532 unsigned int mode_align, type_align;
5534 if (TREE_CODE (arg0) != INTEGER_CST)
5535 return NULL_TREE;
5537 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5538 mode = mode_for_size (size, MODE_INT, 0);
5539 mode_align = GET_MODE_ALIGNMENT (mode);
5541 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5542 type_align = mode_align;
5543 else
5545 tree ttype = TREE_TYPE (arg1);
5547 /* This function is usually invoked and folded immediately by the front
5548 end before anything else has a chance to look at it. The pointer
5549 parameter at this point is usually cast to a void *, so check for that
5550 and look past the cast. */
5551 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5552 && VOID_TYPE_P (TREE_TYPE (ttype)))
5553 arg1 = TREE_OPERAND (arg1, 0);
5555 ttype = TREE_TYPE (arg1);
5556 gcc_assert (POINTER_TYPE_P (ttype));
5558 /* Get the underlying type of the object. */
5559 ttype = TREE_TYPE (ttype);
5560 type_align = TYPE_ALIGN (ttype);
5563 /* If the object has smaller alignment, the the lock free routines cannot
5564 be used. */
5565 if (type_align < mode_align)
5566 return boolean_false_node;
5568 /* Check if a compare_and_swap pattern exists for the mode which represents
5569 the required size. The pattern is not allowed to fail, so the existence
5570 of the pattern indicates support is present. */
5571 if (can_compare_and_swap_p (mode, true))
5572 return boolean_true_node;
5573 else
5574 return boolean_false_node;
5577 /* Return true if the parameters to call EXP represent an object which will
5578 always generate lock free instructions. The first argument represents the
5579 size of the object, and the second parameter is a pointer to the object
5580 itself. If NULL is passed for the object, then the result is based on
5581 typical alignment for an object of the specified size. Otherwise return
5582 false. */
5584 static rtx
5585 expand_builtin_atomic_always_lock_free (tree exp)
5587 tree size;
5588 tree arg0 = CALL_EXPR_ARG (exp, 0);
5589 tree arg1 = CALL_EXPR_ARG (exp, 1);
5591 if (TREE_CODE (arg0) != INTEGER_CST)
5593 error ("non-constant argument 1 to __atomic_always_lock_free");
5594 return const0_rtx;
5597 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5598 if (size == boolean_true_node)
5599 return const1_rtx;
5600 return const0_rtx;
5603 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5604 is lock free on this architecture. */
5606 static tree
5607 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5609 if (!flag_inline_atomics)
5610 return NULL_TREE;
5612 /* If it isn't always lock free, don't generate a result. */
5613 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5614 return boolean_true_node;
5616 return NULL_TREE;
5619 /* Return true if the parameters to call EXP represent an object which will
5620 always generate lock free instructions. The first argument represents the
5621 size of the object, and the second parameter is a pointer to the object
5622 itself. If NULL is passed for the object, then the result is based on
5623 typical alignment for an object of the specified size. Otherwise return
5624 NULL*/
5626 static rtx
5627 expand_builtin_atomic_is_lock_free (tree exp)
5629 tree size;
5630 tree arg0 = CALL_EXPR_ARG (exp, 0);
5631 tree arg1 = CALL_EXPR_ARG (exp, 1);
5633 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5635 error ("non-integer argument 1 to __atomic_is_lock_free");
5636 return NULL_RTX;
5639 if (!flag_inline_atomics)
5640 return NULL_RTX;
5642 /* If the value is known at compile time, return the RTX for it. */
5643 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5644 if (size == boolean_true_node)
5645 return const1_rtx;
5647 return NULL_RTX;
5650 /* Expand the __atomic_thread_fence intrinsic:
5651 void __atomic_thread_fence (enum memmodel)
5652 EXP is the CALL_EXPR. */
5654 static void
5655 expand_builtin_atomic_thread_fence (tree exp)
5657 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5658 expand_mem_thread_fence (model);
5661 /* Expand the __atomic_signal_fence intrinsic:
5662 void __atomic_signal_fence (enum memmodel)
5663 EXP is the CALL_EXPR. */
5665 static void
5666 expand_builtin_atomic_signal_fence (tree exp)
5668 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5669 expand_mem_signal_fence (model);
5672 /* Expand the __sync_synchronize intrinsic. */
5674 static void
5675 expand_builtin_sync_synchronize (void)
5677 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5680 static rtx
5681 expand_builtin_thread_pointer (tree exp, rtx target)
5683 enum insn_code icode;
5684 if (!validate_arglist (exp, VOID_TYPE))
5685 return const0_rtx;
5686 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5687 if (icode != CODE_FOR_nothing)
5689 struct expand_operand op;
5690 if (!REG_P (target) || GET_MODE (target) != Pmode)
5691 target = gen_reg_rtx (Pmode);
5692 create_output_operand (&op, target, Pmode);
5693 expand_insn (icode, 1, &op);
5694 return target;
5696 error ("__builtin_thread_pointer is not supported on this target");
5697 return const0_rtx;
5700 static void
5701 expand_builtin_set_thread_pointer (tree exp)
5703 enum insn_code icode;
5704 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5705 return;
5706 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5707 if (icode != CODE_FOR_nothing)
5709 struct expand_operand op;
5710 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5711 Pmode, EXPAND_NORMAL);
5712 create_input_operand (&op, val, Pmode);
5713 expand_insn (icode, 1, &op);
5714 return;
5716 error ("__builtin_set_thread_pointer is not supported on this target");
5720 /* Expand an expression EXP that calls a built-in function,
5721 with result going to TARGET if that's convenient
5722 (and in mode MODE if that's convenient).
5723 SUBTARGET may be used as the target for computing one of EXP's operands.
5724 IGNORE is nonzero if the value is to be ignored. */
5727 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5728 int ignore)
5730 tree fndecl = get_callee_fndecl (exp);
5731 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5732 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5733 int flags;
5735 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5736 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5738 /* When not optimizing, generate calls to library functions for a certain
5739 set of builtins. */
5740 if (!optimize
5741 && !called_as_built_in (fndecl)
5742 && fcode != BUILT_IN_FORK
5743 && fcode != BUILT_IN_EXECL
5744 && fcode != BUILT_IN_EXECV
5745 && fcode != BUILT_IN_EXECLP
5746 && fcode != BUILT_IN_EXECLE
5747 && fcode != BUILT_IN_EXECVP
5748 && fcode != BUILT_IN_EXECVE
5749 && fcode != BUILT_IN_ALLOCA
5750 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5751 && fcode != BUILT_IN_FREE)
5752 return expand_call (exp, target, ignore);
5754 /* The built-in function expanders test for target == const0_rtx
5755 to determine whether the function's result will be ignored. */
5756 if (ignore)
5757 target = const0_rtx;
5759 /* If the result of a pure or const built-in function is ignored, and
5760 none of its arguments are volatile, we can avoid expanding the
5761 built-in call and just evaluate the arguments for side-effects. */
5762 if (target == const0_rtx
5763 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5764 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5766 bool volatilep = false;
5767 tree arg;
5768 call_expr_arg_iterator iter;
5770 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5771 if (TREE_THIS_VOLATILE (arg))
5773 volatilep = true;
5774 break;
5777 if (! volatilep)
5779 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5780 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5781 return const0_rtx;
5785 switch (fcode)
5787 CASE_FLT_FN (BUILT_IN_FABS):
5788 case BUILT_IN_FABSD32:
5789 case BUILT_IN_FABSD64:
5790 case BUILT_IN_FABSD128:
5791 target = expand_builtin_fabs (exp, target, subtarget);
5792 if (target)
5793 return target;
5794 break;
5796 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5797 target = expand_builtin_copysign (exp, target, subtarget);
5798 if (target)
5799 return target;
5800 break;
5802 /* Just do a normal library call if we were unable to fold
5803 the values. */
5804 CASE_FLT_FN (BUILT_IN_CABS):
5805 break;
5807 CASE_FLT_FN (BUILT_IN_EXP):
5808 CASE_FLT_FN (BUILT_IN_EXP10):
5809 CASE_FLT_FN (BUILT_IN_POW10):
5810 CASE_FLT_FN (BUILT_IN_EXP2):
5811 CASE_FLT_FN (BUILT_IN_EXPM1):
5812 CASE_FLT_FN (BUILT_IN_LOGB):
5813 CASE_FLT_FN (BUILT_IN_LOG):
5814 CASE_FLT_FN (BUILT_IN_LOG10):
5815 CASE_FLT_FN (BUILT_IN_LOG2):
5816 CASE_FLT_FN (BUILT_IN_LOG1P):
5817 CASE_FLT_FN (BUILT_IN_TAN):
5818 CASE_FLT_FN (BUILT_IN_ASIN):
5819 CASE_FLT_FN (BUILT_IN_ACOS):
5820 CASE_FLT_FN (BUILT_IN_ATAN):
5821 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5822 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5823 because of possible accuracy problems. */
5824 if (! flag_unsafe_math_optimizations)
5825 break;
5826 CASE_FLT_FN (BUILT_IN_SQRT):
5827 CASE_FLT_FN (BUILT_IN_FLOOR):
5828 CASE_FLT_FN (BUILT_IN_CEIL):
5829 CASE_FLT_FN (BUILT_IN_TRUNC):
5830 CASE_FLT_FN (BUILT_IN_ROUND):
5831 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5832 CASE_FLT_FN (BUILT_IN_RINT):
5833 target = expand_builtin_mathfn (exp, target, subtarget);
5834 if (target)
5835 return target;
5836 break;
5838 CASE_FLT_FN (BUILT_IN_FMA):
5839 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5840 if (target)
5841 return target;
5842 break;
5844 CASE_FLT_FN (BUILT_IN_ILOGB):
5845 if (! flag_unsafe_math_optimizations)
5846 break;
5847 CASE_FLT_FN (BUILT_IN_ISINF):
5848 CASE_FLT_FN (BUILT_IN_FINITE):
5849 case BUILT_IN_ISFINITE:
5850 case BUILT_IN_ISNORMAL:
5851 target = expand_builtin_interclass_mathfn (exp, target);
5852 if (target)
5853 return target;
5854 break;
5856 CASE_FLT_FN (BUILT_IN_ICEIL):
5857 CASE_FLT_FN (BUILT_IN_LCEIL):
5858 CASE_FLT_FN (BUILT_IN_LLCEIL):
5859 CASE_FLT_FN (BUILT_IN_LFLOOR):
5860 CASE_FLT_FN (BUILT_IN_IFLOOR):
5861 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5862 target = expand_builtin_int_roundingfn (exp, target);
5863 if (target)
5864 return target;
5865 break;
5867 CASE_FLT_FN (BUILT_IN_IRINT):
5868 CASE_FLT_FN (BUILT_IN_LRINT):
5869 CASE_FLT_FN (BUILT_IN_LLRINT):
5870 CASE_FLT_FN (BUILT_IN_IROUND):
5871 CASE_FLT_FN (BUILT_IN_LROUND):
5872 CASE_FLT_FN (BUILT_IN_LLROUND):
5873 target = expand_builtin_int_roundingfn_2 (exp, target);
5874 if (target)
5875 return target;
5876 break;
5878 CASE_FLT_FN (BUILT_IN_POWI):
5879 target = expand_builtin_powi (exp, target);
5880 if (target)
5881 return target;
5882 break;
5884 CASE_FLT_FN (BUILT_IN_ATAN2):
5885 CASE_FLT_FN (BUILT_IN_LDEXP):
5886 CASE_FLT_FN (BUILT_IN_SCALB):
5887 CASE_FLT_FN (BUILT_IN_SCALBN):
5888 CASE_FLT_FN (BUILT_IN_SCALBLN):
5889 if (! flag_unsafe_math_optimizations)
5890 break;
5892 CASE_FLT_FN (BUILT_IN_FMOD):
5893 CASE_FLT_FN (BUILT_IN_REMAINDER):
5894 CASE_FLT_FN (BUILT_IN_DREM):
5895 CASE_FLT_FN (BUILT_IN_POW):
5896 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5897 if (target)
5898 return target;
5899 break;
5901 CASE_FLT_FN (BUILT_IN_CEXPI):
5902 target = expand_builtin_cexpi (exp, target);
5903 gcc_assert (target);
5904 return target;
5906 CASE_FLT_FN (BUILT_IN_SIN):
5907 CASE_FLT_FN (BUILT_IN_COS):
5908 if (! flag_unsafe_math_optimizations)
5909 break;
5910 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5911 if (target)
5912 return target;
5913 break;
5915 CASE_FLT_FN (BUILT_IN_SINCOS):
5916 if (! flag_unsafe_math_optimizations)
5917 break;
5918 target = expand_builtin_sincos (exp);
5919 if (target)
5920 return target;
5921 break;
5923 case BUILT_IN_APPLY_ARGS:
5924 return expand_builtin_apply_args ();
5926 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5927 FUNCTION with a copy of the parameters described by
5928 ARGUMENTS, and ARGSIZE. It returns a block of memory
5929 allocated on the stack into which is stored all the registers
5930 that might possibly be used for returning the result of a
5931 function. ARGUMENTS is the value returned by
5932 __builtin_apply_args. ARGSIZE is the number of bytes of
5933 arguments that must be copied. ??? How should this value be
5934 computed? We'll also need a safe worst case value for varargs
5935 functions. */
5936 case BUILT_IN_APPLY:
5937 if (!validate_arglist (exp, POINTER_TYPE,
5938 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5939 && !validate_arglist (exp, REFERENCE_TYPE,
5940 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5941 return const0_rtx;
5942 else
5944 rtx ops[3];
5946 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5947 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5948 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5950 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5953 /* __builtin_return (RESULT) causes the function to return the
5954 value described by RESULT. RESULT is address of the block of
5955 memory returned by __builtin_apply. */
5956 case BUILT_IN_RETURN:
5957 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5958 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5959 return const0_rtx;
5961 case BUILT_IN_SAVEREGS:
5962 return expand_builtin_saveregs ();
5964 case BUILT_IN_VA_ARG_PACK:
5965 /* All valid uses of __builtin_va_arg_pack () are removed during
5966 inlining. */
5967 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5968 return const0_rtx;
5970 case BUILT_IN_VA_ARG_PACK_LEN:
5971 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5972 inlining. */
5973 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5974 return const0_rtx;
5976 /* Return the address of the first anonymous stack arg. */
5977 case BUILT_IN_NEXT_ARG:
5978 if (fold_builtin_next_arg (exp, false))
5979 return const0_rtx;
5980 return expand_builtin_next_arg ();
5982 case BUILT_IN_CLEAR_CACHE:
5983 target = expand_builtin___clear_cache (exp);
5984 if (target)
5985 return target;
5986 break;
5988 case BUILT_IN_CLASSIFY_TYPE:
5989 return expand_builtin_classify_type (exp);
5991 case BUILT_IN_CONSTANT_P:
5992 return const0_rtx;
5994 case BUILT_IN_FRAME_ADDRESS:
5995 case BUILT_IN_RETURN_ADDRESS:
5996 return expand_builtin_frame_address (fndecl, exp);
5998 /* Returns the address of the area where the structure is returned.
5999 0 otherwise. */
6000 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6001 if (call_expr_nargs (exp) != 0
6002 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6003 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6004 return const0_rtx;
6005 else
6006 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6008 case BUILT_IN_ALLOCA:
6009 case BUILT_IN_ALLOCA_WITH_ALIGN:
6010 /* If the allocation stems from the declaration of a variable-sized
6011 object, it cannot accumulate. */
6012 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6013 if (target)
6014 return target;
6015 break;
6017 case BUILT_IN_STACK_SAVE:
6018 return expand_stack_save ();
6020 case BUILT_IN_STACK_RESTORE:
6021 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6022 return const0_rtx;
6024 case BUILT_IN_BSWAP16:
6025 case BUILT_IN_BSWAP32:
6026 case BUILT_IN_BSWAP64:
6027 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6028 if (target)
6029 return target;
6030 break;
6032 CASE_INT_FN (BUILT_IN_FFS):
6033 target = expand_builtin_unop (target_mode, exp, target,
6034 subtarget, ffs_optab);
6035 if (target)
6036 return target;
6037 break;
6039 CASE_INT_FN (BUILT_IN_CLZ):
6040 target = expand_builtin_unop (target_mode, exp, target,
6041 subtarget, clz_optab);
6042 if (target)
6043 return target;
6044 break;
6046 CASE_INT_FN (BUILT_IN_CTZ):
6047 target = expand_builtin_unop (target_mode, exp, target,
6048 subtarget, ctz_optab);
6049 if (target)
6050 return target;
6051 break;
6053 CASE_INT_FN (BUILT_IN_CLRSB):
6054 target = expand_builtin_unop (target_mode, exp, target,
6055 subtarget, clrsb_optab);
6056 if (target)
6057 return target;
6058 break;
6060 CASE_INT_FN (BUILT_IN_POPCOUNT):
6061 target = expand_builtin_unop (target_mode, exp, target,
6062 subtarget, popcount_optab);
6063 if (target)
6064 return target;
6065 break;
6067 CASE_INT_FN (BUILT_IN_PARITY):
6068 target = expand_builtin_unop (target_mode, exp, target,
6069 subtarget, parity_optab);
6070 if (target)
6071 return target;
6072 break;
6074 case BUILT_IN_STRLEN:
6075 target = expand_builtin_strlen (exp, target, target_mode);
6076 if (target)
6077 return target;
6078 break;
6080 case BUILT_IN_STRCPY:
6081 target = expand_builtin_strcpy (exp, target);
6082 if (target)
6083 return target;
6084 break;
6086 case BUILT_IN_STRNCPY:
6087 target = expand_builtin_strncpy (exp, target);
6088 if (target)
6089 return target;
6090 break;
6092 case BUILT_IN_STPCPY:
6093 target = expand_builtin_stpcpy (exp, target, mode);
6094 if (target)
6095 return target;
6096 break;
6098 case BUILT_IN_MEMCPY:
6099 target = expand_builtin_memcpy (exp, target);
6100 if (target)
6101 return target;
6102 break;
6104 case BUILT_IN_MEMPCPY:
6105 target = expand_builtin_mempcpy (exp, target, mode);
6106 if (target)
6107 return target;
6108 break;
6110 case BUILT_IN_MEMSET:
6111 target = expand_builtin_memset (exp, target, mode);
6112 if (target)
6113 return target;
6114 break;
6116 case BUILT_IN_BZERO:
6117 target = expand_builtin_bzero (exp);
6118 if (target)
6119 return target;
6120 break;
6122 case BUILT_IN_STRCMP:
6123 target = expand_builtin_strcmp (exp, target);
6124 if (target)
6125 return target;
6126 break;
6128 case BUILT_IN_STRNCMP:
6129 target = expand_builtin_strncmp (exp, target, mode);
6130 if (target)
6131 return target;
6132 break;
6134 case BUILT_IN_BCMP:
6135 case BUILT_IN_MEMCMP:
6136 target = expand_builtin_memcmp (exp, target, mode);
6137 if (target)
6138 return target;
6139 break;
6141 case BUILT_IN_SETJMP:
6142 /* This should have been lowered to the builtins below. */
6143 gcc_unreachable ();
6145 case BUILT_IN_SETJMP_SETUP:
6146 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6147 and the receiver label. */
6148 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6150 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6151 VOIDmode, EXPAND_NORMAL);
6152 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6153 rtx label_r = label_rtx (label);
6155 /* This is copied from the handling of non-local gotos. */
6156 expand_builtin_setjmp_setup (buf_addr, label_r);
6157 nonlocal_goto_handler_labels
6158 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6159 nonlocal_goto_handler_labels);
6160 /* ??? Do not let expand_label treat us as such since we would
6161 not want to be both on the list of non-local labels and on
6162 the list of forced labels. */
6163 FORCED_LABEL (label) = 0;
6164 return const0_rtx;
6166 break;
6168 case BUILT_IN_SETJMP_DISPATCHER:
6169 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6170 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6172 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6173 rtx label_r = label_rtx (label);
6175 /* Remove the dispatcher label from the list of non-local labels
6176 since the receiver labels have been added to it above. */
6177 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6178 return const0_rtx;
6180 break;
6182 case BUILT_IN_SETJMP_RECEIVER:
6183 /* __builtin_setjmp_receiver is passed the receiver label. */
6184 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6186 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6187 rtx label_r = label_rtx (label);
6189 expand_builtin_setjmp_receiver (label_r);
6190 return const0_rtx;
6192 break;
6194 /* __builtin_longjmp is passed a pointer to an array of five words.
6195 It's similar to the C library longjmp function but works with
6196 __builtin_setjmp above. */
6197 case BUILT_IN_LONGJMP:
6198 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6200 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6201 VOIDmode, EXPAND_NORMAL);
6202 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6204 if (value != const1_rtx)
6206 error ("%<__builtin_longjmp%> second argument must be 1");
6207 return const0_rtx;
6210 expand_builtin_longjmp (buf_addr, value);
6211 return const0_rtx;
6213 break;
6215 case BUILT_IN_NONLOCAL_GOTO:
6216 target = expand_builtin_nonlocal_goto (exp);
6217 if (target)
6218 return target;
6219 break;
6221 /* This updates the setjmp buffer that is its argument with the value
6222 of the current stack pointer. */
6223 case BUILT_IN_UPDATE_SETJMP_BUF:
6224 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6226 rtx buf_addr
6227 = expand_normal (CALL_EXPR_ARG (exp, 0));
6229 expand_builtin_update_setjmp_buf (buf_addr);
6230 return const0_rtx;
6232 break;
6234 case BUILT_IN_TRAP:
6235 expand_builtin_trap ();
6236 return const0_rtx;
6238 case BUILT_IN_UNREACHABLE:
6239 expand_builtin_unreachable ();
6240 return const0_rtx;
6242 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6243 case BUILT_IN_SIGNBITD32:
6244 case BUILT_IN_SIGNBITD64:
6245 case BUILT_IN_SIGNBITD128:
6246 target = expand_builtin_signbit (exp, target);
6247 if (target)
6248 return target;
6249 break;
6251 /* Various hooks for the DWARF 2 __throw routine. */
6252 case BUILT_IN_UNWIND_INIT:
6253 expand_builtin_unwind_init ();
6254 return const0_rtx;
6255 case BUILT_IN_DWARF_CFA:
6256 return virtual_cfa_rtx;
6257 #ifdef DWARF2_UNWIND_INFO
6258 case BUILT_IN_DWARF_SP_COLUMN:
6259 return expand_builtin_dwarf_sp_column ();
6260 case BUILT_IN_INIT_DWARF_REG_SIZES:
6261 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6262 return const0_rtx;
6263 #endif
6264 case BUILT_IN_FROB_RETURN_ADDR:
6265 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6266 case BUILT_IN_EXTRACT_RETURN_ADDR:
6267 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6268 case BUILT_IN_EH_RETURN:
6269 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6270 CALL_EXPR_ARG (exp, 1));
6271 return const0_rtx;
6272 #ifdef EH_RETURN_DATA_REGNO
6273 case BUILT_IN_EH_RETURN_DATA_REGNO:
6274 return expand_builtin_eh_return_data_regno (exp);
6275 #endif
6276 case BUILT_IN_EXTEND_POINTER:
6277 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6278 case BUILT_IN_EH_POINTER:
6279 return expand_builtin_eh_pointer (exp);
6280 case BUILT_IN_EH_FILTER:
6281 return expand_builtin_eh_filter (exp);
6282 case BUILT_IN_EH_COPY_VALUES:
6283 return expand_builtin_eh_copy_values (exp);
6285 case BUILT_IN_VA_START:
6286 return expand_builtin_va_start (exp);
6287 case BUILT_IN_VA_END:
6288 return expand_builtin_va_end (exp);
6289 case BUILT_IN_VA_COPY:
6290 return expand_builtin_va_copy (exp);
6291 case BUILT_IN_EXPECT:
6292 return expand_builtin_expect (exp, target);
6293 case BUILT_IN_ASSUME_ALIGNED:
6294 return expand_builtin_assume_aligned (exp, target);
6295 case BUILT_IN_PREFETCH:
6296 expand_builtin_prefetch (exp);
6297 return const0_rtx;
6299 case BUILT_IN_INIT_TRAMPOLINE:
6300 return expand_builtin_init_trampoline (exp, true);
6301 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6302 return expand_builtin_init_trampoline (exp, false);
6303 case BUILT_IN_ADJUST_TRAMPOLINE:
6304 return expand_builtin_adjust_trampoline (exp);
6306 case BUILT_IN_FORK:
6307 case BUILT_IN_EXECL:
6308 case BUILT_IN_EXECV:
6309 case BUILT_IN_EXECLP:
6310 case BUILT_IN_EXECLE:
6311 case BUILT_IN_EXECVP:
6312 case BUILT_IN_EXECVE:
6313 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6314 if (target)
6315 return target;
6316 break;
6318 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6319 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6320 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6321 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6322 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6323 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6324 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6325 if (target)
6326 return target;
6327 break;
6329 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6330 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6331 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6332 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6333 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6334 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6335 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6336 if (target)
6337 return target;
6338 break;
6340 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6341 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6342 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6343 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6344 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6345 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6346 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6347 if (target)
6348 return target;
6349 break;
6351 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6352 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6353 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6354 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6355 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6356 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6357 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6358 if (target)
6359 return target;
6360 break;
6362 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6363 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6364 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6365 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6366 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6367 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6368 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6369 if (target)
6370 return target;
6371 break;
6373 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6374 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6375 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6376 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6377 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6378 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6379 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6380 if (target)
6381 return target;
6382 break;
6384 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6385 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6386 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6387 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6388 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6389 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6390 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6391 if (target)
6392 return target;
6393 break;
6395 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6396 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6397 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6398 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6399 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6400 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6401 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6402 if (target)
6403 return target;
6404 break;
6406 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6407 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6408 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6409 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6410 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6411 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6412 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6413 if (target)
6414 return target;
6415 break;
6417 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6418 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6419 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6420 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6421 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6422 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6423 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6424 if (target)
6425 return target;
6426 break;
6428 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6429 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6430 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6431 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6432 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6433 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6434 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6435 if (target)
6436 return target;
6437 break;
6439 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6440 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6441 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6442 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6443 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6444 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6445 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6446 if (target)
6447 return target;
6448 break;
6450 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6451 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6452 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6453 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6454 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6455 if (mode == VOIDmode)
6456 mode = TYPE_MODE (boolean_type_node);
6457 if (!target || !register_operand (target, mode))
6458 target = gen_reg_rtx (mode);
6460 mode = get_builtin_sync_mode
6461 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6462 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6463 if (target)
6464 return target;
6465 break;
6467 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6468 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6469 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6470 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6471 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6472 mode = get_builtin_sync_mode
6473 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6474 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6475 if (target)
6476 return target;
6477 break;
6479 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6480 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6481 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6482 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6483 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6484 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6485 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6486 if (target)
6487 return target;
6488 break;
6490 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6491 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6492 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6493 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6494 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6495 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6496 expand_builtin_sync_lock_release (mode, exp);
6497 return const0_rtx;
6499 case BUILT_IN_SYNC_SYNCHRONIZE:
6500 expand_builtin_sync_synchronize ();
6501 return const0_rtx;
6503 case BUILT_IN_ATOMIC_EXCHANGE_1:
6504 case BUILT_IN_ATOMIC_EXCHANGE_2:
6505 case BUILT_IN_ATOMIC_EXCHANGE_4:
6506 case BUILT_IN_ATOMIC_EXCHANGE_8:
6507 case BUILT_IN_ATOMIC_EXCHANGE_16:
6508 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6509 target = expand_builtin_atomic_exchange (mode, exp, target);
6510 if (target)
6511 return target;
6512 break;
6514 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6515 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6516 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6517 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6518 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6520 unsigned int nargs, z;
6521 vec<tree, va_gc> *vec;
6523 mode =
6524 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6525 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6526 if (target)
6527 return target;
6529 /* If this is turned into an external library call, the weak parameter
6530 must be dropped to match the expected parameter list. */
6531 nargs = call_expr_nargs (exp);
6532 vec_alloc (vec, nargs - 1);
6533 for (z = 0; z < 3; z++)
6534 vec->quick_push (CALL_EXPR_ARG (exp, z));
6535 /* Skip the boolean weak parameter. */
6536 for (z = 4; z < 6; z++)
6537 vec->quick_push (CALL_EXPR_ARG (exp, z));
6538 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6539 break;
6542 case BUILT_IN_ATOMIC_LOAD_1:
6543 case BUILT_IN_ATOMIC_LOAD_2:
6544 case BUILT_IN_ATOMIC_LOAD_4:
6545 case BUILT_IN_ATOMIC_LOAD_8:
6546 case BUILT_IN_ATOMIC_LOAD_16:
6547 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6548 target = expand_builtin_atomic_load (mode, exp, target);
6549 if (target)
6550 return target;
6551 break;
6553 case BUILT_IN_ATOMIC_STORE_1:
6554 case BUILT_IN_ATOMIC_STORE_2:
6555 case BUILT_IN_ATOMIC_STORE_4:
6556 case BUILT_IN_ATOMIC_STORE_8:
6557 case BUILT_IN_ATOMIC_STORE_16:
6558 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6559 target = expand_builtin_atomic_store (mode, exp);
6560 if (target)
6561 return const0_rtx;
6562 break;
6564 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6565 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6566 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6567 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6568 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6570 enum built_in_function lib;
6571 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6572 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6573 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6574 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6575 ignore, lib);
6576 if (target)
6577 return target;
6578 break;
6580 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6581 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6582 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6583 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6584 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6586 enum built_in_function lib;
6587 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6588 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6589 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6590 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6591 ignore, lib);
6592 if (target)
6593 return target;
6594 break;
6596 case BUILT_IN_ATOMIC_AND_FETCH_1:
6597 case BUILT_IN_ATOMIC_AND_FETCH_2:
6598 case BUILT_IN_ATOMIC_AND_FETCH_4:
6599 case BUILT_IN_ATOMIC_AND_FETCH_8:
6600 case BUILT_IN_ATOMIC_AND_FETCH_16:
6602 enum built_in_function lib;
6603 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6604 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6605 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6606 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6607 ignore, lib);
6608 if (target)
6609 return target;
6610 break;
6612 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6613 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6614 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6615 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6616 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6618 enum built_in_function lib;
6619 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6620 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6621 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6622 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6623 ignore, lib);
6624 if (target)
6625 return target;
6626 break;
6628 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6629 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6630 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6631 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6632 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6634 enum built_in_function lib;
6635 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6636 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6637 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6638 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6639 ignore, lib);
6640 if (target)
6641 return target;
6642 break;
6644 case BUILT_IN_ATOMIC_OR_FETCH_1:
6645 case BUILT_IN_ATOMIC_OR_FETCH_2:
6646 case BUILT_IN_ATOMIC_OR_FETCH_4:
6647 case BUILT_IN_ATOMIC_OR_FETCH_8:
6648 case BUILT_IN_ATOMIC_OR_FETCH_16:
6650 enum built_in_function lib;
6651 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6652 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6653 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6654 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6655 ignore, lib);
6656 if (target)
6657 return target;
6658 break;
6660 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6661 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6662 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6663 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6664 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6665 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6666 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6667 ignore, BUILT_IN_NONE);
6668 if (target)
6669 return target;
6670 break;
6672 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6673 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6674 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6675 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6676 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6677 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6678 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6679 ignore, BUILT_IN_NONE);
6680 if (target)
6681 return target;
6682 break;
6684 case BUILT_IN_ATOMIC_FETCH_AND_1:
6685 case BUILT_IN_ATOMIC_FETCH_AND_2:
6686 case BUILT_IN_ATOMIC_FETCH_AND_4:
6687 case BUILT_IN_ATOMIC_FETCH_AND_8:
6688 case BUILT_IN_ATOMIC_FETCH_AND_16:
6689 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6690 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6691 ignore, BUILT_IN_NONE);
6692 if (target)
6693 return target;
6694 break;
6696 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6697 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6698 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6699 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6700 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6701 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6702 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6703 ignore, BUILT_IN_NONE);
6704 if (target)
6705 return target;
6706 break;
6708 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6709 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6710 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6711 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6712 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6713 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6714 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6715 ignore, BUILT_IN_NONE);
6716 if (target)
6717 return target;
6718 break;
6720 case BUILT_IN_ATOMIC_FETCH_OR_1:
6721 case BUILT_IN_ATOMIC_FETCH_OR_2:
6722 case BUILT_IN_ATOMIC_FETCH_OR_4:
6723 case BUILT_IN_ATOMIC_FETCH_OR_8:
6724 case BUILT_IN_ATOMIC_FETCH_OR_16:
6725 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6726 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6727 ignore, BUILT_IN_NONE);
6728 if (target)
6729 return target;
6730 break;
6732 case BUILT_IN_ATOMIC_TEST_AND_SET:
6733 return expand_builtin_atomic_test_and_set (exp, target);
6735 case BUILT_IN_ATOMIC_CLEAR:
6736 return expand_builtin_atomic_clear (exp);
6738 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6739 return expand_builtin_atomic_always_lock_free (exp);
6741 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6742 target = expand_builtin_atomic_is_lock_free (exp);
6743 if (target)
6744 return target;
6745 break;
6747 case BUILT_IN_ATOMIC_THREAD_FENCE:
6748 expand_builtin_atomic_thread_fence (exp);
6749 return const0_rtx;
6751 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6752 expand_builtin_atomic_signal_fence (exp);
6753 return const0_rtx;
6755 case BUILT_IN_OBJECT_SIZE:
6756 return expand_builtin_object_size (exp);
6758 case BUILT_IN_MEMCPY_CHK:
6759 case BUILT_IN_MEMPCPY_CHK:
6760 case BUILT_IN_MEMMOVE_CHK:
6761 case BUILT_IN_MEMSET_CHK:
6762 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6763 if (target)
6764 return target;
6765 break;
6767 case BUILT_IN_STRCPY_CHK:
6768 case BUILT_IN_STPCPY_CHK:
6769 case BUILT_IN_STRNCPY_CHK:
6770 case BUILT_IN_STPNCPY_CHK:
6771 case BUILT_IN_STRCAT_CHK:
6772 case BUILT_IN_STRNCAT_CHK:
6773 case BUILT_IN_SNPRINTF_CHK:
6774 case BUILT_IN_VSNPRINTF_CHK:
6775 maybe_emit_chk_warning (exp, fcode);
6776 break;
6778 case BUILT_IN_SPRINTF_CHK:
6779 case BUILT_IN_VSPRINTF_CHK:
6780 maybe_emit_sprintf_chk_warning (exp, fcode);
6781 break;
6783 case BUILT_IN_FREE:
6784 if (warn_free_nonheap_object)
6785 maybe_emit_free_warning (exp);
6786 break;
6788 case BUILT_IN_THREAD_POINTER:
6789 return expand_builtin_thread_pointer (exp, target);
6791 case BUILT_IN_SET_THREAD_POINTER:
6792 expand_builtin_set_thread_pointer (exp);
6793 return const0_rtx;
6795 default: /* just do library call, if unknown builtin */
6796 break;
6799 /* The switch statement above can drop through to cause the function
6800 to be called normally. */
6801 return expand_call (exp, target, ignore);
6804 /* Determine whether a tree node represents a call to a built-in
6805 function. If the tree T is a call to a built-in function with
6806 the right number of arguments of the appropriate types, return
6807 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6808 Otherwise the return value is END_BUILTINS. */
6810 enum built_in_function
6811 builtin_mathfn_code (const_tree t)
6813 const_tree fndecl, arg, parmlist;
6814 const_tree argtype, parmtype;
6815 const_call_expr_arg_iterator iter;
6817 if (TREE_CODE (t) != CALL_EXPR
6818 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6819 return END_BUILTINS;
6821 fndecl = get_callee_fndecl (t);
6822 if (fndecl == NULL_TREE
6823 || TREE_CODE (fndecl) != FUNCTION_DECL
6824 || ! DECL_BUILT_IN (fndecl)
6825 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6826 return END_BUILTINS;
6828 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6829 init_const_call_expr_arg_iterator (t, &iter);
6830 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6832 /* If a function doesn't take a variable number of arguments,
6833 the last element in the list will have type `void'. */
6834 parmtype = TREE_VALUE (parmlist);
6835 if (VOID_TYPE_P (parmtype))
6837 if (more_const_call_expr_args_p (&iter))
6838 return END_BUILTINS;
6839 return DECL_FUNCTION_CODE (fndecl);
6842 if (! more_const_call_expr_args_p (&iter))
6843 return END_BUILTINS;
6845 arg = next_const_call_expr_arg (&iter);
6846 argtype = TREE_TYPE (arg);
6848 if (SCALAR_FLOAT_TYPE_P (parmtype))
6850 if (! SCALAR_FLOAT_TYPE_P (argtype))
6851 return END_BUILTINS;
6853 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6855 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6856 return END_BUILTINS;
6858 else if (POINTER_TYPE_P (parmtype))
6860 if (! POINTER_TYPE_P (argtype))
6861 return END_BUILTINS;
6863 else if (INTEGRAL_TYPE_P (parmtype))
6865 if (! INTEGRAL_TYPE_P (argtype))
6866 return END_BUILTINS;
6868 else
6869 return END_BUILTINS;
6872 /* Variable-length argument list. */
6873 return DECL_FUNCTION_CODE (fndecl);
6876 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6877 evaluate to a constant. */
6879 static tree
6880 fold_builtin_constant_p (tree arg)
6882 /* We return 1 for a numeric type that's known to be a constant
6883 value at compile-time or for an aggregate type that's a
6884 literal constant. */
6885 STRIP_NOPS (arg);
6887 /* If we know this is a constant, emit the constant of one. */
6888 if (CONSTANT_CLASS_P (arg)
6889 || (TREE_CODE (arg) == CONSTRUCTOR
6890 && TREE_CONSTANT (arg)))
6891 return integer_one_node;
6892 if (TREE_CODE (arg) == ADDR_EXPR)
6894 tree op = TREE_OPERAND (arg, 0);
6895 if (TREE_CODE (op) == STRING_CST
6896 || (TREE_CODE (op) == ARRAY_REF
6897 && integer_zerop (TREE_OPERAND (op, 1))
6898 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6899 return integer_one_node;
6902 /* If this expression has side effects, show we don't know it to be a
6903 constant. Likewise if it's a pointer or aggregate type since in
6904 those case we only want literals, since those are only optimized
6905 when generating RTL, not later.
6906 And finally, if we are compiling an initializer, not code, we
6907 need to return a definite result now; there's not going to be any
6908 more optimization done. */
6909 if (TREE_SIDE_EFFECTS (arg)
6910 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6911 || POINTER_TYPE_P (TREE_TYPE (arg))
6912 || cfun == 0
6913 || folding_initializer
6914 || force_folding_builtin_constant_p)
6915 return integer_zero_node;
6917 return NULL_TREE;
6920 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6921 return it as a truthvalue. */
6923 static tree
6924 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6926 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6928 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6929 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6930 ret_type = TREE_TYPE (TREE_TYPE (fn));
6931 pred_type = TREE_VALUE (arg_types);
6932 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6934 pred = fold_convert_loc (loc, pred_type, pred);
6935 expected = fold_convert_loc (loc, expected_type, expected);
6936 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6938 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6939 build_int_cst (ret_type, 0));
6942 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6943 NULL_TREE if no simplification is possible. */
6945 static tree
6946 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6948 tree inner, fndecl, inner_arg0;
6949 enum tree_code code;
6951 /* Distribute the expected value over short-circuiting operators.
6952 See through the cast from truthvalue_type_node to long. */
6953 inner_arg0 = arg0;
6954 while (TREE_CODE (inner_arg0) == NOP_EXPR
6955 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6956 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6957 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6959 /* If this is a builtin_expect within a builtin_expect keep the
6960 inner one. See through a comparison against a constant. It
6961 might have been added to create a thruthvalue. */
6962 inner = inner_arg0;
6964 if (COMPARISON_CLASS_P (inner)
6965 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6966 inner = TREE_OPERAND (inner, 0);
6968 if (TREE_CODE (inner) == CALL_EXPR
6969 && (fndecl = get_callee_fndecl (inner))
6970 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6971 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6972 return arg0;
6974 inner = inner_arg0;
6975 code = TREE_CODE (inner);
6976 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6978 tree op0 = TREE_OPERAND (inner, 0);
6979 tree op1 = TREE_OPERAND (inner, 1);
6981 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6982 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6983 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6985 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6988 /* If the argument isn't invariant then there's nothing else we can do. */
6989 if (!TREE_CONSTANT (inner_arg0))
6990 return NULL_TREE;
6992 /* If we expect that a comparison against the argument will fold to
6993 a constant return the constant. In practice, this means a true
6994 constant or the address of a non-weak symbol. */
6995 inner = inner_arg0;
6996 STRIP_NOPS (inner);
6997 if (TREE_CODE (inner) == ADDR_EXPR)
7001 inner = TREE_OPERAND (inner, 0);
7003 while (TREE_CODE (inner) == COMPONENT_REF
7004 || TREE_CODE (inner) == ARRAY_REF);
7005 if ((TREE_CODE (inner) == VAR_DECL
7006 || TREE_CODE (inner) == FUNCTION_DECL)
7007 && DECL_WEAK (inner))
7008 return NULL_TREE;
7011 /* Otherwise, ARG0 already has the proper type for the return value. */
7012 return arg0;
7015 /* Fold a call to __builtin_classify_type with argument ARG. */
7017 static tree
7018 fold_builtin_classify_type (tree arg)
7020 if (arg == 0)
7021 return build_int_cst (integer_type_node, no_type_class);
7023 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7026 /* Fold a call to __builtin_strlen with argument ARG. */
7028 static tree
7029 fold_builtin_strlen (location_t loc, tree type, tree arg)
7031 if (!validate_arg (arg, POINTER_TYPE))
7032 return NULL_TREE;
7033 else
7035 tree len = c_strlen (arg, 0);
7037 if (len)
7038 return fold_convert_loc (loc, type, len);
7040 return NULL_TREE;
7044 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7046 static tree
7047 fold_builtin_inf (location_t loc, tree type, int warn)
7049 REAL_VALUE_TYPE real;
7051 /* __builtin_inff is intended to be usable to define INFINITY on all
7052 targets. If an infinity is not available, INFINITY expands "to a
7053 positive constant of type float that overflows at translation
7054 time", footnote "In this case, using INFINITY will violate the
7055 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7056 Thus we pedwarn to ensure this constraint violation is
7057 diagnosed. */
7058 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7059 pedwarn (loc, 0, "target format does not support infinity");
7061 real_inf (&real);
7062 return build_real (type, real);
7065 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7067 static tree
7068 fold_builtin_nan (tree arg, tree type, int quiet)
7070 REAL_VALUE_TYPE real;
7071 const char *str;
7073 if (!validate_arg (arg, POINTER_TYPE))
7074 return NULL_TREE;
7075 str = c_getstr (arg);
7076 if (!str)
7077 return NULL_TREE;
7079 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7080 return NULL_TREE;
7082 return build_real (type, real);
7085 /* Return true if the floating point expression T has an integer value.
7086 We also allow +Inf, -Inf and NaN to be considered integer values. */
7088 static bool
7089 integer_valued_real_p (tree t)
7091 switch (TREE_CODE (t))
7093 case FLOAT_EXPR:
7094 return true;
7096 case ABS_EXPR:
7097 case SAVE_EXPR:
7098 return integer_valued_real_p (TREE_OPERAND (t, 0));
7100 case COMPOUND_EXPR:
7101 case MODIFY_EXPR:
7102 case BIND_EXPR:
7103 return integer_valued_real_p (TREE_OPERAND (t, 1));
7105 case PLUS_EXPR:
7106 case MINUS_EXPR:
7107 case MULT_EXPR:
7108 case MIN_EXPR:
7109 case MAX_EXPR:
7110 return integer_valued_real_p (TREE_OPERAND (t, 0))
7111 && integer_valued_real_p (TREE_OPERAND (t, 1));
7113 case COND_EXPR:
7114 return integer_valued_real_p (TREE_OPERAND (t, 1))
7115 && integer_valued_real_p (TREE_OPERAND (t, 2));
7117 case REAL_CST:
7118 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7120 case NOP_EXPR:
7122 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7123 if (TREE_CODE (type) == INTEGER_TYPE)
7124 return true;
7125 if (TREE_CODE (type) == REAL_TYPE)
7126 return integer_valued_real_p (TREE_OPERAND (t, 0));
7127 break;
7130 case CALL_EXPR:
7131 switch (builtin_mathfn_code (t))
7133 CASE_FLT_FN (BUILT_IN_CEIL):
7134 CASE_FLT_FN (BUILT_IN_FLOOR):
7135 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7136 CASE_FLT_FN (BUILT_IN_RINT):
7137 CASE_FLT_FN (BUILT_IN_ROUND):
7138 CASE_FLT_FN (BUILT_IN_TRUNC):
7139 return true;
7141 CASE_FLT_FN (BUILT_IN_FMIN):
7142 CASE_FLT_FN (BUILT_IN_FMAX):
7143 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7144 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7146 default:
7147 break;
7149 break;
7151 default:
7152 break;
7154 return false;
7157 /* FNDECL is assumed to be a builtin where truncation can be propagated
7158 across (for instance floor((double)f) == (double)floorf (f).
7159 Do the transformation for a call with argument ARG. */
7161 static tree
7162 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7164 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7166 if (!validate_arg (arg, REAL_TYPE))
7167 return NULL_TREE;
7169 /* Integer rounding functions are idempotent. */
7170 if (fcode == builtin_mathfn_code (arg))
7171 return arg;
7173 /* If argument is already integer valued, and we don't need to worry
7174 about setting errno, there's no need to perform rounding. */
7175 if (! flag_errno_math && integer_valued_real_p (arg))
7176 return arg;
7178 if (optimize)
7180 tree arg0 = strip_float_extensions (arg);
7181 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7182 tree newtype = TREE_TYPE (arg0);
7183 tree decl;
7185 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7186 && (decl = mathfn_built_in (newtype, fcode)))
7187 return fold_convert_loc (loc, ftype,
7188 build_call_expr_loc (loc, decl, 1,
7189 fold_convert_loc (loc,
7190 newtype,
7191 arg0)));
7193 return NULL_TREE;
7196 /* FNDECL is assumed to be builtin which can narrow the FP type of
7197 the argument, for instance lround((double)f) -> lroundf (f).
7198 Do the transformation for a call with argument ARG. */
7200 static tree
7201 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7203 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7205 if (!validate_arg (arg, REAL_TYPE))
7206 return NULL_TREE;
7208 /* If argument is already integer valued, and we don't need to worry
7209 about setting errno, there's no need to perform rounding. */
7210 if (! flag_errno_math && integer_valued_real_p (arg))
7211 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7212 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7214 if (optimize)
7216 tree ftype = TREE_TYPE (arg);
7217 tree arg0 = strip_float_extensions (arg);
7218 tree newtype = TREE_TYPE (arg0);
7219 tree decl;
7221 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7222 && (decl = mathfn_built_in (newtype, fcode)))
7223 return build_call_expr_loc (loc, decl, 1,
7224 fold_convert_loc (loc, newtype, arg0));
7227 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7228 sizeof (int) == sizeof (long). */
7229 if (TYPE_PRECISION (integer_type_node)
7230 == TYPE_PRECISION (long_integer_type_node))
7232 tree newfn = NULL_TREE;
7233 switch (fcode)
7235 CASE_FLT_FN (BUILT_IN_ICEIL):
7236 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7237 break;
7239 CASE_FLT_FN (BUILT_IN_IFLOOR):
7240 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7241 break;
7243 CASE_FLT_FN (BUILT_IN_IROUND):
7244 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7245 break;
7247 CASE_FLT_FN (BUILT_IN_IRINT):
7248 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7249 break;
7251 default:
7252 break;
7255 if (newfn)
7257 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7258 return fold_convert_loc (loc,
7259 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7263 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7264 sizeof (long long) == sizeof (long). */
7265 if (TYPE_PRECISION (long_long_integer_type_node)
7266 == TYPE_PRECISION (long_integer_type_node))
7268 tree newfn = NULL_TREE;
7269 switch (fcode)
7271 CASE_FLT_FN (BUILT_IN_LLCEIL):
7272 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7273 break;
7275 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7276 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7277 break;
7279 CASE_FLT_FN (BUILT_IN_LLROUND):
7280 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7281 break;
7283 CASE_FLT_FN (BUILT_IN_LLRINT):
7284 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7285 break;
7287 default:
7288 break;
7291 if (newfn)
7293 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7294 return fold_convert_loc (loc,
7295 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7299 return NULL_TREE;
7302 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7303 return type. Return NULL_TREE if no simplification can be made. */
7305 static tree
7306 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7308 tree res;
7310 if (!validate_arg (arg, COMPLEX_TYPE)
7311 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7312 return NULL_TREE;
7314 /* Calculate the result when the argument is a constant. */
7315 if (TREE_CODE (arg) == COMPLEX_CST
7316 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7317 type, mpfr_hypot)))
7318 return res;
7320 if (TREE_CODE (arg) == COMPLEX_EXPR)
7322 tree real = TREE_OPERAND (arg, 0);
7323 tree imag = TREE_OPERAND (arg, 1);
7325 /* If either part is zero, cabs is fabs of the other. */
7326 if (real_zerop (real))
7327 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7328 if (real_zerop (imag))
7329 return fold_build1_loc (loc, ABS_EXPR, type, real);
7331 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7332 if (flag_unsafe_math_optimizations
7333 && operand_equal_p (real, imag, OEP_PURE_SAME))
7335 const REAL_VALUE_TYPE sqrt2_trunc
7336 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7337 STRIP_NOPS (real);
7338 return fold_build2_loc (loc, MULT_EXPR, type,
7339 fold_build1_loc (loc, ABS_EXPR, type, real),
7340 build_real (type, sqrt2_trunc));
7344 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7345 if (TREE_CODE (arg) == NEGATE_EXPR
7346 || TREE_CODE (arg) == CONJ_EXPR)
7347 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7349 /* Don't do this when optimizing for size. */
7350 if (flag_unsafe_math_optimizations
7351 && optimize && optimize_function_for_speed_p (cfun))
7353 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7355 if (sqrtfn != NULL_TREE)
7357 tree rpart, ipart, result;
7359 arg = builtin_save_expr (arg);
7361 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7362 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7364 rpart = builtin_save_expr (rpart);
7365 ipart = builtin_save_expr (ipart);
7367 result = fold_build2_loc (loc, PLUS_EXPR, type,
7368 fold_build2_loc (loc, MULT_EXPR, type,
7369 rpart, rpart),
7370 fold_build2_loc (loc, MULT_EXPR, type,
7371 ipart, ipart));
7373 return build_call_expr_loc (loc, sqrtfn, 1, result);
7377 return NULL_TREE;
7380 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7381 complex tree type of the result. If NEG is true, the imaginary
7382 zero is negative. */
7384 static tree
7385 build_complex_cproj (tree type, bool neg)
7387 REAL_VALUE_TYPE rinf, rzero = dconst0;
7389 real_inf (&rinf);
7390 rzero.sign = neg;
7391 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7392 build_real (TREE_TYPE (type), rzero));
7395 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7396 return type. Return NULL_TREE if no simplification can be made. */
7398 static tree
7399 fold_builtin_cproj (location_t loc, tree arg, tree type)
7401 if (!validate_arg (arg, COMPLEX_TYPE)
7402 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7403 return NULL_TREE;
7405 /* If there are no infinities, return arg. */
7406 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7407 return non_lvalue_loc (loc, arg);
7409 /* Calculate the result when the argument is a constant. */
7410 if (TREE_CODE (arg) == COMPLEX_CST)
7412 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7413 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7415 if (real_isinf (real) || real_isinf (imag))
7416 return build_complex_cproj (type, imag->sign);
7417 else
7418 return arg;
7420 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7422 tree real = TREE_OPERAND (arg, 0);
7423 tree imag = TREE_OPERAND (arg, 1);
7425 STRIP_NOPS (real);
7426 STRIP_NOPS (imag);
7428 /* If the real part is inf and the imag part is known to be
7429 nonnegative, return (inf + 0i). Remember side-effects are
7430 possible in the imag part. */
7431 if (TREE_CODE (real) == REAL_CST
7432 && real_isinf (TREE_REAL_CST_PTR (real))
7433 && tree_expr_nonnegative_p (imag))
7434 return omit_one_operand_loc (loc, type,
7435 build_complex_cproj (type, false),
7436 arg);
7438 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7439 Remember side-effects are possible in the real part. */
7440 if (TREE_CODE (imag) == REAL_CST
7441 && real_isinf (TREE_REAL_CST_PTR (imag)))
7442 return
7443 omit_one_operand_loc (loc, type,
7444 build_complex_cproj (type, TREE_REAL_CST_PTR
7445 (imag)->sign), arg);
7448 return NULL_TREE;
7451 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7452 Return NULL_TREE if no simplification can be made. */
7454 static tree
7455 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7458 enum built_in_function fcode;
7459 tree res;
7461 if (!validate_arg (arg, REAL_TYPE))
7462 return NULL_TREE;
7464 /* Calculate the result when the argument is a constant. */
7465 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7466 return res;
7468 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7469 fcode = builtin_mathfn_code (arg);
7470 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7472 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7473 arg = fold_build2_loc (loc, MULT_EXPR, type,
7474 CALL_EXPR_ARG (arg, 0),
7475 build_real (type, dconsthalf));
7476 return build_call_expr_loc (loc, expfn, 1, arg);
7479 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7480 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7482 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7484 if (powfn)
7486 tree arg0 = CALL_EXPR_ARG (arg, 0);
7487 tree tree_root;
7488 /* The inner root was either sqrt or cbrt. */
7489 /* This was a conditional expression but it triggered a bug
7490 in Sun C 5.5. */
7491 REAL_VALUE_TYPE dconstroot;
7492 if (BUILTIN_SQRT_P (fcode))
7493 dconstroot = dconsthalf;
7494 else
7495 dconstroot = dconst_third ();
7497 /* Adjust for the outer root. */
7498 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7499 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7500 tree_root = build_real (type, dconstroot);
7501 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7505 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7506 if (flag_unsafe_math_optimizations
7507 && (fcode == BUILT_IN_POW
7508 || fcode == BUILT_IN_POWF
7509 || fcode == BUILT_IN_POWL))
7511 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7512 tree arg0 = CALL_EXPR_ARG (arg, 0);
7513 tree arg1 = CALL_EXPR_ARG (arg, 1);
7514 tree narg1;
7515 if (!tree_expr_nonnegative_p (arg0))
7516 arg0 = build1 (ABS_EXPR, type, arg0);
7517 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7518 build_real (type, dconsthalf));
7519 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7522 return NULL_TREE;
7525 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7526 Return NULL_TREE if no simplification can be made. */
7528 static tree
7529 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7531 const enum built_in_function fcode = builtin_mathfn_code (arg);
7532 tree res;
7534 if (!validate_arg (arg, REAL_TYPE))
7535 return NULL_TREE;
7537 /* Calculate the result when the argument is a constant. */
7538 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7539 return res;
7541 if (flag_unsafe_math_optimizations)
7543 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7544 if (BUILTIN_EXPONENT_P (fcode))
7546 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7547 const REAL_VALUE_TYPE third_trunc =
7548 real_value_truncate (TYPE_MODE (type), dconst_third ());
7549 arg = fold_build2_loc (loc, MULT_EXPR, type,
7550 CALL_EXPR_ARG (arg, 0),
7551 build_real (type, third_trunc));
7552 return build_call_expr_loc (loc, expfn, 1, arg);
7555 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7556 if (BUILTIN_SQRT_P (fcode))
7558 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7560 if (powfn)
7562 tree arg0 = CALL_EXPR_ARG (arg, 0);
7563 tree tree_root;
7564 REAL_VALUE_TYPE dconstroot = dconst_third ();
7566 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7567 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7568 tree_root = build_real (type, dconstroot);
7569 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7573 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7574 if (BUILTIN_CBRT_P (fcode))
7576 tree arg0 = CALL_EXPR_ARG (arg, 0);
7577 if (tree_expr_nonnegative_p (arg0))
7579 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7581 if (powfn)
7583 tree tree_root;
7584 REAL_VALUE_TYPE dconstroot;
7586 real_arithmetic (&dconstroot, MULT_EXPR,
7587 dconst_third_ptr (), dconst_third_ptr ());
7588 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7589 tree_root = build_real (type, dconstroot);
7590 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7595 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7596 if (fcode == BUILT_IN_POW
7597 || fcode == BUILT_IN_POWF
7598 || fcode == BUILT_IN_POWL)
7600 tree arg00 = CALL_EXPR_ARG (arg, 0);
7601 tree arg01 = CALL_EXPR_ARG (arg, 1);
7602 if (tree_expr_nonnegative_p (arg00))
7604 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7605 const REAL_VALUE_TYPE dconstroot
7606 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7607 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7608 build_real (type, dconstroot));
7609 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7613 return NULL_TREE;
7616 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7617 TYPE is the type of the return value. Return NULL_TREE if no
7618 simplification can be made. */
7620 static tree
7621 fold_builtin_cos (location_t loc,
7622 tree arg, tree type, tree fndecl)
7624 tree res, narg;
7626 if (!validate_arg (arg, REAL_TYPE))
7627 return NULL_TREE;
7629 /* Calculate the result when the argument is a constant. */
7630 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7631 return res;
7633 /* Optimize cos(-x) into cos (x). */
7634 if ((narg = fold_strip_sign_ops (arg)))
7635 return build_call_expr_loc (loc, fndecl, 1, narg);
7637 return NULL_TREE;
7640 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7641 Return NULL_TREE if no simplification can be made. */
7643 static tree
7644 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7646 if (validate_arg (arg, REAL_TYPE))
7648 tree res, narg;
7650 /* Calculate the result when the argument is a constant. */
7651 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7652 return res;
7654 /* Optimize cosh(-x) into cosh (x). */
7655 if ((narg = fold_strip_sign_ops (arg)))
7656 return build_call_expr_loc (loc, fndecl, 1, narg);
7659 return NULL_TREE;
7662 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7663 argument ARG. TYPE is the type of the return value. Return
7664 NULL_TREE if no simplification can be made. */
7666 static tree
7667 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7668 bool hyper)
7670 if (validate_arg (arg, COMPLEX_TYPE)
7671 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7673 tree tmp;
7675 /* Calculate the result when the argument is a constant. */
7676 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7677 return tmp;
7679 /* Optimize fn(-x) into fn(x). */
7680 if ((tmp = fold_strip_sign_ops (arg)))
7681 return build_call_expr_loc (loc, fndecl, 1, tmp);
7684 return NULL_TREE;
7687 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7688 Return NULL_TREE if no simplification can be made. */
7690 static tree
7691 fold_builtin_tan (tree arg, tree type)
7693 enum built_in_function fcode;
7694 tree res;
7696 if (!validate_arg (arg, REAL_TYPE))
7697 return NULL_TREE;
7699 /* Calculate the result when the argument is a constant. */
7700 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7701 return res;
7703 /* Optimize tan(atan(x)) = x. */
7704 fcode = builtin_mathfn_code (arg);
7705 if (flag_unsafe_math_optimizations
7706 && (fcode == BUILT_IN_ATAN
7707 || fcode == BUILT_IN_ATANF
7708 || fcode == BUILT_IN_ATANL))
7709 return CALL_EXPR_ARG (arg, 0);
7711 return NULL_TREE;
7714 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7715 NULL_TREE if no simplification can be made. */
7717 static tree
7718 fold_builtin_sincos (location_t loc,
7719 tree arg0, tree arg1, tree arg2)
7721 tree type;
7722 tree res, fn, call;
7724 if (!validate_arg (arg0, REAL_TYPE)
7725 || !validate_arg (arg1, POINTER_TYPE)
7726 || !validate_arg (arg2, POINTER_TYPE))
7727 return NULL_TREE;
7729 type = TREE_TYPE (arg0);
7731 /* Calculate the result when the argument is a constant. */
7732 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7733 return res;
7735 /* Canonicalize sincos to cexpi. */
7736 if (!targetm.libc_has_function (function_c99_math_complex))
7737 return NULL_TREE;
7738 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7739 if (!fn)
7740 return NULL_TREE;
7742 call = build_call_expr_loc (loc, fn, 1, arg0);
7743 call = builtin_save_expr (call);
7745 return build2 (COMPOUND_EXPR, void_type_node,
7746 build2 (MODIFY_EXPR, void_type_node,
7747 build_fold_indirect_ref_loc (loc, arg1),
7748 build1 (IMAGPART_EXPR, type, call)),
7749 build2 (MODIFY_EXPR, void_type_node,
7750 build_fold_indirect_ref_loc (loc, arg2),
7751 build1 (REALPART_EXPR, type, call)));
7754 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7755 NULL_TREE if no simplification can be made. */
7757 static tree
7758 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7760 tree rtype;
7761 tree realp, imagp, ifn;
7762 tree res;
7764 if (!validate_arg (arg0, COMPLEX_TYPE)
7765 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7766 return NULL_TREE;
7768 /* Calculate the result when the argument is a constant. */
7769 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7770 return res;
7772 rtype = TREE_TYPE (TREE_TYPE (arg0));
7774 /* In case we can figure out the real part of arg0 and it is constant zero
7775 fold to cexpi. */
7776 if (!targetm.libc_has_function (function_c99_math_complex))
7777 return NULL_TREE;
7778 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7779 if (!ifn)
7780 return NULL_TREE;
7782 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7783 && real_zerop (realp))
7785 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7786 return build_call_expr_loc (loc, ifn, 1, narg);
7789 /* In case we can easily decompose real and imaginary parts split cexp
7790 to exp (r) * cexpi (i). */
7791 if (flag_unsafe_math_optimizations
7792 && realp)
7794 tree rfn, rcall, icall;
7796 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7797 if (!rfn)
7798 return NULL_TREE;
7800 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7801 if (!imagp)
7802 return NULL_TREE;
7804 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7805 icall = builtin_save_expr (icall);
7806 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7807 rcall = builtin_save_expr (rcall);
7808 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7809 fold_build2_loc (loc, MULT_EXPR, rtype,
7810 rcall,
7811 fold_build1_loc (loc, REALPART_EXPR,
7812 rtype, icall)),
7813 fold_build2_loc (loc, MULT_EXPR, rtype,
7814 rcall,
7815 fold_build1_loc (loc, IMAGPART_EXPR,
7816 rtype, icall)));
7819 return NULL_TREE;
7822 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7823 Return NULL_TREE if no simplification can be made. */
7825 static tree
7826 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7828 if (!validate_arg (arg, REAL_TYPE))
7829 return NULL_TREE;
7831 /* Optimize trunc of constant value. */
7832 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7834 REAL_VALUE_TYPE r, x;
7835 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7837 x = TREE_REAL_CST (arg);
7838 real_trunc (&r, TYPE_MODE (type), &x);
7839 return build_real (type, r);
7842 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7845 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7846 Return NULL_TREE if no simplification can be made. */
7848 static tree
7849 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7851 if (!validate_arg (arg, REAL_TYPE))
7852 return NULL_TREE;
7854 /* Optimize floor of constant value. */
7855 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7857 REAL_VALUE_TYPE x;
7859 x = TREE_REAL_CST (arg);
7860 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7862 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7863 REAL_VALUE_TYPE r;
7865 real_floor (&r, TYPE_MODE (type), &x);
7866 return build_real (type, r);
7870 /* Fold floor (x) where x is nonnegative to trunc (x). */
7871 if (tree_expr_nonnegative_p (arg))
7873 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7874 if (truncfn)
7875 return build_call_expr_loc (loc, truncfn, 1, arg);
7878 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7881 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7882 Return NULL_TREE if no simplification can be made. */
7884 static tree
7885 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7887 if (!validate_arg (arg, REAL_TYPE))
7888 return NULL_TREE;
7890 /* Optimize ceil of constant value. */
7891 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7893 REAL_VALUE_TYPE x;
7895 x = TREE_REAL_CST (arg);
7896 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7898 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7899 REAL_VALUE_TYPE r;
7901 real_ceil (&r, TYPE_MODE (type), &x);
7902 return build_real (type, r);
7906 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7909 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7910 Return NULL_TREE if no simplification can be made. */
7912 static tree
7913 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7915 if (!validate_arg (arg, REAL_TYPE))
7916 return NULL_TREE;
7918 /* Optimize round of constant value. */
7919 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7921 REAL_VALUE_TYPE x;
7923 x = TREE_REAL_CST (arg);
7924 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7926 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7927 REAL_VALUE_TYPE r;
7929 real_round (&r, TYPE_MODE (type), &x);
7930 return build_real (type, r);
7934 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7937 /* Fold function call to builtin lround, lroundf or lroundl (or the
7938 corresponding long long versions) and other rounding functions. ARG
7939 is the argument to the call. Return NULL_TREE if no simplification
7940 can be made. */
7942 static tree
7943 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7945 if (!validate_arg (arg, REAL_TYPE))
7946 return NULL_TREE;
7948 /* Optimize lround of constant value. */
7949 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7951 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7953 if (real_isfinite (&x))
7955 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7956 tree ftype = TREE_TYPE (arg);
7957 double_int val;
7958 REAL_VALUE_TYPE r;
7960 switch (DECL_FUNCTION_CODE (fndecl))
7962 CASE_FLT_FN (BUILT_IN_IFLOOR):
7963 CASE_FLT_FN (BUILT_IN_LFLOOR):
7964 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7965 real_floor (&r, TYPE_MODE (ftype), &x);
7966 break;
7968 CASE_FLT_FN (BUILT_IN_ICEIL):
7969 CASE_FLT_FN (BUILT_IN_LCEIL):
7970 CASE_FLT_FN (BUILT_IN_LLCEIL):
7971 real_ceil (&r, TYPE_MODE (ftype), &x);
7972 break;
7974 CASE_FLT_FN (BUILT_IN_IROUND):
7975 CASE_FLT_FN (BUILT_IN_LROUND):
7976 CASE_FLT_FN (BUILT_IN_LLROUND):
7977 real_round (&r, TYPE_MODE (ftype), &x);
7978 break;
7980 default:
7981 gcc_unreachable ();
7984 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7985 if (double_int_fits_to_tree_p (itype, val))
7986 return double_int_to_tree (itype, val);
7990 switch (DECL_FUNCTION_CODE (fndecl))
7992 CASE_FLT_FN (BUILT_IN_LFLOOR):
7993 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7994 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7995 if (tree_expr_nonnegative_p (arg))
7996 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7997 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7998 break;
7999 default:;
8002 return fold_fixed_mathfn (loc, fndecl, arg);
8005 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8006 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8007 the argument to the call. Return NULL_TREE if no simplification can
8008 be made. */
8010 static tree
8011 fold_builtin_bitop (tree fndecl, tree arg)
8013 if (!validate_arg (arg, INTEGER_TYPE))
8014 return NULL_TREE;
8016 /* Optimize for constant argument. */
8017 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8019 HOST_WIDE_INT hi, width, result;
8020 unsigned HOST_WIDE_INT lo;
8021 tree type;
8023 type = TREE_TYPE (arg);
8024 width = TYPE_PRECISION (type);
8025 lo = TREE_INT_CST_LOW (arg);
8027 /* Clear all the bits that are beyond the type's precision. */
8028 if (width > HOST_BITS_PER_WIDE_INT)
8030 hi = TREE_INT_CST_HIGH (arg);
8031 if (width < HOST_BITS_PER_DOUBLE_INT)
8032 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
8034 else
8036 hi = 0;
8037 if (width < HOST_BITS_PER_WIDE_INT)
8038 lo &= ~(HOST_WIDE_INT_M1U << width);
8041 switch (DECL_FUNCTION_CODE (fndecl))
8043 CASE_INT_FN (BUILT_IN_FFS):
8044 if (lo != 0)
8045 result = ffs_hwi (lo);
8046 else if (hi != 0)
8047 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8048 else
8049 result = 0;
8050 break;
8052 CASE_INT_FN (BUILT_IN_CLZ):
8053 if (hi != 0)
8054 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8055 else if (lo != 0)
8056 result = width - floor_log2 (lo) - 1;
8057 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8058 result = width;
8059 break;
8061 CASE_INT_FN (BUILT_IN_CTZ):
8062 if (lo != 0)
8063 result = ctz_hwi (lo);
8064 else if (hi != 0)
8065 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8066 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8067 result = width;
8068 break;
8070 CASE_INT_FN (BUILT_IN_CLRSB):
8071 if (width > 2 * HOST_BITS_PER_WIDE_INT)
8072 return NULL_TREE;
8073 if (width > HOST_BITS_PER_WIDE_INT
8074 && (hi & ((unsigned HOST_WIDE_INT) 1
8075 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8077 hi = ~hi & ~(HOST_WIDE_INT_M1U
8078 << (width - HOST_BITS_PER_WIDE_INT - 1));
8079 lo = ~lo;
8081 else if (width <= HOST_BITS_PER_WIDE_INT
8082 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8083 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
8084 if (hi != 0)
8085 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8086 else if (lo != 0)
8087 result = width - floor_log2 (lo) - 2;
8088 else
8089 result = width - 1;
8090 break;
8092 CASE_INT_FN (BUILT_IN_POPCOUNT):
8093 result = 0;
8094 while (lo)
8095 result++, lo &= lo - 1;
8096 while (hi)
8097 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8098 break;
8100 CASE_INT_FN (BUILT_IN_PARITY):
8101 result = 0;
8102 while (lo)
8103 result++, lo &= lo - 1;
8104 while (hi)
8105 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8106 result &= 1;
8107 break;
8109 default:
8110 gcc_unreachable ();
8113 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8116 return NULL_TREE;
8119 /* Fold function call to builtin_bswap and the short, long and long long
8120 variants. Return NULL_TREE if no simplification can be made. */
8121 static tree
8122 fold_builtin_bswap (tree fndecl, tree arg)
8124 if (! validate_arg (arg, INTEGER_TYPE))
8125 return NULL_TREE;
8127 /* Optimize constant value. */
8128 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8130 HOST_WIDE_INT hi, width, r_hi = 0;
8131 unsigned HOST_WIDE_INT lo, r_lo = 0;
8132 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8134 width = TYPE_PRECISION (type);
8135 lo = TREE_INT_CST_LOW (arg);
8136 hi = TREE_INT_CST_HIGH (arg);
8138 switch (DECL_FUNCTION_CODE (fndecl))
8140 case BUILT_IN_BSWAP16:
8141 case BUILT_IN_BSWAP32:
8142 case BUILT_IN_BSWAP64:
8144 int s;
8146 for (s = 0; s < width; s += 8)
8148 int d = width - s - 8;
8149 unsigned HOST_WIDE_INT byte;
8151 if (s < HOST_BITS_PER_WIDE_INT)
8152 byte = (lo >> s) & 0xff;
8153 else
8154 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8156 if (d < HOST_BITS_PER_WIDE_INT)
8157 r_lo |= byte << d;
8158 else
8159 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8163 break;
8165 default:
8166 gcc_unreachable ();
8169 if (width < HOST_BITS_PER_WIDE_INT)
8170 return build_int_cst (type, r_lo);
8171 else
8172 return build_int_cst_wide (type, r_lo, r_hi);
8175 return NULL_TREE;
8178 /* A subroutine of fold_builtin to fold the various logarithmic
8179 functions. Return NULL_TREE if no simplification can me made.
8180 FUNC is the corresponding MPFR logarithm function. */
8182 static tree
8183 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8184 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8186 if (validate_arg (arg, REAL_TYPE))
8188 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8189 tree res;
8190 const enum built_in_function fcode = builtin_mathfn_code (arg);
8192 /* Calculate the result when the argument is a constant. */
8193 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8194 return res;
8196 /* Special case, optimize logN(expN(x)) = x. */
8197 if (flag_unsafe_math_optimizations
8198 && ((func == mpfr_log
8199 && (fcode == BUILT_IN_EXP
8200 || fcode == BUILT_IN_EXPF
8201 || fcode == BUILT_IN_EXPL))
8202 || (func == mpfr_log2
8203 && (fcode == BUILT_IN_EXP2
8204 || fcode == BUILT_IN_EXP2F
8205 || fcode == BUILT_IN_EXP2L))
8206 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8207 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8209 /* Optimize logN(func()) for various exponential functions. We
8210 want to determine the value "x" and the power "exponent" in
8211 order to transform logN(x**exponent) into exponent*logN(x). */
8212 if (flag_unsafe_math_optimizations)
8214 tree exponent = 0, x = 0;
8216 switch (fcode)
8218 CASE_FLT_FN (BUILT_IN_EXP):
8219 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8220 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8221 dconst_e ()));
8222 exponent = CALL_EXPR_ARG (arg, 0);
8223 break;
8224 CASE_FLT_FN (BUILT_IN_EXP2):
8225 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8226 x = build_real (type, dconst2);
8227 exponent = CALL_EXPR_ARG (arg, 0);
8228 break;
8229 CASE_FLT_FN (BUILT_IN_EXP10):
8230 CASE_FLT_FN (BUILT_IN_POW10):
8231 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8233 REAL_VALUE_TYPE dconst10;
8234 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8235 x = build_real (type, dconst10);
8237 exponent = CALL_EXPR_ARG (arg, 0);
8238 break;
8239 CASE_FLT_FN (BUILT_IN_SQRT):
8240 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8241 x = CALL_EXPR_ARG (arg, 0);
8242 exponent = build_real (type, dconsthalf);
8243 break;
8244 CASE_FLT_FN (BUILT_IN_CBRT):
8245 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8246 x = CALL_EXPR_ARG (arg, 0);
8247 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8248 dconst_third ()));
8249 break;
8250 CASE_FLT_FN (BUILT_IN_POW):
8251 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8252 x = CALL_EXPR_ARG (arg, 0);
8253 exponent = CALL_EXPR_ARG (arg, 1);
8254 break;
8255 default:
8256 break;
8259 /* Now perform the optimization. */
8260 if (x && exponent)
8262 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8263 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8268 return NULL_TREE;
8271 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8272 NULL_TREE if no simplification can be made. */
8274 static tree
8275 fold_builtin_hypot (location_t loc, tree fndecl,
8276 tree arg0, tree arg1, tree type)
8278 tree res, narg0, narg1;
8280 if (!validate_arg (arg0, REAL_TYPE)
8281 || !validate_arg (arg1, REAL_TYPE))
8282 return NULL_TREE;
8284 /* Calculate the result when the argument is a constant. */
8285 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8286 return res;
8288 /* If either argument to hypot has a negate or abs, strip that off.
8289 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8290 narg0 = fold_strip_sign_ops (arg0);
8291 narg1 = fold_strip_sign_ops (arg1);
8292 if (narg0 || narg1)
8294 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8295 narg1 ? narg1 : arg1);
8298 /* If either argument is zero, hypot is fabs of the other. */
8299 if (real_zerop (arg0))
8300 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8301 else if (real_zerop (arg1))
8302 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8304 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8305 if (flag_unsafe_math_optimizations
8306 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8308 const REAL_VALUE_TYPE sqrt2_trunc
8309 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8310 return fold_build2_loc (loc, MULT_EXPR, type,
8311 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8312 build_real (type, sqrt2_trunc));
8315 return NULL_TREE;
8319 /* Fold a builtin function call to pow, powf, or powl. Return
8320 NULL_TREE if no simplification can be made. */
8321 static tree
8322 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8324 tree res;
8326 if (!validate_arg (arg0, REAL_TYPE)
8327 || !validate_arg (arg1, REAL_TYPE))
8328 return NULL_TREE;
8330 /* Calculate the result when the argument is a constant. */
8331 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8332 return res;
8334 /* Optimize pow(1.0,y) = 1.0. */
8335 if (real_onep (arg0))
8336 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8338 if (TREE_CODE (arg1) == REAL_CST
8339 && !TREE_OVERFLOW (arg1))
8341 REAL_VALUE_TYPE cint;
8342 REAL_VALUE_TYPE c;
8343 HOST_WIDE_INT n;
8345 c = TREE_REAL_CST (arg1);
8347 /* Optimize pow(x,0.0) = 1.0. */
8348 if (REAL_VALUES_EQUAL (c, dconst0))
8349 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8350 arg0);
8352 /* Optimize pow(x,1.0) = x. */
8353 if (REAL_VALUES_EQUAL (c, dconst1))
8354 return arg0;
8356 /* Optimize pow(x,-1.0) = 1.0/x. */
8357 if (REAL_VALUES_EQUAL (c, dconstm1))
8358 return fold_build2_loc (loc, RDIV_EXPR, type,
8359 build_real (type, dconst1), arg0);
8361 /* Optimize pow(x,0.5) = sqrt(x). */
8362 if (flag_unsafe_math_optimizations
8363 && REAL_VALUES_EQUAL (c, dconsthalf))
8365 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8367 if (sqrtfn != NULL_TREE)
8368 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8371 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8372 if (flag_unsafe_math_optimizations)
8374 const REAL_VALUE_TYPE dconstroot
8375 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8377 if (REAL_VALUES_EQUAL (c, dconstroot))
8379 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8380 if (cbrtfn != NULL_TREE)
8381 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8385 /* Check for an integer exponent. */
8386 n = real_to_integer (&c);
8387 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8388 if (real_identical (&c, &cint))
8390 /* Attempt to evaluate pow at compile-time, unless this should
8391 raise an exception. */
8392 if (TREE_CODE (arg0) == REAL_CST
8393 && !TREE_OVERFLOW (arg0)
8394 && (n > 0
8395 || (!flag_trapping_math && !flag_errno_math)
8396 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8398 REAL_VALUE_TYPE x;
8399 bool inexact;
8401 x = TREE_REAL_CST (arg0);
8402 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8403 if (flag_unsafe_math_optimizations || !inexact)
8404 return build_real (type, x);
8407 /* Strip sign ops from even integer powers. */
8408 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8410 tree narg0 = fold_strip_sign_ops (arg0);
8411 if (narg0)
8412 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8417 if (flag_unsafe_math_optimizations)
8419 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8421 /* Optimize pow(expN(x),y) = expN(x*y). */
8422 if (BUILTIN_EXPONENT_P (fcode))
8424 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8425 tree arg = CALL_EXPR_ARG (arg0, 0);
8426 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8427 return build_call_expr_loc (loc, expfn, 1, arg);
8430 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8431 if (BUILTIN_SQRT_P (fcode))
8433 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8434 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8435 build_real (type, dconsthalf));
8436 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8439 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8440 if (BUILTIN_CBRT_P (fcode))
8442 tree arg = CALL_EXPR_ARG (arg0, 0);
8443 if (tree_expr_nonnegative_p (arg))
8445 const REAL_VALUE_TYPE dconstroot
8446 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8447 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8448 build_real (type, dconstroot));
8449 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8453 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8454 if (fcode == BUILT_IN_POW
8455 || fcode == BUILT_IN_POWF
8456 || fcode == BUILT_IN_POWL)
8458 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8459 if (tree_expr_nonnegative_p (arg00))
8461 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8462 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8463 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8468 return NULL_TREE;
8471 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8472 Return NULL_TREE if no simplification can be made. */
8473 static tree
8474 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8475 tree arg0, tree arg1, tree type)
8477 if (!validate_arg (arg0, REAL_TYPE)
8478 || !validate_arg (arg1, INTEGER_TYPE))
8479 return NULL_TREE;
8481 /* Optimize pow(1.0,y) = 1.0. */
8482 if (real_onep (arg0))
8483 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8485 if (host_integerp (arg1, 0))
8487 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8489 /* Evaluate powi at compile-time. */
8490 if (TREE_CODE (arg0) == REAL_CST
8491 && !TREE_OVERFLOW (arg0))
8493 REAL_VALUE_TYPE x;
8494 x = TREE_REAL_CST (arg0);
8495 real_powi (&x, TYPE_MODE (type), &x, c);
8496 return build_real (type, x);
8499 /* Optimize pow(x,0) = 1.0. */
8500 if (c == 0)
8501 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8502 arg0);
8504 /* Optimize pow(x,1) = x. */
8505 if (c == 1)
8506 return arg0;
8508 /* Optimize pow(x,-1) = 1.0/x. */
8509 if (c == -1)
8510 return fold_build2_loc (loc, RDIV_EXPR, type,
8511 build_real (type, dconst1), arg0);
8514 return NULL_TREE;
8517 /* A subroutine of fold_builtin to fold the various exponent
8518 functions. Return NULL_TREE if no simplification can be made.
8519 FUNC is the corresponding MPFR exponent function. */
8521 static tree
8522 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8523 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8525 if (validate_arg (arg, REAL_TYPE))
8527 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8528 tree res;
8530 /* Calculate the result when the argument is a constant. */
8531 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8532 return res;
8534 /* Optimize expN(logN(x)) = x. */
8535 if (flag_unsafe_math_optimizations)
8537 const enum built_in_function fcode = builtin_mathfn_code (arg);
8539 if ((func == mpfr_exp
8540 && (fcode == BUILT_IN_LOG
8541 || fcode == BUILT_IN_LOGF
8542 || fcode == BUILT_IN_LOGL))
8543 || (func == mpfr_exp2
8544 && (fcode == BUILT_IN_LOG2
8545 || fcode == BUILT_IN_LOG2F
8546 || fcode == BUILT_IN_LOG2L))
8547 || (func == mpfr_exp10
8548 && (fcode == BUILT_IN_LOG10
8549 || fcode == BUILT_IN_LOG10F
8550 || fcode == BUILT_IN_LOG10L)))
8551 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8555 return NULL_TREE;
8558 /* Return true if VAR is a VAR_DECL or a component thereof. */
8560 static bool
8561 var_decl_component_p (tree var)
8563 tree inner = var;
8564 while (handled_component_p (inner))
8565 inner = TREE_OPERAND (inner, 0);
8566 return SSA_VAR_P (inner);
8569 /* Fold function call to builtin memset. Return
8570 NULL_TREE if no simplification can be made. */
8572 static tree
8573 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8574 tree type, bool ignore)
8576 tree var, ret, etype;
8577 unsigned HOST_WIDE_INT length, cval;
8579 if (! validate_arg (dest, POINTER_TYPE)
8580 || ! validate_arg (c, INTEGER_TYPE)
8581 || ! validate_arg (len, INTEGER_TYPE))
8582 return NULL_TREE;
8584 if (! host_integerp (len, 1))
8585 return NULL_TREE;
8587 /* If the LEN parameter is zero, return DEST. */
8588 if (integer_zerop (len))
8589 return omit_one_operand_loc (loc, type, dest, c);
8591 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8592 return NULL_TREE;
8594 var = dest;
8595 STRIP_NOPS (var);
8596 if (TREE_CODE (var) != ADDR_EXPR)
8597 return NULL_TREE;
8599 var = TREE_OPERAND (var, 0);
8600 if (TREE_THIS_VOLATILE (var))
8601 return NULL_TREE;
8603 etype = TREE_TYPE (var);
8604 if (TREE_CODE (etype) == ARRAY_TYPE)
8605 etype = TREE_TYPE (etype);
8607 if (!INTEGRAL_TYPE_P (etype)
8608 && !POINTER_TYPE_P (etype))
8609 return NULL_TREE;
8611 if (! var_decl_component_p (var))
8612 return NULL_TREE;
8614 length = tree_low_cst (len, 1);
8615 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8616 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8617 return NULL_TREE;
8619 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8620 return NULL_TREE;
8622 if (integer_zerop (c))
8623 cval = 0;
8624 else
8626 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8627 return NULL_TREE;
8629 cval = TREE_INT_CST_LOW (c);
8630 cval &= 0xff;
8631 cval |= cval << 8;
8632 cval |= cval << 16;
8633 cval |= (cval << 31) << 1;
8636 ret = build_int_cst_type (etype, cval);
8637 var = build_fold_indirect_ref_loc (loc,
8638 fold_convert_loc (loc,
8639 build_pointer_type (etype),
8640 dest));
8641 ret = build2 (MODIFY_EXPR, etype, var, ret);
8642 if (ignore)
8643 return ret;
8645 return omit_one_operand_loc (loc, type, dest, ret);
8648 /* Fold function call to builtin memset. Return
8649 NULL_TREE if no simplification can be made. */
8651 static tree
8652 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8654 if (! validate_arg (dest, POINTER_TYPE)
8655 || ! validate_arg (size, INTEGER_TYPE))
8656 return NULL_TREE;
8658 if (!ignore)
8659 return NULL_TREE;
8661 /* New argument list transforming bzero(ptr x, int y) to
8662 memset(ptr x, int 0, size_t y). This is done this way
8663 so that if it isn't expanded inline, we fallback to
8664 calling bzero instead of memset. */
8666 return fold_builtin_memset (loc, dest, integer_zero_node,
8667 fold_convert_loc (loc, size_type_node, size),
8668 void_type_node, ignore);
8671 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8672 NULL_TREE if no simplification can be made.
8673 If ENDP is 0, return DEST (like memcpy).
8674 If ENDP is 1, return DEST+LEN (like mempcpy).
8675 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8676 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8677 (memmove). */
8679 static tree
8680 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8681 tree len, tree type, bool ignore, int endp)
8683 tree destvar, srcvar, expr;
8685 if (! validate_arg (dest, POINTER_TYPE)
8686 || ! validate_arg (src, POINTER_TYPE)
8687 || ! validate_arg (len, INTEGER_TYPE))
8688 return NULL_TREE;
8690 /* If the LEN parameter is zero, return DEST. */
8691 if (integer_zerop (len))
8692 return omit_one_operand_loc (loc, type, dest, src);
8694 /* If SRC and DEST are the same (and not volatile), return
8695 DEST{,+LEN,+LEN-1}. */
8696 if (operand_equal_p (src, dest, 0))
8697 expr = len;
8698 else
8700 tree srctype, desttype;
8701 unsigned int src_align, dest_align;
8702 tree off0;
8704 if (endp == 3)
8706 src_align = get_pointer_alignment (src);
8707 dest_align = get_pointer_alignment (dest);
8709 /* Both DEST and SRC must be pointer types.
8710 ??? This is what old code did. Is the testing for pointer types
8711 really mandatory?
8713 If either SRC is readonly or length is 1, we can use memcpy. */
8714 if (!dest_align || !src_align)
8715 return NULL_TREE;
8716 if (readonly_data_expr (src)
8717 || (host_integerp (len, 1)
8718 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8719 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8721 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8722 if (!fn)
8723 return NULL_TREE;
8724 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8727 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8728 if (TREE_CODE (src) == ADDR_EXPR
8729 && TREE_CODE (dest) == ADDR_EXPR)
8731 tree src_base, dest_base, fn;
8732 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8733 HOST_WIDE_INT size = -1;
8734 HOST_WIDE_INT maxsize = -1;
8736 srcvar = TREE_OPERAND (src, 0);
8737 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8738 &size, &maxsize);
8739 destvar = TREE_OPERAND (dest, 0);
8740 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8741 &size, &maxsize);
8742 if (host_integerp (len, 1))
8743 maxsize = tree_low_cst (len, 1);
8744 else
8745 maxsize = -1;
8746 src_offset /= BITS_PER_UNIT;
8747 dest_offset /= BITS_PER_UNIT;
8748 if (SSA_VAR_P (src_base)
8749 && SSA_VAR_P (dest_base))
8751 if (operand_equal_p (src_base, dest_base, 0)
8752 && ranges_overlap_p (src_offset, maxsize,
8753 dest_offset, maxsize))
8754 return NULL_TREE;
8756 else if (TREE_CODE (src_base) == MEM_REF
8757 && TREE_CODE (dest_base) == MEM_REF)
8759 double_int off;
8760 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8761 TREE_OPERAND (dest_base, 0), 0))
8762 return NULL_TREE;
8763 off = mem_ref_offset (src_base) +
8764 double_int::from_shwi (src_offset);
8765 if (!off.fits_shwi ())
8766 return NULL_TREE;
8767 src_offset = off.low;
8768 off = mem_ref_offset (dest_base) +
8769 double_int::from_shwi (dest_offset);
8770 if (!off.fits_shwi ())
8771 return NULL_TREE;
8772 dest_offset = off.low;
8773 if (ranges_overlap_p (src_offset, maxsize,
8774 dest_offset, maxsize))
8775 return NULL_TREE;
8777 else
8778 return NULL_TREE;
8780 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8781 if (!fn)
8782 return NULL_TREE;
8783 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8786 /* If the destination and source do not alias optimize into
8787 memcpy as well. */
8788 if ((is_gimple_min_invariant (dest)
8789 || TREE_CODE (dest) == SSA_NAME)
8790 && (is_gimple_min_invariant (src)
8791 || TREE_CODE (src) == SSA_NAME))
8793 ao_ref destr, srcr;
8794 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8795 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8796 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8798 tree fn;
8799 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8800 if (!fn)
8801 return NULL_TREE;
8802 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8806 return NULL_TREE;
8809 if (!host_integerp (len, 0))
8810 return NULL_TREE;
8811 /* FIXME:
8812 This logic lose for arguments like (type *)malloc (sizeof (type)),
8813 since we strip the casts of up to VOID return value from malloc.
8814 Perhaps we ought to inherit type from non-VOID argument here? */
8815 STRIP_NOPS (src);
8816 STRIP_NOPS (dest);
8817 if (!POINTER_TYPE_P (TREE_TYPE (src))
8818 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8819 return NULL_TREE;
8820 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8821 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8823 tree tem = TREE_OPERAND (src, 0);
8824 STRIP_NOPS (tem);
8825 if (tem != TREE_OPERAND (src, 0))
8826 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8828 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8830 tree tem = TREE_OPERAND (dest, 0);
8831 STRIP_NOPS (tem);
8832 if (tem != TREE_OPERAND (dest, 0))
8833 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8835 srctype = TREE_TYPE (TREE_TYPE (src));
8836 if (TREE_CODE (srctype) == ARRAY_TYPE
8837 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8839 srctype = TREE_TYPE (srctype);
8840 STRIP_NOPS (src);
8841 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8843 desttype = TREE_TYPE (TREE_TYPE (dest));
8844 if (TREE_CODE (desttype) == ARRAY_TYPE
8845 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8847 desttype = TREE_TYPE (desttype);
8848 STRIP_NOPS (dest);
8849 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8851 if (TREE_ADDRESSABLE (srctype)
8852 || TREE_ADDRESSABLE (desttype))
8853 return NULL_TREE;
8855 src_align = get_pointer_alignment (src);
8856 dest_align = get_pointer_alignment (dest);
8857 if (dest_align < TYPE_ALIGN (desttype)
8858 || src_align < TYPE_ALIGN (srctype))
8859 return NULL_TREE;
8861 if (!ignore)
8862 dest = builtin_save_expr (dest);
8864 /* Build accesses at offset zero with a ref-all character type. */
8865 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8866 ptr_mode, true), 0);
8868 destvar = dest;
8869 STRIP_NOPS (destvar);
8870 if (TREE_CODE (destvar) == ADDR_EXPR
8871 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8872 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8873 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8874 else
8875 destvar = NULL_TREE;
8877 srcvar = src;
8878 STRIP_NOPS (srcvar);
8879 if (TREE_CODE (srcvar) == ADDR_EXPR
8880 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8881 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8883 if (!destvar
8884 || src_align >= TYPE_ALIGN (desttype))
8885 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8886 srcvar, off0);
8887 else if (!STRICT_ALIGNMENT)
8889 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8890 src_align);
8891 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8893 else
8894 srcvar = NULL_TREE;
8896 else
8897 srcvar = NULL_TREE;
8899 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8900 return NULL_TREE;
8902 if (srcvar == NULL_TREE)
8904 STRIP_NOPS (src);
8905 if (src_align >= TYPE_ALIGN (desttype))
8906 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8907 else
8909 if (STRICT_ALIGNMENT)
8910 return NULL_TREE;
8911 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8912 src_align);
8913 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8916 else if (destvar == NULL_TREE)
8918 STRIP_NOPS (dest);
8919 if (dest_align >= TYPE_ALIGN (srctype))
8920 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8921 else
8923 if (STRICT_ALIGNMENT)
8924 return NULL_TREE;
8925 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8926 dest_align);
8927 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8931 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8934 if (ignore)
8935 return expr;
8937 if (endp == 0 || endp == 3)
8938 return omit_one_operand_loc (loc, type, dest, expr);
8940 if (expr == len)
8941 expr = NULL_TREE;
8943 if (endp == 2)
8944 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8945 ssize_int (1));
8947 dest = fold_build_pointer_plus_loc (loc, dest, len);
8948 dest = fold_convert_loc (loc, type, dest);
8949 if (expr)
8950 dest = omit_one_operand_loc (loc, type, dest, expr);
8951 return dest;
8954 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8955 If LEN is not NULL, it represents the length of the string to be
8956 copied. Return NULL_TREE if no simplification can be made. */
8958 tree
8959 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8961 tree fn;
8963 if (!validate_arg (dest, POINTER_TYPE)
8964 || !validate_arg (src, POINTER_TYPE))
8965 return NULL_TREE;
8967 /* If SRC and DEST are the same (and not volatile), return DEST. */
8968 if (operand_equal_p (src, dest, 0))
8969 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8971 if (optimize_function_for_size_p (cfun))
8972 return NULL_TREE;
8974 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8975 if (!fn)
8976 return NULL_TREE;
8978 if (!len)
8980 len = c_strlen (src, 1);
8981 if (! len || TREE_SIDE_EFFECTS (len))
8982 return NULL_TREE;
8985 len = fold_convert_loc (loc, size_type_node, len);
8986 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
8987 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8988 build_call_expr_loc (loc, fn, 3, dest, src, len));
8991 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8992 Return NULL_TREE if no simplification can be made. */
8994 static tree
8995 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8997 tree fn, len, lenp1, call, type;
8999 if (!validate_arg (dest, POINTER_TYPE)
9000 || !validate_arg (src, POINTER_TYPE))
9001 return NULL_TREE;
9003 len = c_strlen (src, 1);
9004 if (!len
9005 || TREE_CODE (len) != INTEGER_CST)
9006 return NULL_TREE;
9008 if (optimize_function_for_size_p (cfun)
9009 /* If length is zero it's small enough. */
9010 && !integer_zerop (len))
9011 return NULL_TREE;
9013 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9014 if (!fn)
9015 return NULL_TREE;
9017 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9018 fold_convert_loc (loc, size_type_node, len),
9019 build_int_cst (size_type_node, 1));
9020 /* We use dest twice in building our expression. Save it from
9021 multiple expansions. */
9022 dest = builtin_save_expr (dest);
9023 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9025 type = TREE_TYPE (TREE_TYPE (fndecl));
9026 dest = fold_build_pointer_plus_loc (loc, dest, len);
9027 dest = fold_convert_loc (loc, type, dest);
9028 dest = omit_one_operand_loc (loc, type, dest, call);
9029 return dest;
9032 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9033 If SLEN is not NULL, it represents the length of the source string.
9034 Return NULL_TREE if no simplification can be made. */
9036 tree
9037 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9038 tree src, tree len, tree slen)
9040 tree fn;
9042 if (!validate_arg (dest, POINTER_TYPE)
9043 || !validate_arg (src, POINTER_TYPE)
9044 || !validate_arg (len, INTEGER_TYPE))
9045 return NULL_TREE;
9047 /* If the LEN parameter is zero, return DEST. */
9048 if (integer_zerop (len))
9049 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9051 /* We can't compare slen with len as constants below if len is not a
9052 constant. */
9053 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9054 return NULL_TREE;
9056 if (!slen)
9057 slen = c_strlen (src, 1);
9059 /* Now, we must be passed a constant src ptr parameter. */
9060 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9061 return NULL_TREE;
9063 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9065 /* We do not support simplification of this case, though we do
9066 support it when expanding trees into RTL. */
9067 /* FIXME: generate a call to __builtin_memset. */
9068 if (tree_int_cst_lt (slen, len))
9069 return NULL_TREE;
9071 /* OK transform into builtin memcpy. */
9072 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9073 if (!fn)
9074 return NULL_TREE;
9076 len = fold_convert_loc (loc, size_type_node, len);
9077 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9078 build_call_expr_loc (loc, fn, 3, dest, src, len));
9081 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9082 arguments to the call, and TYPE is its return type.
9083 Return NULL_TREE if no simplification can be made. */
9085 static tree
9086 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9088 if (!validate_arg (arg1, POINTER_TYPE)
9089 || !validate_arg (arg2, INTEGER_TYPE)
9090 || !validate_arg (len, INTEGER_TYPE))
9091 return NULL_TREE;
9092 else
9094 const char *p1;
9096 if (TREE_CODE (arg2) != INTEGER_CST
9097 || !host_integerp (len, 1))
9098 return NULL_TREE;
9100 p1 = c_getstr (arg1);
9101 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9103 char c;
9104 const char *r;
9105 tree tem;
9107 if (target_char_cast (arg2, &c))
9108 return NULL_TREE;
9110 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9112 if (r == NULL)
9113 return build_int_cst (TREE_TYPE (arg1), 0);
9115 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9116 return fold_convert_loc (loc, type, tem);
9118 return NULL_TREE;
9122 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9123 Return NULL_TREE if no simplification can be made. */
9125 static tree
9126 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9128 const char *p1, *p2;
9130 if (!validate_arg (arg1, POINTER_TYPE)
9131 || !validate_arg (arg2, POINTER_TYPE)
9132 || !validate_arg (len, INTEGER_TYPE))
9133 return NULL_TREE;
9135 /* If the LEN parameter is zero, return zero. */
9136 if (integer_zerop (len))
9137 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9138 arg1, arg2);
9140 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9141 if (operand_equal_p (arg1, arg2, 0))
9142 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9144 p1 = c_getstr (arg1);
9145 p2 = c_getstr (arg2);
9147 /* If all arguments are constant, and the value of len is not greater
9148 than the lengths of arg1 and arg2, evaluate at compile-time. */
9149 if (host_integerp (len, 1) && p1 && p2
9150 && compare_tree_int (len, strlen (p1) + 1) <= 0
9151 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9153 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9155 if (r > 0)
9156 return integer_one_node;
9157 else if (r < 0)
9158 return integer_minus_one_node;
9159 else
9160 return integer_zero_node;
9163 /* If len parameter is one, return an expression corresponding to
9164 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9165 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9167 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9168 tree cst_uchar_ptr_node
9169 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9171 tree ind1
9172 = fold_convert_loc (loc, integer_type_node,
9173 build1 (INDIRECT_REF, cst_uchar_node,
9174 fold_convert_loc (loc,
9175 cst_uchar_ptr_node,
9176 arg1)));
9177 tree ind2
9178 = fold_convert_loc (loc, integer_type_node,
9179 build1 (INDIRECT_REF, cst_uchar_node,
9180 fold_convert_loc (loc,
9181 cst_uchar_ptr_node,
9182 arg2)));
9183 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9186 return NULL_TREE;
9189 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9190 Return NULL_TREE if no simplification can be made. */
9192 static tree
9193 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9195 const char *p1, *p2;
9197 if (!validate_arg (arg1, POINTER_TYPE)
9198 || !validate_arg (arg2, POINTER_TYPE))
9199 return NULL_TREE;
9201 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9202 if (operand_equal_p (arg1, arg2, 0))
9203 return integer_zero_node;
9205 p1 = c_getstr (arg1);
9206 p2 = c_getstr (arg2);
9208 if (p1 && p2)
9210 const int i = strcmp (p1, p2);
9211 if (i < 0)
9212 return integer_minus_one_node;
9213 else if (i > 0)
9214 return integer_one_node;
9215 else
9216 return integer_zero_node;
9219 /* If the second arg is "", return *(const unsigned char*)arg1. */
9220 if (p2 && *p2 == '\0')
9222 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9223 tree cst_uchar_ptr_node
9224 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9226 return fold_convert_loc (loc, integer_type_node,
9227 build1 (INDIRECT_REF, cst_uchar_node,
9228 fold_convert_loc (loc,
9229 cst_uchar_ptr_node,
9230 arg1)));
9233 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9234 if (p1 && *p1 == '\0')
9236 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9237 tree cst_uchar_ptr_node
9238 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9240 tree temp
9241 = fold_convert_loc (loc, integer_type_node,
9242 build1 (INDIRECT_REF, cst_uchar_node,
9243 fold_convert_loc (loc,
9244 cst_uchar_ptr_node,
9245 arg2)));
9246 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9249 return NULL_TREE;
9252 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9253 Return NULL_TREE if no simplification can be made. */
9255 static tree
9256 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9258 const char *p1, *p2;
9260 if (!validate_arg (arg1, POINTER_TYPE)
9261 || !validate_arg (arg2, POINTER_TYPE)
9262 || !validate_arg (len, INTEGER_TYPE))
9263 return NULL_TREE;
9265 /* If the LEN parameter is zero, return zero. */
9266 if (integer_zerop (len))
9267 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9268 arg1, arg2);
9270 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9271 if (operand_equal_p (arg1, arg2, 0))
9272 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9274 p1 = c_getstr (arg1);
9275 p2 = c_getstr (arg2);
9277 if (host_integerp (len, 1) && p1 && p2)
9279 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9280 if (i > 0)
9281 return integer_one_node;
9282 else if (i < 0)
9283 return integer_minus_one_node;
9284 else
9285 return integer_zero_node;
9288 /* If the second arg is "", and the length is greater than zero,
9289 return *(const unsigned char*)arg1. */
9290 if (p2 && *p2 == '\0'
9291 && TREE_CODE (len) == INTEGER_CST
9292 && tree_int_cst_sgn (len) == 1)
9294 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9295 tree cst_uchar_ptr_node
9296 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9298 return fold_convert_loc (loc, integer_type_node,
9299 build1 (INDIRECT_REF, cst_uchar_node,
9300 fold_convert_loc (loc,
9301 cst_uchar_ptr_node,
9302 arg1)));
9305 /* If the first arg is "", and the length is greater than zero,
9306 return -*(const unsigned char*)arg2. */
9307 if (p1 && *p1 == '\0'
9308 && TREE_CODE (len) == INTEGER_CST
9309 && tree_int_cst_sgn (len) == 1)
9311 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9312 tree cst_uchar_ptr_node
9313 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9315 tree temp = fold_convert_loc (loc, integer_type_node,
9316 build1 (INDIRECT_REF, cst_uchar_node,
9317 fold_convert_loc (loc,
9318 cst_uchar_ptr_node,
9319 arg2)));
9320 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9323 /* If len parameter is one, return an expression corresponding to
9324 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9325 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9327 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9328 tree cst_uchar_ptr_node
9329 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9331 tree ind1 = fold_convert_loc (loc, integer_type_node,
9332 build1 (INDIRECT_REF, cst_uchar_node,
9333 fold_convert_loc (loc,
9334 cst_uchar_ptr_node,
9335 arg1)));
9336 tree ind2 = fold_convert_loc (loc, integer_type_node,
9337 build1 (INDIRECT_REF, cst_uchar_node,
9338 fold_convert_loc (loc,
9339 cst_uchar_ptr_node,
9340 arg2)));
9341 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9344 return NULL_TREE;
9347 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9348 ARG. Return NULL_TREE if no simplification can be made. */
9350 static tree
9351 fold_builtin_signbit (location_t loc, tree arg, tree type)
9353 if (!validate_arg (arg, REAL_TYPE))
9354 return NULL_TREE;
9356 /* If ARG is a compile-time constant, determine the result. */
9357 if (TREE_CODE (arg) == REAL_CST
9358 && !TREE_OVERFLOW (arg))
9360 REAL_VALUE_TYPE c;
9362 c = TREE_REAL_CST (arg);
9363 return (REAL_VALUE_NEGATIVE (c)
9364 ? build_one_cst (type)
9365 : build_zero_cst (type));
9368 /* If ARG is non-negative, the result is always zero. */
9369 if (tree_expr_nonnegative_p (arg))
9370 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9372 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9373 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9374 return fold_convert (type,
9375 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9376 build_real (TREE_TYPE (arg), dconst0)));
9378 return NULL_TREE;
9381 /* Fold function call to builtin copysign, copysignf or copysignl with
9382 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9383 be made. */
9385 static tree
9386 fold_builtin_copysign (location_t loc, tree fndecl,
9387 tree arg1, tree arg2, tree type)
9389 tree tem;
9391 if (!validate_arg (arg1, REAL_TYPE)
9392 || !validate_arg (arg2, REAL_TYPE))
9393 return NULL_TREE;
9395 /* copysign(X,X) is X. */
9396 if (operand_equal_p (arg1, arg2, 0))
9397 return fold_convert_loc (loc, type, arg1);
9399 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9400 if (TREE_CODE (arg1) == REAL_CST
9401 && TREE_CODE (arg2) == REAL_CST
9402 && !TREE_OVERFLOW (arg1)
9403 && !TREE_OVERFLOW (arg2))
9405 REAL_VALUE_TYPE c1, c2;
9407 c1 = TREE_REAL_CST (arg1);
9408 c2 = TREE_REAL_CST (arg2);
9409 /* c1.sign := c2.sign. */
9410 real_copysign (&c1, &c2);
9411 return build_real (type, c1);
9414 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9415 Remember to evaluate Y for side-effects. */
9416 if (tree_expr_nonnegative_p (arg2))
9417 return omit_one_operand_loc (loc, type,
9418 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9419 arg2);
9421 /* Strip sign changing operations for the first argument. */
9422 tem = fold_strip_sign_ops (arg1);
9423 if (tem)
9424 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9426 return NULL_TREE;
9429 /* Fold a call to builtin isascii with argument ARG. */
9431 static tree
9432 fold_builtin_isascii (location_t loc, tree arg)
9434 if (!validate_arg (arg, INTEGER_TYPE))
9435 return NULL_TREE;
9436 else
9438 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9439 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9440 build_int_cst (integer_type_node,
9441 ~ (unsigned HOST_WIDE_INT) 0x7f));
9442 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9443 arg, integer_zero_node);
9447 /* Fold a call to builtin toascii with argument ARG. */
9449 static tree
9450 fold_builtin_toascii (location_t loc, tree arg)
9452 if (!validate_arg (arg, INTEGER_TYPE))
9453 return NULL_TREE;
9455 /* Transform toascii(c) -> (c & 0x7f). */
9456 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9457 build_int_cst (integer_type_node, 0x7f));
9460 /* Fold a call to builtin isdigit with argument ARG. */
9462 static tree
9463 fold_builtin_isdigit (location_t loc, tree arg)
9465 if (!validate_arg (arg, INTEGER_TYPE))
9466 return NULL_TREE;
9467 else
9469 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9470 /* According to the C standard, isdigit is unaffected by locale.
9471 However, it definitely is affected by the target character set. */
9472 unsigned HOST_WIDE_INT target_digit0
9473 = lang_hooks.to_target_charset ('0');
9475 if (target_digit0 == 0)
9476 return NULL_TREE;
9478 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9479 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9480 build_int_cst (unsigned_type_node, target_digit0));
9481 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9482 build_int_cst (unsigned_type_node, 9));
9486 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9488 static tree
9489 fold_builtin_fabs (location_t loc, tree arg, tree type)
9491 if (!validate_arg (arg, REAL_TYPE))
9492 return NULL_TREE;
9494 arg = fold_convert_loc (loc, type, arg);
9495 if (TREE_CODE (arg) == REAL_CST)
9496 return fold_abs_const (arg, type);
9497 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9500 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9502 static tree
9503 fold_builtin_abs (location_t loc, tree arg, tree type)
9505 if (!validate_arg (arg, INTEGER_TYPE))
9506 return NULL_TREE;
9508 arg = fold_convert_loc (loc, type, arg);
9509 if (TREE_CODE (arg) == INTEGER_CST)
9510 return fold_abs_const (arg, type);
9511 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9514 /* Fold a fma operation with arguments ARG[012]. */
9516 tree
9517 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9518 tree type, tree arg0, tree arg1, tree arg2)
9520 if (TREE_CODE (arg0) == REAL_CST
9521 && TREE_CODE (arg1) == REAL_CST
9522 && TREE_CODE (arg2) == REAL_CST)
9523 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9525 return NULL_TREE;
9528 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9530 static tree
9531 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9533 if (validate_arg (arg0, REAL_TYPE)
9534 && validate_arg (arg1, REAL_TYPE)
9535 && validate_arg (arg2, REAL_TYPE))
9537 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9538 if (tem)
9539 return tem;
9541 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9542 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9543 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9545 return NULL_TREE;
9548 /* Fold a call to builtin fmin or fmax. */
9550 static tree
9551 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9552 tree type, bool max)
9554 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9556 /* Calculate the result when the argument is a constant. */
9557 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9559 if (res)
9560 return res;
9562 /* If either argument is NaN, return the other one. Avoid the
9563 transformation if we get (and honor) a signalling NaN. Using
9564 omit_one_operand() ensures we create a non-lvalue. */
9565 if (TREE_CODE (arg0) == REAL_CST
9566 && real_isnan (&TREE_REAL_CST (arg0))
9567 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9568 || ! TREE_REAL_CST (arg0).signalling))
9569 return omit_one_operand_loc (loc, type, arg1, arg0);
9570 if (TREE_CODE (arg1) == REAL_CST
9571 && real_isnan (&TREE_REAL_CST (arg1))
9572 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9573 || ! TREE_REAL_CST (arg1).signalling))
9574 return omit_one_operand_loc (loc, type, arg0, arg1);
9576 /* Transform fmin/fmax(x,x) -> x. */
9577 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9578 return omit_one_operand_loc (loc, type, arg0, arg1);
9580 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9581 functions to return the numeric arg if the other one is NaN.
9582 These tree codes don't honor that, so only transform if
9583 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9584 handled, so we don't have to worry about it either. */
9585 if (flag_finite_math_only)
9586 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9587 fold_convert_loc (loc, type, arg0),
9588 fold_convert_loc (loc, type, arg1));
9590 return NULL_TREE;
9593 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9595 static tree
9596 fold_builtin_carg (location_t loc, tree arg, tree type)
9598 if (validate_arg (arg, COMPLEX_TYPE)
9599 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9601 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9603 if (atan2_fn)
9605 tree new_arg = builtin_save_expr (arg);
9606 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9607 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9608 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9612 return NULL_TREE;
9615 /* Fold a call to builtin logb/ilogb. */
9617 static tree
9618 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9620 if (! validate_arg (arg, REAL_TYPE))
9621 return NULL_TREE;
9623 STRIP_NOPS (arg);
9625 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9627 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9629 switch (value->cl)
9631 case rvc_nan:
9632 case rvc_inf:
9633 /* If arg is Inf or NaN and we're logb, return it. */
9634 if (TREE_CODE (rettype) == REAL_TYPE)
9636 /* For logb(-Inf) we have to return +Inf. */
9637 if (real_isinf (value) && real_isneg (value))
9639 REAL_VALUE_TYPE tem;
9640 real_inf (&tem);
9641 return build_real (rettype, tem);
9643 return fold_convert_loc (loc, rettype, arg);
9645 /* Fall through... */
9646 case rvc_zero:
9647 /* Zero may set errno and/or raise an exception for logb, also
9648 for ilogb we don't know FP_ILOGB0. */
9649 return NULL_TREE;
9650 case rvc_normal:
9651 /* For normal numbers, proceed iff radix == 2. In GCC,
9652 normalized significands are in the range [0.5, 1.0). We
9653 want the exponent as if they were [1.0, 2.0) so get the
9654 exponent and subtract 1. */
9655 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9656 return fold_convert_loc (loc, rettype,
9657 build_int_cst (integer_type_node,
9658 REAL_EXP (value)-1));
9659 break;
9663 return NULL_TREE;
9666 /* Fold a call to builtin significand, if radix == 2. */
9668 static tree
9669 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9671 if (! validate_arg (arg, REAL_TYPE))
9672 return NULL_TREE;
9674 STRIP_NOPS (arg);
9676 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9678 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9680 switch (value->cl)
9682 case rvc_zero:
9683 case rvc_nan:
9684 case rvc_inf:
9685 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9686 return fold_convert_loc (loc, rettype, arg);
9687 case rvc_normal:
9688 /* For normal numbers, proceed iff radix == 2. */
9689 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9691 REAL_VALUE_TYPE result = *value;
9692 /* In GCC, normalized significands are in the range [0.5,
9693 1.0). We want them to be [1.0, 2.0) so set the
9694 exponent to 1. */
9695 SET_REAL_EXP (&result, 1);
9696 return build_real (rettype, result);
9698 break;
9702 return NULL_TREE;
9705 /* Fold a call to builtin frexp, we can assume the base is 2. */
9707 static tree
9708 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9710 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9711 return NULL_TREE;
9713 STRIP_NOPS (arg0);
9715 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9716 return NULL_TREE;
9718 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9720 /* Proceed if a valid pointer type was passed in. */
9721 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9723 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9724 tree frac, exp;
9726 switch (value->cl)
9728 case rvc_zero:
9729 /* For +-0, return (*exp = 0, +-0). */
9730 exp = integer_zero_node;
9731 frac = arg0;
9732 break;
9733 case rvc_nan:
9734 case rvc_inf:
9735 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9736 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9737 case rvc_normal:
9739 /* Since the frexp function always expects base 2, and in
9740 GCC normalized significands are already in the range
9741 [0.5, 1.0), we have exactly what frexp wants. */
9742 REAL_VALUE_TYPE frac_rvt = *value;
9743 SET_REAL_EXP (&frac_rvt, 0);
9744 frac = build_real (rettype, frac_rvt);
9745 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9747 break;
9748 default:
9749 gcc_unreachable ();
9752 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9753 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9754 TREE_SIDE_EFFECTS (arg1) = 1;
9755 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9758 return NULL_TREE;
9761 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9762 then we can assume the base is two. If it's false, then we have to
9763 check the mode of the TYPE parameter in certain cases. */
9765 static tree
9766 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9767 tree type, bool ldexp)
9769 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9771 STRIP_NOPS (arg0);
9772 STRIP_NOPS (arg1);
9774 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9775 if (real_zerop (arg0) || integer_zerop (arg1)
9776 || (TREE_CODE (arg0) == REAL_CST
9777 && !real_isfinite (&TREE_REAL_CST (arg0))))
9778 return omit_one_operand_loc (loc, type, arg0, arg1);
9780 /* If both arguments are constant, then try to evaluate it. */
9781 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9782 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9783 && host_integerp (arg1, 0))
9785 /* Bound the maximum adjustment to twice the range of the
9786 mode's valid exponents. Use abs to ensure the range is
9787 positive as a sanity check. */
9788 const long max_exp_adj = 2 *
9789 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9790 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9792 /* Get the user-requested adjustment. */
9793 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9795 /* The requested adjustment must be inside this range. This
9796 is a preliminary cap to avoid things like overflow, we
9797 may still fail to compute the result for other reasons. */
9798 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9800 REAL_VALUE_TYPE initial_result;
9802 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9804 /* Ensure we didn't overflow. */
9805 if (! real_isinf (&initial_result))
9807 const REAL_VALUE_TYPE trunc_result
9808 = real_value_truncate (TYPE_MODE (type), initial_result);
9810 /* Only proceed if the target mode can hold the
9811 resulting value. */
9812 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9813 return build_real (type, trunc_result);
9819 return NULL_TREE;
9822 /* Fold a call to builtin modf. */
9824 static tree
9825 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9827 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9828 return NULL_TREE;
9830 STRIP_NOPS (arg0);
9832 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9833 return NULL_TREE;
9835 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9837 /* Proceed if a valid pointer type was passed in. */
9838 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9840 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9841 REAL_VALUE_TYPE trunc, frac;
9843 switch (value->cl)
9845 case rvc_nan:
9846 case rvc_zero:
9847 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9848 trunc = frac = *value;
9849 break;
9850 case rvc_inf:
9851 /* For +-Inf, return (*arg1 = arg0, +-0). */
9852 frac = dconst0;
9853 frac.sign = value->sign;
9854 trunc = *value;
9855 break;
9856 case rvc_normal:
9857 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9858 real_trunc (&trunc, VOIDmode, value);
9859 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9860 /* If the original number was negative and already
9861 integral, then the fractional part is -0.0. */
9862 if (value->sign && frac.cl == rvc_zero)
9863 frac.sign = value->sign;
9864 break;
9867 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9868 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9869 build_real (rettype, trunc));
9870 TREE_SIDE_EFFECTS (arg1) = 1;
9871 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9872 build_real (rettype, frac));
9875 return NULL_TREE;
9878 /* Given a location LOC, an interclass builtin function decl FNDECL
9879 and its single argument ARG, return an folded expression computing
9880 the same, or NULL_TREE if we either couldn't or didn't want to fold
9881 (the latter happen if there's an RTL instruction available). */
9883 static tree
9884 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9886 enum machine_mode mode;
9888 if (!validate_arg (arg, REAL_TYPE))
9889 return NULL_TREE;
9891 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9892 return NULL_TREE;
9894 mode = TYPE_MODE (TREE_TYPE (arg));
9896 /* If there is no optab, try generic code. */
9897 switch (DECL_FUNCTION_CODE (fndecl))
9899 tree result;
9901 CASE_FLT_FN (BUILT_IN_ISINF):
9903 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9904 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9905 tree const type = TREE_TYPE (arg);
9906 REAL_VALUE_TYPE r;
9907 char buf[128];
9909 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9910 real_from_string (&r, buf);
9911 result = build_call_expr (isgr_fn, 2,
9912 fold_build1_loc (loc, ABS_EXPR, type, arg),
9913 build_real (type, r));
9914 return result;
9916 CASE_FLT_FN (BUILT_IN_FINITE):
9917 case BUILT_IN_ISFINITE:
9919 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9920 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9921 tree const type = TREE_TYPE (arg);
9922 REAL_VALUE_TYPE r;
9923 char buf[128];
9925 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9926 real_from_string (&r, buf);
9927 result = build_call_expr (isle_fn, 2,
9928 fold_build1_loc (loc, ABS_EXPR, type, arg),
9929 build_real (type, r));
9930 /*result = fold_build2_loc (loc, UNGT_EXPR,
9931 TREE_TYPE (TREE_TYPE (fndecl)),
9932 fold_build1_loc (loc, ABS_EXPR, type, arg),
9933 build_real (type, r));
9934 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9935 TREE_TYPE (TREE_TYPE (fndecl)),
9936 result);*/
9937 return result;
9939 case BUILT_IN_ISNORMAL:
9941 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9942 islessequal(fabs(x),DBL_MAX). */
9943 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9944 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9945 tree const type = TREE_TYPE (arg);
9946 REAL_VALUE_TYPE rmax, rmin;
9947 char buf[128];
9949 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9950 real_from_string (&rmax, buf);
9951 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9952 real_from_string (&rmin, buf);
9953 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9954 result = build_call_expr (isle_fn, 2, arg,
9955 build_real (type, rmax));
9956 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9957 build_call_expr (isge_fn, 2, arg,
9958 build_real (type, rmin)));
9959 return result;
9961 default:
9962 break;
9965 return NULL_TREE;
9968 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9969 ARG is the argument for the call. */
9971 static tree
9972 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9974 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9975 REAL_VALUE_TYPE r;
9977 if (!validate_arg (arg, REAL_TYPE))
9978 return NULL_TREE;
9980 switch (builtin_index)
9982 case BUILT_IN_ISINF:
9983 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9984 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9986 if (TREE_CODE (arg) == REAL_CST)
9988 r = TREE_REAL_CST (arg);
9989 if (real_isinf (&r))
9990 return real_compare (GT_EXPR, &r, &dconst0)
9991 ? integer_one_node : integer_minus_one_node;
9992 else
9993 return integer_zero_node;
9996 return NULL_TREE;
9998 case BUILT_IN_ISINF_SIGN:
10000 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10001 /* In a boolean context, GCC will fold the inner COND_EXPR to
10002 1. So e.g. "if (isinf_sign(x))" would be folded to just
10003 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10004 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10005 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10006 tree tmp = NULL_TREE;
10008 arg = builtin_save_expr (arg);
10010 if (signbit_fn && isinf_fn)
10012 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10013 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10015 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10016 signbit_call, integer_zero_node);
10017 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10018 isinf_call, integer_zero_node);
10020 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10021 integer_minus_one_node, integer_one_node);
10022 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10023 isinf_call, tmp,
10024 integer_zero_node);
10027 return tmp;
10030 case BUILT_IN_ISFINITE:
10031 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10032 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10033 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10035 if (TREE_CODE (arg) == REAL_CST)
10037 r = TREE_REAL_CST (arg);
10038 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10041 return NULL_TREE;
10043 case BUILT_IN_ISNAN:
10044 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10045 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10047 if (TREE_CODE (arg) == REAL_CST)
10049 r = TREE_REAL_CST (arg);
10050 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10053 arg = builtin_save_expr (arg);
10054 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10056 default:
10057 gcc_unreachable ();
10061 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10062 This builtin will generate code to return the appropriate floating
10063 point classification depending on the value of the floating point
10064 number passed in. The possible return values must be supplied as
10065 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10066 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10067 one floating point argument which is "type generic". */
10069 static tree
10070 fold_builtin_fpclassify (location_t loc, tree exp)
10072 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10073 arg, type, res, tmp;
10074 enum machine_mode mode;
10075 REAL_VALUE_TYPE r;
10076 char buf[128];
10078 /* Verify the required arguments in the original call. */
10079 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10080 INTEGER_TYPE, INTEGER_TYPE,
10081 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10082 return NULL_TREE;
10084 fp_nan = CALL_EXPR_ARG (exp, 0);
10085 fp_infinite = CALL_EXPR_ARG (exp, 1);
10086 fp_normal = CALL_EXPR_ARG (exp, 2);
10087 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10088 fp_zero = CALL_EXPR_ARG (exp, 4);
10089 arg = CALL_EXPR_ARG (exp, 5);
10090 type = TREE_TYPE (arg);
10091 mode = TYPE_MODE (type);
10092 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10094 /* fpclassify(x) ->
10095 isnan(x) ? FP_NAN :
10096 (fabs(x) == Inf ? FP_INFINITE :
10097 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10098 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10100 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10101 build_real (type, dconst0));
10102 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10103 tmp, fp_zero, fp_subnormal);
10105 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10106 real_from_string (&r, buf);
10107 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10108 arg, build_real (type, r));
10109 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10111 if (HONOR_INFINITIES (mode))
10113 real_inf (&r);
10114 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10115 build_real (type, r));
10116 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10117 fp_infinite, res);
10120 if (HONOR_NANS (mode))
10122 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10123 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10126 return res;
10129 /* Fold a call to an unordered comparison function such as
10130 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10131 being called and ARG0 and ARG1 are the arguments for the call.
10132 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10133 the opposite of the desired result. UNORDERED_CODE is used
10134 for modes that can hold NaNs and ORDERED_CODE is used for
10135 the rest. */
10137 static tree
10138 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10139 enum tree_code unordered_code,
10140 enum tree_code ordered_code)
10142 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10143 enum tree_code code;
10144 tree type0, type1;
10145 enum tree_code code0, code1;
10146 tree cmp_type = NULL_TREE;
10148 type0 = TREE_TYPE (arg0);
10149 type1 = TREE_TYPE (arg1);
10151 code0 = TREE_CODE (type0);
10152 code1 = TREE_CODE (type1);
10154 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10155 /* Choose the wider of two real types. */
10156 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10157 ? type0 : type1;
10158 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10159 cmp_type = type0;
10160 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10161 cmp_type = type1;
10163 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10164 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10166 if (unordered_code == UNORDERED_EXPR)
10168 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10169 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10170 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10173 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10174 : ordered_code;
10175 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10176 fold_build2_loc (loc, code, type, arg0, arg1));
10179 /* Fold a call to built-in function FNDECL with 0 arguments.
10180 IGNORE is true if the result of the function call is ignored. This
10181 function returns NULL_TREE if no simplification was possible. */
10183 static tree
10184 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10186 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10187 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10188 switch (fcode)
10190 CASE_FLT_FN (BUILT_IN_INF):
10191 case BUILT_IN_INFD32:
10192 case BUILT_IN_INFD64:
10193 case BUILT_IN_INFD128:
10194 return fold_builtin_inf (loc, type, true);
10196 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10197 return fold_builtin_inf (loc, type, false);
10199 case BUILT_IN_CLASSIFY_TYPE:
10200 return fold_builtin_classify_type (NULL_TREE);
10202 case BUILT_IN_UNREACHABLE:
10203 if (flag_sanitize & SANITIZE_UNREACHABLE
10204 && (current_function_decl == NULL
10205 || !lookup_attribute ("no_sanitize_undefined",
10206 DECL_ATTRIBUTES (current_function_decl))))
10207 return ubsan_instrument_unreachable (loc);
10208 break;
10210 default:
10211 break;
10213 return NULL_TREE;
10216 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10217 IGNORE is true if the result of the function call is ignored. This
10218 function returns NULL_TREE if no simplification was possible. */
10220 static tree
10221 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10223 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10224 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10225 switch (fcode)
10227 case BUILT_IN_CONSTANT_P:
10229 tree val = fold_builtin_constant_p (arg0);
10231 /* Gimplification will pull the CALL_EXPR for the builtin out of
10232 an if condition. When not optimizing, we'll not CSE it back.
10233 To avoid link error types of regressions, return false now. */
10234 if (!val && !optimize)
10235 val = integer_zero_node;
10237 return val;
10240 case BUILT_IN_CLASSIFY_TYPE:
10241 return fold_builtin_classify_type (arg0);
10243 case BUILT_IN_STRLEN:
10244 return fold_builtin_strlen (loc, type, arg0);
10246 CASE_FLT_FN (BUILT_IN_FABS):
10247 case BUILT_IN_FABSD32:
10248 case BUILT_IN_FABSD64:
10249 case BUILT_IN_FABSD128:
10250 return fold_builtin_fabs (loc, arg0, type);
10252 case BUILT_IN_ABS:
10253 case BUILT_IN_LABS:
10254 case BUILT_IN_LLABS:
10255 case BUILT_IN_IMAXABS:
10256 return fold_builtin_abs (loc, arg0, type);
10258 CASE_FLT_FN (BUILT_IN_CONJ):
10259 if (validate_arg (arg0, COMPLEX_TYPE)
10260 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10261 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10262 break;
10264 CASE_FLT_FN (BUILT_IN_CREAL):
10265 if (validate_arg (arg0, COMPLEX_TYPE)
10266 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10267 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10268 break;
10270 CASE_FLT_FN (BUILT_IN_CIMAG):
10271 if (validate_arg (arg0, COMPLEX_TYPE)
10272 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10273 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10274 break;
10276 CASE_FLT_FN (BUILT_IN_CCOS):
10277 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10279 CASE_FLT_FN (BUILT_IN_CCOSH):
10280 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10282 CASE_FLT_FN (BUILT_IN_CPROJ):
10283 return fold_builtin_cproj (loc, arg0, type);
10285 CASE_FLT_FN (BUILT_IN_CSIN):
10286 if (validate_arg (arg0, COMPLEX_TYPE)
10287 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10288 return do_mpc_arg1 (arg0, type, mpc_sin);
10289 break;
10291 CASE_FLT_FN (BUILT_IN_CSINH):
10292 if (validate_arg (arg0, COMPLEX_TYPE)
10293 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10294 return do_mpc_arg1 (arg0, type, mpc_sinh);
10295 break;
10297 CASE_FLT_FN (BUILT_IN_CTAN):
10298 if (validate_arg (arg0, COMPLEX_TYPE)
10299 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10300 return do_mpc_arg1 (arg0, type, mpc_tan);
10301 break;
10303 CASE_FLT_FN (BUILT_IN_CTANH):
10304 if (validate_arg (arg0, COMPLEX_TYPE)
10305 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10306 return do_mpc_arg1 (arg0, type, mpc_tanh);
10307 break;
10309 CASE_FLT_FN (BUILT_IN_CLOG):
10310 if (validate_arg (arg0, COMPLEX_TYPE)
10311 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10312 return do_mpc_arg1 (arg0, type, mpc_log);
10313 break;
10315 CASE_FLT_FN (BUILT_IN_CSQRT):
10316 if (validate_arg (arg0, COMPLEX_TYPE)
10317 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10318 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10319 break;
10321 CASE_FLT_FN (BUILT_IN_CASIN):
10322 if (validate_arg (arg0, COMPLEX_TYPE)
10323 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10324 return do_mpc_arg1 (arg0, type, mpc_asin);
10325 break;
10327 CASE_FLT_FN (BUILT_IN_CACOS):
10328 if (validate_arg (arg0, COMPLEX_TYPE)
10329 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10330 return do_mpc_arg1 (arg0, type, mpc_acos);
10331 break;
10333 CASE_FLT_FN (BUILT_IN_CATAN):
10334 if (validate_arg (arg0, COMPLEX_TYPE)
10335 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10336 return do_mpc_arg1 (arg0, type, mpc_atan);
10337 break;
10339 CASE_FLT_FN (BUILT_IN_CASINH):
10340 if (validate_arg (arg0, COMPLEX_TYPE)
10341 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10342 return do_mpc_arg1 (arg0, type, mpc_asinh);
10343 break;
10345 CASE_FLT_FN (BUILT_IN_CACOSH):
10346 if (validate_arg (arg0, COMPLEX_TYPE)
10347 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10348 return do_mpc_arg1 (arg0, type, mpc_acosh);
10349 break;
10351 CASE_FLT_FN (BUILT_IN_CATANH):
10352 if (validate_arg (arg0, COMPLEX_TYPE)
10353 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10354 return do_mpc_arg1 (arg0, type, mpc_atanh);
10355 break;
10357 CASE_FLT_FN (BUILT_IN_CABS):
10358 return fold_builtin_cabs (loc, arg0, type, fndecl);
10360 CASE_FLT_FN (BUILT_IN_CARG):
10361 return fold_builtin_carg (loc, arg0, type);
10363 CASE_FLT_FN (BUILT_IN_SQRT):
10364 return fold_builtin_sqrt (loc, arg0, type);
10366 CASE_FLT_FN (BUILT_IN_CBRT):
10367 return fold_builtin_cbrt (loc, arg0, type);
10369 CASE_FLT_FN (BUILT_IN_ASIN):
10370 if (validate_arg (arg0, REAL_TYPE))
10371 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10372 &dconstm1, &dconst1, true);
10373 break;
10375 CASE_FLT_FN (BUILT_IN_ACOS):
10376 if (validate_arg (arg0, REAL_TYPE))
10377 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10378 &dconstm1, &dconst1, true);
10379 break;
10381 CASE_FLT_FN (BUILT_IN_ATAN):
10382 if (validate_arg (arg0, REAL_TYPE))
10383 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10384 break;
10386 CASE_FLT_FN (BUILT_IN_ASINH):
10387 if (validate_arg (arg0, REAL_TYPE))
10388 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10389 break;
10391 CASE_FLT_FN (BUILT_IN_ACOSH):
10392 if (validate_arg (arg0, REAL_TYPE))
10393 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10394 &dconst1, NULL, true);
10395 break;
10397 CASE_FLT_FN (BUILT_IN_ATANH):
10398 if (validate_arg (arg0, REAL_TYPE))
10399 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10400 &dconstm1, &dconst1, false);
10401 break;
10403 CASE_FLT_FN (BUILT_IN_SIN):
10404 if (validate_arg (arg0, REAL_TYPE))
10405 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10406 break;
10408 CASE_FLT_FN (BUILT_IN_COS):
10409 return fold_builtin_cos (loc, arg0, type, fndecl);
10411 CASE_FLT_FN (BUILT_IN_TAN):
10412 return fold_builtin_tan (arg0, type);
10414 CASE_FLT_FN (BUILT_IN_CEXP):
10415 return fold_builtin_cexp (loc, arg0, type);
10417 CASE_FLT_FN (BUILT_IN_CEXPI):
10418 if (validate_arg (arg0, REAL_TYPE))
10419 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10420 break;
10422 CASE_FLT_FN (BUILT_IN_SINH):
10423 if (validate_arg (arg0, REAL_TYPE))
10424 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10425 break;
10427 CASE_FLT_FN (BUILT_IN_COSH):
10428 return fold_builtin_cosh (loc, arg0, type, fndecl);
10430 CASE_FLT_FN (BUILT_IN_TANH):
10431 if (validate_arg (arg0, REAL_TYPE))
10432 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10433 break;
10435 CASE_FLT_FN (BUILT_IN_ERF):
10436 if (validate_arg (arg0, REAL_TYPE))
10437 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10438 break;
10440 CASE_FLT_FN (BUILT_IN_ERFC):
10441 if (validate_arg (arg0, REAL_TYPE))
10442 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10443 break;
10445 CASE_FLT_FN (BUILT_IN_TGAMMA):
10446 if (validate_arg (arg0, REAL_TYPE))
10447 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10448 break;
10450 CASE_FLT_FN (BUILT_IN_EXP):
10451 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10453 CASE_FLT_FN (BUILT_IN_EXP2):
10454 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10456 CASE_FLT_FN (BUILT_IN_EXP10):
10457 CASE_FLT_FN (BUILT_IN_POW10):
10458 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10460 CASE_FLT_FN (BUILT_IN_EXPM1):
10461 if (validate_arg (arg0, REAL_TYPE))
10462 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10463 break;
10465 CASE_FLT_FN (BUILT_IN_LOG):
10466 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10468 CASE_FLT_FN (BUILT_IN_LOG2):
10469 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10471 CASE_FLT_FN (BUILT_IN_LOG10):
10472 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10474 CASE_FLT_FN (BUILT_IN_LOG1P):
10475 if (validate_arg (arg0, REAL_TYPE))
10476 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10477 &dconstm1, NULL, false);
10478 break;
10480 CASE_FLT_FN (BUILT_IN_J0):
10481 if (validate_arg (arg0, REAL_TYPE))
10482 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10483 NULL, NULL, 0);
10484 break;
10486 CASE_FLT_FN (BUILT_IN_J1):
10487 if (validate_arg (arg0, REAL_TYPE))
10488 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10489 NULL, NULL, 0);
10490 break;
10492 CASE_FLT_FN (BUILT_IN_Y0):
10493 if (validate_arg (arg0, REAL_TYPE))
10494 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10495 &dconst0, NULL, false);
10496 break;
10498 CASE_FLT_FN (BUILT_IN_Y1):
10499 if (validate_arg (arg0, REAL_TYPE))
10500 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10501 &dconst0, NULL, false);
10502 break;
10504 CASE_FLT_FN (BUILT_IN_NAN):
10505 case BUILT_IN_NAND32:
10506 case BUILT_IN_NAND64:
10507 case BUILT_IN_NAND128:
10508 return fold_builtin_nan (arg0, type, true);
10510 CASE_FLT_FN (BUILT_IN_NANS):
10511 return fold_builtin_nan (arg0, type, false);
10513 CASE_FLT_FN (BUILT_IN_FLOOR):
10514 return fold_builtin_floor (loc, fndecl, arg0);
10516 CASE_FLT_FN (BUILT_IN_CEIL):
10517 return fold_builtin_ceil (loc, fndecl, arg0);
10519 CASE_FLT_FN (BUILT_IN_TRUNC):
10520 return fold_builtin_trunc (loc, fndecl, arg0);
10522 CASE_FLT_FN (BUILT_IN_ROUND):
10523 return fold_builtin_round (loc, fndecl, arg0);
10525 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10526 CASE_FLT_FN (BUILT_IN_RINT):
10527 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10529 CASE_FLT_FN (BUILT_IN_ICEIL):
10530 CASE_FLT_FN (BUILT_IN_LCEIL):
10531 CASE_FLT_FN (BUILT_IN_LLCEIL):
10532 CASE_FLT_FN (BUILT_IN_LFLOOR):
10533 CASE_FLT_FN (BUILT_IN_IFLOOR):
10534 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10535 CASE_FLT_FN (BUILT_IN_IROUND):
10536 CASE_FLT_FN (BUILT_IN_LROUND):
10537 CASE_FLT_FN (BUILT_IN_LLROUND):
10538 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10540 CASE_FLT_FN (BUILT_IN_IRINT):
10541 CASE_FLT_FN (BUILT_IN_LRINT):
10542 CASE_FLT_FN (BUILT_IN_LLRINT):
10543 return fold_fixed_mathfn (loc, fndecl, arg0);
10545 case BUILT_IN_BSWAP16:
10546 case BUILT_IN_BSWAP32:
10547 case BUILT_IN_BSWAP64:
10548 return fold_builtin_bswap (fndecl, arg0);
10550 CASE_INT_FN (BUILT_IN_FFS):
10551 CASE_INT_FN (BUILT_IN_CLZ):
10552 CASE_INT_FN (BUILT_IN_CTZ):
10553 CASE_INT_FN (BUILT_IN_CLRSB):
10554 CASE_INT_FN (BUILT_IN_POPCOUNT):
10555 CASE_INT_FN (BUILT_IN_PARITY):
10556 return fold_builtin_bitop (fndecl, arg0);
10558 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10559 return fold_builtin_signbit (loc, arg0, type);
10561 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10562 return fold_builtin_significand (loc, arg0, type);
10564 CASE_FLT_FN (BUILT_IN_ILOGB):
10565 CASE_FLT_FN (BUILT_IN_LOGB):
10566 return fold_builtin_logb (loc, arg0, type);
10568 case BUILT_IN_ISASCII:
10569 return fold_builtin_isascii (loc, arg0);
10571 case BUILT_IN_TOASCII:
10572 return fold_builtin_toascii (loc, arg0);
10574 case BUILT_IN_ISDIGIT:
10575 return fold_builtin_isdigit (loc, arg0);
10577 CASE_FLT_FN (BUILT_IN_FINITE):
10578 case BUILT_IN_FINITED32:
10579 case BUILT_IN_FINITED64:
10580 case BUILT_IN_FINITED128:
10581 case BUILT_IN_ISFINITE:
10583 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10584 if (ret)
10585 return ret;
10586 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10589 CASE_FLT_FN (BUILT_IN_ISINF):
10590 case BUILT_IN_ISINFD32:
10591 case BUILT_IN_ISINFD64:
10592 case BUILT_IN_ISINFD128:
10594 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10595 if (ret)
10596 return ret;
10597 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10600 case BUILT_IN_ISNORMAL:
10601 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10603 case BUILT_IN_ISINF_SIGN:
10604 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10606 CASE_FLT_FN (BUILT_IN_ISNAN):
10607 case BUILT_IN_ISNAND32:
10608 case BUILT_IN_ISNAND64:
10609 case BUILT_IN_ISNAND128:
10610 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10612 case BUILT_IN_PRINTF:
10613 case BUILT_IN_PRINTF_UNLOCKED:
10614 case BUILT_IN_VPRINTF:
10615 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10617 case BUILT_IN_FREE:
10618 if (integer_zerop (arg0))
10619 return build_empty_stmt (loc);
10620 break;
10622 default:
10623 break;
10626 return NULL_TREE;
10630 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10631 IGNORE is true if the result of the function call is ignored. This
10632 function returns NULL_TREE if no simplification was possible. */
10634 static tree
10635 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10637 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10638 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10640 switch (fcode)
10642 CASE_FLT_FN (BUILT_IN_JN):
10643 if (validate_arg (arg0, INTEGER_TYPE)
10644 && validate_arg (arg1, REAL_TYPE))
10645 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10646 break;
10648 CASE_FLT_FN (BUILT_IN_YN):
10649 if (validate_arg (arg0, INTEGER_TYPE)
10650 && validate_arg (arg1, REAL_TYPE))
10651 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10652 &dconst0, false);
10653 break;
10655 CASE_FLT_FN (BUILT_IN_DREM):
10656 CASE_FLT_FN (BUILT_IN_REMAINDER):
10657 if (validate_arg (arg0, REAL_TYPE)
10658 && validate_arg (arg1, REAL_TYPE))
10659 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10660 break;
10662 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10663 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10664 if (validate_arg (arg0, REAL_TYPE)
10665 && validate_arg (arg1, POINTER_TYPE))
10666 return do_mpfr_lgamma_r (arg0, arg1, type);
10667 break;
10669 CASE_FLT_FN (BUILT_IN_ATAN2):
10670 if (validate_arg (arg0, REAL_TYPE)
10671 && validate_arg (arg1, REAL_TYPE))
10672 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10673 break;
10675 CASE_FLT_FN (BUILT_IN_FDIM):
10676 if (validate_arg (arg0, REAL_TYPE)
10677 && validate_arg (arg1, REAL_TYPE))
10678 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10679 break;
10681 CASE_FLT_FN (BUILT_IN_HYPOT):
10682 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10684 CASE_FLT_FN (BUILT_IN_CPOW):
10685 if (validate_arg (arg0, COMPLEX_TYPE)
10686 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10687 && validate_arg (arg1, COMPLEX_TYPE)
10688 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10689 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10690 break;
10692 CASE_FLT_FN (BUILT_IN_LDEXP):
10693 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10694 CASE_FLT_FN (BUILT_IN_SCALBN):
10695 CASE_FLT_FN (BUILT_IN_SCALBLN):
10696 return fold_builtin_load_exponent (loc, arg0, arg1,
10697 type, /*ldexp=*/false);
10699 CASE_FLT_FN (BUILT_IN_FREXP):
10700 return fold_builtin_frexp (loc, arg0, arg1, type);
10702 CASE_FLT_FN (BUILT_IN_MODF):
10703 return fold_builtin_modf (loc, arg0, arg1, type);
10705 case BUILT_IN_BZERO:
10706 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10708 case BUILT_IN_FPUTS:
10709 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10711 case BUILT_IN_FPUTS_UNLOCKED:
10712 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10714 case BUILT_IN_STRSTR:
10715 return fold_builtin_strstr (loc, arg0, arg1, type);
10717 case BUILT_IN_STRCAT:
10718 return fold_builtin_strcat (loc, arg0, arg1);
10720 case BUILT_IN_STRSPN:
10721 return fold_builtin_strspn (loc, arg0, arg1);
10723 case BUILT_IN_STRCSPN:
10724 return fold_builtin_strcspn (loc, arg0, arg1);
10726 case BUILT_IN_STRCHR:
10727 case BUILT_IN_INDEX:
10728 return fold_builtin_strchr (loc, arg0, arg1, type);
10730 case BUILT_IN_STRRCHR:
10731 case BUILT_IN_RINDEX:
10732 return fold_builtin_strrchr (loc, arg0, arg1, type);
10734 case BUILT_IN_STRCPY:
10735 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10737 case BUILT_IN_STPCPY:
10738 if (ignore)
10740 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10741 if (!fn)
10742 break;
10744 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10746 else
10747 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10748 break;
10750 case BUILT_IN_STRCMP:
10751 return fold_builtin_strcmp (loc, arg0, arg1);
10753 case BUILT_IN_STRPBRK:
10754 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10756 case BUILT_IN_EXPECT:
10757 return fold_builtin_expect (loc, arg0, arg1);
10759 CASE_FLT_FN (BUILT_IN_POW):
10760 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10762 CASE_FLT_FN (BUILT_IN_POWI):
10763 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10765 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10766 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10768 CASE_FLT_FN (BUILT_IN_FMIN):
10769 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10771 CASE_FLT_FN (BUILT_IN_FMAX):
10772 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10774 case BUILT_IN_ISGREATER:
10775 return fold_builtin_unordered_cmp (loc, fndecl,
10776 arg0, arg1, UNLE_EXPR, LE_EXPR);
10777 case BUILT_IN_ISGREATEREQUAL:
10778 return fold_builtin_unordered_cmp (loc, fndecl,
10779 arg0, arg1, UNLT_EXPR, LT_EXPR);
10780 case BUILT_IN_ISLESS:
10781 return fold_builtin_unordered_cmp (loc, fndecl,
10782 arg0, arg1, UNGE_EXPR, GE_EXPR);
10783 case BUILT_IN_ISLESSEQUAL:
10784 return fold_builtin_unordered_cmp (loc, fndecl,
10785 arg0, arg1, UNGT_EXPR, GT_EXPR);
10786 case BUILT_IN_ISLESSGREATER:
10787 return fold_builtin_unordered_cmp (loc, fndecl,
10788 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10789 case BUILT_IN_ISUNORDERED:
10790 return fold_builtin_unordered_cmp (loc, fndecl,
10791 arg0, arg1, UNORDERED_EXPR,
10792 NOP_EXPR);
10794 /* We do the folding for va_start in the expander. */
10795 case BUILT_IN_VA_START:
10796 break;
10798 case BUILT_IN_SPRINTF:
10799 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10801 case BUILT_IN_OBJECT_SIZE:
10802 return fold_builtin_object_size (arg0, arg1);
10804 case BUILT_IN_PRINTF:
10805 case BUILT_IN_PRINTF_UNLOCKED:
10806 case BUILT_IN_VPRINTF:
10807 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10809 case BUILT_IN_PRINTF_CHK:
10810 case BUILT_IN_VPRINTF_CHK:
10811 if (!validate_arg (arg0, INTEGER_TYPE)
10812 || TREE_SIDE_EFFECTS (arg0))
10813 return NULL_TREE;
10814 else
10815 return fold_builtin_printf (loc, fndecl,
10816 arg1, NULL_TREE, ignore, fcode);
10817 break;
10819 case BUILT_IN_FPRINTF:
10820 case BUILT_IN_FPRINTF_UNLOCKED:
10821 case BUILT_IN_VFPRINTF:
10822 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10823 ignore, fcode);
10825 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10826 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10828 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10829 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10831 default:
10832 break;
10834 return NULL_TREE;
10837 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10838 and ARG2. IGNORE is true if the result of the function call is ignored.
10839 This function returns NULL_TREE if no simplification was possible. */
10841 static tree
10842 fold_builtin_3 (location_t loc, tree fndecl,
10843 tree arg0, tree arg1, tree arg2, bool ignore)
10845 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10846 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10847 switch (fcode)
10850 CASE_FLT_FN (BUILT_IN_SINCOS):
10851 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10853 CASE_FLT_FN (BUILT_IN_FMA):
10854 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10855 break;
10857 CASE_FLT_FN (BUILT_IN_REMQUO):
10858 if (validate_arg (arg0, REAL_TYPE)
10859 && validate_arg (arg1, REAL_TYPE)
10860 && validate_arg (arg2, POINTER_TYPE))
10861 return do_mpfr_remquo (arg0, arg1, arg2);
10862 break;
10864 case BUILT_IN_MEMSET:
10865 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10867 case BUILT_IN_BCOPY:
10868 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10869 void_type_node, true, /*endp=*/3);
10871 case BUILT_IN_MEMCPY:
10872 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10873 type, ignore, /*endp=*/0);
10875 case BUILT_IN_MEMPCPY:
10876 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10877 type, ignore, /*endp=*/1);
10879 case BUILT_IN_MEMMOVE:
10880 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10881 type, ignore, /*endp=*/3);
10883 case BUILT_IN_STRNCAT:
10884 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10886 case BUILT_IN_STRNCPY:
10887 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10889 case BUILT_IN_STRNCMP:
10890 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10892 case BUILT_IN_MEMCHR:
10893 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10895 case BUILT_IN_BCMP:
10896 case BUILT_IN_MEMCMP:
10897 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10899 case BUILT_IN_SPRINTF:
10900 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10902 case BUILT_IN_SNPRINTF:
10903 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10905 case BUILT_IN_STRCPY_CHK:
10906 case BUILT_IN_STPCPY_CHK:
10907 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10908 ignore, fcode);
10910 case BUILT_IN_STRCAT_CHK:
10911 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10913 case BUILT_IN_PRINTF_CHK:
10914 case BUILT_IN_VPRINTF_CHK:
10915 if (!validate_arg (arg0, INTEGER_TYPE)
10916 || TREE_SIDE_EFFECTS (arg0))
10917 return NULL_TREE;
10918 else
10919 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10920 break;
10922 case BUILT_IN_FPRINTF:
10923 case BUILT_IN_FPRINTF_UNLOCKED:
10924 case BUILT_IN_VFPRINTF:
10925 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10926 ignore, fcode);
10928 case BUILT_IN_FPRINTF_CHK:
10929 case BUILT_IN_VFPRINTF_CHK:
10930 if (!validate_arg (arg1, INTEGER_TYPE)
10931 || TREE_SIDE_EFFECTS (arg1))
10932 return NULL_TREE;
10933 else
10934 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10935 ignore, fcode);
10937 default:
10938 break;
10940 return NULL_TREE;
10943 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10944 ARG2, and ARG3. IGNORE is true if the result of the function call is
10945 ignored. This function returns NULL_TREE if no simplification was
10946 possible. */
10948 static tree
10949 fold_builtin_4 (location_t loc, tree fndecl,
10950 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10952 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10954 switch (fcode)
10956 case BUILT_IN_MEMCPY_CHK:
10957 case BUILT_IN_MEMPCPY_CHK:
10958 case BUILT_IN_MEMMOVE_CHK:
10959 case BUILT_IN_MEMSET_CHK:
10960 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10961 NULL_TREE, ignore,
10962 DECL_FUNCTION_CODE (fndecl));
10964 case BUILT_IN_STRNCPY_CHK:
10965 case BUILT_IN_STPNCPY_CHK:
10966 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
10967 ignore, fcode);
10969 case BUILT_IN_STRNCAT_CHK:
10970 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10972 case BUILT_IN_SNPRINTF:
10973 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10975 case BUILT_IN_FPRINTF_CHK:
10976 case BUILT_IN_VFPRINTF_CHK:
10977 if (!validate_arg (arg1, INTEGER_TYPE)
10978 || TREE_SIDE_EFFECTS (arg1))
10979 return NULL_TREE;
10980 else
10981 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10982 ignore, fcode);
10983 break;
10985 default:
10986 break;
10988 return NULL_TREE;
10991 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10992 arguments, where NARGS <= 4. IGNORE is true if the result of the
10993 function call is ignored. This function returns NULL_TREE if no
10994 simplification was possible. Note that this only folds builtins with
10995 fixed argument patterns. Foldings that do varargs-to-varargs
10996 transformations, or that match calls with more than 4 arguments,
10997 need to be handled with fold_builtin_varargs instead. */
10999 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11001 static tree
11002 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11004 tree ret = NULL_TREE;
11006 switch (nargs)
11008 case 0:
11009 ret = fold_builtin_0 (loc, fndecl, ignore);
11010 break;
11011 case 1:
11012 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11013 break;
11014 case 2:
11015 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11016 break;
11017 case 3:
11018 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11019 break;
11020 case 4:
11021 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11022 ignore);
11023 break;
11024 default:
11025 break;
11027 if (ret)
11029 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11030 SET_EXPR_LOCATION (ret, loc);
11031 TREE_NO_WARNING (ret) = 1;
11032 return ret;
11034 return NULL_TREE;
11037 /* Builtins with folding operations that operate on "..." arguments
11038 need special handling; we need to store the arguments in a convenient
11039 data structure before attempting any folding. Fortunately there are
11040 only a few builtins that fall into this category. FNDECL is the
11041 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11042 result of the function call is ignored. */
11044 static tree
11045 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11046 bool ignore ATTRIBUTE_UNUSED)
11048 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11049 tree ret = NULL_TREE;
11051 switch (fcode)
11053 case BUILT_IN_SPRINTF_CHK:
11054 case BUILT_IN_VSPRINTF_CHK:
11055 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11056 break;
11058 case BUILT_IN_SNPRINTF_CHK:
11059 case BUILT_IN_VSNPRINTF_CHK:
11060 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11061 break;
11063 case BUILT_IN_FPCLASSIFY:
11064 ret = fold_builtin_fpclassify (loc, exp);
11065 break;
11067 default:
11068 break;
11070 if (ret)
11072 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11073 SET_EXPR_LOCATION (ret, loc);
11074 TREE_NO_WARNING (ret) = 1;
11075 return ret;
11077 return NULL_TREE;
11080 /* Return true if FNDECL shouldn't be folded right now.
11081 If a built-in function has an inline attribute always_inline
11082 wrapper, defer folding it after always_inline functions have
11083 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11084 might not be performed. */
11086 bool
11087 avoid_folding_inline_builtin (tree fndecl)
11089 return (DECL_DECLARED_INLINE_P (fndecl)
11090 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11091 && cfun
11092 && !cfun->always_inline_functions_inlined
11093 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11096 /* A wrapper function for builtin folding that prevents warnings for
11097 "statement without effect" and the like, caused by removing the
11098 call node earlier than the warning is generated. */
11100 tree
11101 fold_call_expr (location_t loc, tree exp, bool ignore)
11103 tree ret = NULL_TREE;
11104 tree fndecl = get_callee_fndecl (exp);
11105 if (fndecl
11106 && TREE_CODE (fndecl) == FUNCTION_DECL
11107 && DECL_BUILT_IN (fndecl)
11108 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11109 yet. Defer folding until we see all the arguments
11110 (after inlining). */
11111 && !CALL_EXPR_VA_ARG_PACK (exp))
11113 int nargs = call_expr_nargs (exp);
11115 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11116 instead last argument is __builtin_va_arg_pack (). Defer folding
11117 even in that case, until arguments are finalized. */
11118 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11120 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11121 if (fndecl2
11122 && TREE_CODE (fndecl2) == FUNCTION_DECL
11123 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11124 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11125 return NULL_TREE;
11128 if (avoid_folding_inline_builtin (fndecl))
11129 return NULL_TREE;
11131 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11132 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11133 CALL_EXPR_ARGP (exp), ignore);
11134 else
11136 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11138 tree *args = CALL_EXPR_ARGP (exp);
11139 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11141 if (!ret)
11142 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11143 if (ret)
11144 return ret;
11147 return NULL_TREE;
11150 /* Conveniently construct a function call expression. FNDECL names the
11151 function to be called and N arguments are passed in the array
11152 ARGARRAY. */
11154 tree
11155 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11157 tree fntype = TREE_TYPE (fndecl);
11158 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11160 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11163 /* Conveniently construct a function call expression. FNDECL names the
11164 function to be called and the arguments are passed in the vector
11165 VEC. */
11167 tree
11168 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11170 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11171 vec_safe_address (vec));
11175 /* Conveniently construct a function call expression. FNDECL names the
11176 function to be called, N is the number of arguments, and the "..."
11177 parameters are the argument expressions. */
11179 tree
11180 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11182 va_list ap;
11183 tree *argarray = XALLOCAVEC (tree, n);
11184 int i;
11186 va_start (ap, n);
11187 for (i = 0; i < n; i++)
11188 argarray[i] = va_arg (ap, tree);
11189 va_end (ap);
11190 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11193 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11194 varargs macros aren't supported by all bootstrap compilers. */
11196 tree
11197 build_call_expr (tree fndecl, int n, ...)
11199 va_list ap;
11200 tree *argarray = XALLOCAVEC (tree, n);
11201 int i;
11203 va_start (ap, n);
11204 for (i = 0; i < n; i++)
11205 argarray[i] = va_arg (ap, tree);
11206 va_end (ap);
11207 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11210 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11211 N arguments are passed in the array ARGARRAY. */
11213 tree
11214 fold_builtin_call_array (location_t loc, tree type,
11215 tree fn,
11216 int n,
11217 tree *argarray)
11219 tree ret = NULL_TREE;
11220 tree exp;
11222 if (TREE_CODE (fn) == ADDR_EXPR)
11224 tree fndecl = TREE_OPERAND (fn, 0);
11225 if (TREE_CODE (fndecl) == FUNCTION_DECL
11226 && DECL_BUILT_IN (fndecl))
11228 /* If last argument is __builtin_va_arg_pack (), arguments to this
11229 function are not finalized yet. Defer folding until they are. */
11230 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11232 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11233 if (fndecl2
11234 && TREE_CODE (fndecl2) == FUNCTION_DECL
11235 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11236 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11237 return build_call_array_loc (loc, type, fn, n, argarray);
11239 if (avoid_folding_inline_builtin (fndecl))
11240 return build_call_array_loc (loc, type, fn, n, argarray);
11241 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11243 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11244 if (ret)
11245 return ret;
11247 return build_call_array_loc (loc, type, fn, n, argarray);
11249 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11251 /* First try the transformations that don't require consing up
11252 an exp. */
11253 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11254 if (ret)
11255 return ret;
11258 /* If we got this far, we need to build an exp. */
11259 exp = build_call_array_loc (loc, type, fn, n, argarray);
11260 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11261 return ret ? ret : exp;
11265 return build_call_array_loc (loc, type, fn, n, argarray);
11268 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11269 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11270 of arguments in ARGS to be omitted. OLDNARGS is the number of
11271 elements in ARGS. */
11273 static tree
11274 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11275 int skip, tree fndecl, int n, va_list newargs)
11277 int nargs = oldnargs - skip + n;
11278 tree *buffer;
11280 if (n > 0)
11282 int i, j;
11284 buffer = XALLOCAVEC (tree, nargs);
11285 for (i = 0; i < n; i++)
11286 buffer[i] = va_arg (newargs, tree);
11287 for (j = skip; j < oldnargs; j++, i++)
11288 buffer[i] = args[j];
11290 else
11291 buffer = args + skip;
11293 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11296 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11297 list ARGS along with N new arguments specified as the "..."
11298 parameters. SKIP is the number of arguments in ARGS to be omitted.
11299 OLDNARGS is the number of elements in ARGS. */
11301 static tree
11302 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11303 int skip, tree fndecl, int n, ...)
11305 va_list ap;
11306 tree t;
11308 va_start (ap, n);
11309 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11310 va_end (ap);
11312 return t;
11315 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11316 along with N new arguments specified as the "..." parameters. SKIP
11317 is the number of arguments in EXP to be omitted. This function is used
11318 to do varargs-to-varargs transformations. */
11320 static tree
11321 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11323 va_list ap;
11324 tree t;
11326 va_start (ap, n);
11327 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11328 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11329 va_end (ap);
11331 return t;
11334 /* Validate a single argument ARG against a tree code CODE representing
11335 a type. */
11337 static bool
11338 validate_arg (const_tree arg, enum tree_code code)
11340 if (!arg)
11341 return false;
11342 else if (code == POINTER_TYPE)
11343 return POINTER_TYPE_P (TREE_TYPE (arg));
11344 else if (code == INTEGER_TYPE)
11345 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11346 return code == TREE_CODE (TREE_TYPE (arg));
11349 /* This function validates the types of a function call argument list
11350 against a specified list of tree_codes. If the last specifier is a 0,
11351 that represents an ellipses, otherwise the last specifier must be a
11352 VOID_TYPE.
11354 This is the GIMPLE version of validate_arglist. Eventually we want to
11355 completely convert builtins.c to work from GIMPLEs and the tree based
11356 validate_arglist will then be removed. */
11358 bool
11359 validate_gimple_arglist (const_gimple call, ...)
11361 enum tree_code code;
11362 bool res = 0;
11363 va_list ap;
11364 const_tree arg;
11365 size_t i;
11367 va_start (ap, call);
11368 i = 0;
11372 code = (enum tree_code) va_arg (ap, int);
11373 switch (code)
11375 case 0:
11376 /* This signifies an ellipses, any further arguments are all ok. */
11377 res = true;
11378 goto end;
11379 case VOID_TYPE:
11380 /* This signifies an endlink, if no arguments remain, return
11381 true, otherwise return false. */
11382 res = (i == gimple_call_num_args (call));
11383 goto end;
11384 default:
11385 /* If no parameters remain or the parameter's code does not
11386 match the specified code, return false. Otherwise continue
11387 checking any remaining arguments. */
11388 arg = gimple_call_arg (call, i++);
11389 if (!validate_arg (arg, code))
11390 goto end;
11391 break;
11394 while (1);
11396 /* We need gotos here since we can only have one VA_CLOSE in a
11397 function. */
11398 end: ;
11399 va_end (ap);
11401 return res;
11404 /* This function validates the types of a function call argument list
11405 against a specified list of tree_codes. If the last specifier is a 0,
11406 that represents an ellipses, otherwise the last specifier must be a
11407 VOID_TYPE. */
11409 bool
11410 validate_arglist (const_tree callexpr, ...)
11412 enum tree_code code;
11413 bool res = 0;
11414 va_list ap;
11415 const_call_expr_arg_iterator iter;
11416 const_tree arg;
11418 va_start (ap, callexpr);
11419 init_const_call_expr_arg_iterator (callexpr, &iter);
11423 code = (enum tree_code) va_arg (ap, int);
11424 switch (code)
11426 case 0:
11427 /* This signifies an ellipses, any further arguments are all ok. */
11428 res = true;
11429 goto end;
11430 case VOID_TYPE:
11431 /* This signifies an endlink, if no arguments remain, return
11432 true, otherwise return false. */
11433 res = !more_const_call_expr_args_p (&iter);
11434 goto end;
11435 default:
11436 /* If no parameters remain or the parameter's code does not
11437 match the specified code, return false. Otherwise continue
11438 checking any remaining arguments. */
11439 arg = next_const_call_expr_arg (&iter);
11440 if (!validate_arg (arg, code))
11441 goto end;
11442 break;
11445 while (1);
11447 /* We need gotos here since we can only have one VA_CLOSE in a
11448 function. */
11449 end: ;
11450 va_end (ap);
11452 return res;
11455 /* Default target-specific builtin expander that does nothing. */
11458 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11459 rtx target ATTRIBUTE_UNUSED,
11460 rtx subtarget ATTRIBUTE_UNUSED,
11461 enum machine_mode mode ATTRIBUTE_UNUSED,
11462 int ignore ATTRIBUTE_UNUSED)
11464 return NULL_RTX;
11467 /* Returns true is EXP represents data that would potentially reside
11468 in a readonly section. */
11470 static bool
11471 readonly_data_expr (tree exp)
11473 STRIP_NOPS (exp);
11475 if (TREE_CODE (exp) != ADDR_EXPR)
11476 return false;
11478 exp = get_base_address (TREE_OPERAND (exp, 0));
11479 if (!exp)
11480 return false;
11482 /* Make sure we call decl_readonly_section only for trees it
11483 can handle (since it returns true for everything it doesn't
11484 understand). */
11485 if (TREE_CODE (exp) == STRING_CST
11486 || TREE_CODE (exp) == CONSTRUCTOR
11487 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11488 return decl_readonly_section (exp, 0);
11489 else
11490 return false;
11493 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11494 to the call, and TYPE is its return type.
11496 Return NULL_TREE if no simplification was possible, otherwise return the
11497 simplified form of the call as a tree.
11499 The simplified form may be a constant or other expression which
11500 computes the same value, but in a more efficient manner (including
11501 calls to other builtin functions).
11503 The call may contain arguments which need to be evaluated, but
11504 which are not useful to determine the result of the call. In
11505 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11506 COMPOUND_EXPR will be an argument which must be evaluated.
11507 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11508 COMPOUND_EXPR in the chain will contain the tree for the simplified
11509 form of the builtin function call. */
11511 static tree
11512 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11514 if (!validate_arg (s1, POINTER_TYPE)
11515 || !validate_arg (s2, POINTER_TYPE))
11516 return NULL_TREE;
11517 else
11519 tree fn;
11520 const char *p1, *p2;
11522 p2 = c_getstr (s2);
11523 if (p2 == NULL)
11524 return NULL_TREE;
11526 p1 = c_getstr (s1);
11527 if (p1 != NULL)
11529 const char *r = strstr (p1, p2);
11530 tree tem;
11532 if (r == NULL)
11533 return build_int_cst (TREE_TYPE (s1), 0);
11535 /* Return an offset into the constant string argument. */
11536 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11537 return fold_convert_loc (loc, type, tem);
11540 /* The argument is const char *, and the result is char *, so we need
11541 a type conversion here to avoid a warning. */
11542 if (p2[0] == '\0')
11543 return fold_convert_loc (loc, type, s1);
11545 if (p2[1] != '\0')
11546 return NULL_TREE;
11548 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11549 if (!fn)
11550 return NULL_TREE;
11552 /* New argument list transforming strstr(s1, s2) to
11553 strchr(s1, s2[0]). */
11554 return build_call_expr_loc (loc, fn, 2, s1,
11555 build_int_cst (integer_type_node, p2[0]));
11559 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11560 the call, and TYPE is its return type.
11562 Return NULL_TREE if no simplification was possible, otherwise return the
11563 simplified form of the call as a tree.
11565 The simplified form may be a constant or other expression which
11566 computes the same value, but in a more efficient manner (including
11567 calls to other builtin functions).
11569 The call may contain arguments which need to be evaluated, but
11570 which are not useful to determine the result of the call. In
11571 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11572 COMPOUND_EXPR will be an argument which must be evaluated.
11573 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11574 COMPOUND_EXPR in the chain will contain the tree for the simplified
11575 form of the builtin function call. */
11577 static tree
11578 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11580 if (!validate_arg (s1, POINTER_TYPE)
11581 || !validate_arg (s2, INTEGER_TYPE))
11582 return NULL_TREE;
11583 else
11585 const char *p1;
11587 if (TREE_CODE (s2) != INTEGER_CST)
11588 return NULL_TREE;
11590 p1 = c_getstr (s1);
11591 if (p1 != NULL)
11593 char c;
11594 const char *r;
11595 tree tem;
11597 if (target_char_cast (s2, &c))
11598 return NULL_TREE;
11600 r = strchr (p1, c);
11602 if (r == NULL)
11603 return build_int_cst (TREE_TYPE (s1), 0);
11605 /* Return an offset into the constant string argument. */
11606 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11607 return fold_convert_loc (loc, type, tem);
11609 return NULL_TREE;
11613 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11614 the call, and TYPE is its return type.
11616 Return NULL_TREE if no simplification was possible, otherwise return the
11617 simplified form of the call as a tree.
11619 The simplified form may be a constant or other expression which
11620 computes the same value, but in a more efficient manner (including
11621 calls to other builtin functions).
11623 The call may contain arguments which need to be evaluated, but
11624 which are not useful to determine the result of the call. In
11625 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11626 COMPOUND_EXPR will be an argument which must be evaluated.
11627 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11628 COMPOUND_EXPR in the chain will contain the tree for the simplified
11629 form of the builtin function call. */
11631 static tree
11632 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11634 if (!validate_arg (s1, POINTER_TYPE)
11635 || !validate_arg (s2, INTEGER_TYPE))
11636 return NULL_TREE;
11637 else
11639 tree fn;
11640 const char *p1;
11642 if (TREE_CODE (s2) != INTEGER_CST)
11643 return NULL_TREE;
11645 p1 = c_getstr (s1);
11646 if (p1 != NULL)
11648 char c;
11649 const char *r;
11650 tree tem;
11652 if (target_char_cast (s2, &c))
11653 return NULL_TREE;
11655 r = strrchr (p1, c);
11657 if (r == NULL)
11658 return build_int_cst (TREE_TYPE (s1), 0);
11660 /* Return an offset into the constant string argument. */
11661 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11662 return fold_convert_loc (loc, type, tem);
11665 if (! integer_zerop (s2))
11666 return NULL_TREE;
11668 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11669 if (!fn)
11670 return NULL_TREE;
11672 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11673 return build_call_expr_loc (loc, fn, 2, s1, s2);
11677 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11678 to the call, and TYPE is its return type.
11680 Return NULL_TREE if no simplification was possible, otherwise return the
11681 simplified form of the call as a tree.
11683 The simplified form may be a constant or other expression which
11684 computes the same value, but in a more efficient manner (including
11685 calls to other builtin functions).
11687 The call may contain arguments which need to be evaluated, but
11688 which are not useful to determine the result of the call. In
11689 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11690 COMPOUND_EXPR will be an argument which must be evaluated.
11691 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11692 COMPOUND_EXPR in the chain will contain the tree for the simplified
11693 form of the builtin function call. */
11695 static tree
11696 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11698 if (!validate_arg (s1, POINTER_TYPE)
11699 || !validate_arg (s2, POINTER_TYPE))
11700 return NULL_TREE;
11701 else
11703 tree fn;
11704 const char *p1, *p2;
11706 p2 = c_getstr (s2);
11707 if (p2 == NULL)
11708 return NULL_TREE;
11710 p1 = c_getstr (s1);
11711 if (p1 != NULL)
11713 const char *r = strpbrk (p1, p2);
11714 tree tem;
11716 if (r == NULL)
11717 return build_int_cst (TREE_TYPE (s1), 0);
11719 /* Return an offset into the constant string argument. */
11720 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11721 return fold_convert_loc (loc, type, tem);
11724 if (p2[0] == '\0')
11725 /* strpbrk(x, "") == NULL.
11726 Evaluate and ignore s1 in case it had side-effects. */
11727 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11729 if (p2[1] != '\0')
11730 return NULL_TREE; /* Really call strpbrk. */
11732 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11733 if (!fn)
11734 return NULL_TREE;
11736 /* New argument list transforming strpbrk(s1, s2) to
11737 strchr(s1, s2[0]). */
11738 return build_call_expr_loc (loc, fn, 2, s1,
11739 build_int_cst (integer_type_node, p2[0]));
11743 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11744 to the call.
11746 Return NULL_TREE if no simplification was possible, otherwise return the
11747 simplified form of the call as a tree.
11749 The simplified form may be a constant or other expression which
11750 computes the same value, but in a more efficient manner (including
11751 calls to other builtin functions).
11753 The call may contain arguments which need to be evaluated, but
11754 which are not useful to determine the result of the call. In
11755 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11756 COMPOUND_EXPR will be an argument which must be evaluated.
11757 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11758 COMPOUND_EXPR in the chain will contain the tree for the simplified
11759 form of the builtin function call. */
11761 static tree
11762 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11764 if (!validate_arg (dst, POINTER_TYPE)
11765 || !validate_arg (src, POINTER_TYPE))
11766 return NULL_TREE;
11767 else
11769 const char *p = c_getstr (src);
11771 /* If the string length is zero, return the dst parameter. */
11772 if (p && *p == '\0')
11773 return dst;
11775 if (optimize_insn_for_speed_p ())
11777 /* See if we can store by pieces into (dst + strlen(dst)). */
11778 tree newdst, call;
11779 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11780 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11782 if (!strlen_fn || !strcpy_fn)
11783 return NULL_TREE;
11785 /* If we don't have a movstr we don't want to emit an strcpy
11786 call. We have to do that if the length of the source string
11787 isn't computable (in that case we can use memcpy probably
11788 later expanding to a sequence of mov instructions). If we
11789 have movstr instructions we can emit strcpy calls. */
11790 if (!HAVE_movstr)
11792 tree len = c_strlen (src, 1);
11793 if (! len || TREE_SIDE_EFFECTS (len))
11794 return NULL_TREE;
11797 /* Stabilize the argument list. */
11798 dst = builtin_save_expr (dst);
11800 /* Create strlen (dst). */
11801 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11802 /* Create (dst p+ strlen (dst)). */
11804 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11805 newdst = builtin_save_expr (newdst);
11807 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11808 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11810 return NULL_TREE;
11814 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11815 arguments to the call.
11817 Return NULL_TREE if no simplification was possible, otherwise return the
11818 simplified form of the call as a tree.
11820 The simplified form may be a constant or other expression which
11821 computes the same value, but in a more efficient manner (including
11822 calls to other builtin functions).
11824 The call may contain arguments which need to be evaluated, but
11825 which are not useful to determine the result of the call. In
11826 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11827 COMPOUND_EXPR will be an argument which must be evaluated.
11828 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11829 COMPOUND_EXPR in the chain will contain the tree for the simplified
11830 form of the builtin function call. */
11832 static tree
11833 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11835 if (!validate_arg (dst, POINTER_TYPE)
11836 || !validate_arg (src, POINTER_TYPE)
11837 || !validate_arg (len, INTEGER_TYPE))
11838 return NULL_TREE;
11839 else
11841 const char *p = c_getstr (src);
11843 /* If the requested length is zero, or the src parameter string
11844 length is zero, return the dst parameter. */
11845 if (integer_zerop (len) || (p && *p == '\0'))
11846 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11848 /* If the requested len is greater than or equal to the string
11849 length, call strcat. */
11850 if (TREE_CODE (len) == INTEGER_CST && p
11851 && compare_tree_int (len, strlen (p)) >= 0)
11853 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11855 /* If the replacement _DECL isn't initialized, don't do the
11856 transformation. */
11857 if (!fn)
11858 return NULL_TREE;
11860 return build_call_expr_loc (loc, fn, 2, dst, src);
11862 return NULL_TREE;
11866 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11867 to the call.
11869 Return NULL_TREE if no simplification was possible, otherwise return the
11870 simplified form of the call as a tree.
11872 The simplified form may be a constant or other expression which
11873 computes the same value, but in a more efficient manner (including
11874 calls to other builtin functions).
11876 The call may contain arguments which need to be evaluated, but
11877 which are not useful to determine the result of the call. In
11878 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11879 COMPOUND_EXPR will be an argument which must be evaluated.
11880 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11881 COMPOUND_EXPR in the chain will contain the tree for the simplified
11882 form of the builtin function call. */
11884 static tree
11885 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11887 if (!validate_arg (s1, POINTER_TYPE)
11888 || !validate_arg (s2, POINTER_TYPE))
11889 return NULL_TREE;
11890 else
11892 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11894 /* If both arguments are constants, evaluate at compile-time. */
11895 if (p1 && p2)
11897 const size_t r = strspn (p1, p2);
11898 return build_int_cst (size_type_node, r);
11901 /* If either argument is "", return NULL_TREE. */
11902 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11903 /* Evaluate and ignore both arguments in case either one has
11904 side-effects. */
11905 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11906 s1, s2);
11907 return NULL_TREE;
11911 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11912 to the call.
11914 Return NULL_TREE if no simplification was possible, otherwise return the
11915 simplified form of the call as a tree.
11917 The simplified form may be a constant or other expression which
11918 computes the same value, but in a more efficient manner (including
11919 calls to other builtin functions).
11921 The call may contain arguments which need to be evaluated, but
11922 which are not useful to determine the result of the call. In
11923 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11924 COMPOUND_EXPR will be an argument which must be evaluated.
11925 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11926 COMPOUND_EXPR in the chain will contain the tree for the simplified
11927 form of the builtin function call. */
11929 static tree
11930 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11932 if (!validate_arg (s1, POINTER_TYPE)
11933 || !validate_arg (s2, POINTER_TYPE))
11934 return NULL_TREE;
11935 else
11937 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11939 /* If both arguments are constants, evaluate at compile-time. */
11940 if (p1 && p2)
11942 const size_t r = strcspn (p1, p2);
11943 return build_int_cst (size_type_node, r);
11946 /* If the first argument is "", return NULL_TREE. */
11947 if (p1 && *p1 == '\0')
11949 /* Evaluate and ignore argument s2 in case it has
11950 side-effects. */
11951 return omit_one_operand_loc (loc, size_type_node,
11952 size_zero_node, s2);
11955 /* If the second argument is "", return __builtin_strlen(s1). */
11956 if (p2 && *p2 == '\0')
11958 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11960 /* If the replacement _DECL isn't initialized, don't do the
11961 transformation. */
11962 if (!fn)
11963 return NULL_TREE;
11965 return build_call_expr_loc (loc, fn, 1, s1);
11967 return NULL_TREE;
11971 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11972 to the call. IGNORE is true if the value returned
11973 by the builtin will be ignored. UNLOCKED is true is true if this
11974 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11975 the known length of the string. Return NULL_TREE if no simplification
11976 was possible. */
11978 tree
11979 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11980 bool ignore, bool unlocked, tree len)
11982 /* If we're using an unlocked function, assume the other unlocked
11983 functions exist explicitly. */
11984 tree const fn_fputc = (unlocked
11985 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11986 : builtin_decl_implicit (BUILT_IN_FPUTC));
11987 tree const fn_fwrite = (unlocked
11988 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11989 : builtin_decl_implicit (BUILT_IN_FWRITE));
11991 /* If the return value is used, don't do the transformation. */
11992 if (!ignore)
11993 return NULL_TREE;
11995 /* Verify the arguments in the original call. */
11996 if (!validate_arg (arg0, POINTER_TYPE)
11997 || !validate_arg (arg1, POINTER_TYPE))
11998 return NULL_TREE;
12000 if (! len)
12001 len = c_strlen (arg0, 0);
12003 /* Get the length of the string passed to fputs. If the length
12004 can't be determined, punt. */
12005 if (!len
12006 || TREE_CODE (len) != INTEGER_CST)
12007 return NULL_TREE;
12009 switch (compare_tree_int (len, 1))
12011 case -1: /* length is 0, delete the call entirely . */
12012 return omit_one_operand_loc (loc, integer_type_node,
12013 integer_zero_node, arg1);;
12015 case 0: /* length is 1, call fputc. */
12017 const char *p = c_getstr (arg0);
12019 if (p != NULL)
12021 if (fn_fputc)
12022 return build_call_expr_loc (loc, fn_fputc, 2,
12023 build_int_cst
12024 (integer_type_node, p[0]), arg1);
12025 else
12026 return NULL_TREE;
12029 /* FALLTHROUGH */
12030 case 1: /* length is greater than 1, call fwrite. */
12032 /* If optimizing for size keep fputs. */
12033 if (optimize_function_for_size_p (cfun))
12034 return NULL_TREE;
12035 /* New argument list transforming fputs(string, stream) to
12036 fwrite(string, 1, len, stream). */
12037 if (fn_fwrite)
12038 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12039 size_one_node, len, arg1);
12040 else
12041 return NULL_TREE;
12043 default:
12044 gcc_unreachable ();
12046 return NULL_TREE;
12049 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12050 produced. False otherwise. This is done so that we don't output the error
12051 or warning twice or three times. */
12053 bool
12054 fold_builtin_next_arg (tree exp, bool va_start_p)
12056 tree fntype = TREE_TYPE (current_function_decl);
12057 int nargs = call_expr_nargs (exp);
12058 tree arg;
12059 /* There is good chance the current input_location points inside the
12060 definition of the va_start macro (perhaps on the token for
12061 builtin) in a system header, so warnings will not be emitted.
12062 Use the location in real source code. */
12063 source_location current_location =
12064 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12065 NULL);
12067 if (!stdarg_p (fntype))
12069 error ("%<va_start%> used in function with fixed args");
12070 return true;
12073 if (va_start_p)
12075 if (va_start_p && (nargs != 2))
12077 error ("wrong number of arguments to function %<va_start%>");
12078 return true;
12080 arg = CALL_EXPR_ARG (exp, 1);
12082 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12083 when we checked the arguments and if needed issued a warning. */
12084 else
12086 if (nargs == 0)
12088 /* Evidently an out of date version of <stdarg.h>; can't validate
12089 va_start's second argument, but can still work as intended. */
12090 warning_at (current_location,
12091 OPT_Wvarargs,
12092 "%<__builtin_next_arg%> called without an argument");
12093 return true;
12095 else if (nargs > 1)
12097 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12098 return true;
12100 arg = CALL_EXPR_ARG (exp, 0);
12103 if (TREE_CODE (arg) == SSA_NAME)
12104 arg = SSA_NAME_VAR (arg);
12106 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12107 or __builtin_next_arg (0) the first time we see it, after checking
12108 the arguments and if needed issuing a warning. */
12109 if (!integer_zerop (arg))
12111 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12113 /* Strip off all nops for the sake of the comparison. This
12114 is not quite the same as STRIP_NOPS. It does more.
12115 We must also strip off INDIRECT_EXPR for C++ reference
12116 parameters. */
12117 while (CONVERT_EXPR_P (arg)
12118 || TREE_CODE (arg) == INDIRECT_REF)
12119 arg = TREE_OPERAND (arg, 0);
12120 if (arg != last_parm)
12122 /* FIXME: Sometimes with the tree optimizers we can get the
12123 not the last argument even though the user used the last
12124 argument. We just warn and set the arg to be the last
12125 argument so that we will get wrong-code because of
12126 it. */
12127 warning_at (current_location,
12128 OPT_Wvarargs,
12129 "second parameter of %<va_start%> not last named argument");
12132 /* Undefined by C99 7.15.1.4p4 (va_start):
12133 "If the parameter parmN is declared with the register storage
12134 class, with a function or array type, or with a type that is
12135 not compatible with the type that results after application of
12136 the default argument promotions, the behavior is undefined."
12138 else if (DECL_REGISTER (arg))
12140 warning_at (current_location,
12141 OPT_Wvarargs,
12142 "undefined behaviour when second parameter of "
12143 "%<va_start%> is declared with %<register%> storage");
12146 /* We want to verify the second parameter just once before the tree
12147 optimizers are run and then avoid keeping it in the tree,
12148 as otherwise we could warn even for correct code like:
12149 void foo (int i, ...)
12150 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12151 if (va_start_p)
12152 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12153 else
12154 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12156 return false;
12160 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12161 ORIG may be null if this is a 2-argument call. We don't attempt to
12162 simplify calls with more than 3 arguments.
12164 Return NULL_TREE if no simplification was possible, otherwise return the
12165 simplified form of the call as a tree. If IGNORED is true, it means that
12166 the caller does not use the returned value of the function. */
12168 static tree
12169 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12170 tree orig, int ignored)
12172 tree call, retval;
12173 const char *fmt_str = NULL;
12175 /* Verify the required arguments in the original call. We deal with two
12176 types of sprintf() calls: 'sprintf (str, fmt)' and
12177 'sprintf (dest, "%s", orig)'. */
12178 if (!validate_arg (dest, POINTER_TYPE)
12179 || !validate_arg (fmt, POINTER_TYPE))
12180 return NULL_TREE;
12181 if (orig && !validate_arg (orig, POINTER_TYPE))
12182 return NULL_TREE;
12184 /* Check whether the format is a literal string constant. */
12185 fmt_str = c_getstr (fmt);
12186 if (fmt_str == NULL)
12187 return NULL_TREE;
12189 call = NULL_TREE;
12190 retval = NULL_TREE;
12192 if (!init_target_chars ())
12193 return NULL_TREE;
12195 /* If the format doesn't contain % args or %%, use strcpy. */
12196 if (strchr (fmt_str, target_percent) == NULL)
12198 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12200 if (!fn)
12201 return NULL_TREE;
12203 /* Don't optimize sprintf (buf, "abc", ptr++). */
12204 if (orig)
12205 return NULL_TREE;
12207 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12208 'format' is known to contain no % formats. */
12209 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12210 if (!ignored)
12211 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12214 /* If the format is "%s", use strcpy if the result isn't used. */
12215 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12217 tree fn;
12218 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12220 if (!fn)
12221 return NULL_TREE;
12223 /* Don't crash on sprintf (str1, "%s"). */
12224 if (!orig)
12225 return NULL_TREE;
12227 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12228 if (!ignored)
12230 retval = c_strlen (orig, 1);
12231 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12232 return NULL_TREE;
12234 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12237 if (call && retval)
12239 retval = fold_convert_loc
12240 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12241 retval);
12242 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12244 else
12245 return call;
12248 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12249 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12250 attempt to simplify calls with more than 4 arguments.
12252 Return NULL_TREE if no simplification was possible, otherwise return the
12253 simplified form of the call as a tree. If IGNORED is true, it means that
12254 the caller does not use the returned value of the function. */
12256 static tree
12257 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12258 tree orig, int ignored)
12260 tree call, retval;
12261 const char *fmt_str = NULL;
12262 unsigned HOST_WIDE_INT destlen;
12264 /* Verify the required arguments in the original call. We deal with two
12265 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12266 'snprintf (dest, cst, "%s", orig)'. */
12267 if (!validate_arg (dest, POINTER_TYPE)
12268 || !validate_arg (destsize, INTEGER_TYPE)
12269 || !validate_arg (fmt, POINTER_TYPE))
12270 return NULL_TREE;
12271 if (orig && !validate_arg (orig, POINTER_TYPE))
12272 return NULL_TREE;
12274 if (!host_integerp (destsize, 1))
12275 return NULL_TREE;
12277 /* Check whether the format is a literal string constant. */
12278 fmt_str = c_getstr (fmt);
12279 if (fmt_str == NULL)
12280 return NULL_TREE;
12282 call = NULL_TREE;
12283 retval = NULL_TREE;
12285 if (!init_target_chars ())
12286 return NULL_TREE;
12288 destlen = tree_low_cst (destsize, 1);
12290 /* If the format doesn't contain % args or %%, use strcpy. */
12291 if (strchr (fmt_str, target_percent) == NULL)
12293 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12294 size_t len = strlen (fmt_str);
12296 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12297 if (orig)
12298 return NULL_TREE;
12300 /* We could expand this as
12301 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12302 or to
12303 memcpy (str, fmt_with_nul_at_cstm1, cst);
12304 but in the former case that might increase code size
12305 and in the latter case grow .rodata section too much.
12306 So punt for now. */
12307 if (len >= destlen)
12308 return NULL_TREE;
12310 if (!fn)
12311 return NULL_TREE;
12313 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12314 'format' is known to contain no % formats and
12315 strlen (fmt) < cst. */
12316 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12318 if (!ignored)
12319 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12322 /* If the format is "%s", use strcpy if the result isn't used. */
12323 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12325 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12326 unsigned HOST_WIDE_INT origlen;
12328 /* Don't crash on snprintf (str1, cst, "%s"). */
12329 if (!orig)
12330 return NULL_TREE;
12332 retval = c_strlen (orig, 1);
12333 if (!retval || !host_integerp (retval, 1))
12334 return NULL_TREE;
12336 origlen = tree_low_cst (retval, 1);
12337 /* We could expand this as
12338 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12339 or to
12340 memcpy (str1, str2_with_nul_at_cstm1, cst);
12341 but in the former case that might increase code size
12342 and in the latter case grow .rodata section too much.
12343 So punt for now. */
12344 if (origlen >= destlen)
12345 return NULL_TREE;
12347 /* Convert snprintf (str1, cst, "%s", str2) into
12348 strcpy (str1, str2) if strlen (str2) < cst. */
12349 if (!fn)
12350 return NULL_TREE;
12352 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12354 if (ignored)
12355 retval = NULL_TREE;
12358 if (call && retval)
12360 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12361 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12362 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12364 else
12365 return call;
12368 /* Expand a call EXP to __builtin_object_size. */
12371 expand_builtin_object_size (tree exp)
12373 tree ost;
12374 int object_size_type;
12375 tree fndecl = get_callee_fndecl (exp);
12377 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12379 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12380 exp, fndecl);
12381 expand_builtin_trap ();
12382 return const0_rtx;
12385 ost = CALL_EXPR_ARG (exp, 1);
12386 STRIP_NOPS (ost);
12388 if (TREE_CODE (ost) != INTEGER_CST
12389 || tree_int_cst_sgn (ost) < 0
12390 || compare_tree_int (ost, 3) > 0)
12392 error ("%Klast argument of %D is not integer constant between 0 and 3",
12393 exp, fndecl);
12394 expand_builtin_trap ();
12395 return const0_rtx;
12398 object_size_type = tree_low_cst (ost, 0);
12400 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12403 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12404 FCODE is the BUILT_IN_* to use.
12405 Return NULL_RTX if we failed; the caller should emit a normal call,
12406 otherwise try to get the result in TARGET, if convenient (and in
12407 mode MODE if that's convenient). */
12409 static rtx
12410 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12411 enum built_in_function fcode)
12413 tree dest, src, len, size;
12415 if (!validate_arglist (exp,
12416 POINTER_TYPE,
12417 fcode == BUILT_IN_MEMSET_CHK
12418 ? INTEGER_TYPE : POINTER_TYPE,
12419 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12420 return NULL_RTX;
12422 dest = CALL_EXPR_ARG (exp, 0);
12423 src = CALL_EXPR_ARG (exp, 1);
12424 len = CALL_EXPR_ARG (exp, 2);
12425 size = CALL_EXPR_ARG (exp, 3);
12427 if (! host_integerp (size, 1))
12428 return NULL_RTX;
12430 if (host_integerp (len, 1) || integer_all_onesp (size))
12432 tree fn;
12434 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12436 warning_at (tree_nonartificial_location (exp),
12437 0, "%Kcall to %D will always overflow destination buffer",
12438 exp, get_callee_fndecl (exp));
12439 return NULL_RTX;
12442 fn = NULL_TREE;
12443 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12444 mem{cpy,pcpy,move,set} is available. */
12445 switch (fcode)
12447 case BUILT_IN_MEMCPY_CHK:
12448 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12449 break;
12450 case BUILT_IN_MEMPCPY_CHK:
12451 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12452 break;
12453 case BUILT_IN_MEMMOVE_CHK:
12454 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12455 break;
12456 case BUILT_IN_MEMSET_CHK:
12457 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12458 break;
12459 default:
12460 break;
12463 if (! fn)
12464 return NULL_RTX;
12466 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12467 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12468 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12469 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12471 else if (fcode == BUILT_IN_MEMSET_CHK)
12472 return NULL_RTX;
12473 else
12475 unsigned int dest_align = get_pointer_alignment (dest);
12477 /* If DEST is not a pointer type, call the normal function. */
12478 if (dest_align == 0)
12479 return NULL_RTX;
12481 /* If SRC and DEST are the same (and not volatile), do nothing. */
12482 if (operand_equal_p (src, dest, 0))
12484 tree expr;
12486 if (fcode != BUILT_IN_MEMPCPY_CHK)
12488 /* Evaluate and ignore LEN in case it has side-effects. */
12489 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12490 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12493 expr = fold_build_pointer_plus (dest, len);
12494 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12497 /* __memmove_chk special case. */
12498 if (fcode == BUILT_IN_MEMMOVE_CHK)
12500 unsigned int src_align = get_pointer_alignment (src);
12502 if (src_align == 0)
12503 return NULL_RTX;
12505 /* If src is categorized for a readonly section we can use
12506 normal __memcpy_chk. */
12507 if (readonly_data_expr (src))
12509 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12510 if (!fn)
12511 return NULL_RTX;
12512 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12513 dest, src, len, size);
12514 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12515 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12516 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12519 return NULL_RTX;
12523 /* Emit warning if a buffer overflow is detected at compile time. */
12525 static void
12526 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12528 int is_strlen = 0;
12529 tree len, size;
12530 location_t loc = tree_nonartificial_location (exp);
12532 switch (fcode)
12534 case BUILT_IN_STRCPY_CHK:
12535 case BUILT_IN_STPCPY_CHK:
12536 /* For __strcat_chk the warning will be emitted only if overflowing
12537 by at least strlen (dest) + 1 bytes. */
12538 case BUILT_IN_STRCAT_CHK:
12539 len = CALL_EXPR_ARG (exp, 1);
12540 size = CALL_EXPR_ARG (exp, 2);
12541 is_strlen = 1;
12542 break;
12543 case BUILT_IN_STRNCAT_CHK:
12544 case BUILT_IN_STRNCPY_CHK:
12545 case BUILT_IN_STPNCPY_CHK:
12546 len = CALL_EXPR_ARG (exp, 2);
12547 size = CALL_EXPR_ARG (exp, 3);
12548 break;
12549 case BUILT_IN_SNPRINTF_CHK:
12550 case BUILT_IN_VSNPRINTF_CHK:
12551 len = CALL_EXPR_ARG (exp, 1);
12552 size = CALL_EXPR_ARG (exp, 3);
12553 break;
12554 default:
12555 gcc_unreachable ();
12558 if (!len || !size)
12559 return;
12561 if (! host_integerp (size, 1) || integer_all_onesp (size))
12562 return;
12564 if (is_strlen)
12566 len = c_strlen (len, 1);
12567 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12568 return;
12570 else if (fcode == BUILT_IN_STRNCAT_CHK)
12572 tree src = CALL_EXPR_ARG (exp, 1);
12573 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12574 return;
12575 src = c_strlen (src, 1);
12576 if (! src || ! host_integerp (src, 1))
12578 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12579 exp, get_callee_fndecl (exp));
12580 return;
12582 else if (tree_int_cst_lt (src, size))
12583 return;
12585 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12586 return;
12588 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12589 exp, get_callee_fndecl (exp));
12592 /* Emit warning if a buffer overflow is detected at compile time
12593 in __sprintf_chk/__vsprintf_chk calls. */
12595 static void
12596 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12598 tree size, len, fmt;
12599 const char *fmt_str;
12600 int nargs = call_expr_nargs (exp);
12602 /* Verify the required arguments in the original call. */
12604 if (nargs < 4)
12605 return;
12606 size = CALL_EXPR_ARG (exp, 2);
12607 fmt = CALL_EXPR_ARG (exp, 3);
12609 if (! host_integerp (size, 1) || integer_all_onesp (size))
12610 return;
12612 /* Check whether the format is a literal string constant. */
12613 fmt_str = c_getstr (fmt);
12614 if (fmt_str == NULL)
12615 return;
12617 if (!init_target_chars ())
12618 return;
12620 /* If the format doesn't contain % args or %%, we know its size. */
12621 if (strchr (fmt_str, target_percent) == 0)
12622 len = build_int_cstu (size_type_node, strlen (fmt_str));
12623 /* If the format is "%s" and first ... argument is a string literal,
12624 we know it too. */
12625 else if (fcode == BUILT_IN_SPRINTF_CHK
12626 && strcmp (fmt_str, target_percent_s) == 0)
12628 tree arg;
12630 if (nargs < 5)
12631 return;
12632 arg = CALL_EXPR_ARG (exp, 4);
12633 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12634 return;
12636 len = c_strlen (arg, 1);
12637 if (!len || ! host_integerp (len, 1))
12638 return;
12640 else
12641 return;
12643 if (! tree_int_cst_lt (len, size))
12644 warning_at (tree_nonartificial_location (exp),
12645 0, "%Kcall to %D will always overflow destination buffer",
12646 exp, get_callee_fndecl (exp));
12649 /* Emit warning if a free is called with address of a variable. */
12651 static void
12652 maybe_emit_free_warning (tree exp)
12654 tree arg = CALL_EXPR_ARG (exp, 0);
12656 STRIP_NOPS (arg);
12657 if (TREE_CODE (arg) != ADDR_EXPR)
12658 return;
12660 arg = get_base_address (TREE_OPERAND (arg, 0));
12661 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12662 return;
12664 if (SSA_VAR_P (arg))
12665 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12666 "%Kattempt to free a non-heap object %qD", exp, arg);
12667 else
12668 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12669 "%Kattempt to free a non-heap object", exp);
12672 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12673 if possible. */
12675 tree
12676 fold_builtin_object_size (tree ptr, tree ost)
12678 unsigned HOST_WIDE_INT bytes;
12679 int object_size_type;
12681 if (!validate_arg (ptr, POINTER_TYPE)
12682 || !validate_arg (ost, INTEGER_TYPE))
12683 return NULL_TREE;
12685 STRIP_NOPS (ost);
12687 if (TREE_CODE (ost) != INTEGER_CST
12688 || tree_int_cst_sgn (ost) < 0
12689 || compare_tree_int (ost, 3) > 0)
12690 return NULL_TREE;
12692 object_size_type = tree_low_cst (ost, 0);
12694 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12695 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12696 and (size_t) 0 for types 2 and 3. */
12697 if (TREE_SIDE_EFFECTS (ptr))
12698 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12700 if (TREE_CODE (ptr) == ADDR_EXPR)
12702 bytes = compute_builtin_object_size (ptr, object_size_type);
12703 if (double_int_fits_to_tree_p (size_type_node,
12704 double_int::from_uhwi (bytes)))
12705 return build_int_cstu (size_type_node, bytes);
12707 else if (TREE_CODE (ptr) == SSA_NAME)
12709 /* If object size is not known yet, delay folding until
12710 later. Maybe subsequent passes will help determining
12711 it. */
12712 bytes = compute_builtin_object_size (ptr, object_size_type);
12713 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12714 && double_int_fits_to_tree_p (size_type_node,
12715 double_int::from_uhwi (bytes)))
12716 return build_int_cstu (size_type_node, bytes);
12719 return NULL_TREE;
12722 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12723 DEST, SRC, LEN, and SIZE are the arguments to the call.
12724 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12725 code of the builtin. If MAXLEN is not NULL, it is maximum length
12726 passed as third argument. */
12728 tree
12729 fold_builtin_memory_chk (location_t loc, tree fndecl,
12730 tree dest, tree src, tree len, tree size,
12731 tree maxlen, bool ignore,
12732 enum built_in_function fcode)
12734 tree fn;
12736 if (!validate_arg (dest, POINTER_TYPE)
12737 || !validate_arg (src,
12738 (fcode == BUILT_IN_MEMSET_CHK
12739 ? INTEGER_TYPE : POINTER_TYPE))
12740 || !validate_arg (len, INTEGER_TYPE)
12741 || !validate_arg (size, INTEGER_TYPE))
12742 return NULL_TREE;
12744 /* If SRC and DEST are the same (and not volatile), return DEST
12745 (resp. DEST+LEN for __mempcpy_chk). */
12746 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12748 if (fcode != BUILT_IN_MEMPCPY_CHK)
12749 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12750 dest, len);
12751 else
12753 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12754 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12758 if (! host_integerp (size, 1))
12759 return NULL_TREE;
12761 if (! integer_all_onesp (size))
12763 if (! host_integerp (len, 1))
12765 /* If LEN is not constant, try MAXLEN too.
12766 For MAXLEN only allow optimizing into non-_ocs function
12767 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12768 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12770 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12772 /* (void) __mempcpy_chk () can be optimized into
12773 (void) __memcpy_chk (). */
12774 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12775 if (!fn)
12776 return NULL_TREE;
12778 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12780 return NULL_TREE;
12783 else
12784 maxlen = len;
12786 if (tree_int_cst_lt (size, maxlen))
12787 return NULL_TREE;
12790 fn = NULL_TREE;
12791 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12792 mem{cpy,pcpy,move,set} is available. */
12793 switch (fcode)
12795 case BUILT_IN_MEMCPY_CHK:
12796 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12797 break;
12798 case BUILT_IN_MEMPCPY_CHK:
12799 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12800 break;
12801 case BUILT_IN_MEMMOVE_CHK:
12802 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12803 break;
12804 case BUILT_IN_MEMSET_CHK:
12805 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12806 break;
12807 default:
12808 break;
12811 if (!fn)
12812 return NULL_TREE;
12814 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12817 /* Fold a call to the __st[rp]cpy_chk builtin.
12818 DEST, SRC, and SIZE are the arguments to the call.
12819 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12820 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12821 strings passed as second argument. */
12823 tree
12824 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12825 tree src, tree size,
12826 tree maxlen, bool ignore,
12827 enum built_in_function fcode)
12829 tree len, fn;
12831 if (!validate_arg (dest, POINTER_TYPE)
12832 || !validate_arg (src, POINTER_TYPE)
12833 || !validate_arg (size, INTEGER_TYPE))
12834 return NULL_TREE;
12836 /* If SRC and DEST are the same (and not volatile), return DEST. */
12837 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12838 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12840 if (! host_integerp (size, 1))
12841 return NULL_TREE;
12843 if (! integer_all_onesp (size))
12845 len = c_strlen (src, 1);
12846 if (! len || ! host_integerp (len, 1))
12848 /* If LEN is not constant, try MAXLEN too.
12849 For MAXLEN only allow optimizing into non-_ocs function
12850 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12851 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12853 if (fcode == BUILT_IN_STPCPY_CHK)
12855 if (! ignore)
12856 return NULL_TREE;
12858 /* If return value of __stpcpy_chk is ignored,
12859 optimize into __strcpy_chk. */
12860 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12861 if (!fn)
12862 return NULL_TREE;
12864 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12867 if (! len || TREE_SIDE_EFFECTS (len))
12868 return NULL_TREE;
12870 /* If c_strlen returned something, but not a constant,
12871 transform __strcpy_chk into __memcpy_chk. */
12872 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12873 if (!fn)
12874 return NULL_TREE;
12876 len = fold_convert_loc (loc, size_type_node, len);
12877 len = size_binop_loc (loc, PLUS_EXPR, len,
12878 build_int_cst (size_type_node, 1));
12879 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12880 build_call_expr_loc (loc, fn, 4,
12881 dest, src, len, size));
12884 else
12885 maxlen = len;
12887 if (! tree_int_cst_lt (maxlen, size))
12888 return NULL_TREE;
12891 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12892 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12893 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12894 if (!fn)
12895 return NULL_TREE;
12897 return build_call_expr_loc (loc, fn, 2, dest, src);
12900 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12901 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12902 length passed as third argument. IGNORE is true if return value can be
12903 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12905 tree
12906 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12907 tree len, tree size, tree maxlen, bool ignore,
12908 enum built_in_function fcode)
12910 tree fn;
12912 if (!validate_arg (dest, POINTER_TYPE)
12913 || !validate_arg (src, POINTER_TYPE)
12914 || !validate_arg (len, INTEGER_TYPE)
12915 || !validate_arg (size, INTEGER_TYPE))
12916 return NULL_TREE;
12918 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12920 /* If return value of __stpncpy_chk is ignored,
12921 optimize into __strncpy_chk. */
12922 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12923 if (fn)
12924 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12927 if (! host_integerp (size, 1))
12928 return NULL_TREE;
12930 if (! integer_all_onesp (size))
12932 if (! host_integerp (len, 1))
12934 /* If LEN is not constant, try MAXLEN too.
12935 For MAXLEN only allow optimizing into non-_ocs function
12936 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12937 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12938 return NULL_TREE;
12940 else
12941 maxlen = len;
12943 if (tree_int_cst_lt (size, maxlen))
12944 return NULL_TREE;
12947 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12948 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12949 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12950 if (!fn)
12951 return NULL_TREE;
12953 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12956 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12957 are the arguments to the call. */
12959 static tree
12960 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12961 tree src, tree size)
12963 tree fn;
12964 const char *p;
12966 if (!validate_arg (dest, POINTER_TYPE)
12967 || !validate_arg (src, POINTER_TYPE)
12968 || !validate_arg (size, INTEGER_TYPE))
12969 return NULL_TREE;
12971 p = c_getstr (src);
12972 /* If the SRC parameter is "", return DEST. */
12973 if (p && *p == '\0')
12974 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12976 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12977 return NULL_TREE;
12979 /* If __builtin_strcat_chk is used, assume strcat is available. */
12980 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12981 if (!fn)
12982 return NULL_TREE;
12984 return build_call_expr_loc (loc, fn, 2, dest, src);
12987 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12988 LEN, and SIZE. */
12990 static tree
12991 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12992 tree dest, tree src, tree len, tree size)
12994 tree fn;
12995 const char *p;
12997 if (!validate_arg (dest, POINTER_TYPE)
12998 || !validate_arg (src, POINTER_TYPE)
12999 || !validate_arg (size, INTEGER_TYPE)
13000 || !validate_arg (size, INTEGER_TYPE))
13001 return NULL_TREE;
13003 p = c_getstr (src);
13004 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13005 if (p && *p == '\0')
13006 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
13007 else if (integer_zerop (len))
13008 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13010 if (! host_integerp (size, 1))
13011 return NULL_TREE;
13013 if (! integer_all_onesp (size))
13015 tree src_len = c_strlen (src, 1);
13016 if (src_len
13017 && host_integerp (src_len, 1)
13018 && host_integerp (len, 1)
13019 && ! tree_int_cst_lt (len, src_len))
13021 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13022 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13023 if (!fn)
13024 return NULL_TREE;
13026 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13028 return NULL_TREE;
13031 /* If __builtin_strncat_chk is used, assume strncat is available. */
13032 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13033 if (!fn)
13034 return NULL_TREE;
13036 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13039 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13040 Return NULL_TREE if a normal call should be emitted rather than
13041 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13042 or BUILT_IN_VSPRINTF_CHK. */
13044 static tree
13045 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13046 enum built_in_function fcode)
13048 tree dest, size, len, fn, fmt, flag;
13049 const char *fmt_str;
13051 /* Verify the required arguments in the original call. */
13052 if (nargs < 4)
13053 return NULL_TREE;
13054 dest = args[0];
13055 if (!validate_arg (dest, POINTER_TYPE))
13056 return NULL_TREE;
13057 flag = args[1];
13058 if (!validate_arg (flag, INTEGER_TYPE))
13059 return NULL_TREE;
13060 size = args[2];
13061 if (!validate_arg (size, INTEGER_TYPE))
13062 return NULL_TREE;
13063 fmt = args[3];
13064 if (!validate_arg (fmt, POINTER_TYPE))
13065 return NULL_TREE;
13067 if (! host_integerp (size, 1))
13068 return NULL_TREE;
13070 len = NULL_TREE;
13072 if (!init_target_chars ())
13073 return NULL_TREE;
13075 /* Check whether the format is a literal string constant. */
13076 fmt_str = c_getstr (fmt);
13077 if (fmt_str != NULL)
13079 /* If the format doesn't contain % args or %%, we know the size. */
13080 if (strchr (fmt_str, target_percent) == 0)
13082 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13083 len = build_int_cstu (size_type_node, strlen (fmt_str));
13085 /* If the format is "%s" and first ... argument is a string literal,
13086 we know the size too. */
13087 else if (fcode == BUILT_IN_SPRINTF_CHK
13088 && strcmp (fmt_str, target_percent_s) == 0)
13090 tree arg;
13092 if (nargs == 5)
13094 arg = args[4];
13095 if (validate_arg (arg, POINTER_TYPE))
13097 len = c_strlen (arg, 1);
13098 if (! len || ! host_integerp (len, 1))
13099 len = NULL_TREE;
13105 if (! integer_all_onesp (size))
13107 if (! len || ! tree_int_cst_lt (len, size))
13108 return NULL_TREE;
13111 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13112 or if format doesn't contain % chars or is "%s". */
13113 if (! integer_zerop (flag))
13115 if (fmt_str == NULL)
13116 return NULL_TREE;
13117 if (strchr (fmt_str, target_percent) != NULL
13118 && strcmp (fmt_str, target_percent_s))
13119 return NULL_TREE;
13122 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13123 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13124 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13125 if (!fn)
13126 return NULL_TREE;
13128 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13131 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13132 a normal call should be emitted rather than expanding the function
13133 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13135 static tree
13136 fold_builtin_sprintf_chk (location_t loc, tree exp,
13137 enum built_in_function fcode)
13139 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13140 CALL_EXPR_ARGP (exp), fcode);
13143 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13144 NULL_TREE if a normal call should be emitted rather than expanding
13145 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13146 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13147 passed as second argument. */
13149 static tree
13150 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13151 tree maxlen, enum built_in_function fcode)
13153 tree dest, size, len, fn, fmt, flag;
13154 const char *fmt_str;
13156 /* Verify the required arguments in the original call. */
13157 if (nargs < 5)
13158 return NULL_TREE;
13159 dest = args[0];
13160 if (!validate_arg (dest, POINTER_TYPE))
13161 return NULL_TREE;
13162 len = args[1];
13163 if (!validate_arg (len, INTEGER_TYPE))
13164 return NULL_TREE;
13165 flag = args[2];
13166 if (!validate_arg (flag, INTEGER_TYPE))
13167 return NULL_TREE;
13168 size = args[3];
13169 if (!validate_arg (size, INTEGER_TYPE))
13170 return NULL_TREE;
13171 fmt = args[4];
13172 if (!validate_arg (fmt, POINTER_TYPE))
13173 return NULL_TREE;
13175 if (! host_integerp (size, 1))
13176 return NULL_TREE;
13178 if (! integer_all_onesp (size))
13180 if (! host_integerp (len, 1))
13182 /* If LEN is not constant, try MAXLEN too.
13183 For MAXLEN only allow optimizing into non-_ocs function
13184 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13185 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13186 return NULL_TREE;
13188 else
13189 maxlen = len;
13191 if (tree_int_cst_lt (size, maxlen))
13192 return NULL_TREE;
13195 if (!init_target_chars ())
13196 return NULL_TREE;
13198 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13199 or if format doesn't contain % chars or is "%s". */
13200 if (! integer_zerop (flag))
13202 fmt_str = c_getstr (fmt);
13203 if (fmt_str == NULL)
13204 return NULL_TREE;
13205 if (strchr (fmt_str, target_percent) != NULL
13206 && strcmp (fmt_str, target_percent_s))
13207 return NULL_TREE;
13210 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13211 available. */
13212 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13213 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13214 if (!fn)
13215 return NULL_TREE;
13217 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13220 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13221 a normal call should be emitted rather than expanding the function
13222 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13223 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13224 passed as second argument. */
13226 tree
13227 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13228 enum built_in_function fcode)
13230 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13231 CALL_EXPR_ARGP (exp), maxlen, fcode);
13234 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13235 FMT and ARG are the arguments to the call; we don't fold cases with
13236 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13238 Return NULL_TREE if no simplification was possible, otherwise return the
13239 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13240 code of the function to be simplified. */
13242 static tree
13243 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13244 tree arg, bool ignore,
13245 enum built_in_function fcode)
13247 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13248 const char *fmt_str = NULL;
13250 /* If the return value is used, don't do the transformation. */
13251 if (! ignore)
13252 return NULL_TREE;
13254 /* Verify the required arguments in the original call. */
13255 if (!validate_arg (fmt, POINTER_TYPE))
13256 return NULL_TREE;
13258 /* Check whether the format is a literal string constant. */
13259 fmt_str = c_getstr (fmt);
13260 if (fmt_str == NULL)
13261 return NULL_TREE;
13263 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13265 /* If we're using an unlocked function, assume the other
13266 unlocked functions exist explicitly. */
13267 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13268 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13270 else
13272 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13273 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13276 if (!init_target_chars ())
13277 return NULL_TREE;
13279 if (strcmp (fmt_str, target_percent_s) == 0
13280 || strchr (fmt_str, target_percent) == NULL)
13282 const char *str;
13284 if (strcmp (fmt_str, target_percent_s) == 0)
13286 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13287 return NULL_TREE;
13289 if (!arg || !validate_arg (arg, POINTER_TYPE))
13290 return NULL_TREE;
13292 str = c_getstr (arg);
13293 if (str == NULL)
13294 return NULL_TREE;
13296 else
13298 /* The format specifier doesn't contain any '%' characters. */
13299 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13300 && arg)
13301 return NULL_TREE;
13302 str = fmt_str;
13305 /* If the string was "", printf does nothing. */
13306 if (str[0] == '\0')
13307 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13309 /* If the string has length of 1, call putchar. */
13310 if (str[1] == '\0')
13312 /* Given printf("c"), (where c is any one character,)
13313 convert "c"[0] to an int and pass that to the replacement
13314 function. */
13315 newarg = build_int_cst (integer_type_node, str[0]);
13316 if (fn_putchar)
13317 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13319 else
13321 /* If the string was "string\n", call puts("string"). */
13322 size_t len = strlen (str);
13323 if ((unsigned char)str[len - 1] == target_newline
13324 && (size_t) (int) len == len
13325 && (int) len > 0)
13327 char *newstr;
13328 tree offset_node, string_cst;
13330 /* Create a NUL-terminated string that's one char shorter
13331 than the original, stripping off the trailing '\n'. */
13332 newarg = build_string_literal (len, str);
13333 string_cst = string_constant (newarg, &offset_node);
13334 gcc_checking_assert (string_cst
13335 && (TREE_STRING_LENGTH (string_cst)
13336 == (int) len)
13337 && integer_zerop (offset_node)
13338 && (unsigned char)
13339 TREE_STRING_POINTER (string_cst)[len - 1]
13340 == target_newline);
13341 /* build_string_literal creates a new STRING_CST,
13342 modify it in place to avoid double copying. */
13343 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13344 newstr[len - 1] = '\0';
13345 if (fn_puts)
13346 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13348 else
13349 /* We'd like to arrange to call fputs(string,stdout) here,
13350 but we need stdout and don't have a way to get it yet. */
13351 return NULL_TREE;
13355 /* The other optimizations can be done only on the non-va_list variants. */
13356 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13357 return NULL_TREE;
13359 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13360 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13362 if (!arg || !validate_arg (arg, POINTER_TYPE))
13363 return NULL_TREE;
13364 if (fn_puts)
13365 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13368 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13369 else if (strcmp (fmt_str, target_percent_c) == 0)
13371 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13372 return NULL_TREE;
13373 if (fn_putchar)
13374 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13377 if (!call)
13378 return NULL_TREE;
13380 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13383 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13384 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13385 more than 3 arguments, and ARG may be null in the 2-argument case.
13387 Return NULL_TREE if no simplification was possible, otherwise return the
13388 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13389 code of the function to be simplified. */
13391 static tree
13392 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13393 tree fmt, tree arg, bool ignore,
13394 enum built_in_function fcode)
13396 tree fn_fputc, fn_fputs, call = NULL_TREE;
13397 const char *fmt_str = NULL;
13399 /* If the return value is used, don't do the transformation. */
13400 if (! ignore)
13401 return NULL_TREE;
13403 /* Verify the required arguments in the original call. */
13404 if (!validate_arg (fp, POINTER_TYPE))
13405 return NULL_TREE;
13406 if (!validate_arg (fmt, POINTER_TYPE))
13407 return NULL_TREE;
13409 /* Check whether the format is a literal string constant. */
13410 fmt_str = c_getstr (fmt);
13411 if (fmt_str == NULL)
13412 return NULL_TREE;
13414 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13416 /* If we're using an unlocked function, assume the other
13417 unlocked functions exist explicitly. */
13418 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13419 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13421 else
13423 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13424 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13427 if (!init_target_chars ())
13428 return NULL_TREE;
13430 /* If the format doesn't contain % args or %%, use strcpy. */
13431 if (strchr (fmt_str, target_percent) == NULL)
13433 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13434 && arg)
13435 return NULL_TREE;
13437 /* If the format specifier was "", fprintf does nothing. */
13438 if (fmt_str[0] == '\0')
13440 /* If FP has side-effects, just wait until gimplification is
13441 done. */
13442 if (TREE_SIDE_EFFECTS (fp))
13443 return NULL_TREE;
13445 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13448 /* When "string" doesn't contain %, replace all cases of
13449 fprintf (fp, string) with fputs (string, fp). The fputs
13450 builtin will take care of special cases like length == 1. */
13451 if (fn_fputs)
13452 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13455 /* The other optimizations can be done only on the non-va_list variants. */
13456 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13457 return NULL_TREE;
13459 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13460 else if (strcmp (fmt_str, target_percent_s) == 0)
13462 if (!arg || !validate_arg (arg, POINTER_TYPE))
13463 return NULL_TREE;
13464 if (fn_fputs)
13465 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13468 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13469 else if (strcmp (fmt_str, target_percent_c) == 0)
13471 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13472 return NULL_TREE;
13473 if (fn_fputc)
13474 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13477 if (!call)
13478 return NULL_TREE;
13479 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13482 /* Initialize format string characters in the target charset. */
13484 static bool
13485 init_target_chars (void)
13487 static bool init;
13488 if (!init)
13490 target_newline = lang_hooks.to_target_charset ('\n');
13491 target_percent = lang_hooks.to_target_charset ('%');
13492 target_c = lang_hooks.to_target_charset ('c');
13493 target_s = lang_hooks.to_target_charset ('s');
13494 if (target_newline == 0 || target_percent == 0 || target_c == 0
13495 || target_s == 0)
13496 return false;
13498 target_percent_c[0] = target_percent;
13499 target_percent_c[1] = target_c;
13500 target_percent_c[2] = '\0';
13502 target_percent_s[0] = target_percent;
13503 target_percent_s[1] = target_s;
13504 target_percent_s[2] = '\0';
13506 target_percent_s_newline[0] = target_percent;
13507 target_percent_s_newline[1] = target_s;
13508 target_percent_s_newline[2] = target_newline;
13509 target_percent_s_newline[3] = '\0';
13511 init = true;
13513 return true;
13516 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13517 and no overflow/underflow occurred. INEXACT is true if M was not
13518 exactly calculated. TYPE is the tree type for the result. This
13519 function assumes that you cleared the MPFR flags and then
13520 calculated M to see if anything subsequently set a flag prior to
13521 entering this function. Return NULL_TREE if any checks fail. */
13523 static tree
13524 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13526 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13527 overflow/underflow occurred. If -frounding-math, proceed iff the
13528 result of calling FUNC was exact. */
13529 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13530 && (!flag_rounding_math || !inexact))
13532 REAL_VALUE_TYPE rr;
13534 real_from_mpfr (&rr, m, type, GMP_RNDN);
13535 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13536 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13537 but the mpft_t is not, then we underflowed in the
13538 conversion. */
13539 if (real_isfinite (&rr)
13540 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13542 REAL_VALUE_TYPE rmode;
13544 real_convert (&rmode, TYPE_MODE (type), &rr);
13545 /* Proceed iff the specified mode can hold the value. */
13546 if (real_identical (&rmode, &rr))
13547 return build_real (type, rmode);
13550 return NULL_TREE;
13553 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13554 number and no overflow/underflow occurred. INEXACT is true if M
13555 was not exactly calculated. TYPE is the tree type for the result.
13556 This function assumes that you cleared the MPFR flags and then
13557 calculated M to see if anything subsequently set a flag prior to
13558 entering this function. Return NULL_TREE if any checks fail, if
13559 FORCE_CONVERT is true, then bypass the checks. */
13561 static tree
13562 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13564 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13565 overflow/underflow occurred. If -frounding-math, proceed iff the
13566 result of calling FUNC was exact. */
13567 if (force_convert
13568 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13569 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13570 && (!flag_rounding_math || !inexact)))
13572 REAL_VALUE_TYPE re, im;
13574 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13575 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13576 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13577 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13578 but the mpft_t is not, then we underflowed in the
13579 conversion. */
13580 if (force_convert
13581 || (real_isfinite (&re) && real_isfinite (&im)
13582 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13583 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13585 REAL_VALUE_TYPE re_mode, im_mode;
13587 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13588 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13589 /* Proceed iff the specified mode can hold the value. */
13590 if (force_convert
13591 || (real_identical (&re_mode, &re)
13592 && real_identical (&im_mode, &im)))
13593 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13594 build_real (TREE_TYPE (type), im_mode));
13597 return NULL_TREE;
13600 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13601 FUNC on it and return the resulting value as a tree with type TYPE.
13602 If MIN and/or MAX are not NULL, then the supplied ARG must be
13603 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13604 acceptable values, otherwise they are not. The mpfr precision is
13605 set to the precision of TYPE. We assume that function FUNC returns
13606 zero if the result could be calculated exactly within the requested
13607 precision. */
13609 static tree
13610 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13611 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13612 bool inclusive)
13614 tree result = NULL_TREE;
13616 STRIP_NOPS (arg);
13618 /* To proceed, MPFR must exactly represent the target floating point
13619 format, which only happens when the target base equals two. */
13620 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13621 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13623 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13625 if (real_isfinite (ra)
13626 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13627 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13629 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13630 const int prec = fmt->p;
13631 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13632 int inexact;
13633 mpfr_t m;
13635 mpfr_init2 (m, prec);
13636 mpfr_from_real (m, ra, GMP_RNDN);
13637 mpfr_clear_flags ();
13638 inexact = func (m, m, rnd);
13639 result = do_mpfr_ckconv (m, type, inexact);
13640 mpfr_clear (m);
13644 return result;
13647 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13648 FUNC on it and return the resulting value as a tree with type TYPE.
13649 The mpfr precision is set to the precision of TYPE. We assume that
13650 function FUNC returns zero if the result could be calculated
13651 exactly within the requested precision. */
13653 static tree
13654 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13655 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13657 tree result = NULL_TREE;
13659 STRIP_NOPS (arg1);
13660 STRIP_NOPS (arg2);
13662 /* To proceed, MPFR must exactly represent the target floating point
13663 format, which only happens when the target base equals two. */
13664 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13665 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13666 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13668 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13669 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13671 if (real_isfinite (ra1) && real_isfinite (ra2))
13673 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13674 const int prec = fmt->p;
13675 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13676 int inexact;
13677 mpfr_t m1, m2;
13679 mpfr_inits2 (prec, m1, m2, NULL);
13680 mpfr_from_real (m1, ra1, GMP_RNDN);
13681 mpfr_from_real (m2, ra2, GMP_RNDN);
13682 mpfr_clear_flags ();
13683 inexact = func (m1, m1, m2, rnd);
13684 result = do_mpfr_ckconv (m1, type, inexact);
13685 mpfr_clears (m1, m2, NULL);
13689 return result;
13692 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13693 FUNC on it and return the resulting value as a tree with type TYPE.
13694 The mpfr precision is set to the precision of TYPE. We assume that
13695 function FUNC returns zero if the result could be calculated
13696 exactly within the requested precision. */
13698 static tree
13699 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13700 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13702 tree result = NULL_TREE;
13704 STRIP_NOPS (arg1);
13705 STRIP_NOPS (arg2);
13706 STRIP_NOPS (arg3);
13708 /* To proceed, MPFR must exactly represent the target floating point
13709 format, which only happens when the target base equals two. */
13710 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13711 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13712 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13713 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13715 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13716 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13717 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13719 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13721 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13722 const int prec = fmt->p;
13723 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13724 int inexact;
13725 mpfr_t m1, m2, m3;
13727 mpfr_inits2 (prec, m1, m2, m3, NULL);
13728 mpfr_from_real (m1, ra1, GMP_RNDN);
13729 mpfr_from_real (m2, ra2, GMP_RNDN);
13730 mpfr_from_real (m3, ra3, GMP_RNDN);
13731 mpfr_clear_flags ();
13732 inexact = func (m1, m1, m2, m3, rnd);
13733 result = do_mpfr_ckconv (m1, type, inexact);
13734 mpfr_clears (m1, m2, m3, NULL);
13738 return result;
13741 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13742 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13743 If ARG_SINP and ARG_COSP are NULL then the result is returned
13744 as a complex value.
13745 The type is taken from the type of ARG and is used for setting the
13746 precision of the calculation and results. */
13748 static tree
13749 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13751 tree const type = TREE_TYPE (arg);
13752 tree result = NULL_TREE;
13754 STRIP_NOPS (arg);
13756 /* To proceed, MPFR must exactly represent the target floating point
13757 format, which only happens when the target base equals two. */
13758 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13759 && TREE_CODE (arg) == REAL_CST
13760 && !TREE_OVERFLOW (arg))
13762 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13764 if (real_isfinite (ra))
13766 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13767 const int prec = fmt->p;
13768 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13769 tree result_s, result_c;
13770 int inexact;
13771 mpfr_t m, ms, mc;
13773 mpfr_inits2 (prec, m, ms, mc, NULL);
13774 mpfr_from_real (m, ra, GMP_RNDN);
13775 mpfr_clear_flags ();
13776 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13777 result_s = do_mpfr_ckconv (ms, type, inexact);
13778 result_c = do_mpfr_ckconv (mc, type, inexact);
13779 mpfr_clears (m, ms, mc, NULL);
13780 if (result_s && result_c)
13782 /* If we are to return in a complex value do so. */
13783 if (!arg_sinp && !arg_cosp)
13784 return build_complex (build_complex_type (type),
13785 result_c, result_s);
13787 /* Dereference the sin/cos pointer arguments. */
13788 arg_sinp = build_fold_indirect_ref (arg_sinp);
13789 arg_cosp = build_fold_indirect_ref (arg_cosp);
13790 /* Proceed if valid pointer type were passed in. */
13791 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13792 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13794 /* Set the values. */
13795 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13796 result_s);
13797 TREE_SIDE_EFFECTS (result_s) = 1;
13798 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13799 result_c);
13800 TREE_SIDE_EFFECTS (result_c) = 1;
13801 /* Combine the assignments into a compound expr. */
13802 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13803 result_s, result_c));
13808 return result;
13811 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13812 two-argument mpfr order N Bessel function FUNC on them and return
13813 the resulting value as a tree with type TYPE. The mpfr precision
13814 is set to the precision of TYPE. We assume that function FUNC
13815 returns zero if the result could be calculated exactly within the
13816 requested precision. */
13817 static tree
13818 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13819 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13820 const REAL_VALUE_TYPE *min, bool inclusive)
13822 tree result = NULL_TREE;
13824 STRIP_NOPS (arg1);
13825 STRIP_NOPS (arg2);
13827 /* To proceed, MPFR must exactly represent the target floating point
13828 format, which only happens when the target base equals two. */
13829 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13830 && host_integerp (arg1, 0)
13831 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13833 const HOST_WIDE_INT n = tree_low_cst (arg1, 0);
13834 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13836 if (n == (long)n
13837 && real_isfinite (ra)
13838 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13840 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13841 const int prec = fmt->p;
13842 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13843 int inexact;
13844 mpfr_t m;
13846 mpfr_init2 (m, prec);
13847 mpfr_from_real (m, ra, GMP_RNDN);
13848 mpfr_clear_flags ();
13849 inexact = func (m, n, m, rnd);
13850 result = do_mpfr_ckconv (m, type, inexact);
13851 mpfr_clear (m);
13855 return result;
13858 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13859 the pointer *(ARG_QUO) and return the result. The type is taken
13860 from the type of ARG0 and is used for setting the precision of the
13861 calculation and results. */
13863 static tree
13864 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13866 tree const type = TREE_TYPE (arg0);
13867 tree result = NULL_TREE;
13869 STRIP_NOPS (arg0);
13870 STRIP_NOPS (arg1);
13872 /* To proceed, MPFR must exactly represent the target floating point
13873 format, which only happens when the target base equals two. */
13874 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13875 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13876 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13878 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13879 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13881 if (real_isfinite (ra0) && real_isfinite (ra1))
13883 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13884 const int prec = fmt->p;
13885 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13886 tree result_rem;
13887 long integer_quo;
13888 mpfr_t m0, m1;
13890 mpfr_inits2 (prec, m0, m1, NULL);
13891 mpfr_from_real (m0, ra0, GMP_RNDN);
13892 mpfr_from_real (m1, ra1, GMP_RNDN);
13893 mpfr_clear_flags ();
13894 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13895 /* Remquo is independent of the rounding mode, so pass
13896 inexact=0 to do_mpfr_ckconv(). */
13897 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13898 mpfr_clears (m0, m1, NULL);
13899 if (result_rem)
13901 /* MPFR calculates quo in the host's long so it may
13902 return more bits in quo than the target int can hold
13903 if sizeof(host long) > sizeof(target int). This can
13904 happen even for native compilers in LP64 mode. In
13905 these cases, modulo the quo value with the largest
13906 number that the target int can hold while leaving one
13907 bit for the sign. */
13908 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13909 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13911 /* Dereference the quo pointer argument. */
13912 arg_quo = build_fold_indirect_ref (arg_quo);
13913 /* Proceed iff a valid pointer type was passed in. */
13914 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13916 /* Set the value. */
13917 tree result_quo
13918 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13919 build_int_cst (TREE_TYPE (arg_quo),
13920 integer_quo));
13921 TREE_SIDE_EFFECTS (result_quo) = 1;
13922 /* Combine the quo assignment with the rem. */
13923 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13924 result_quo, result_rem));
13929 return result;
13932 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13933 resulting value as a tree with type TYPE. The mpfr precision is
13934 set to the precision of TYPE. We assume that this mpfr function
13935 returns zero if the result could be calculated exactly within the
13936 requested precision. In addition, the integer pointer represented
13937 by ARG_SG will be dereferenced and set to the appropriate signgam
13938 (-1,1) value. */
13940 static tree
13941 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13943 tree result = NULL_TREE;
13945 STRIP_NOPS (arg);
13947 /* To proceed, MPFR must exactly represent the target floating point
13948 format, which only happens when the target base equals two. Also
13949 verify ARG is a constant and that ARG_SG is an int pointer. */
13950 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13951 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13952 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13953 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13955 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13957 /* In addition to NaN and Inf, the argument cannot be zero or a
13958 negative integer. */
13959 if (real_isfinite (ra)
13960 && ra->cl != rvc_zero
13961 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
13963 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13964 const int prec = fmt->p;
13965 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13966 int inexact, sg;
13967 mpfr_t m;
13968 tree result_lg;
13970 mpfr_init2 (m, prec);
13971 mpfr_from_real (m, ra, GMP_RNDN);
13972 mpfr_clear_flags ();
13973 inexact = mpfr_lgamma (m, &sg, m, rnd);
13974 result_lg = do_mpfr_ckconv (m, type, inexact);
13975 mpfr_clear (m);
13976 if (result_lg)
13978 tree result_sg;
13980 /* Dereference the arg_sg pointer argument. */
13981 arg_sg = build_fold_indirect_ref (arg_sg);
13982 /* Assign the signgam value into *arg_sg. */
13983 result_sg = fold_build2 (MODIFY_EXPR,
13984 TREE_TYPE (arg_sg), arg_sg,
13985 build_int_cst (TREE_TYPE (arg_sg), sg));
13986 TREE_SIDE_EFFECTS (result_sg) = 1;
13987 /* Combine the signgam assignment with the lgamma result. */
13988 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13989 result_sg, result_lg));
13994 return result;
13997 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13998 function FUNC on it and return the resulting value as a tree with
13999 type TYPE. The mpfr precision is set to the precision of TYPE. We
14000 assume that function FUNC returns zero if the result could be
14001 calculated exactly within the requested precision. */
14003 static tree
14004 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14006 tree result = NULL_TREE;
14008 STRIP_NOPS (arg);
14010 /* To proceed, MPFR must exactly represent the target floating point
14011 format, which only happens when the target base equals two. */
14012 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14013 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14014 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14016 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14017 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14019 if (real_isfinite (re) && real_isfinite (im))
14021 const struct real_format *const fmt =
14022 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14023 const int prec = fmt->p;
14024 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14025 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14026 int inexact;
14027 mpc_t m;
14029 mpc_init2 (m, prec);
14030 mpfr_from_real (mpc_realref (m), re, rnd);
14031 mpfr_from_real (mpc_imagref (m), im, rnd);
14032 mpfr_clear_flags ();
14033 inexact = func (m, m, crnd);
14034 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14035 mpc_clear (m);
14039 return result;
14042 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14043 mpc function FUNC on it and return the resulting value as a tree
14044 with type TYPE. The mpfr precision is set to the precision of
14045 TYPE. We assume that function FUNC returns zero if the result
14046 could be calculated exactly within the requested precision. If
14047 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14048 in the arguments and/or results. */
14050 tree
14051 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14052 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14054 tree result = NULL_TREE;
14056 STRIP_NOPS (arg0);
14057 STRIP_NOPS (arg1);
14059 /* To proceed, MPFR must exactly represent the target floating point
14060 format, which only happens when the target base equals two. */
14061 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14062 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14063 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14064 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14065 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14067 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14068 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14069 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14070 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14072 if (do_nonfinite
14073 || (real_isfinite (re0) && real_isfinite (im0)
14074 && real_isfinite (re1) && real_isfinite (im1)))
14076 const struct real_format *const fmt =
14077 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14078 const int prec = fmt->p;
14079 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14080 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14081 int inexact;
14082 mpc_t m0, m1;
14084 mpc_init2 (m0, prec);
14085 mpc_init2 (m1, prec);
14086 mpfr_from_real (mpc_realref (m0), re0, rnd);
14087 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14088 mpfr_from_real (mpc_realref (m1), re1, rnd);
14089 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14090 mpfr_clear_flags ();
14091 inexact = func (m0, m0, m1, crnd);
14092 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14093 mpc_clear (m0);
14094 mpc_clear (m1);
14098 return result;
14101 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14102 a normal call should be emitted rather than expanding the function
14103 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14105 static tree
14106 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14108 int nargs = gimple_call_num_args (stmt);
14110 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14111 (nargs > 0
14112 ? gimple_call_arg_ptr (stmt, 0)
14113 : &error_mark_node), fcode);
14116 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14117 a normal call should be emitted rather than expanding the function
14118 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14119 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14120 passed as second argument. */
14122 tree
14123 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14124 enum built_in_function fcode)
14126 int nargs = gimple_call_num_args (stmt);
14128 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14129 (nargs > 0
14130 ? gimple_call_arg_ptr (stmt, 0)
14131 : &error_mark_node), maxlen, fcode);
14134 /* Builtins with folding operations that operate on "..." arguments
14135 need special handling; we need to store the arguments in a convenient
14136 data structure before attempting any folding. Fortunately there are
14137 only a few builtins that fall into this category. FNDECL is the
14138 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14139 result of the function call is ignored. */
14141 static tree
14142 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14143 bool ignore ATTRIBUTE_UNUSED)
14145 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14146 tree ret = NULL_TREE;
14148 switch (fcode)
14150 case BUILT_IN_SPRINTF_CHK:
14151 case BUILT_IN_VSPRINTF_CHK:
14152 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14153 break;
14155 case BUILT_IN_SNPRINTF_CHK:
14156 case BUILT_IN_VSNPRINTF_CHK:
14157 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14159 default:
14160 break;
14162 if (ret)
14164 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14165 TREE_NO_WARNING (ret) = 1;
14166 return ret;
14168 return NULL_TREE;
14171 /* A wrapper function for builtin folding that prevents warnings for
14172 "statement without effect" and the like, caused by removing the
14173 call node earlier than the warning is generated. */
14175 tree
14176 fold_call_stmt (gimple stmt, bool ignore)
14178 tree ret = NULL_TREE;
14179 tree fndecl = gimple_call_fndecl (stmt);
14180 location_t loc = gimple_location (stmt);
14181 if (fndecl
14182 && TREE_CODE (fndecl) == FUNCTION_DECL
14183 && DECL_BUILT_IN (fndecl)
14184 && !gimple_call_va_arg_pack_p (stmt))
14186 int nargs = gimple_call_num_args (stmt);
14187 tree *args = (nargs > 0
14188 ? gimple_call_arg_ptr (stmt, 0)
14189 : &error_mark_node);
14191 if (avoid_folding_inline_builtin (fndecl))
14192 return NULL_TREE;
14193 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14195 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14197 else
14199 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14200 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14201 if (!ret)
14202 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14203 if (ret)
14205 /* Propagate location information from original call to
14206 expansion of builtin. Otherwise things like
14207 maybe_emit_chk_warning, that operate on the expansion
14208 of a builtin, will use the wrong location information. */
14209 if (gimple_has_location (stmt))
14211 tree realret = ret;
14212 if (TREE_CODE (ret) == NOP_EXPR)
14213 realret = TREE_OPERAND (ret, 0);
14214 if (CAN_HAVE_LOCATION_P (realret)
14215 && !EXPR_HAS_LOCATION (realret))
14216 SET_EXPR_LOCATION (realret, loc);
14217 return realret;
14219 return ret;
14223 return NULL_TREE;
14226 /* Look up the function in builtin_decl that corresponds to DECL
14227 and set ASMSPEC as its user assembler name. DECL must be a
14228 function decl that declares a builtin. */
14230 void
14231 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14233 tree builtin;
14234 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14235 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14236 && asmspec != 0);
14238 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14239 set_user_assembler_name (builtin, asmspec);
14240 switch (DECL_FUNCTION_CODE (decl))
14242 case BUILT_IN_MEMCPY:
14243 init_block_move_fn (asmspec);
14244 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14245 break;
14246 case BUILT_IN_MEMSET:
14247 init_block_clear_fn (asmspec);
14248 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14249 break;
14250 case BUILT_IN_MEMMOVE:
14251 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14252 break;
14253 case BUILT_IN_MEMCMP:
14254 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14255 break;
14256 case BUILT_IN_ABORT:
14257 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14258 break;
14259 case BUILT_IN_FFS:
14260 if (INT_TYPE_SIZE < BITS_PER_WORD)
14262 set_user_assembler_libfunc ("ffs", asmspec);
14263 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14264 MODE_INT, 0), "ffs");
14266 break;
14267 default:
14268 break;
14272 /* Return true if DECL is a builtin that expands to a constant or similarly
14273 simple code. */
14274 bool
14275 is_simple_builtin (tree decl)
14277 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14278 switch (DECL_FUNCTION_CODE (decl))
14280 /* Builtins that expand to constants. */
14281 case BUILT_IN_CONSTANT_P:
14282 case BUILT_IN_EXPECT:
14283 case BUILT_IN_OBJECT_SIZE:
14284 case BUILT_IN_UNREACHABLE:
14285 /* Simple register moves or loads from stack. */
14286 case BUILT_IN_ASSUME_ALIGNED:
14287 case BUILT_IN_RETURN_ADDRESS:
14288 case BUILT_IN_EXTRACT_RETURN_ADDR:
14289 case BUILT_IN_FROB_RETURN_ADDR:
14290 case BUILT_IN_RETURN:
14291 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14292 case BUILT_IN_FRAME_ADDRESS:
14293 case BUILT_IN_VA_END:
14294 case BUILT_IN_STACK_SAVE:
14295 case BUILT_IN_STACK_RESTORE:
14296 /* Exception state returns or moves registers around. */
14297 case BUILT_IN_EH_FILTER:
14298 case BUILT_IN_EH_POINTER:
14299 case BUILT_IN_EH_COPY_VALUES:
14300 return true;
14302 default:
14303 return false;
14306 return false;
14309 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14310 most probably expanded inline into reasonably simple code. This is a
14311 superset of is_simple_builtin. */
14312 bool
14313 is_inexpensive_builtin (tree decl)
14315 if (!decl)
14316 return false;
14317 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14318 return true;
14319 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14320 switch (DECL_FUNCTION_CODE (decl))
14322 case BUILT_IN_ABS:
14323 case BUILT_IN_ALLOCA:
14324 case BUILT_IN_ALLOCA_WITH_ALIGN:
14325 case BUILT_IN_BSWAP16:
14326 case BUILT_IN_BSWAP32:
14327 case BUILT_IN_BSWAP64:
14328 case BUILT_IN_CLZ:
14329 case BUILT_IN_CLZIMAX:
14330 case BUILT_IN_CLZL:
14331 case BUILT_IN_CLZLL:
14332 case BUILT_IN_CTZ:
14333 case BUILT_IN_CTZIMAX:
14334 case BUILT_IN_CTZL:
14335 case BUILT_IN_CTZLL:
14336 case BUILT_IN_FFS:
14337 case BUILT_IN_FFSIMAX:
14338 case BUILT_IN_FFSL:
14339 case BUILT_IN_FFSLL:
14340 case BUILT_IN_IMAXABS:
14341 case BUILT_IN_FINITE:
14342 case BUILT_IN_FINITEF:
14343 case BUILT_IN_FINITEL:
14344 case BUILT_IN_FINITED32:
14345 case BUILT_IN_FINITED64:
14346 case BUILT_IN_FINITED128:
14347 case BUILT_IN_FPCLASSIFY:
14348 case BUILT_IN_ISFINITE:
14349 case BUILT_IN_ISINF_SIGN:
14350 case BUILT_IN_ISINF:
14351 case BUILT_IN_ISINFF:
14352 case BUILT_IN_ISINFL:
14353 case BUILT_IN_ISINFD32:
14354 case BUILT_IN_ISINFD64:
14355 case BUILT_IN_ISINFD128:
14356 case BUILT_IN_ISNAN:
14357 case BUILT_IN_ISNANF:
14358 case BUILT_IN_ISNANL:
14359 case BUILT_IN_ISNAND32:
14360 case BUILT_IN_ISNAND64:
14361 case BUILT_IN_ISNAND128:
14362 case BUILT_IN_ISNORMAL:
14363 case BUILT_IN_ISGREATER:
14364 case BUILT_IN_ISGREATEREQUAL:
14365 case BUILT_IN_ISLESS:
14366 case BUILT_IN_ISLESSEQUAL:
14367 case BUILT_IN_ISLESSGREATER:
14368 case BUILT_IN_ISUNORDERED:
14369 case BUILT_IN_VA_ARG_PACK:
14370 case BUILT_IN_VA_ARG_PACK_LEN:
14371 case BUILT_IN_VA_COPY:
14372 case BUILT_IN_TRAP:
14373 case BUILT_IN_SAVEREGS:
14374 case BUILT_IN_POPCOUNTL:
14375 case BUILT_IN_POPCOUNTLL:
14376 case BUILT_IN_POPCOUNTIMAX:
14377 case BUILT_IN_POPCOUNT:
14378 case BUILT_IN_PARITYL:
14379 case BUILT_IN_PARITYLL:
14380 case BUILT_IN_PARITYIMAX:
14381 case BUILT_IN_PARITY:
14382 case BUILT_IN_LABS:
14383 case BUILT_IN_LLABS:
14384 case BUILT_IN_PREFETCH:
14385 return true;
14387 default:
14388 return is_simple_builtin (decl);
14391 return false;