cgraph.c (cgraph_turn_edge_to_speculative): Fix debug output.
[official-gcc.git] / gcc / builtins.c
blobd8baad15e8efb2e4eeba8f5a438ed248a4e7bd1d
1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "realmpfr.h"
28 #include "gimple.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "predict.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "langhooks.h"
45 #include "basic-block.h"
46 #include "tree-mudflap.h"
47 #include "tree-flow.h"
48 #include "value-prof.h"
49 #include "diagnostic-core.h"
50 #include "builtins.h"
53 #ifndef PAD_VARARGS_DOWN
54 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
55 #endif
56 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
58 struct target_builtins default_target_builtins;
59 #if SWITCHABLE_TARGET
60 struct target_builtins *this_target_builtins = &default_target_builtins;
61 #endif
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
72 #undef DEF_BUILTIN
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 builtin_info_type builtin_info;
78 /* Non-zero if __builtin_constant_p should be folded right away. */
79 bool force_folding_builtin_constant_p;
81 static const char *c_getstr (tree);
82 static rtx c_readstr (const char *, enum machine_mode);
83 static int target_char_cast (tree, char *);
84 static rtx get_memory_rtx (tree, tree);
85 static int apply_args_size (void);
86 static int apply_result_size (void);
87 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
88 static rtx result_vector (int, rtx);
89 #endif
90 static void expand_builtin_update_setjmp_buf (rtx);
91 static void expand_builtin_prefetch (tree);
92 static rtx expand_builtin_apply_args (void);
93 static rtx expand_builtin_apply_args_1 (void);
94 static rtx expand_builtin_apply (rtx, rtx, rtx);
95 static void expand_builtin_return (rtx);
96 static enum type_class type_to_class (tree);
97 static rtx expand_builtin_classify_type (tree);
98 static void expand_errno_check (tree, rtx);
99 static rtx expand_builtin_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_next_arg (void);
109 static rtx expand_builtin_va_start (tree);
110 static rtx expand_builtin_va_end (tree);
111 static rtx expand_builtin_va_copy (tree);
112 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_strcmp (tree, rtx);
114 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
115 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
116 static rtx expand_builtin_memcpy (tree, rtx);
117 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
119 enum machine_mode, int);
120 static rtx expand_builtin_strcpy (tree, rtx);
121 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
122 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_strncpy (tree, rtx);
124 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
125 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
127 static rtx expand_builtin_bzero (tree);
128 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_alloca (tree, bool);
130 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
131 static rtx expand_builtin_frame_address (tree, tree);
132 static tree stabilize_va_list_loc (location_t, tree, int);
133 static rtx expand_builtin_expect (tree, rtx);
134 static tree fold_builtin_constant_p (tree);
135 static tree fold_builtin_expect (location_t, tree, tree);
136 static tree fold_builtin_classify_type (tree);
137 static tree fold_builtin_strlen (location_t, tree, tree);
138 static tree fold_builtin_inf (location_t, tree, int);
139 static tree fold_builtin_nan (tree, tree, int);
140 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
141 static bool validate_arg (const_tree, enum tree_code code);
142 static bool integer_valued_real_p (tree);
143 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
144 static bool readonly_data_expr (tree);
145 static rtx expand_builtin_fabs (tree, rtx, rtx);
146 static rtx expand_builtin_signbit (tree, rtx);
147 static tree fold_builtin_sqrt (location_t, tree, tree);
148 static tree fold_builtin_cbrt (location_t, tree, tree);
149 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
150 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_cos (location_t, tree, tree, tree);
152 static tree fold_builtin_cosh (location_t, tree, tree, tree);
153 static tree fold_builtin_tan (tree, tree);
154 static tree fold_builtin_trunc (location_t, tree, tree);
155 static tree fold_builtin_floor (location_t, tree, tree);
156 static tree fold_builtin_ceil (location_t, tree, tree);
157 static tree fold_builtin_round (location_t, tree, tree);
158 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
159 static tree fold_builtin_bitop (tree, tree);
160 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
161 static tree fold_builtin_strchr (location_t, tree, tree, tree);
162 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
163 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
164 static tree fold_builtin_strcmp (location_t, tree, tree);
165 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
166 static tree fold_builtin_signbit (location_t, tree, tree);
167 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
168 static tree fold_builtin_isascii (location_t, tree);
169 static tree fold_builtin_toascii (location_t, tree);
170 static tree fold_builtin_isdigit (location_t, tree);
171 static tree fold_builtin_fabs (location_t, tree, tree);
172 static tree fold_builtin_abs (location_t, tree, tree);
173 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
174 enum tree_code);
175 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
176 static tree fold_builtin_0 (location_t, tree, bool);
177 static tree fold_builtin_1 (location_t, tree, tree, bool);
178 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
179 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
180 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
181 static tree fold_builtin_varargs (location_t, tree, tree, bool);
183 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
184 static tree fold_builtin_strstr (location_t, tree, tree, tree);
185 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
186 static tree fold_builtin_strcat (location_t, tree, tree);
187 static tree fold_builtin_strncat (location_t, tree, tree, tree);
188 static tree fold_builtin_strspn (location_t, tree, tree);
189 static tree fold_builtin_strcspn (location_t, tree, tree);
190 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
191 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
227 static void expand_builtin_sync_synchronize (void);
229 /* Return true if NAME starts with __builtin_ or __sync_. */
231 static bool
232 is_builtin_name (const char *name)
234 if (strncmp (name, "__builtin_", 10) == 0)
235 return true;
236 if (strncmp (name, "__sync_", 7) == 0)
237 return true;
238 if (strncmp (name, "__atomic_", 9) == 0)
239 return true;
240 return false;
244 /* Return true if DECL is a function symbol representing a built-in. */
246 bool
247 is_builtin_fn (tree decl)
249 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
252 /* By default we assume that c99 functions are present at the runtime,
253 but sincos is not. */
254 bool
255 default_libc_has_function (enum function_class fn_class)
257 if (fn_class == function_c94
258 || fn_class == function_c99_misc
259 || fn_class == function_c99_math_complex)
260 return true;
262 return false;
265 bool
266 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
268 return true;
271 bool
272 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
274 return false;
277 /* Return true if NODE should be considered for inline expansion regardless
278 of the optimization level. This means whenever a function is invoked with
279 its "internal" name, which normally contains the prefix "__builtin". */
281 static bool
282 called_as_built_in (tree node)
284 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
285 we want the name used to call the function, not the name it
286 will have. */
287 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
288 return is_builtin_name (name);
291 /* Compute values M and N such that M divides (address of EXP - N) and such
292 that N < M. If these numbers can be determined, store M in alignp and N in
293 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
294 *alignp and any bit-offset to *bitposp.
296 Note that the address (and thus the alignment) computed here is based
297 on the address to which a symbol resolves, whereas DECL_ALIGN is based
298 on the address at which an object is actually located. These two
299 addresses are not always the same. For example, on ARM targets,
300 the address &foo of a Thumb function foo() has the lowest bit set,
301 whereas foo() itself starts on an even address.
303 If ADDR_P is true we are taking the address of the memory reference EXP
304 and thus cannot rely on the access taking place. */
306 static bool
307 get_object_alignment_2 (tree exp, unsigned int *alignp,
308 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
310 HOST_WIDE_INT bitsize, bitpos;
311 tree offset;
312 enum machine_mode mode;
313 int unsignedp, volatilep;
314 unsigned int inner, align = BITS_PER_UNIT;
315 bool known_alignment = false;
317 /* Get the innermost object and the constant (bitpos) and possibly
318 variable (offset) offset of the access. */
319 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
320 &mode, &unsignedp, &volatilep, true);
322 /* Extract alignment information from the innermost object and
323 possibly adjust bitpos and offset. */
324 if (TREE_CODE (exp) == FUNCTION_DECL)
326 /* Function addresses can encode extra information besides their
327 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
328 allows the low bit to be used as a virtual bit, we know
329 that the address itself must be at least 2-byte aligned. */
330 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
331 align = 2 * BITS_PER_UNIT;
333 else if (TREE_CODE (exp) == LABEL_DECL)
335 else if (TREE_CODE (exp) == CONST_DECL)
337 /* The alignment of a CONST_DECL is determined by its initializer. */
338 exp = DECL_INITIAL (exp);
339 align = TYPE_ALIGN (TREE_TYPE (exp));
340 #ifdef CONSTANT_ALIGNMENT
341 if (CONSTANT_CLASS_P (exp))
342 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
343 #endif
344 known_alignment = true;
346 else if (DECL_P (exp))
348 align = DECL_ALIGN (exp);
349 known_alignment = true;
351 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
353 align = TYPE_ALIGN (TREE_TYPE (exp));
355 else if (TREE_CODE (exp) == INDIRECT_REF
356 || TREE_CODE (exp) == MEM_REF
357 || TREE_CODE (exp) == TARGET_MEM_REF)
359 tree addr = TREE_OPERAND (exp, 0);
360 unsigned ptr_align;
361 unsigned HOST_WIDE_INT ptr_bitpos;
363 if (TREE_CODE (addr) == BIT_AND_EXPR
364 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
366 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
367 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
368 align *= BITS_PER_UNIT;
369 addr = TREE_OPERAND (addr, 0);
372 known_alignment
373 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
374 align = MAX (ptr_align, align);
376 /* The alignment of the pointer operand in a TARGET_MEM_REF
377 has to take the variable offset parts into account. */
378 if (TREE_CODE (exp) == TARGET_MEM_REF)
380 if (TMR_INDEX (exp))
382 unsigned HOST_WIDE_INT step = 1;
383 if (TMR_STEP (exp))
384 step = TREE_INT_CST_LOW (TMR_STEP (exp));
385 align = MIN (align, (step & -step) * BITS_PER_UNIT);
387 if (TMR_INDEX2 (exp))
388 align = BITS_PER_UNIT;
389 known_alignment = false;
392 /* When EXP is an actual memory reference then we can use
393 TYPE_ALIGN of a pointer indirection to derive alignment.
394 Do so only if get_pointer_alignment_1 did not reveal absolute
395 alignment knowledge and if using that alignment would
396 improve the situation. */
397 if (!addr_p && !known_alignment
398 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
399 align = TYPE_ALIGN (TREE_TYPE (exp));
400 else
402 /* Else adjust bitpos accordingly. */
403 bitpos += ptr_bitpos;
404 if (TREE_CODE (exp) == MEM_REF
405 || TREE_CODE (exp) == TARGET_MEM_REF)
406 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
409 else if (TREE_CODE (exp) == STRING_CST)
411 /* STRING_CST are the only constant objects we allow to be not
412 wrapped inside a CONST_DECL. */
413 align = TYPE_ALIGN (TREE_TYPE (exp));
414 #ifdef CONSTANT_ALIGNMENT
415 if (CONSTANT_CLASS_P (exp))
416 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
417 #endif
418 known_alignment = true;
421 /* If there is a non-constant offset part extract the maximum
422 alignment that can prevail. */
423 inner = ~0U;
424 while (offset)
426 tree next_offset;
428 if (TREE_CODE (offset) == PLUS_EXPR)
430 next_offset = TREE_OPERAND (offset, 0);
431 offset = TREE_OPERAND (offset, 1);
433 else
434 next_offset = NULL;
435 if (host_integerp (offset, 1))
437 /* Any overflow in calculating offset_bits won't change
438 the alignment. */
439 unsigned offset_bits
440 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
442 if (offset_bits)
443 inner = MIN (inner, (offset_bits & -offset_bits));
445 else if (TREE_CODE (offset) == MULT_EXPR
446 && host_integerp (TREE_OPERAND (offset, 1), 1))
448 /* Any overflow in calculating offset_factor won't change
449 the alignment. */
450 unsigned offset_factor
451 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
452 * BITS_PER_UNIT);
454 if (offset_factor)
455 inner = MIN (inner, (offset_factor & -offset_factor));
457 else
459 inner = MIN (inner, BITS_PER_UNIT);
460 break;
462 offset = next_offset;
464 /* Alignment is innermost object alignment adjusted by the constant
465 and non-constant offset parts. */
466 align = MIN (align, inner);
468 *alignp = align;
469 *bitposp = bitpos & (*alignp - 1);
470 return known_alignment;
473 /* For a memory reference expression EXP compute values M and N such that M
474 divides (&EXP - N) and such that N < M. If these numbers can be determined,
475 store M in alignp and N in *BITPOSP and return true. Otherwise return false
476 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
478 bool
479 get_object_alignment_1 (tree exp, unsigned int *alignp,
480 unsigned HOST_WIDE_INT *bitposp)
482 return get_object_alignment_2 (exp, alignp, bitposp, false);
485 /* Return the alignment in bits of EXP, an object. */
487 unsigned int
488 get_object_alignment (tree exp)
490 unsigned HOST_WIDE_INT bitpos = 0;
491 unsigned int align;
493 get_object_alignment_1 (exp, &align, &bitpos);
495 /* align and bitpos now specify known low bits of the pointer.
496 ptr & (align - 1) == bitpos. */
498 if (bitpos != 0)
499 align = (bitpos & -bitpos);
500 return align;
503 /* For a pointer valued expression EXP compute values M and N such that M
504 divides (EXP - N) and such that N < M. If these numbers can be determined,
505 store M in alignp and N in *BITPOSP and return true. Return false if
506 the results are just a conservative approximation.
508 If EXP is not a pointer, false is returned too. */
510 bool
511 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
512 unsigned HOST_WIDE_INT *bitposp)
514 STRIP_NOPS (exp);
516 if (TREE_CODE (exp) == ADDR_EXPR)
517 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
518 alignp, bitposp, true);
519 else if (TREE_CODE (exp) == SSA_NAME
520 && POINTER_TYPE_P (TREE_TYPE (exp)))
522 unsigned int ptr_align, ptr_misalign;
523 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
525 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
527 *bitposp = ptr_misalign * BITS_PER_UNIT;
528 *alignp = ptr_align * BITS_PER_UNIT;
529 /* We cannot really tell whether this result is an approximation. */
530 return true;
532 else
534 *bitposp = 0;
535 *alignp = BITS_PER_UNIT;
536 return false;
539 else if (TREE_CODE (exp) == INTEGER_CST)
541 *alignp = BIGGEST_ALIGNMENT;
542 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
543 & (BIGGEST_ALIGNMENT - 1));
544 return true;
547 *bitposp = 0;
548 *alignp = BITS_PER_UNIT;
549 return false;
552 /* Return the alignment in bits of EXP, a pointer valued expression.
553 The alignment returned is, by default, the alignment of the thing that
554 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
556 Otherwise, look at the expression to see if we can do better, i.e., if the
557 expression is actually pointing at an object whose alignment is tighter. */
559 unsigned int
560 get_pointer_alignment (tree exp)
562 unsigned HOST_WIDE_INT bitpos = 0;
563 unsigned int align;
565 get_pointer_alignment_1 (exp, &align, &bitpos);
567 /* align and bitpos now specify known low bits of the pointer.
568 ptr & (align - 1) == bitpos. */
570 if (bitpos != 0)
571 align = (bitpos & -bitpos);
573 return align;
576 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
577 way, because it could contain a zero byte in the middle.
578 TREE_STRING_LENGTH is the size of the character array, not the string.
580 ONLY_VALUE should be nonzero if the result is not going to be emitted
581 into the instruction stream and zero if it is going to be expanded.
582 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
583 is returned, otherwise NULL, since
584 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
585 evaluate the side-effects.
587 The value returned is of type `ssizetype'.
589 Unfortunately, string_constant can't access the values of const char
590 arrays with initializers, so neither can we do so here. */
592 tree
593 c_strlen (tree src, int only_value)
595 tree offset_node;
596 HOST_WIDE_INT offset;
597 int max;
598 const char *ptr;
599 location_t loc;
601 STRIP_NOPS (src);
602 if (TREE_CODE (src) == COND_EXPR
603 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
605 tree len1, len2;
607 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
608 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
609 if (tree_int_cst_equal (len1, len2))
610 return len1;
613 if (TREE_CODE (src) == COMPOUND_EXPR
614 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
615 return c_strlen (TREE_OPERAND (src, 1), only_value);
617 loc = EXPR_LOC_OR_HERE (src);
619 src = string_constant (src, &offset_node);
620 if (src == 0)
621 return NULL_TREE;
623 max = TREE_STRING_LENGTH (src) - 1;
624 ptr = TREE_STRING_POINTER (src);
626 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
628 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
629 compute the offset to the following null if we don't know where to
630 start searching for it. */
631 int i;
633 for (i = 0; i < max; i++)
634 if (ptr[i] == 0)
635 return NULL_TREE;
637 /* We don't know the starting offset, but we do know that the string
638 has no internal zero bytes. We can assume that the offset falls
639 within the bounds of the string; otherwise, the programmer deserves
640 what he gets. Subtract the offset from the length of the string,
641 and return that. This would perhaps not be valid if we were dealing
642 with named arrays in addition to literal string constants. */
644 return size_diffop_loc (loc, size_int (max), offset_node);
647 /* We have a known offset into the string. Start searching there for
648 a null character if we can represent it as a single HOST_WIDE_INT. */
649 if (offset_node == 0)
650 offset = 0;
651 else if (! host_integerp (offset_node, 0))
652 offset = -1;
653 else
654 offset = tree_low_cst (offset_node, 0);
656 /* If the offset is known to be out of bounds, warn, and call strlen at
657 runtime. */
658 if (offset < 0 || offset > max)
660 /* Suppress multiple warnings for propagated constant strings. */
661 if (! TREE_NO_WARNING (src))
663 warning_at (loc, 0, "offset outside bounds of constant string");
664 TREE_NO_WARNING (src) = 1;
666 return NULL_TREE;
669 /* Use strlen to search for the first zero byte. Since any strings
670 constructed with build_string will have nulls appended, we win even
671 if we get handed something like (char[4])"abcd".
673 Since OFFSET is our starting index into the string, no further
674 calculation is needed. */
675 return ssize_int (strlen (ptr + offset));
678 /* Return a char pointer for a C string if it is a string constant
679 or sum of string constant and integer constant. */
681 static const char *
682 c_getstr (tree src)
684 tree offset_node;
686 src = string_constant (src, &offset_node);
687 if (src == 0)
688 return 0;
690 if (offset_node == 0)
691 return TREE_STRING_POINTER (src);
692 else if (!host_integerp (offset_node, 1)
693 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
694 return 0;
696 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
699 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
700 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
702 static rtx
703 c_readstr (const char *str, enum machine_mode mode)
705 HOST_WIDE_INT c[2];
706 HOST_WIDE_INT ch;
707 unsigned int i, j;
709 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
711 c[0] = 0;
712 c[1] = 0;
713 ch = 1;
714 for (i = 0; i < GET_MODE_SIZE (mode); i++)
716 j = i;
717 if (WORDS_BIG_ENDIAN)
718 j = GET_MODE_SIZE (mode) - i - 1;
719 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
720 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
721 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
722 j *= BITS_PER_UNIT;
723 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
725 if (ch)
726 ch = (unsigned char) str[i];
727 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
729 return immed_double_const (c[0], c[1], mode);
732 /* Cast a target constant CST to target CHAR and if that value fits into
733 host char type, return zero and put that value into variable pointed to by
734 P. */
736 static int
737 target_char_cast (tree cst, char *p)
739 unsigned HOST_WIDE_INT val, hostval;
741 if (TREE_CODE (cst) != INTEGER_CST
742 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
743 return 1;
745 val = TREE_INT_CST_LOW (cst);
746 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
747 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
749 hostval = val;
750 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
751 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
753 if (val != hostval)
754 return 1;
756 *p = hostval;
757 return 0;
760 /* Similar to save_expr, but assumes that arbitrary code is not executed
761 in between the multiple evaluations. In particular, we assume that a
762 non-addressable local variable will not be modified. */
764 static tree
765 builtin_save_expr (tree exp)
767 if (TREE_CODE (exp) == SSA_NAME
768 || (TREE_ADDRESSABLE (exp) == 0
769 && (TREE_CODE (exp) == PARM_DECL
770 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
771 return exp;
773 return save_expr (exp);
776 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
777 times to get the address of either a higher stack frame, or a return
778 address located within it (depending on FNDECL_CODE). */
780 static rtx
781 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
783 int i;
785 #ifdef INITIAL_FRAME_ADDRESS_RTX
786 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
787 #else
788 rtx tem;
790 /* For a zero count with __builtin_return_address, we don't care what
791 frame address we return, because target-specific definitions will
792 override us. Therefore frame pointer elimination is OK, and using
793 the soft frame pointer is OK.
795 For a nonzero count, or a zero count with __builtin_frame_address,
796 we require a stable offset from the current frame pointer to the
797 previous one, so we must use the hard frame pointer, and
798 we must disable frame pointer elimination. */
799 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
800 tem = frame_pointer_rtx;
801 else
803 tem = hard_frame_pointer_rtx;
805 /* Tell reload not to eliminate the frame pointer. */
806 crtl->accesses_prior_frames = 1;
808 #endif
810 /* Some machines need special handling before we can access
811 arbitrary frames. For example, on the SPARC, we must first flush
812 all register windows to the stack. */
813 #ifdef SETUP_FRAME_ADDRESSES
814 if (count > 0)
815 SETUP_FRAME_ADDRESSES ();
816 #endif
818 /* On the SPARC, the return address is not in the frame, it is in a
819 register. There is no way to access it off of the current frame
820 pointer, but it can be accessed off the previous frame pointer by
821 reading the value from the register window save area. */
822 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
823 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
824 count--;
825 #endif
827 /* Scan back COUNT frames to the specified frame. */
828 for (i = 0; i < count; i++)
830 /* Assume the dynamic chain pointer is in the word that the
831 frame address points to, unless otherwise specified. */
832 #ifdef DYNAMIC_CHAIN_ADDRESS
833 tem = DYNAMIC_CHAIN_ADDRESS (tem);
834 #endif
835 tem = memory_address (Pmode, tem);
836 tem = gen_frame_mem (Pmode, tem);
837 tem = copy_to_reg (tem);
840 /* For __builtin_frame_address, return what we've got. But, on
841 the SPARC for example, we may have to add a bias. */
842 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
843 #ifdef FRAME_ADDR_RTX
844 return FRAME_ADDR_RTX (tem);
845 #else
846 return tem;
847 #endif
849 /* For __builtin_return_address, get the return address from that frame. */
850 #ifdef RETURN_ADDR_RTX
851 tem = RETURN_ADDR_RTX (count, tem);
852 #else
853 tem = memory_address (Pmode,
854 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
855 tem = gen_frame_mem (Pmode, tem);
856 #endif
857 return tem;
860 /* Alias set used for setjmp buffer. */
861 static alias_set_type setjmp_alias_set = -1;
863 /* Construct the leading half of a __builtin_setjmp call. Control will
864 return to RECEIVER_LABEL. This is also called directly by the SJLJ
865 exception handling code. */
867 void
868 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
870 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
871 rtx stack_save;
872 rtx mem;
874 if (setjmp_alias_set == -1)
875 setjmp_alias_set = new_alias_set ();
877 buf_addr = convert_memory_address (Pmode, buf_addr);
879 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
881 /* We store the frame pointer and the address of receiver_label in
882 the buffer and use the rest of it for the stack save area, which
883 is machine-dependent. */
885 mem = gen_rtx_MEM (Pmode, buf_addr);
886 set_mem_alias_set (mem, setjmp_alias_set);
887 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
889 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
890 GET_MODE_SIZE (Pmode))),
891 set_mem_alias_set (mem, setjmp_alias_set);
893 emit_move_insn (validize_mem (mem),
894 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
896 stack_save = gen_rtx_MEM (sa_mode,
897 plus_constant (Pmode, buf_addr,
898 2 * GET_MODE_SIZE (Pmode)));
899 set_mem_alias_set (stack_save, setjmp_alias_set);
900 emit_stack_save (SAVE_NONLOCAL, &stack_save);
902 /* If there is further processing to do, do it. */
903 #ifdef HAVE_builtin_setjmp_setup
904 if (HAVE_builtin_setjmp_setup)
905 emit_insn (gen_builtin_setjmp_setup (buf_addr));
906 #endif
908 /* We have a nonlocal label. */
909 cfun->has_nonlocal_label = 1;
912 /* Construct the trailing part of a __builtin_setjmp call. This is
913 also called directly by the SJLJ exception handling code.
914 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
916 void
917 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
919 rtx chain;
921 /* Mark the FP as used when we get here, so we have to make sure it's
922 marked as used by this function. */
923 emit_use (hard_frame_pointer_rtx);
925 /* Mark the static chain as clobbered here so life information
926 doesn't get messed up for it. */
927 chain = targetm.calls.static_chain (current_function_decl, true);
928 if (chain && REG_P (chain))
929 emit_clobber (chain);
931 /* Now put in the code to restore the frame pointer, and argument
932 pointer, if needed. */
933 #ifdef HAVE_nonlocal_goto
934 if (! HAVE_nonlocal_goto)
935 #endif
936 /* First adjust our frame pointer to its actual value. It was
937 previously set to the start of the virtual area corresponding to
938 the stacked variables when we branched here and now needs to be
939 adjusted to the actual hardware fp value.
941 Assignments to virtual registers are converted by
942 instantiate_virtual_regs into the corresponding assignment
943 to the underlying register (fp in this case) that makes
944 the original assignment true.
945 So the following insn will actually be decrementing fp by
946 STARTING_FRAME_OFFSET. */
947 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
949 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
950 if (fixed_regs[ARG_POINTER_REGNUM])
952 #ifdef ELIMINABLE_REGS
953 /* If the argument pointer can be eliminated in favor of the
954 frame pointer, we don't need to restore it. We assume here
955 that if such an elimination is present, it can always be used.
956 This is the case on all known machines; if we don't make this
957 assumption, we do unnecessary saving on many machines. */
958 size_t i;
959 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
961 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
962 if (elim_regs[i].from == ARG_POINTER_REGNUM
963 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
964 break;
966 if (i == ARRAY_SIZE (elim_regs))
967 #endif
969 /* Now restore our arg pointer from the address at which it
970 was saved in our stack frame. */
971 emit_move_insn (crtl->args.internal_arg_pointer,
972 copy_to_reg (get_arg_pointer_save_area ()));
975 #endif
977 #ifdef HAVE_builtin_setjmp_receiver
978 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
979 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
980 else
981 #endif
982 #ifdef HAVE_nonlocal_goto_receiver
983 if (HAVE_nonlocal_goto_receiver)
984 emit_insn (gen_nonlocal_goto_receiver ());
985 else
986 #endif
987 { /* Nothing */ }
989 /* We must not allow the code we just generated to be reordered by
990 scheduling. Specifically, the update of the frame pointer must
991 happen immediately, not later. Similarly, we must block
992 (frame-related) register values to be used across this code. */
993 emit_insn (gen_blockage ());
996 /* __builtin_longjmp is passed a pointer to an array of five words (not
997 all will be used on all machines). It operates similarly to the C
998 library function of the same name, but is more efficient. Much of
999 the code below is copied from the handling of non-local gotos. */
1001 static void
1002 expand_builtin_longjmp (rtx buf_addr, rtx value)
1004 rtx fp, lab, stack, insn, last;
1005 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1007 /* DRAP is needed for stack realign if longjmp is expanded to current
1008 function */
1009 if (SUPPORTS_STACK_ALIGNMENT)
1010 crtl->need_drap = true;
1012 if (setjmp_alias_set == -1)
1013 setjmp_alias_set = new_alias_set ();
1015 buf_addr = convert_memory_address (Pmode, buf_addr);
1017 buf_addr = force_reg (Pmode, buf_addr);
1019 /* We require that the user must pass a second argument of 1, because
1020 that is what builtin_setjmp will return. */
1021 gcc_assert (value == const1_rtx);
1023 last = get_last_insn ();
1024 #ifdef HAVE_builtin_longjmp
1025 if (HAVE_builtin_longjmp)
1026 emit_insn (gen_builtin_longjmp (buf_addr));
1027 else
1028 #endif
1030 fp = gen_rtx_MEM (Pmode, buf_addr);
1031 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1032 GET_MODE_SIZE (Pmode)));
1034 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1035 2 * GET_MODE_SIZE (Pmode)));
1036 set_mem_alias_set (fp, setjmp_alias_set);
1037 set_mem_alias_set (lab, setjmp_alias_set);
1038 set_mem_alias_set (stack, setjmp_alias_set);
1040 /* Pick up FP, label, and SP from the block and jump. This code is
1041 from expand_goto in stmt.c; see there for detailed comments. */
1042 #ifdef HAVE_nonlocal_goto
1043 if (HAVE_nonlocal_goto)
1044 /* We have to pass a value to the nonlocal_goto pattern that will
1045 get copied into the static_chain pointer, but it does not matter
1046 what that value is, because builtin_setjmp does not use it. */
1047 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1048 else
1049 #endif
1051 lab = copy_to_reg (lab);
1053 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1054 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1056 emit_move_insn (hard_frame_pointer_rtx, fp);
1057 emit_stack_restore (SAVE_NONLOCAL, stack);
1059 emit_use (hard_frame_pointer_rtx);
1060 emit_use (stack_pointer_rtx);
1061 emit_indirect_jump (lab);
1065 /* Search backwards and mark the jump insn as a non-local goto.
1066 Note that this precludes the use of __builtin_longjmp to a
1067 __builtin_setjmp target in the same function. However, we've
1068 already cautioned the user that these functions are for
1069 internal exception handling use only. */
1070 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1072 gcc_assert (insn != last);
1074 if (JUMP_P (insn))
1076 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1077 break;
1079 else if (CALL_P (insn))
1080 break;
1084 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1085 and the address of the save area. */
1087 static rtx
1088 expand_builtin_nonlocal_goto (tree exp)
1090 tree t_label, t_save_area;
1091 rtx r_label, r_save_area, r_fp, r_sp, insn;
1093 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1094 return NULL_RTX;
1096 t_label = CALL_EXPR_ARG (exp, 0);
1097 t_save_area = CALL_EXPR_ARG (exp, 1);
1099 r_label = expand_normal (t_label);
1100 r_label = convert_memory_address (Pmode, r_label);
1101 r_save_area = expand_normal (t_save_area);
1102 r_save_area = convert_memory_address (Pmode, r_save_area);
1103 /* Copy the address of the save location to a register just in case it was
1104 based on the frame pointer. */
1105 r_save_area = copy_to_reg (r_save_area);
1106 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1107 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1108 plus_constant (Pmode, r_save_area,
1109 GET_MODE_SIZE (Pmode)));
1111 crtl->has_nonlocal_goto = 1;
1113 #ifdef HAVE_nonlocal_goto
1114 /* ??? We no longer need to pass the static chain value, afaik. */
1115 if (HAVE_nonlocal_goto)
1116 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1117 else
1118 #endif
1120 r_label = copy_to_reg (r_label);
1122 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1123 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1125 /* Restore frame pointer for containing function. */
1126 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1127 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1129 /* USE of hard_frame_pointer_rtx added for consistency;
1130 not clear if really needed. */
1131 emit_use (hard_frame_pointer_rtx);
1132 emit_use (stack_pointer_rtx);
1134 /* If the architecture is using a GP register, we must
1135 conservatively assume that the target function makes use of it.
1136 The prologue of functions with nonlocal gotos must therefore
1137 initialize the GP register to the appropriate value, and we
1138 must then make sure that this value is live at the point
1139 of the jump. (Note that this doesn't necessarily apply
1140 to targets with a nonlocal_goto pattern; they are free
1141 to implement it in their own way. Note also that this is
1142 a no-op if the GP register is a global invariant.) */
1143 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1144 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1145 emit_use (pic_offset_table_rtx);
1147 emit_indirect_jump (r_label);
1150 /* Search backwards to the jump insn and mark it as a
1151 non-local goto. */
1152 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1154 if (JUMP_P (insn))
1156 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1157 break;
1159 else if (CALL_P (insn))
1160 break;
1163 return const0_rtx;
1166 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1167 (not all will be used on all machines) that was passed to __builtin_setjmp.
1168 It updates the stack pointer in that block to correspond to the current
1169 stack pointer. */
1171 static void
1172 expand_builtin_update_setjmp_buf (rtx buf_addr)
1174 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1175 rtx stack_save
1176 = gen_rtx_MEM (sa_mode,
1177 memory_address
1178 (sa_mode,
1179 plus_constant (Pmode, buf_addr,
1180 2 * GET_MODE_SIZE (Pmode))));
1182 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1185 /* Expand a call to __builtin_prefetch. For a target that does not support
1186 data prefetch, evaluate the memory address argument in case it has side
1187 effects. */
1189 static void
1190 expand_builtin_prefetch (tree exp)
1192 tree arg0, arg1, arg2;
1193 int nargs;
1194 rtx op0, op1, op2;
1196 if (!validate_arglist (exp, POINTER_TYPE, 0))
1197 return;
1199 arg0 = CALL_EXPR_ARG (exp, 0);
1201 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1202 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1203 locality). */
1204 nargs = call_expr_nargs (exp);
1205 if (nargs > 1)
1206 arg1 = CALL_EXPR_ARG (exp, 1);
1207 else
1208 arg1 = integer_zero_node;
1209 if (nargs > 2)
1210 arg2 = CALL_EXPR_ARG (exp, 2);
1211 else
1212 arg2 = integer_three_node;
1214 /* Argument 0 is an address. */
1215 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1217 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1218 if (TREE_CODE (arg1) != INTEGER_CST)
1220 error ("second argument to %<__builtin_prefetch%> must be a constant");
1221 arg1 = integer_zero_node;
1223 op1 = expand_normal (arg1);
1224 /* Argument 1 must be either zero or one. */
1225 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1227 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1228 " using zero");
1229 op1 = const0_rtx;
1232 /* Argument 2 (locality) must be a compile-time constant int. */
1233 if (TREE_CODE (arg2) != INTEGER_CST)
1235 error ("third argument to %<__builtin_prefetch%> must be a constant");
1236 arg2 = integer_zero_node;
1238 op2 = expand_normal (arg2);
1239 /* Argument 2 must be 0, 1, 2, or 3. */
1240 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1242 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1243 op2 = const0_rtx;
1246 #ifdef HAVE_prefetch
1247 if (HAVE_prefetch)
1249 struct expand_operand ops[3];
1251 create_address_operand (&ops[0], op0);
1252 create_integer_operand (&ops[1], INTVAL (op1));
1253 create_integer_operand (&ops[2], INTVAL (op2));
1254 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1255 return;
1257 #endif
1259 /* Don't do anything with direct references to volatile memory, but
1260 generate code to handle other side effects. */
1261 if (!MEM_P (op0) && side_effects_p (op0))
1262 emit_insn (op0);
1265 /* Get a MEM rtx for expression EXP which is the address of an operand
1266 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1267 the maximum length of the block of memory that might be accessed or
1268 NULL if unknown. */
1270 static rtx
1271 get_memory_rtx (tree exp, tree len)
1273 tree orig_exp = exp;
1274 rtx addr, mem;
1276 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1277 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1278 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1279 exp = TREE_OPERAND (exp, 0);
1281 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1282 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1284 /* Get an expression we can use to find the attributes to assign to MEM.
1285 First remove any nops. */
1286 while (CONVERT_EXPR_P (exp)
1287 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1288 exp = TREE_OPERAND (exp, 0);
1290 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1291 (as builtin stringops may alias with anything). */
1292 exp = fold_build2 (MEM_REF,
1293 build_array_type (char_type_node,
1294 build_range_type (sizetype,
1295 size_one_node, len)),
1296 exp, build_int_cst (ptr_type_node, 0));
1298 /* If the MEM_REF has no acceptable address, try to get the base object
1299 from the original address we got, and build an all-aliasing
1300 unknown-sized access to that one. */
1301 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1302 set_mem_attributes (mem, exp, 0);
1303 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1304 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1305 0))))
1307 exp = build_fold_addr_expr (exp);
1308 exp = fold_build2 (MEM_REF,
1309 build_array_type (char_type_node,
1310 build_range_type (sizetype,
1311 size_zero_node,
1312 NULL)),
1313 exp, build_int_cst (ptr_type_node, 0));
1314 set_mem_attributes (mem, exp, 0);
1316 set_mem_alias_set (mem, 0);
1317 return mem;
1320 /* Built-in functions to perform an untyped call and return. */
1322 #define apply_args_mode \
1323 (this_target_builtins->x_apply_args_mode)
1324 #define apply_result_mode \
1325 (this_target_builtins->x_apply_result_mode)
1327 /* Return the size required for the block returned by __builtin_apply_args,
1328 and initialize apply_args_mode. */
1330 static int
1331 apply_args_size (void)
1333 static int size = -1;
1334 int align;
1335 unsigned int regno;
1336 enum machine_mode mode;
1338 /* The values computed by this function never change. */
1339 if (size < 0)
1341 /* The first value is the incoming arg-pointer. */
1342 size = GET_MODE_SIZE (Pmode);
1344 /* The second value is the structure value address unless this is
1345 passed as an "invisible" first argument. */
1346 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1347 size += GET_MODE_SIZE (Pmode);
1349 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1350 if (FUNCTION_ARG_REGNO_P (regno))
1352 mode = targetm.calls.get_raw_arg_mode (regno);
1354 gcc_assert (mode != VOIDmode);
1356 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1357 if (size % align != 0)
1358 size = CEIL (size, align) * align;
1359 size += GET_MODE_SIZE (mode);
1360 apply_args_mode[regno] = mode;
1362 else
1364 apply_args_mode[regno] = VOIDmode;
1367 return size;
1370 /* Return the size required for the block returned by __builtin_apply,
1371 and initialize apply_result_mode. */
1373 static int
1374 apply_result_size (void)
1376 static int size = -1;
1377 int align, regno;
1378 enum machine_mode mode;
1380 /* The values computed by this function never change. */
1381 if (size < 0)
1383 size = 0;
1385 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1386 if (targetm.calls.function_value_regno_p (regno))
1388 mode = targetm.calls.get_raw_result_mode (regno);
1390 gcc_assert (mode != VOIDmode);
1392 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1393 if (size % align != 0)
1394 size = CEIL (size, align) * align;
1395 size += GET_MODE_SIZE (mode);
1396 apply_result_mode[regno] = mode;
1398 else
1399 apply_result_mode[regno] = VOIDmode;
1401 /* Allow targets that use untyped_call and untyped_return to override
1402 the size so that machine-specific information can be stored here. */
1403 #ifdef APPLY_RESULT_SIZE
1404 size = APPLY_RESULT_SIZE;
1405 #endif
1407 return size;
1410 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1411 /* Create a vector describing the result block RESULT. If SAVEP is true,
1412 the result block is used to save the values; otherwise it is used to
1413 restore the values. */
1415 static rtx
1416 result_vector (int savep, rtx result)
1418 int regno, size, align, nelts;
1419 enum machine_mode mode;
1420 rtx reg, mem;
1421 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1423 size = nelts = 0;
1424 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1425 if ((mode = apply_result_mode[regno]) != VOIDmode)
1427 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1428 if (size % align != 0)
1429 size = CEIL (size, align) * align;
1430 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1431 mem = adjust_address (result, mode, size);
1432 savevec[nelts++] = (savep
1433 ? gen_rtx_SET (VOIDmode, mem, reg)
1434 : gen_rtx_SET (VOIDmode, reg, mem));
1435 size += GET_MODE_SIZE (mode);
1437 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1439 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1441 /* Save the state required to perform an untyped call with the same
1442 arguments as were passed to the current function. */
1444 static rtx
1445 expand_builtin_apply_args_1 (void)
1447 rtx registers, tem;
1448 int size, align, regno;
1449 enum machine_mode mode;
1450 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1452 /* Create a block where the arg-pointer, structure value address,
1453 and argument registers can be saved. */
1454 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1456 /* Walk past the arg-pointer and structure value address. */
1457 size = GET_MODE_SIZE (Pmode);
1458 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1459 size += GET_MODE_SIZE (Pmode);
1461 /* Save each register used in calling a function to the block. */
1462 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1463 if ((mode = apply_args_mode[regno]) != VOIDmode)
1465 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1466 if (size % align != 0)
1467 size = CEIL (size, align) * align;
1469 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1471 emit_move_insn (adjust_address (registers, mode, size), tem);
1472 size += GET_MODE_SIZE (mode);
1475 /* Save the arg pointer to the block. */
1476 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1477 #ifdef STACK_GROWS_DOWNWARD
1478 /* We need the pointer as the caller actually passed them to us, not
1479 as we might have pretended they were passed. Make sure it's a valid
1480 operand, as emit_move_insn isn't expected to handle a PLUS. */
1482 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1483 NULL_RTX);
1484 #endif
1485 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1487 size = GET_MODE_SIZE (Pmode);
1489 /* Save the structure value address unless this is passed as an
1490 "invisible" first argument. */
1491 if (struct_incoming_value)
1493 emit_move_insn (adjust_address (registers, Pmode, size),
1494 copy_to_reg (struct_incoming_value));
1495 size += GET_MODE_SIZE (Pmode);
1498 /* Return the address of the block. */
1499 return copy_addr_to_reg (XEXP (registers, 0));
1502 /* __builtin_apply_args returns block of memory allocated on
1503 the stack into which is stored the arg pointer, structure
1504 value address, static chain, and all the registers that might
1505 possibly be used in performing a function call. The code is
1506 moved to the start of the function so the incoming values are
1507 saved. */
1509 static rtx
1510 expand_builtin_apply_args (void)
1512 /* Don't do __builtin_apply_args more than once in a function.
1513 Save the result of the first call and reuse it. */
1514 if (apply_args_value != 0)
1515 return apply_args_value;
1517 /* When this function is called, it means that registers must be
1518 saved on entry to this function. So we migrate the
1519 call to the first insn of this function. */
1520 rtx temp;
1521 rtx seq;
1523 start_sequence ();
1524 temp = expand_builtin_apply_args_1 ();
1525 seq = get_insns ();
1526 end_sequence ();
1528 apply_args_value = temp;
1530 /* Put the insns after the NOTE that starts the function.
1531 If this is inside a start_sequence, make the outer-level insn
1532 chain current, so the code is placed at the start of the
1533 function. If internal_arg_pointer is a non-virtual pseudo,
1534 it needs to be placed after the function that initializes
1535 that pseudo. */
1536 push_topmost_sequence ();
1537 if (REG_P (crtl->args.internal_arg_pointer)
1538 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1539 emit_insn_before (seq, parm_birth_insn);
1540 else
1541 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1542 pop_topmost_sequence ();
1543 return temp;
1547 /* Perform an untyped call and save the state required to perform an
1548 untyped return of whatever value was returned by the given function. */
1550 static rtx
1551 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1553 int size, align, regno;
1554 enum machine_mode mode;
1555 rtx incoming_args, result, reg, dest, src, call_insn;
1556 rtx old_stack_level = 0;
1557 rtx call_fusage = 0;
1558 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1560 arguments = convert_memory_address (Pmode, arguments);
1562 /* Create a block where the return registers can be saved. */
1563 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1565 /* Fetch the arg pointer from the ARGUMENTS block. */
1566 incoming_args = gen_reg_rtx (Pmode);
1567 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1568 #ifndef STACK_GROWS_DOWNWARD
1569 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1570 incoming_args, 0, OPTAB_LIB_WIDEN);
1571 #endif
1573 /* Push a new argument block and copy the arguments. Do not allow
1574 the (potential) memcpy call below to interfere with our stack
1575 manipulations. */
1576 do_pending_stack_adjust ();
1577 NO_DEFER_POP;
1579 /* Save the stack with nonlocal if available. */
1580 #ifdef HAVE_save_stack_nonlocal
1581 if (HAVE_save_stack_nonlocal)
1582 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1583 else
1584 #endif
1585 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1587 /* Allocate a block of memory onto the stack and copy the memory
1588 arguments to the outgoing arguments address. We can pass TRUE
1589 as the 4th argument because we just saved the stack pointer
1590 and will restore it right after the call. */
1591 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1593 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1594 may have already set current_function_calls_alloca to true.
1595 current_function_calls_alloca won't be set if argsize is zero,
1596 so we have to guarantee need_drap is true here. */
1597 if (SUPPORTS_STACK_ALIGNMENT)
1598 crtl->need_drap = true;
1600 dest = virtual_outgoing_args_rtx;
1601 #ifndef STACK_GROWS_DOWNWARD
1602 if (CONST_INT_P (argsize))
1603 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1604 else
1605 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1606 #endif
1607 dest = gen_rtx_MEM (BLKmode, dest);
1608 set_mem_align (dest, PARM_BOUNDARY);
1609 src = gen_rtx_MEM (BLKmode, incoming_args);
1610 set_mem_align (src, PARM_BOUNDARY);
1611 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1613 /* Refer to the argument block. */
1614 apply_args_size ();
1615 arguments = gen_rtx_MEM (BLKmode, arguments);
1616 set_mem_align (arguments, PARM_BOUNDARY);
1618 /* Walk past the arg-pointer and structure value address. */
1619 size = GET_MODE_SIZE (Pmode);
1620 if (struct_value)
1621 size += GET_MODE_SIZE (Pmode);
1623 /* Restore each of the registers previously saved. Make USE insns
1624 for each of these registers for use in making the call. */
1625 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1626 if ((mode = apply_args_mode[regno]) != VOIDmode)
1628 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1629 if (size % align != 0)
1630 size = CEIL (size, align) * align;
1631 reg = gen_rtx_REG (mode, regno);
1632 emit_move_insn (reg, adjust_address (arguments, mode, size));
1633 use_reg (&call_fusage, reg);
1634 size += GET_MODE_SIZE (mode);
1637 /* Restore the structure value address unless this is passed as an
1638 "invisible" first argument. */
1639 size = GET_MODE_SIZE (Pmode);
1640 if (struct_value)
1642 rtx value = gen_reg_rtx (Pmode);
1643 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1644 emit_move_insn (struct_value, value);
1645 if (REG_P (struct_value))
1646 use_reg (&call_fusage, struct_value);
1647 size += GET_MODE_SIZE (Pmode);
1650 /* All arguments and registers used for the call are set up by now! */
1651 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1653 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1654 and we don't want to load it into a register as an optimization,
1655 because prepare_call_address already did it if it should be done. */
1656 if (GET_CODE (function) != SYMBOL_REF)
1657 function = memory_address (FUNCTION_MODE, function);
1659 /* Generate the actual call instruction and save the return value. */
1660 #ifdef HAVE_untyped_call
1661 if (HAVE_untyped_call)
1662 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1663 result, result_vector (1, result)));
1664 else
1665 #endif
1666 #ifdef HAVE_call_value
1667 if (HAVE_call_value)
1669 rtx valreg = 0;
1671 /* Locate the unique return register. It is not possible to
1672 express a call that sets more than one return register using
1673 call_value; use untyped_call for that. In fact, untyped_call
1674 only needs to save the return registers in the given block. */
1675 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1676 if ((mode = apply_result_mode[regno]) != VOIDmode)
1678 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1680 valreg = gen_rtx_REG (mode, regno);
1683 emit_call_insn (GEN_CALL_VALUE (valreg,
1684 gen_rtx_MEM (FUNCTION_MODE, function),
1685 const0_rtx, NULL_RTX, const0_rtx));
1687 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1689 else
1690 #endif
1691 gcc_unreachable ();
1693 /* Find the CALL insn we just emitted, and attach the register usage
1694 information. */
1695 call_insn = last_call_insn ();
1696 add_function_usage_to (call_insn, call_fusage);
1698 /* Restore the stack. */
1699 #ifdef HAVE_save_stack_nonlocal
1700 if (HAVE_save_stack_nonlocal)
1701 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1702 else
1703 #endif
1704 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1705 fixup_args_size_notes (call_insn, get_last_insn(), 0);
1707 OK_DEFER_POP;
1709 /* Return the address of the result block. */
1710 result = copy_addr_to_reg (XEXP (result, 0));
1711 return convert_memory_address (ptr_mode, result);
1714 /* Perform an untyped return. */
1716 static void
1717 expand_builtin_return (rtx result)
1719 int size, align, regno;
1720 enum machine_mode mode;
1721 rtx reg;
1722 rtx call_fusage = 0;
1724 result = convert_memory_address (Pmode, result);
1726 apply_result_size ();
1727 result = gen_rtx_MEM (BLKmode, result);
1729 #ifdef HAVE_untyped_return
1730 if (HAVE_untyped_return)
1732 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1733 emit_barrier ();
1734 return;
1736 #endif
1738 /* Restore the return value and note that each value is used. */
1739 size = 0;
1740 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1741 if ((mode = apply_result_mode[regno]) != VOIDmode)
1743 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1744 if (size % align != 0)
1745 size = CEIL (size, align) * align;
1746 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1747 emit_move_insn (reg, adjust_address (result, mode, size));
1749 push_to_sequence (call_fusage);
1750 emit_use (reg);
1751 call_fusage = get_insns ();
1752 end_sequence ();
1753 size += GET_MODE_SIZE (mode);
1756 /* Put the USE insns before the return. */
1757 emit_insn (call_fusage);
1759 /* Return whatever values was restored by jumping directly to the end
1760 of the function. */
1761 expand_naked_return ();
1764 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1766 static enum type_class
1767 type_to_class (tree type)
1769 switch (TREE_CODE (type))
1771 case VOID_TYPE: return void_type_class;
1772 case INTEGER_TYPE: return integer_type_class;
1773 case ENUMERAL_TYPE: return enumeral_type_class;
1774 case BOOLEAN_TYPE: return boolean_type_class;
1775 case POINTER_TYPE: return pointer_type_class;
1776 case REFERENCE_TYPE: return reference_type_class;
1777 case OFFSET_TYPE: return offset_type_class;
1778 case REAL_TYPE: return real_type_class;
1779 case COMPLEX_TYPE: return complex_type_class;
1780 case FUNCTION_TYPE: return function_type_class;
1781 case METHOD_TYPE: return method_type_class;
1782 case RECORD_TYPE: return record_type_class;
1783 case UNION_TYPE:
1784 case QUAL_UNION_TYPE: return union_type_class;
1785 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1786 ? string_type_class : array_type_class);
1787 case LANG_TYPE: return lang_type_class;
1788 default: return no_type_class;
1792 /* Expand a call EXP to __builtin_classify_type. */
1794 static rtx
1795 expand_builtin_classify_type (tree exp)
1797 if (call_expr_nargs (exp))
1798 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1799 return GEN_INT (no_type_class);
1802 /* This helper macro, meant to be used in mathfn_built_in below,
1803 determines which among a set of three builtin math functions is
1804 appropriate for a given type mode. The `F' and `L' cases are
1805 automatically generated from the `double' case. */
1806 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1807 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1808 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1809 fcodel = BUILT_IN_MATHFN##L ; break;
1810 /* Similar to above, but appends _R after any F/L suffix. */
1811 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1812 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1813 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1814 fcodel = BUILT_IN_MATHFN##L_R ; break;
1816 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1817 if available. If IMPLICIT is true use the implicit builtin declaration,
1818 otherwise use the explicit declaration. If we can't do the conversion,
1819 return zero. */
1821 static tree
1822 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1824 enum built_in_function fcode, fcodef, fcodel, fcode2;
1826 switch (fn)
1828 CASE_MATHFN (BUILT_IN_ACOS)
1829 CASE_MATHFN (BUILT_IN_ACOSH)
1830 CASE_MATHFN (BUILT_IN_ASIN)
1831 CASE_MATHFN (BUILT_IN_ASINH)
1832 CASE_MATHFN (BUILT_IN_ATAN)
1833 CASE_MATHFN (BUILT_IN_ATAN2)
1834 CASE_MATHFN (BUILT_IN_ATANH)
1835 CASE_MATHFN (BUILT_IN_CBRT)
1836 CASE_MATHFN (BUILT_IN_CEIL)
1837 CASE_MATHFN (BUILT_IN_CEXPI)
1838 CASE_MATHFN (BUILT_IN_COPYSIGN)
1839 CASE_MATHFN (BUILT_IN_COS)
1840 CASE_MATHFN (BUILT_IN_COSH)
1841 CASE_MATHFN (BUILT_IN_DREM)
1842 CASE_MATHFN (BUILT_IN_ERF)
1843 CASE_MATHFN (BUILT_IN_ERFC)
1844 CASE_MATHFN (BUILT_IN_EXP)
1845 CASE_MATHFN (BUILT_IN_EXP10)
1846 CASE_MATHFN (BUILT_IN_EXP2)
1847 CASE_MATHFN (BUILT_IN_EXPM1)
1848 CASE_MATHFN (BUILT_IN_FABS)
1849 CASE_MATHFN (BUILT_IN_FDIM)
1850 CASE_MATHFN (BUILT_IN_FLOOR)
1851 CASE_MATHFN (BUILT_IN_FMA)
1852 CASE_MATHFN (BUILT_IN_FMAX)
1853 CASE_MATHFN (BUILT_IN_FMIN)
1854 CASE_MATHFN (BUILT_IN_FMOD)
1855 CASE_MATHFN (BUILT_IN_FREXP)
1856 CASE_MATHFN (BUILT_IN_GAMMA)
1857 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1858 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1859 CASE_MATHFN (BUILT_IN_HYPOT)
1860 CASE_MATHFN (BUILT_IN_ILOGB)
1861 CASE_MATHFN (BUILT_IN_ICEIL)
1862 CASE_MATHFN (BUILT_IN_IFLOOR)
1863 CASE_MATHFN (BUILT_IN_INF)
1864 CASE_MATHFN (BUILT_IN_IRINT)
1865 CASE_MATHFN (BUILT_IN_IROUND)
1866 CASE_MATHFN (BUILT_IN_ISINF)
1867 CASE_MATHFN (BUILT_IN_J0)
1868 CASE_MATHFN (BUILT_IN_J1)
1869 CASE_MATHFN (BUILT_IN_JN)
1870 CASE_MATHFN (BUILT_IN_LCEIL)
1871 CASE_MATHFN (BUILT_IN_LDEXP)
1872 CASE_MATHFN (BUILT_IN_LFLOOR)
1873 CASE_MATHFN (BUILT_IN_LGAMMA)
1874 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1875 CASE_MATHFN (BUILT_IN_LLCEIL)
1876 CASE_MATHFN (BUILT_IN_LLFLOOR)
1877 CASE_MATHFN (BUILT_IN_LLRINT)
1878 CASE_MATHFN (BUILT_IN_LLROUND)
1879 CASE_MATHFN (BUILT_IN_LOG)
1880 CASE_MATHFN (BUILT_IN_LOG10)
1881 CASE_MATHFN (BUILT_IN_LOG1P)
1882 CASE_MATHFN (BUILT_IN_LOG2)
1883 CASE_MATHFN (BUILT_IN_LOGB)
1884 CASE_MATHFN (BUILT_IN_LRINT)
1885 CASE_MATHFN (BUILT_IN_LROUND)
1886 CASE_MATHFN (BUILT_IN_MODF)
1887 CASE_MATHFN (BUILT_IN_NAN)
1888 CASE_MATHFN (BUILT_IN_NANS)
1889 CASE_MATHFN (BUILT_IN_NEARBYINT)
1890 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1891 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1892 CASE_MATHFN (BUILT_IN_POW)
1893 CASE_MATHFN (BUILT_IN_POWI)
1894 CASE_MATHFN (BUILT_IN_POW10)
1895 CASE_MATHFN (BUILT_IN_REMAINDER)
1896 CASE_MATHFN (BUILT_IN_REMQUO)
1897 CASE_MATHFN (BUILT_IN_RINT)
1898 CASE_MATHFN (BUILT_IN_ROUND)
1899 CASE_MATHFN (BUILT_IN_SCALB)
1900 CASE_MATHFN (BUILT_IN_SCALBLN)
1901 CASE_MATHFN (BUILT_IN_SCALBN)
1902 CASE_MATHFN (BUILT_IN_SIGNBIT)
1903 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1904 CASE_MATHFN (BUILT_IN_SIN)
1905 CASE_MATHFN (BUILT_IN_SINCOS)
1906 CASE_MATHFN (BUILT_IN_SINH)
1907 CASE_MATHFN (BUILT_IN_SQRT)
1908 CASE_MATHFN (BUILT_IN_TAN)
1909 CASE_MATHFN (BUILT_IN_TANH)
1910 CASE_MATHFN (BUILT_IN_TGAMMA)
1911 CASE_MATHFN (BUILT_IN_TRUNC)
1912 CASE_MATHFN (BUILT_IN_Y0)
1913 CASE_MATHFN (BUILT_IN_Y1)
1914 CASE_MATHFN (BUILT_IN_YN)
1916 default:
1917 return NULL_TREE;
1920 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1921 fcode2 = fcode;
1922 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1923 fcode2 = fcodef;
1924 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1925 fcode2 = fcodel;
1926 else
1927 return NULL_TREE;
1929 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1930 return NULL_TREE;
1932 return builtin_decl_explicit (fcode2);
1935 /* Like mathfn_built_in_1(), but always use the implicit array. */
1937 tree
1938 mathfn_built_in (tree type, enum built_in_function fn)
1940 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1943 /* If errno must be maintained, expand the RTL to check if the result,
1944 TARGET, of a built-in function call, EXP, is NaN, and if so set
1945 errno to EDOM. */
1947 static void
1948 expand_errno_check (tree exp, rtx target)
1950 rtx lab = gen_label_rtx ();
1952 /* Test the result; if it is NaN, set errno=EDOM because
1953 the argument was not in the domain. */
1954 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1955 NULL_RTX, NULL_RTX, lab,
1956 /* The jump is very likely. */
1957 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1959 #ifdef TARGET_EDOM
1960 /* If this built-in doesn't throw an exception, set errno directly. */
1961 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1963 #ifdef GEN_ERRNO_RTX
1964 rtx errno_rtx = GEN_ERRNO_RTX;
1965 #else
1966 rtx errno_rtx
1967 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1968 #endif
1969 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1970 emit_label (lab);
1971 return;
1973 #endif
1975 /* Make sure the library call isn't expanded as a tail call. */
1976 CALL_EXPR_TAILCALL (exp) = 0;
1978 /* We can't set errno=EDOM directly; let the library call do it.
1979 Pop the arguments right away in case the call gets deleted. */
1980 NO_DEFER_POP;
1981 expand_call (exp, target, 0);
1982 OK_DEFER_POP;
1983 emit_label (lab);
1986 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1987 Return NULL_RTX if a normal call should be emitted rather than expanding
1988 the function in-line. EXP is the expression that is a call to the builtin
1989 function; if convenient, the result should be placed in TARGET.
1990 SUBTARGET may be used as the target for computing one of EXP's operands. */
1992 static rtx
1993 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1995 optab builtin_optab;
1996 rtx op0, insns;
1997 tree fndecl = get_callee_fndecl (exp);
1998 enum machine_mode mode;
1999 bool errno_set = false;
2000 bool try_widening = false;
2001 tree arg;
2003 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2004 return NULL_RTX;
2006 arg = CALL_EXPR_ARG (exp, 0);
2008 switch (DECL_FUNCTION_CODE (fndecl))
2010 CASE_FLT_FN (BUILT_IN_SQRT):
2011 errno_set = ! tree_expr_nonnegative_p (arg);
2012 try_widening = true;
2013 builtin_optab = sqrt_optab;
2014 break;
2015 CASE_FLT_FN (BUILT_IN_EXP):
2016 errno_set = true; builtin_optab = exp_optab; break;
2017 CASE_FLT_FN (BUILT_IN_EXP10):
2018 CASE_FLT_FN (BUILT_IN_POW10):
2019 errno_set = true; builtin_optab = exp10_optab; break;
2020 CASE_FLT_FN (BUILT_IN_EXP2):
2021 errno_set = true; builtin_optab = exp2_optab; break;
2022 CASE_FLT_FN (BUILT_IN_EXPM1):
2023 errno_set = true; builtin_optab = expm1_optab; break;
2024 CASE_FLT_FN (BUILT_IN_LOGB):
2025 errno_set = true; builtin_optab = logb_optab; break;
2026 CASE_FLT_FN (BUILT_IN_LOG):
2027 errno_set = true; builtin_optab = log_optab; break;
2028 CASE_FLT_FN (BUILT_IN_LOG10):
2029 errno_set = true; builtin_optab = log10_optab; break;
2030 CASE_FLT_FN (BUILT_IN_LOG2):
2031 errno_set = true; builtin_optab = log2_optab; break;
2032 CASE_FLT_FN (BUILT_IN_LOG1P):
2033 errno_set = true; builtin_optab = log1p_optab; break;
2034 CASE_FLT_FN (BUILT_IN_ASIN):
2035 builtin_optab = asin_optab; break;
2036 CASE_FLT_FN (BUILT_IN_ACOS):
2037 builtin_optab = acos_optab; break;
2038 CASE_FLT_FN (BUILT_IN_TAN):
2039 builtin_optab = tan_optab; break;
2040 CASE_FLT_FN (BUILT_IN_ATAN):
2041 builtin_optab = atan_optab; break;
2042 CASE_FLT_FN (BUILT_IN_FLOOR):
2043 builtin_optab = floor_optab; break;
2044 CASE_FLT_FN (BUILT_IN_CEIL):
2045 builtin_optab = ceil_optab; break;
2046 CASE_FLT_FN (BUILT_IN_TRUNC):
2047 builtin_optab = btrunc_optab; break;
2048 CASE_FLT_FN (BUILT_IN_ROUND):
2049 builtin_optab = round_optab; break;
2050 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2051 builtin_optab = nearbyint_optab;
2052 if (flag_trapping_math)
2053 break;
2054 /* Else fallthrough and expand as rint. */
2055 CASE_FLT_FN (BUILT_IN_RINT):
2056 builtin_optab = rint_optab; break;
2057 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2058 builtin_optab = significand_optab; break;
2059 default:
2060 gcc_unreachable ();
2063 /* Make a suitable register to place result in. */
2064 mode = TYPE_MODE (TREE_TYPE (exp));
2066 if (! flag_errno_math || ! HONOR_NANS (mode))
2067 errno_set = false;
2069 /* Before working hard, check whether the instruction is available, but try
2070 to widen the mode for specific operations. */
2071 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2072 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2073 && (!errno_set || !optimize_insn_for_size_p ()))
2075 rtx result = gen_reg_rtx (mode);
2077 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2078 need to expand the argument again. This way, we will not perform
2079 side-effects more the once. */
2080 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2082 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2084 start_sequence ();
2086 /* Compute into RESULT.
2087 Set RESULT to wherever the result comes back. */
2088 result = expand_unop (mode, builtin_optab, op0, result, 0);
2090 if (result != 0)
2092 if (errno_set)
2093 expand_errno_check (exp, result);
2095 /* Output the entire sequence. */
2096 insns = get_insns ();
2097 end_sequence ();
2098 emit_insn (insns);
2099 return result;
2102 /* If we were unable to expand via the builtin, stop the sequence
2103 (without outputting the insns) and call to the library function
2104 with the stabilized argument list. */
2105 end_sequence ();
2108 return expand_call (exp, target, target == const0_rtx);
2111 /* Expand a call to the builtin binary math functions (pow and atan2).
2112 Return NULL_RTX if a normal call should be emitted rather than expanding the
2113 function in-line. EXP is the expression that is a call to the builtin
2114 function; if convenient, the result should be placed in TARGET.
2115 SUBTARGET may be used as the target for computing one of EXP's
2116 operands. */
2118 static rtx
2119 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2121 optab builtin_optab;
2122 rtx op0, op1, insns, result;
2123 int op1_type = REAL_TYPE;
2124 tree fndecl = get_callee_fndecl (exp);
2125 tree arg0, arg1;
2126 enum machine_mode mode;
2127 bool errno_set = true;
2129 switch (DECL_FUNCTION_CODE (fndecl))
2131 CASE_FLT_FN (BUILT_IN_SCALBN):
2132 CASE_FLT_FN (BUILT_IN_SCALBLN):
2133 CASE_FLT_FN (BUILT_IN_LDEXP):
2134 op1_type = INTEGER_TYPE;
2135 default:
2136 break;
2139 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2140 return NULL_RTX;
2142 arg0 = CALL_EXPR_ARG (exp, 0);
2143 arg1 = CALL_EXPR_ARG (exp, 1);
2145 switch (DECL_FUNCTION_CODE (fndecl))
2147 CASE_FLT_FN (BUILT_IN_POW):
2148 builtin_optab = pow_optab; break;
2149 CASE_FLT_FN (BUILT_IN_ATAN2):
2150 builtin_optab = atan2_optab; break;
2151 CASE_FLT_FN (BUILT_IN_SCALB):
2152 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2153 return 0;
2154 builtin_optab = scalb_optab; break;
2155 CASE_FLT_FN (BUILT_IN_SCALBN):
2156 CASE_FLT_FN (BUILT_IN_SCALBLN):
2157 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2158 return 0;
2159 /* Fall through... */
2160 CASE_FLT_FN (BUILT_IN_LDEXP):
2161 builtin_optab = ldexp_optab; break;
2162 CASE_FLT_FN (BUILT_IN_FMOD):
2163 builtin_optab = fmod_optab; break;
2164 CASE_FLT_FN (BUILT_IN_REMAINDER):
2165 CASE_FLT_FN (BUILT_IN_DREM):
2166 builtin_optab = remainder_optab; break;
2167 default:
2168 gcc_unreachable ();
2171 /* Make a suitable register to place result in. */
2172 mode = TYPE_MODE (TREE_TYPE (exp));
2174 /* Before working hard, check whether the instruction is available. */
2175 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2176 return NULL_RTX;
2178 result = gen_reg_rtx (mode);
2180 if (! flag_errno_math || ! HONOR_NANS (mode))
2181 errno_set = false;
2183 if (errno_set && optimize_insn_for_size_p ())
2184 return 0;
2186 /* Always stabilize the argument list. */
2187 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2188 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2190 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2191 op1 = expand_normal (arg1);
2193 start_sequence ();
2195 /* Compute into RESULT.
2196 Set RESULT to wherever the result comes back. */
2197 result = expand_binop (mode, builtin_optab, op0, op1,
2198 result, 0, OPTAB_DIRECT);
2200 /* If we were unable to expand via the builtin, stop the sequence
2201 (without outputting the insns) and call to the library function
2202 with the stabilized argument list. */
2203 if (result == 0)
2205 end_sequence ();
2206 return expand_call (exp, target, target == const0_rtx);
2209 if (errno_set)
2210 expand_errno_check (exp, result);
2212 /* Output the entire sequence. */
2213 insns = get_insns ();
2214 end_sequence ();
2215 emit_insn (insns);
2217 return result;
2220 /* Expand a call to the builtin trinary math functions (fma).
2221 Return NULL_RTX if a normal call should be emitted rather than expanding the
2222 function in-line. EXP is the expression that is a call to the builtin
2223 function; if convenient, the result should be placed in TARGET.
2224 SUBTARGET may be used as the target for computing one of EXP's
2225 operands. */
2227 static rtx
2228 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2230 optab builtin_optab;
2231 rtx op0, op1, op2, insns, result;
2232 tree fndecl = get_callee_fndecl (exp);
2233 tree arg0, arg1, arg2;
2234 enum machine_mode mode;
2236 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2237 return NULL_RTX;
2239 arg0 = CALL_EXPR_ARG (exp, 0);
2240 arg1 = CALL_EXPR_ARG (exp, 1);
2241 arg2 = CALL_EXPR_ARG (exp, 2);
2243 switch (DECL_FUNCTION_CODE (fndecl))
2245 CASE_FLT_FN (BUILT_IN_FMA):
2246 builtin_optab = fma_optab; break;
2247 default:
2248 gcc_unreachable ();
2251 /* Make a suitable register to place result in. */
2252 mode = TYPE_MODE (TREE_TYPE (exp));
2254 /* Before working hard, check whether the instruction is available. */
2255 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2256 return NULL_RTX;
2258 result = gen_reg_rtx (mode);
2260 /* Always stabilize the argument list. */
2261 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2262 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2263 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2265 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2266 op1 = expand_normal (arg1);
2267 op2 = expand_normal (arg2);
2269 start_sequence ();
2271 /* Compute into RESULT.
2272 Set RESULT to wherever the result comes back. */
2273 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2274 result, 0);
2276 /* If we were unable to expand via the builtin, stop the sequence
2277 (without outputting the insns) and call to the library function
2278 with the stabilized argument list. */
2279 if (result == 0)
2281 end_sequence ();
2282 return expand_call (exp, target, target == const0_rtx);
2285 /* Output the entire sequence. */
2286 insns = get_insns ();
2287 end_sequence ();
2288 emit_insn (insns);
2290 return result;
2293 /* Expand a call to the builtin sin and cos math functions.
2294 Return NULL_RTX if a normal call should be emitted rather than expanding the
2295 function in-line. EXP is the expression that is a call to the builtin
2296 function; if convenient, the result should be placed in TARGET.
2297 SUBTARGET may be used as the target for computing one of EXP's
2298 operands. */
2300 static rtx
2301 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2303 optab builtin_optab;
2304 rtx op0, insns;
2305 tree fndecl = get_callee_fndecl (exp);
2306 enum machine_mode mode;
2307 tree arg;
2309 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2310 return NULL_RTX;
2312 arg = CALL_EXPR_ARG (exp, 0);
2314 switch (DECL_FUNCTION_CODE (fndecl))
2316 CASE_FLT_FN (BUILT_IN_SIN):
2317 CASE_FLT_FN (BUILT_IN_COS):
2318 builtin_optab = sincos_optab; break;
2319 default:
2320 gcc_unreachable ();
2323 /* Make a suitable register to place result in. */
2324 mode = TYPE_MODE (TREE_TYPE (exp));
2326 /* Check if sincos insn is available, otherwise fallback
2327 to sin or cos insn. */
2328 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2329 switch (DECL_FUNCTION_CODE (fndecl))
2331 CASE_FLT_FN (BUILT_IN_SIN):
2332 builtin_optab = sin_optab; break;
2333 CASE_FLT_FN (BUILT_IN_COS):
2334 builtin_optab = cos_optab; break;
2335 default:
2336 gcc_unreachable ();
2339 /* Before working hard, check whether the instruction is available. */
2340 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2342 rtx result = gen_reg_rtx (mode);
2344 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2345 need to expand the argument again. This way, we will not perform
2346 side-effects more the once. */
2347 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2349 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2351 start_sequence ();
2353 /* Compute into RESULT.
2354 Set RESULT to wherever the result comes back. */
2355 if (builtin_optab == sincos_optab)
2357 int ok;
2359 switch (DECL_FUNCTION_CODE (fndecl))
2361 CASE_FLT_FN (BUILT_IN_SIN):
2362 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2363 break;
2364 CASE_FLT_FN (BUILT_IN_COS):
2365 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2366 break;
2367 default:
2368 gcc_unreachable ();
2370 gcc_assert (ok);
2372 else
2373 result = expand_unop (mode, builtin_optab, op0, result, 0);
2375 if (result != 0)
2377 /* Output the entire sequence. */
2378 insns = get_insns ();
2379 end_sequence ();
2380 emit_insn (insns);
2381 return result;
2384 /* If we were unable to expand via the builtin, stop the sequence
2385 (without outputting the insns) and call to the library function
2386 with the stabilized argument list. */
2387 end_sequence ();
2390 return expand_call (exp, target, target == const0_rtx);
2393 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2394 return an RTL instruction code that implements the functionality.
2395 If that isn't possible or available return CODE_FOR_nothing. */
2397 static enum insn_code
2398 interclass_mathfn_icode (tree arg, tree fndecl)
2400 bool errno_set = false;
2401 optab builtin_optab = unknown_optab;
2402 enum machine_mode mode;
2404 switch (DECL_FUNCTION_CODE (fndecl))
2406 CASE_FLT_FN (BUILT_IN_ILOGB):
2407 errno_set = true; builtin_optab = ilogb_optab; break;
2408 CASE_FLT_FN (BUILT_IN_ISINF):
2409 builtin_optab = isinf_optab; break;
2410 case BUILT_IN_ISNORMAL:
2411 case BUILT_IN_ISFINITE:
2412 CASE_FLT_FN (BUILT_IN_FINITE):
2413 case BUILT_IN_FINITED32:
2414 case BUILT_IN_FINITED64:
2415 case BUILT_IN_FINITED128:
2416 case BUILT_IN_ISINFD32:
2417 case BUILT_IN_ISINFD64:
2418 case BUILT_IN_ISINFD128:
2419 /* These builtins have no optabs (yet). */
2420 break;
2421 default:
2422 gcc_unreachable ();
2425 /* There's no easy way to detect the case we need to set EDOM. */
2426 if (flag_errno_math && errno_set)
2427 return CODE_FOR_nothing;
2429 /* Optab mode depends on the mode of the input argument. */
2430 mode = TYPE_MODE (TREE_TYPE (arg));
2432 if (builtin_optab)
2433 return optab_handler (builtin_optab, mode);
2434 return CODE_FOR_nothing;
2437 /* Expand a call to one of the builtin math functions that operate on
2438 floating point argument and output an integer result (ilogb, isinf,
2439 isnan, etc).
2440 Return 0 if a normal call should be emitted rather than expanding the
2441 function in-line. EXP is the expression that is a call to the builtin
2442 function; if convenient, the result should be placed in TARGET. */
2444 static rtx
2445 expand_builtin_interclass_mathfn (tree exp, rtx target)
2447 enum insn_code icode = CODE_FOR_nothing;
2448 rtx op0;
2449 tree fndecl = get_callee_fndecl (exp);
2450 enum machine_mode mode;
2451 tree arg;
2453 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2454 return NULL_RTX;
2456 arg = CALL_EXPR_ARG (exp, 0);
2457 icode = interclass_mathfn_icode (arg, fndecl);
2458 mode = TYPE_MODE (TREE_TYPE (arg));
2460 if (icode != CODE_FOR_nothing)
2462 struct expand_operand ops[1];
2463 rtx last = get_last_insn ();
2464 tree orig_arg = arg;
2466 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2467 need to expand the argument again. This way, we will not perform
2468 side-effects more the once. */
2469 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2471 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2473 if (mode != GET_MODE (op0))
2474 op0 = convert_to_mode (mode, op0, 0);
2476 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2477 if (maybe_legitimize_operands (icode, 0, 1, ops)
2478 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2479 return ops[0].value;
2481 delete_insns_since (last);
2482 CALL_EXPR_ARG (exp, 0) = orig_arg;
2485 return NULL_RTX;
2488 /* Expand a call to the builtin sincos math function.
2489 Return NULL_RTX if a normal call should be emitted rather than expanding the
2490 function in-line. EXP is the expression that is a call to the builtin
2491 function. */
2493 static rtx
2494 expand_builtin_sincos (tree exp)
2496 rtx op0, op1, op2, target1, target2;
2497 enum machine_mode mode;
2498 tree arg, sinp, cosp;
2499 int result;
2500 location_t loc = EXPR_LOCATION (exp);
2501 tree alias_type, alias_off;
2503 if (!validate_arglist (exp, REAL_TYPE,
2504 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2505 return NULL_RTX;
2507 arg = CALL_EXPR_ARG (exp, 0);
2508 sinp = CALL_EXPR_ARG (exp, 1);
2509 cosp = CALL_EXPR_ARG (exp, 2);
2511 /* Make a suitable register to place result in. */
2512 mode = TYPE_MODE (TREE_TYPE (arg));
2514 /* Check if sincos insn is available, otherwise emit the call. */
2515 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2516 return NULL_RTX;
2518 target1 = gen_reg_rtx (mode);
2519 target2 = gen_reg_rtx (mode);
2521 op0 = expand_normal (arg);
2522 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2523 alias_off = build_int_cst (alias_type, 0);
2524 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2525 sinp, alias_off));
2526 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2527 cosp, alias_off));
2529 /* Compute into target1 and target2.
2530 Set TARGET to wherever the result comes back. */
2531 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2532 gcc_assert (result);
2534 /* Move target1 and target2 to the memory locations indicated
2535 by op1 and op2. */
2536 emit_move_insn (op1, target1);
2537 emit_move_insn (op2, target2);
2539 return const0_rtx;
2542 /* Expand a call to the internal cexpi builtin to the sincos math function.
2543 EXP is the expression that is a call to the builtin function; if convenient,
2544 the result should be placed in TARGET. */
2546 static rtx
2547 expand_builtin_cexpi (tree exp, rtx target)
2549 tree fndecl = get_callee_fndecl (exp);
2550 tree arg, type;
2551 enum machine_mode mode;
2552 rtx op0, op1, op2;
2553 location_t loc = EXPR_LOCATION (exp);
2555 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2556 return NULL_RTX;
2558 arg = CALL_EXPR_ARG (exp, 0);
2559 type = TREE_TYPE (arg);
2560 mode = TYPE_MODE (TREE_TYPE (arg));
2562 /* Try expanding via a sincos optab, fall back to emitting a libcall
2563 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2564 is only generated from sincos, cexp or if we have either of them. */
2565 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2567 op1 = gen_reg_rtx (mode);
2568 op2 = gen_reg_rtx (mode);
2570 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2572 /* Compute into op1 and op2. */
2573 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2575 else if (targetm.libc_has_function (function_sincos))
2577 tree call, fn = NULL_TREE;
2578 tree top1, top2;
2579 rtx op1a, op2a;
2581 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2582 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2583 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2584 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2585 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2586 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2587 else
2588 gcc_unreachable ();
2590 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2591 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2592 op1a = copy_addr_to_reg (XEXP (op1, 0));
2593 op2a = copy_addr_to_reg (XEXP (op2, 0));
2594 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2595 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2597 /* Make sure not to fold the sincos call again. */
2598 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2599 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2600 call, 3, arg, top1, top2));
2602 else
2604 tree call, fn = NULL_TREE, narg;
2605 tree ctype = build_complex_type (type);
2607 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2608 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2609 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2610 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2611 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2612 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2613 else
2614 gcc_unreachable ();
2616 /* If we don't have a decl for cexp create one. This is the
2617 friendliest fallback if the user calls __builtin_cexpi
2618 without full target C99 function support. */
2619 if (fn == NULL_TREE)
2621 tree fntype;
2622 const char *name = NULL;
2624 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2625 name = "cexpf";
2626 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2627 name = "cexp";
2628 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2629 name = "cexpl";
2631 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2632 fn = build_fn_decl (name, fntype);
2635 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2636 build_real (type, dconst0), arg);
2638 /* Make sure not to fold the cexp call again. */
2639 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2640 return expand_expr (build_call_nary (ctype, call, 1, narg),
2641 target, VOIDmode, EXPAND_NORMAL);
2644 /* Now build the proper return type. */
2645 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2646 make_tree (TREE_TYPE (arg), op2),
2647 make_tree (TREE_TYPE (arg), op1)),
2648 target, VOIDmode, EXPAND_NORMAL);
2651 /* Conveniently construct a function call expression. FNDECL names the
2652 function to be called, N is the number of arguments, and the "..."
2653 parameters are the argument expressions. Unlike build_call_exr
2654 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2656 static tree
2657 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2659 va_list ap;
2660 tree fntype = TREE_TYPE (fndecl);
2661 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2663 va_start (ap, n);
2664 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2665 va_end (ap);
2666 SET_EXPR_LOCATION (fn, loc);
2667 return fn;
2670 /* Expand a call to one of the builtin rounding functions gcc defines
2671 as an extension (lfloor and lceil). As these are gcc extensions we
2672 do not need to worry about setting errno to EDOM.
2673 If expanding via optab fails, lower expression to (int)(floor(x)).
2674 EXP is the expression that is a call to the builtin function;
2675 if convenient, the result should be placed in TARGET. */
2677 static rtx
2678 expand_builtin_int_roundingfn (tree exp, rtx target)
2680 convert_optab builtin_optab;
2681 rtx op0, insns, tmp;
2682 tree fndecl = get_callee_fndecl (exp);
2683 enum built_in_function fallback_fn;
2684 tree fallback_fndecl;
2685 enum machine_mode mode;
2686 tree arg;
2688 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2689 gcc_unreachable ();
2691 arg = CALL_EXPR_ARG (exp, 0);
2693 switch (DECL_FUNCTION_CODE (fndecl))
2695 CASE_FLT_FN (BUILT_IN_ICEIL):
2696 CASE_FLT_FN (BUILT_IN_LCEIL):
2697 CASE_FLT_FN (BUILT_IN_LLCEIL):
2698 builtin_optab = lceil_optab;
2699 fallback_fn = BUILT_IN_CEIL;
2700 break;
2702 CASE_FLT_FN (BUILT_IN_IFLOOR):
2703 CASE_FLT_FN (BUILT_IN_LFLOOR):
2704 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2705 builtin_optab = lfloor_optab;
2706 fallback_fn = BUILT_IN_FLOOR;
2707 break;
2709 default:
2710 gcc_unreachable ();
2713 /* Make a suitable register to place result in. */
2714 mode = TYPE_MODE (TREE_TYPE (exp));
2716 target = gen_reg_rtx (mode);
2718 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2719 need to expand the argument again. This way, we will not perform
2720 side-effects more the once. */
2721 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2723 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2725 start_sequence ();
2727 /* Compute into TARGET. */
2728 if (expand_sfix_optab (target, op0, builtin_optab))
2730 /* Output the entire sequence. */
2731 insns = get_insns ();
2732 end_sequence ();
2733 emit_insn (insns);
2734 return target;
2737 /* If we were unable to expand via the builtin, stop the sequence
2738 (without outputting the insns). */
2739 end_sequence ();
2741 /* Fall back to floating point rounding optab. */
2742 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2744 /* For non-C99 targets we may end up without a fallback fndecl here
2745 if the user called __builtin_lfloor directly. In this case emit
2746 a call to the floor/ceil variants nevertheless. This should result
2747 in the best user experience for not full C99 targets. */
2748 if (fallback_fndecl == NULL_TREE)
2750 tree fntype;
2751 const char *name = NULL;
2753 switch (DECL_FUNCTION_CODE (fndecl))
2755 case BUILT_IN_ICEIL:
2756 case BUILT_IN_LCEIL:
2757 case BUILT_IN_LLCEIL:
2758 name = "ceil";
2759 break;
2760 case BUILT_IN_ICEILF:
2761 case BUILT_IN_LCEILF:
2762 case BUILT_IN_LLCEILF:
2763 name = "ceilf";
2764 break;
2765 case BUILT_IN_ICEILL:
2766 case BUILT_IN_LCEILL:
2767 case BUILT_IN_LLCEILL:
2768 name = "ceill";
2769 break;
2770 case BUILT_IN_IFLOOR:
2771 case BUILT_IN_LFLOOR:
2772 case BUILT_IN_LLFLOOR:
2773 name = "floor";
2774 break;
2775 case BUILT_IN_IFLOORF:
2776 case BUILT_IN_LFLOORF:
2777 case BUILT_IN_LLFLOORF:
2778 name = "floorf";
2779 break;
2780 case BUILT_IN_IFLOORL:
2781 case BUILT_IN_LFLOORL:
2782 case BUILT_IN_LLFLOORL:
2783 name = "floorl";
2784 break;
2785 default:
2786 gcc_unreachable ();
2789 fntype = build_function_type_list (TREE_TYPE (arg),
2790 TREE_TYPE (arg), NULL_TREE);
2791 fallback_fndecl = build_fn_decl (name, fntype);
2794 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2796 tmp = expand_normal (exp);
2797 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2799 /* Truncate the result of floating point optab to integer
2800 via expand_fix (). */
2801 target = gen_reg_rtx (mode);
2802 expand_fix (target, tmp, 0);
2804 return target;
2807 /* Expand a call to one of the builtin math functions doing integer
2808 conversion (lrint).
2809 Return 0 if a normal call should be emitted rather than expanding the
2810 function in-line. EXP is the expression that is a call to the builtin
2811 function; if convenient, the result should be placed in TARGET. */
2813 static rtx
2814 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2816 convert_optab builtin_optab;
2817 rtx op0, insns;
2818 tree fndecl = get_callee_fndecl (exp);
2819 tree arg;
2820 enum machine_mode mode;
2821 enum built_in_function fallback_fn = BUILT_IN_NONE;
2823 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2824 gcc_unreachable ();
2826 arg = CALL_EXPR_ARG (exp, 0);
2828 switch (DECL_FUNCTION_CODE (fndecl))
2830 CASE_FLT_FN (BUILT_IN_IRINT):
2831 fallback_fn = BUILT_IN_LRINT;
2832 /* FALLTHRU */
2833 CASE_FLT_FN (BUILT_IN_LRINT):
2834 CASE_FLT_FN (BUILT_IN_LLRINT):
2835 builtin_optab = lrint_optab;
2836 break;
2838 CASE_FLT_FN (BUILT_IN_IROUND):
2839 fallback_fn = BUILT_IN_LROUND;
2840 /* FALLTHRU */
2841 CASE_FLT_FN (BUILT_IN_LROUND):
2842 CASE_FLT_FN (BUILT_IN_LLROUND):
2843 builtin_optab = lround_optab;
2844 break;
2846 default:
2847 gcc_unreachable ();
2850 /* There's no easy way to detect the case we need to set EDOM. */
2851 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2852 return NULL_RTX;
2854 /* Make a suitable register to place result in. */
2855 mode = TYPE_MODE (TREE_TYPE (exp));
2857 /* There's no easy way to detect the case we need to set EDOM. */
2858 if (!flag_errno_math)
2860 rtx result = gen_reg_rtx (mode);
2862 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2863 need to expand the argument again. This way, we will not perform
2864 side-effects more the once. */
2865 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2867 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2869 start_sequence ();
2871 if (expand_sfix_optab (result, op0, builtin_optab))
2873 /* Output the entire sequence. */
2874 insns = get_insns ();
2875 end_sequence ();
2876 emit_insn (insns);
2877 return result;
2880 /* If we were unable to expand via the builtin, stop the sequence
2881 (without outputting the insns) and call to the library function
2882 with the stabilized argument list. */
2883 end_sequence ();
2886 if (fallback_fn != BUILT_IN_NONE)
2888 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2889 targets, (int) round (x) should never be transformed into
2890 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2891 a call to lround in the hope that the target provides at least some
2892 C99 functions. This should result in the best user experience for
2893 not full C99 targets. */
2894 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2895 fallback_fn, 0);
2897 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2898 fallback_fndecl, 1, arg);
2900 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2901 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2902 return convert_to_mode (mode, target, 0);
2905 return expand_call (exp, target, target == const0_rtx);
2908 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2909 a normal call should be emitted rather than expanding the function
2910 in-line. EXP is the expression that is a call to the builtin
2911 function; if convenient, the result should be placed in TARGET. */
2913 static rtx
2914 expand_builtin_powi (tree exp, rtx target)
2916 tree arg0, arg1;
2917 rtx op0, op1;
2918 enum machine_mode mode;
2919 enum machine_mode mode2;
2921 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2922 return NULL_RTX;
2924 arg0 = CALL_EXPR_ARG (exp, 0);
2925 arg1 = CALL_EXPR_ARG (exp, 1);
2926 mode = TYPE_MODE (TREE_TYPE (exp));
2928 /* Emit a libcall to libgcc. */
2930 /* Mode of the 2nd argument must match that of an int. */
2931 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2933 if (target == NULL_RTX)
2934 target = gen_reg_rtx (mode);
2936 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2937 if (GET_MODE (op0) != mode)
2938 op0 = convert_to_mode (mode, op0, 0);
2939 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2940 if (GET_MODE (op1) != mode2)
2941 op1 = convert_to_mode (mode2, op1, 0);
2943 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2944 target, LCT_CONST, mode, 2,
2945 op0, mode, op1, mode2);
2947 return target;
2950 /* Expand expression EXP which is a call to the strlen builtin. Return
2951 NULL_RTX if we failed the caller should emit a normal call, otherwise
2952 try to get the result in TARGET, if convenient. */
2954 static rtx
2955 expand_builtin_strlen (tree exp, rtx target,
2956 enum machine_mode target_mode)
2958 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2959 return NULL_RTX;
2960 else
2962 struct expand_operand ops[4];
2963 rtx pat;
2964 tree len;
2965 tree src = CALL_EXPR_ARG (exp, 0);
2966 rtx src_reg, before_strlen;
2967 enum machine_mode insn_mode = target_mode;
2968 enum insn_code icode = CODE_FOR_nothing;
2969 unsigned int align;
2971 /* If the length can be computed at compile-time, return it. */
2972 len = c_strlen (src, 0);
2973 if (len)
2974 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2976 /* If the length can be computed at compile-time and is constant
2977 integer, but there are side-effects in src, evaluate
2978 src for side-effects, then return len.
2979 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2980 can be optimized into: i++; x = 3; */
2981 len = c_strlen (src, 1);
2982 if (len && TREE_CODE (len) == INTEGER_CST)
2984 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2985 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2988 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2990 /* If SRC is not a pointer type, don't do this operation inline. */
2991 if (align == 0)
2992 return NULL_RTX;
2994 /* Bail out if we can't compute strlen in the right mode. */
2995 while (insn_mode != VOIDmode)
2997 icode = optab_handler (strlen_optab, insn_mode);
2998 if (icode != CODE_FOR_nothing)
2999 break;
3001 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3003 if (insn_mode == VOIDmode)
3004 return NULL_RTX;
3006 /* Make a place to hold the source address. We will not expand
3007 the actual source until we are sure that the expansion will
3008 not fail -- there are trees that cannot be expanded twice. */
3009 src_reg = gen_reg_rtx (Pmode);
3011 /* Mark the beginning of the strlen sequence so we can emit the
3012 source operand later. */
3013 before_strlen = get_last_insn ();
3015 create_output_operand (&ops[0], target, insn_mode);
3016 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3017 create_integer_operand (&ops[2], 0);
3018 create_integer_operand (&ops[3], align);
3019 if (!maybe_expand_insn (icode, 4, ops))
3020 return NULL_RTX;
3022 /* Now that we are assured of success, expand the source. */
3023 start_sequence ();
3024 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3025 if (pat != src_reg)
3027 #ifdef POINTERS_EXTEND_UNSIGNED
3028 if (GET_MODE (pat) != Pmode)
3029 pat = convert_to_mode (Pmode, pat,
3030 POINTERS_EXTEND_UNSIGNED);
3031 #endif
3032 emit_move_insn (src_reg, pat);
3034 pat = get_insns ();
3035 end_sequence ();
3037 if (before_strlen)
3038 emit_insn_after (pat, before_strlen);
3039 else
3040 emit_insn_before (pat, get_insns ());
3042 /* Return the value in the proper mode for this function. */
3043 if (GET_MODE (ops[0].value) == target_mode)
3044 target = ops[0].value;
3045 else if (target != 0)
3046 convert_move (target, ops[0].value, 0);
3047 else
3048 target = convert_to_mode (target_mode, ops[0].value, 0);
3050 return target;
3054 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3055 bytes from constant string DATA + OFFSET and return it as target
3056 constant. */
3058 static rtx
3059 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3060 enum machine_mode mode)
3062 const char *str = (const char *) data;
3064 gcc_assert (offset >= 0
3065 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3066 <= strlen (str) + 1));
3068 return c_readstr (str + offset, mode);
3071 /* Expand a call EXP to the memcpy builtin.
3072 Return NULL_RTX if we failed, the caller should emit a normal call,
3073 otherwise try to get the result in TARGET, if convenient (and in
3074 mode MODE if that's convenient). */
3076 static rtx
3077 expand_builtin_memcpy (tree exp, rtx target)
3079 if (!validate_arglist (exp,
3080 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3081 return NULL_RTX;
3082 else
3084 tree dest = CALL_EXPR_ARG (exp, 0);
3085 tree src = CALL_EXPR_ARG (exp, 1);
3086 tree len = CALL_EXPR_ARG (exp, 2);
3087 const char *src_str;
3088 unsigned int src_align = get_pointer_alignment (src);
3089 unsigned int dest_align = get_pointer_alignment (dest);
3090 rtx dest_mem, src_mem, dest_addr, len_rtx;
3091 HOST_WIDE_INT expected_size = -1;
3092 unsigned int expected_align = 0;
3094 /* If DEST is not a pointer type, call the normal function. */
3095 if (dest_align == 0)
3096 return NULL_RTX;
3098 /* If either SRC is not a pointer type, don't do this
3099 operation in-line. */
3100 if (src_align == 0)
3101 return NULL_RTX;
3103 if (currently_expanding_gimple_stmt)
3104 stringop_block_profile (currently_expanding_gimple_stmt,
3105 &expected_align, &expected_size);
3107 if (expected_align < dest_align)
3108 expected_align = dest_align;
3109 dest_mem = get_memory_rtx (dest, len);
3110 set_mem_align (dest_mem, dest_align);
3111 len_rtx = expand_normal (len);
3112 src_str = c_getstr (src);
3114 /* If SRC is a string constant and block move would be done
3115 by pieces, we can avoid loading the string from memory
3116 and only stored the computed constants. */
3117 if (src_str
3118 && CONST_INT_P (len_rtx)
3119 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3120 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3121 CONST_CAST (char *, src_str),
3122 dest_align, false))
3124 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3125 builtin_memcpy_read_str,
3126 CONST_CAST (char *, src_str),
3127 dest_align, false, 0);
3128 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3129 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3130 return dest_mem;
3133 src_mem = get_memory_rtx (src, len);
3134 set_mem_align (src_mem, src_align);
3136 /* Copy word part most expediently. */
3137 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3138 CALL_EXPR_TAILCALL (exp)
3139 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3140 expected_align, expected_size);
3142 if (dest_addr == 0)
3144 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3145 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3147 return dest_addr;
3151 /* Expand a call EXP to the mempcpy builtin.
3152 Return NULL_RTX if we failed; the caller should emit a normal call,
3153 otherwise try to get the result in TARGET, if convenient (and in
3154 mode MODE if that's convenient). If ENDP is 0 return the
3155 destination pointer, if ENDP is 1 return the end pointer ala
3156 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3157 stpcpy. */
3159 static rtx
3160 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3162 if (!validate_arglist (exp,
3163 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3164 return NULL_RTX;
3165 else
3167 tree dest = CALL_EXPR_ARG (exp, 0);
3168 tree src = CALL_EXPR_ARG (exp, 1);
3169 tree len = CALL_EXPR_ARG (exp, 2);
3170 return expand_builtin_mempcpy_args (dest, src, len,
3171 target, mode, /*endp=*/ 1);
3175 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3176 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3177 so that this can also be called without constructing an actual CALL_EXPR.
3178 The other arguments and return value are the same as for
3179 expand_builtin_mempcpy. */
3181 static rtx
3182 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3183 rtx target, enum machine_mode mode, int endp)
3185 /* If return value is ignored, transform mempcpy into memcpy. */
3186 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3188 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3189 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3190 dest, src, len);
3191 return expand_expr (result, target, mode, EXPAND_NORMAL);
3193 else
3195 const char *src_str;
3196 unsigned int src_align = get_pointer_alignment (src);
3197 unsigned int dest_align = get_pointer_alignment (dest);
3198 rtx dest_mem, src_mem, len_rtx;
3200 /* If either SRC or DEST is not a pointer type, don't do this
3201 operation in-line. */
3202 if (dest_align == 0 || src_align == 0)
3203 return NULL_RTX;
3205 /* If LEN is not constant, call the normal function. */
3206 if (! host_integerp (len, 1))
3207 return NULL_RTX;
3209 len_rtx = expand_normal (len);
3210 src_str = c_getstr (src);
3212 /* If SRC is a string constant and block move would be done
3213 by pieces, we can avoid loading the string from memory
3214 and only stored the computed constants. */
3215 if (src_str
3216 && CONST_INT_P (len_rtx)
3217 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3218 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3219 CONST_CAST (char *, src_str),
3220 dest_align, false))
3222 dest_mem = get_memory_rtx (dest, len);
3223 set_mem_align (dest_mem, dest_align);
3224 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3225 builtin_memcpy_read_str,
3226 CONST_CAST (char *, src_str),
3227 dest_align, false, endp);
3228 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3229 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3230 return dest_mem;
3233 if (CONST_INT_P (len_rtx)
3234 && can_move_by_pieces (INTVAL (len_rtx),
3235 MIN (dest_align, src_align)))
3237 dest_mem = get_memory_rtx (dest, len);
3238 set_mem_align (dest_mem, dest_align);
3239 src_mem = get_memory_rtx (src, len);
3240 set_mem_align (src_mem, src_align);
3241 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3242 MIN (dest_align, src_align), endp);
3243 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3244 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3245 return dest_mem;
3248 return NULL_RTX;
3252 #ifndef HAVE_movstr
3253 # define HAVE_movstr 0
3254 # define CODE_FOR_movstr CODE_FOR_nothing
3255 #endif
3257 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3258 we failed, the caller should emit a normal call, otherwise try to
3259 get the result in TARGET, if convenient. If ENDP is 0 return the
3260 destination pointer, if ENDP is 1 return the end pointer ala
3261 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3262 stpcpy. */
3264 static rtx
3265 expand_movstr (tree dest, tree src, rtx target, int endp)
3267 struct expand_operand ops[3];
3268 rtx dest_mem;
3269 rtx src_mem;
3271 if (!HAVE_movstr)
3272 return NULL_RTX;
3274 dest_mem = get_memory_rtx (dest, NULL);
3275 src_mem = get_memory_rtx (src, NULL);
3276 if (!endp)
3278 target = force_reg (Pmode, XEXP (dest_mem, 0));
3279 dest_mem = replace_equiv_address (dest_mem, target);
3282 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3283 create_fixed_operand (&ops[1], dest_mem);
3284 create_fixed_operand (&ops[2], src_mem);
3285 expand_insn (CODE_FOR_movstr, 3, ops);
3287 if (endp && target != const0_rtx)
3289 target = ops[0].value;
3290 /* movstr is supposed to set end to the address of the NUL
3291 terminator. If the caller requested a mempcpy-like return value,
3292 adjust it. */
3293 if (endp == 1)
3295 rtx tem = plus_constant (GET_MODE (target),
3296 gen_lowpart (GET_MODE (target), target), 1);
3297 emit_move_insn (target, force_operand (tem, NULL_RTX));
3300 return target;
3303 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3304 NULL_RTX if we failed the caller should emit a normal call, otherwise
3305 try to get the result in TARGET, if convenient (and in mode MODE if that's
3306 convenient). */
3308 static rtx
3309 expand_builtin_strcpy (tree exp, rtx target)
3311 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3313 tree dest = CALL_EXPR_ARG (exp, 0);
3314 tree src = CALL_EXPR_ARG (exp, 1);
3315 return expand_builtin_strcpy_args (dest, src, target);
3317 return NULL_RTX;
3320 /* Helper function to do the actual work for expand_builtin_strcpy. The
3321 arguments to the builtin_strcpy call DEST and SRC are broken out
3322 so that this can also be called without constructing an actual CALL_EXPR.
3323 The other arguments and return value are the same as for
3324 expand_builtin_strcpy. */
3326 static rtx
3327 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3329 return expand_movstr (dest, src, target, /*endp=*/0);
3332 /* Expand a call EXP to the stpcpy builtin.
3333 Return NULL_RTX if we failed the caller should emit a normal call,
3334 otherwise try to get the result in TARGET, if convenient (and in
3335 mode MODE if that's convenient). */
3337 static rtx
3338 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3340 tree dst, src;
3341 location_t loc = EXPR_LOCATION (exp);
3343 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3344 return NULL_RTX;
3346 dst = CALL_EXPR_ARG (exp, 0);
3347 src = CALL_EXPR_ARG (exp, 1);
3349 /* If return value is ignored, transform stpcpy into strcpy. */
3350 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3352 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3353 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3354 return expand_expr (result, target, mode, EXPAND_NORMAL);
3356 else
3358 tree len, lenp1;
3359 rtx ret;
3361 /* Ensure we get an actual string whose length can be evaluated at
3362 compile-time, not an expression containing a string. This is
3363 because the latter will potentially produce pessimized code
3364 when used to produce the return value. */
3365 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3366 return expand_movstr (dst, src, target, /*endp=*/2);
3368 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3369 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3370 target, mode, /*endp=*/2);
3372 if (ret)
3373 return ret;
3375 if (TREE_CODE (len) == INTEGER_CST)
3377 rtx len_rtx = expand_normal (len);
3379 if (CONST_INT_P (len_rtx))
3381 ret = expand_builtin_strcpy_args (dst, src, target);
3383 if (ret)
3385 if (! target)
3387 if (mode != VOIDmode)
3388 target = gen_reg_rtx (mode);
3389 else
3390 target = gen_reg_rtx (GET_MODE (ret));
3392 if (GET_MODE (target) != GET_MODE (ret))
3393 ret = gen_lowpart (GET_MODE (target), ret);
3395 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3396 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3397 gcc_assert (ret);
3399 return target;
3404 return expand_movstr (dst, src, target, /*endp=*/2);
3408 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3409 bytes from constant string DATA + OFFSET and return it as target
3410 constant. */
3413 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3414 enum machine_mode mode)
3416 const char *str = (const char *) data;
3418 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3419 return const0_rtx;
3421 return c_readstr (str + offset, mode);
3424 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3425 NULL_RTX if we failed the caller should emit a normal call. */
3427 static rtx
3428 expand_builtin_strncpy (tree exp, rtx target)
3430 location_t loc = EXPR_LOCATION (exp);
3432 if (validate_arglist (exp,
3433 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3435 tree dest = CALL_EXPR_ARG (exp, 0);
3436 tree src = CALL_EXPR_ARG (exp, 1);
3437 tree len = CALL_EXPR_ARG (exp, 2);
3438 tree slen = c_strlen (src, 1);
3440 /* We must be passed a constant len and src parameter. */
3441 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3442 return NULL_RTX;
3444 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3446 /* We're required to pad with trailing zeros if the requested
3447 len is greater than strlen(s2)+1. In that case try to
3448 use store_by_pieces, if it fails, punt. */
3449 if (tree_int_cst_lt (slen, len))
3451 unsigned int dest_align = get_pointer_alignment (dest);
3452 const char *p = c_getstr (src);
3453 rtx dest_mem;
3455 if (!p || dest_align == 0 || !host_integerp (len, 1)
3456 || !can_store_by_pieces (tree_low_cst (len, 1),
3457 builtin_strncpy_read_str,
3458 CONST_CAST (char *, p),
3459 dest_align, false))
3460 return NULL_RTX;
3462 dest_mem = get_memory_rtx (dest, len);
3463 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3464 builtin_strncpy_read_str,
3465 CONST_CAST (char *, p), dest_align, false, 0);
3466 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3467 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3468 return dest_mem;
3471 return NULL_RTX;
3474 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3475 bytes from constant string DATA + OFFSET and return it as target
3476 constant. */
3479 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3480 enum machine_mode mode)
3482 const char *c = (const char *) data;
3483 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3485 memset (p, *c, GET_MODE_SIZE (mode));
3487 return c_readstr (p, mode);
3490 /* Callback routine for store_by_pieces. Return the RTL of a register
3491 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3492 char value given in the RTL register data. For example, if mode is
3493 4 bytes wide, return the RTL for 0x01010101*data. */
3495 static rtx
3496 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3497 enum machine_mode mode)
3499 rtx target, coeff;
3500 size_t size;
3501 char *p;
3503 size = GET_MODE_SIZE (mode);
3504 if (size == 1)
3505 return (rtx) data;
3507 p = XALLOCAVEC (char, size);
3508 memset (p, 1, size);
3509 coeff = c_readstr (p, mode);
3511 target = convert_to_mode (mode, (rtx) data, 1);
3512 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3513 return force_reg (mode, target);
3516 /* Expand expression EXP, which is a call to the memset builtin. Return
3517 NULL_RTX if we failed the caller should emit a normal call, otherwise
3518 try to get the result in TARGET, if convenient (and in mode MODE if that's
3519 convenient). */
3521 static rtx
3522 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3524 if (!validate_arglist (exp,
3525 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3526 return NULL_RTX;
3527 else
3529 tree dest = CALL_EXPR_ARG (exp, 0);
3530 tree val = CALL_EXPR_ARG (exp, 1);
3531 tree len = CALL_EXPR_ARG (exp, 2);
3532 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3536 /* Helper function to do the actual work for expand_builtin_memset. The
3537 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3538 so that this can also be called without constructing an actual CALL_EXPR.
3539 The other arguments and return value are the same as for
3540 expand_builtin_memset. */
3542 static rtx
3543 expand_builtin_memset_args (tree dest, tree val, tree len,
3544 rtx target, enum machine_mode mode, tree orig_exp)
3546 tree fndecl, fn;
3547 enum built_in_function fcode;
3548 enum machine_mode val_mode;
3549 char c;
3550 unsigned int dest_align;
3551 rtx dest_mem, dest_addr, len_rtx;
3552 HOST_WIDE_INT expected_size = -1;
3553 unsigned int expected_align = 0;
3555 dest_align = get_pointer_alignment (dest);
3557 /* If DEST is not a pointer type, don't do this operation in-line. */
3558 if (dest_align == 0)
3559 return NULL_RTX;
3561 if (currently_expanding_gimple_stmt)
3562 stringop_block_profile (currently_expanding_gimple_stmt,
3563 &expected_align, &expected_size);
3565 if (expected_align < dest_align)
3566 expected_align = dest_align;
3568 /* If the LEN parameter is zero, return DEST. */
3569 if (integer_zerop (len))
3571 /* Evaluate and ignore VAL in case it has side-effects. */
3572 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3573 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3576 /* Stabilize the arguments in case we fail. */
3577 dest = builtin_save_expr (dest);
3578 val = builtin_save_expr (val);
3579 len = builtin_save_expr (len);
3581 len_rtx = expand_normal (len);
3582 dest_mem = get_memory_rtx (dest, len);
3583 val_mode = TYPE_MODE (unsigned_char_type_node);
3585 if (TREE_CODE (val) != INTEGER_CST)
3587 rtx val_rtx;
3589 val_rtx = expand_normal (val);
3590 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3592 /* Assume that we can memset by pieces if we can store
3593 * the coefficients by pieces (in the required modes).
3594 * We can't pass builtin_memset_gen_str as that emits RTL. */
3595 c = 1;
3596 if (host_integerp (len, 1)
3597 && can_store_by_pieces (tree_low_cst (len, 1),
3598 builtin_memset_read_str, &c, dest_align,
3599 true))
3601 val_rtx = force_reg (val_mode, val_rtx);
3602 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3603 builtin_memset_gen_str, val_rtx, dest_align,
3604 true, 0);
3606 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3607 dest_align, expected_align,
3608 expected_size))
3609 goto do_libcall;
3611 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3612 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3613 return dest_mem;
3616 if (target_char_cast (val, &c))
3617 goto do_libcall;
3619 if (c)
3621 if (host_integerp (len, 1)
3622 && can_store_by_pieces (tree_low_cst (len, 1),
3623 builtin_memset_read_str, &c, dest_align,
3624 true))
3625 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3626 builtin_memset_read_str, &c, dest_align, true, 0);
3627 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3628 gen_int_mode (c, val_mode),
3629 dest_align, expected_align,
3630 expected_size))
3631 goto do_libcall;
3633 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3634 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3635 return dest_mem;
3638 set_mem_align (dest_mem, dest_align);
3639 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3640 CALL_EXPR_TAILCALL (orig_exp)
3641 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3642 expected_align, expected_size);
3644 if (dest_addr == 0)
3646 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3647 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3650 return dest_addr;
3652 do_libcall:
3653 fndecl = get_callee_fndecl (orig_exp);
3654 fcode = DECL_FUNCTION_CODE (fndecl);
3655 if (fcode == BUILT_IN_MEMSET)
3656 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3657 dest, val, len);
3658 else if (fcode == BUILT_IN_BZERO)
3659 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3660 dest, len);
3661 else
3662 gcc_unreachable ();
3663 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3664 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3665 return expand_call (fn, target, target == const0_rtx);
3668 /* Expand expression EXP, which is a call to the bzero builtin. Return
3669 NULL_RTX if we failed the caller should emit a normal call. */
3671 static rtx
3672 expand_builtin_bzero (tree exp)
3674 tree dest, size;
3675 location_t loc = EXPR_LOCATION (exp);
3677 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3678 return NULL_RTX;
3680 dest = CALL_EXPR_ARG (exp, 0);
3681 size = CALL_EXPR_ARG (exp, 1);
3683 /* New argument list transforming bzero(ptr x, int y) to
3684 memset(ptr x, int 0, size_t y). This is done this way
3685 so that if it isn't expanded inline, we fallback to
3686 calling bzero instead of memset. */
3688 return expand_builtin_memset_args (dest, integer_zero_node,
3689 fold_convert_loc (loc,
3690 size_type_node, size),
3691 const0_rtx, VOIDmode, exp);
3694 /* Expand expression EXP, which is a call to the memcmp built-in function.
3695 Return NULL_RTX if we failed and the caller should emit a normal call,
3696 otherwise try to get the result in TARGET, if convenient (and in mode
3697 MODE, if that's convenient). */
3699 static rtx
3700 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3701 ATTRIBUTE_UNUSED enum machine_mode mode)
3703 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3705 if (!validate_arglist (exp,
3706 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3707 return NULL_RTX;
3709 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3710 implementing memcmp because it will stop if it encounters two
3711 zero bytes. */
3712 #if defined HAVE_cmpmemsi
3714 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3715 rtx result;
3716 rtx insn;
3717 tree arg1 = CALL_EXPR_ARG (exp, 0);
3718 tree arg2 = CALL_EXPR_ARG (exp, 1);
3719 tree len = CALL_EXPR_ARG (exp, 2);
3721 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3722 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3723 enum machine_mode insn_mode;
3725 if (HAVE_cmpmemsi)
3726 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3727 else
3728 return NULL_RTX;
3730 /* If we don't have POINTER_TYPE, call the function. */
3731 if (arg1_align == 0 || arg2_align == 0)
3732 return NULL_RTX;
3734 /* Make a place to write the result of the instruction. */
3735 result = target;
3736 if (! (result != 0
3737 && REG_P (result) && GET_MODE (result) == insn_mode
3738 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3739 result = gen_reg_rtx (insn_mode);
3741 arg1_rtx = get_memory_rtx (arg1, len);
3742 arg2_rtx = get_memory_rtx (arg2, len);
3743 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3745 /* Set MEM_SIZE as appropriate. */
3746 if (CONST_INT_P (arg3_rtx))
3748 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3749 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3752 if (HAVE_cmpmemsi)
3753 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3754 GEN_INT (MIN (arg1_align, arg2_align)));
3755 else
3756 gcc_unreachable ();
3758 if (insn)
3759 emit_insn (insn);
3760 else
3761 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3762 TYPE_MODE (integer_type_node), 3,
3763 XEXP (arg1_rtx, 0), Pmode,
3764 XEXP (arg2_rtx, 0), Pmode,
3765 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3766 TYPE_UNSIGNED (sizetype)),
3767 TYPE_MODE (sizetype));
3769 /* Return the value in the proper mode for this function. */
3770 mode = TYPE_MODE (TREE_TYPE (exp));
3771 if (GET_MODE (result) == mode)
3772 return result;
3773 else if (target != 0)
3775 convert_move (target, result, 0);
3776 return target;
3778 else
3779 return convert_to_mode (mode, result, 0);
3781 #endif /* HAVE_cmpmemsi. */
3783 return NULL_RTX;
3786 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3787 if we failed the caller should emit a normal call, otherwise try to get
3788 the result in TARGET, if convenient. */
3790 static rtx
3791 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3793 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3794 return NULL_RTX;
3796 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3797 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3798 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3800 rtx arg1_rtx, arg2_rtx;
3801 rtx result, insn = NULL_RTX;
3802 tree fndecl, fn;
3803 tree arg1 = CALL_EXPR_ARG (exp, 0);
3804 tree arg2 = CALL_EXPR_ARG (exp, 1);
3806 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3807 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3809 /* If we don't have POINTER_TYPE, call the function. */
3810 if (arg1_align == 0 || arg2_align == 0)
3811 return NULL_RTX;
3813 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3814 arg1 = builtin_save_expr (arg1);
3815 arg2 = builtin_save_expr (arg2);
3817 arg1_rtx = get_memory_rtx (arg1, NULL);
3818 arg2_rtx = get_memory_rtx (arg2, NULL);
3820 #ifdef HAVE_cmpstrsi
3821 /* Try to call cmpstrsi. */
3822 if (HAVE_cmpstrsi)
3824 enum machine_mode insn_mode
3825 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3827 /* Make a place to write the result of the instruction. */
3828 result = target;
3829 if (! (result != 0
3830 && REG_P (result) && GET_MODE (result) == insn_mode
3831 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3832 result = gen_reg_rtx (insn_mode);
3834 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3835 GEN_INT (MIN (arg1_align, arg2_align)));
3837 #endif
3838 #ifdef HAVE_cmpstrnsi
3839 /* Try to determine at least one length and call cmpstrnsi. */
3840 if (!insn && HAVE_cmpstrnsi)
3842 tree len;
3843 rtx arg3_rtx;
3845 enum machine_mode insn_mode
3846 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3847 tree len1 = c_strlen (arg1, 1);
3848 tree len2 = c_strlen (arg2, 1);
3850 if (len1)
3851 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3852 if (len2)
3853 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3855 /* If we don't have a constant length for the first, use the length
3856 of the second, if we know it. We don't require a constant for
3857 this case; some cost analysis could be done if both are available
3858 but neither is constant. For now, assume they're equally cheap,
3859 unless one has side effects. If both strings have constant lengths,
3860 use the smaller. */
3862 if (!len1)
3863 len = len2;
3864 else if (!len2)
3865 len = len1;
3866 else if (TREE_SIDE_EFFECTS (len1))
3867 len = len2;
3868 else if (TREE_SIDE_EFFECTS (len2))
3869 len = len1;
3870 else if (TREE_CODE (len1) != INTEGER_CST)
3871 len = len2;
3872 else if (TREE_CODE (len2) != INTEGER_CST)
3873 len = len1;
3874 else if (tree_int_cst_lt (len1, len2))
3875 len = len1;
3876 else
3877 len = len2;
3879 /* If both arguments have side effects, we cannot optimize. */
3880 if (!len || TREE_SIDE_EFFECTS (len))
3881 goto do_libcall;
3883 arg3_rtx = expand_normal (len);
3885 /* Make a place to write the result of the instruction. */
3886 result = target;
3887 if (! (result != 0
3888 && REG_P (result) && GET_MODE (result) == insn_mode
3889 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3890 result = gen_reg_rtx (insn_mode);
3892 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3893 GEN_INT (MIN (arg1_align, arg2_align)));
3895 #endif
3897 if (insn)
3899 enum machine_mode mode;
3900 emit_insn (insn);
3902 /* Return the value in the proper mode for this function. */
3903 mode = TYPE_MODE (TREE_TYPE (exp));
3904 if (GET_MODE (result) == mode)
3905 return result;
3906 if (target == 0)
3907 return convert_to_mode (mode, result, 0);
3908 convert_move (target, result, 0);
3909 return target;
3912 /* Expand the library call ourselves using a stabilized argument
3913 list to avoid re-evaluating the function's arguments twice. */
3914 #ifdef HAVE_cmpstrnsi
3915 do_libcall:
3916 #endif
3917 fndecl = get_callee_fndecl (exp);
3918 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3919 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3920 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3921 return expand_call (fn, target, target == const0_rtx);
3923 #endif
3924 return NULL_RTX;
3927 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3928 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3929 the result in TARGET, if convenient. */
3931 static rtx
3932 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3933 ATTRIBUTE_UNUSED enum machine_mode mode)
3935 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3937 if (!validate_arglist (exp,
3938 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3939 return NULL_RTX;
3941 /* If c_strlen can determine an expression for one of the string
3942 lengths, and it doesn't have side effects, then emit cmpstrnsi
3943 using length MIN(strlen(string)+1, arg3). */
3944 #ifdef HAVE_cmpstrnsi
3945 if (HAVE_cmpstrnsi)
3947 tree len, len1, len2;
3948 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3949 rtx result, insn;
3950 tree fndecl, fn;
3951 tree arg1 = CALL_EXPR_ARG (exp, 0);
3952 tree arg2 = CALL_EXPR_ARG (exp, 1);
3953 tree arg3 = CALL_EXPR_ARG (exp, 2);
3955 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3956 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3957 enum machine_mode insn_mode
3958 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3960 len1 = c_strlen (arg1, 1);
3961 len2 = c_strlen (arg2, 1);
3963 if (len1)
3964 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3965 if (len2)
3966 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3968 /* If we don't have a constant length for the first, use the length
3969 of the second, if we know it. We don't require a constant for
3970 this case; some cost analysis could be done if both are available
3971 but neither is constant. For now, assume they're equally cheap,
3972 unless one has side effects. If both strings have constant lengths,
3973 use the smaller. */
3975 if (!len1)
3976 len = len2;
3977 else if (!len2)
3978 len = len1;
3979 else if (TREE_SIDE_EFFECTS (len1))
3980 len = len2;
3981 else if (TREE_SIDE_EFFECTS (len2))
3982 len = len1;
3983 else if (TREE_CODE (len1) != INTEGER_CST)
3984 len = len2;
3985 else if (TREE_CODE (len2) != INTEGER_CST)
3986 len = len1;
3987 else if (tree_int_cst_lt (len1, len2))
3988 len = len1;
3989 else
3990 len = len2;
3992 /* If both arguments have side effects, we cannot optimize. */
3993 if (!len || TREE_SIDE_EFFECTS (len))
3994 return NULL_RTX;
3996 /* The actual new length parameter is MIN(len,arg3). */
3997 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3998 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4000 /* If we don't have POINTER_TYPE, call the function. */
4001 if (arg1_align == 0 || arg2_align == 0)
4002 return NULL_RTX;
4004 /* Make a place to write the result of the instruction. */
4005 result = target;
4006 if (! (result != 0
4007 && REG_P (result) && GET_MODE (result) == insn_mode
4008 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4009 result = gen_reg_rtx (insn_mode);
4011 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4012 arg1 = builtin_save_expr (arg1);
4013 arg2 = builtin_save_expr (arg2);
4014 len = builtin_save_expr (len);
4016 arg1_rtx = get_memory_rtx (arg1, len);
4017 arg2_rtx = get_memory_rtx (arg2, len);
4018 arg3_rtx = expand_normal (len);
4019 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4020 GEN_INT (MIN (arg1_align, arg2_align)));
4021 if (insn)
4023 emit_insn (insn);
4025 /* Return the value in the proper mode for this function. */
4026 mode = TYPE_MODE (TREE_TYPE (exp));
4027 if (GET_MODE (result) == mode)
4028 return result;
4029 if (target == 0)
4030 return convert_to_mode (mode, result, 0);
4031 convert_move (target, result, 0);
4032 return target;
4035 /* Expand the library call ourselves using a stabilized argument
4036 list to avoid re-evaluating the function's arguments twice. */
4037 fndecl = get_callee_fndecl (exp);
4038 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4039 arg1, arg2, len);
4040 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4041 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4042 return expand_call (fn, target, target == const0_rtx);
4044 #endif
4045 return NULL_RTX;
4048 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4049 if that's convenient. */
4052 expand_builtin_saveregs (void)
4054 rtx val, seq;
4056 /* Don't do __builtin_saveregs more than once in a function.
4057 Save the result of the first call and reuse it. */
4058 if (saveregs_value != 0)
4059 return saveregs_value;
4061 /* When this function is called, it means that registers must be
4062 saved on entry to this function. So we migrate the call to the
4063 first insn of this function. */
4065 start_sequence ();
4067 /* Do whatever the machine needs done in this case. */
4068 val = targetm.calls.expand_builtin_saveregs ();
4070 seq = get_insns ();
4071 end_sequence ();
4073 saveregs_value = val;
4075 /* Put the insns after the NOTE that starts the function. If this
4076 is inside a start_sequence, make the outer-level insn chain current, so
4077 the code is placed at the start of the function. */
4078 push_topmost_sequence ();
4079 emit_insn_after (seq, entry_of_function ());
4080 pop_topmost_sequence ();
4082 return val;
4085 /* Expand a call to __builtin_next_arg. */
4087 static rtx
4088 expand_builtin_next_arg (void)
4090 /* Checking arguments is already done in fold_builtin_next_arg
4091 that must be called before this function. */
4092 return expand_binop (ptr_mode, add_optab,
4093 crtl->args.internal_arg_pointer,
4094 crtl->args.arg_offset_rtx,
4095 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4098 /* Make it easier for the backends by protecting the valist argument
4099 from multiple evaluations. */
4101 static tree
4102 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4104 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4106 /* The current way of determining the type of valist is completely
4107 bogus. We should have the information on the va builtin instead. */
4108 if (!vatype)
4109 vatype = targetm.fn_abi_va_list (cfun->decl);
4111 if (TREE_CODE (vatype) == ARRAY_TYPE)
4113 if (TREE_SIDE_EFFECTS (valist))
4114 valist = save_expr (valist);
4116 /* For this case, the backends will be expecting a pointer to
4117 vatype, but it's possible we've actually been given an array
4118 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4119 So fix it. */
4120 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4122 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4123 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4126 else
4128 tree pt = build_pointer_type (vatype);
4130 if (! needs_lvalue)
4132 if (! TREE_SIDE_EFFECTS (valist))
4133 return valist;
4135 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4136 TREE_SIDE_EFFECTS (valist) = 1;
4139 if (TREE_SIDE_EFFECTS (valist))
4140 valist = save_expr (valist);
4141 valist = fold_build2_loc (loc, MEM_REF,
4142 vatype, valist, build_int_cst (pt, 0));
4145 return valist;
4148 /* The "standard" definition of va_list is void*. */
4150 tree
4151 std_build_builtin_va_list (void)
4153 return ptr_type_node;
4156 /* The "standard" abi va_list is va_list_type_node. */
4158 tree
4159 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4161 return va_list_type_node;
4164 /* The "standard" type of va_list is va_list_type_node. */
4166 tree
4167 std_canonical_va_list_type (tree type)
4169 tree wtype, htype;
4171 if (INDIRECT_REF_P (type))
4172 type = TREE_TYPE (type);
4173 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4174 type = TREE_TYPE (type);
4175 wtype = va_list_type_node;
4176 htype = type;
4177 /* Treat structure va_list types. */
4178 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4179 htype = TREE_TYPE (htype);
4180 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4182 /* If va_list is an array type, the argument may have decayed
4183 to a pointer type, e.g. by being passed to another function.
4184 In that case, unwrap both types so that we can compare the
4185 underlying records. */
4186 if (TREE_CODE (htype) == ARRAY_TYPE
4187 || POINTER_TYPE_P (htype))
4189 wtype = TREE_TYPE (wtype);
4190 htype = TREE_TYPE (htype);
4193 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4194 return va_list_type_node;
4196 return NULL_TREE;
4199 /* The "standard" implementation of va_start: just assign `nextarg' to
4200 the variable. */
4202 void
4203 std_expand_builtin_va_start (tree valist, rtx nextarg)
4205 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4206 convert_move (va_r, nextarg, 0);
4209 /* Expand EXP, a call to __builtin_va_start. */
4211 static rtx
4212 expand_builtin_va_start (tree exp)
4214 rtx nextarg;
4215 tree valist;
4216 location_t loc = EXPR_LOCATION (exp);
4218 if (call_expr_nargs (exp) < 2)
4220 error_at (loc, "too few arguments to function %<va_start%>");
4221 return const0_rtx;
4224 if (fold_builtin_next_arg (exp, true))
4225 return const0_rtx;
4227 nextarg = expand_builtin_next_arg ();
4228 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4230 if (targetm.expand_builtin_va_start)
4231 targetm.expand_builtin_va_start (valist, nextarg);
4232 else
4233 std_expand_builtin_va_start (valist, nextarg);
4235 return const0_rtx;
4238 /* The "standard" implementation of va_arg: read the value from the
4239 current (padded) address and increment by the (padded) size. */
4241 tree
4242 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4243 gimple_seq *post_p)
4245 tree addr, t, type_size, rounded_size, valist_tmp;
4246 unsigned HOST_WIDE_INT align, boundary;
4247 bool indirect;
4249 #ifdef ARGS_GROW_DOWNWARD
4250 /* All of the alignment and movement below is for args-grow-up machines.
4251 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4252 implement their own specialized gimplify_va_arg_expr routines. */
4253 gcc_unreachable ();
4254 #endif
4256 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4257 if (indirect)
4258 type = build_pointer_type (type);
4260 align = PARM_BOUNDARY / BITS_PER_UNIT;
4261 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4263 /* When we align parameter on stack for caller, if the parameter
4264 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4265 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4266 here with caller. */
4267 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4268 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4270 boundary /= BITS_PER_UNIT;
4272 /* Hoist the valist value into a temporary for the moment. */
4273 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4275 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4276 requires greater alignment, we must perform dynamic alignment. */
4277 if (boundary > align
4278 && !integer_zerop (TYPE_SIZE (type)))
4280 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4281 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
4282 gimplify_and_add (t, pre_p);
4284 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4285 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
4286 valist_tmp,
4287 build_int_cst (TREE_TYPE (valist), -boundary)));
4288 gimplify_and_add (t, pre_p);
4290 else
4291 boundary = align;
4293 /* If the actual alignment is less than the alignment of the type,
4294 adjust the type accordingly so that we don't assume strict alignment
4295 when dereferencing the pointer. */
4296 boundary *= BITS_PER_UNIT;
4297 if (boundary < TYPE_ALIGN (type))
4299 type = build_variant_type_copy (type);
4300 TYPE_ALIGN (type) = boundary;
4303 /* Compute the rounded size of the type. */
4304 type_size = size_in_bytes (type);
4305 rounded_size = round_up (type_size, align);
4307 /* Reduce rounded_size so it's sharable with the postqueue. */
4308 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4310 /* Get AP. */
4311 addr = valist_tmp;
4312 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4314 /* Small args are padded downward. */
4315 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4316 rounded_size, size_int (align));
4317 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4318 size_binop (MINUS_EXPR, rounded_size, type_size));
4319 addr = fold_build_pointer_plus (addr, t);
4322 /* Compute new value for AP. */
4323 t = fold_build_pointer_plus (valist_tmp, rounded_size);
4324 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4325 gimplify_and_add (t, pre_p);
4327 addr = fold_convert (build_pointer_type (type), addr);
4329 if (indirect)
4330 addr = build_va_arg_indirect_ref (addr);
4332 return build_va_arg_indirect_ref (addr);
4335 /* Build an indirect-ref expression over the given TREE, which represents a
4336 piece of a va_arg() expansion. */
4337 tree
4338 build_va_arg_indirect_ref (tree addr)
4340 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4342 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4343 mf_mark (addr);
4345 return addr;
4348 /* Return a dummy expression of type TYPE in order to keep going after an
4349 error. */
4351 static tree
4352 dummy_object (tree type)
4354 tree t = build_int_cst (build_pointer_type (type), 0);
4355 return build2 (MEM_REF, type, t, t);
4358 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4359 builtin function, but a very special sort of operator. */
4361 enum gimplify_status
4362 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4364 tree promoted_type, have_va_type;
4365 tree valist = TREE_OPERAND (*expr_p, 0);
4366 tree type = TREE_TYPE (*expr_p);
4367 tree t;
4368 location_t loc = EXPR_LOCATION (*expr_p);
4370 /* Verify that valist is of the proper type. */
4371 have_va_type = TREE_TYPE (valist);
4372 if (have_va_type == error_mark_node)
4373 return GS_ERROR;
4374 have_va_type = targetm.canonical_va_list_type (have_va_type);
4376 if (have_va_type == NULL_TREE)
4378 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4379 return GS_ERROR;
4382 /* Generate a diagnostic for requesting data of a type that cannot
4383 be passed through `...' due to type promotion at the call site. */
4384 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4385 != type)
4387 static bool gave_help;
4388 bool warned;
4390 /* Unfortunately, this is merely undefined, rather than a constraint
4391 violation, so we cannot make this an error. If this call is never
4392 executed, the program is still strictly conforming. */
4393 warned = warning_at (loc, 0,
4394 "%qT is promoted to %qT when passed through %<...%>",
4395 type, promoted_type);
4396 if (!gave_help && warned)
4398 gave_help = true;
4399 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4400 promoted_type, type);
4403 /* We can, however, treat "undefined" any way we please.
4404 Call abort to encourage the user to fix the program. */
4405 if (warned)
4406 inform (loc, "if this code is reached, the program will abort");
4407 /* Before the abort, allow the evaluation of the va_list
4408 expression to exit or longjmp. */
4409 gimplify_and_add (valist, pre_p);
4410 t = build_call_expr_loc (loc,
4411 builtin_decl_implicit (BUILT_IN_TRAP), 0);
4412 gimplify_and_add (t, pre_p);
4414 /* This is dead code, but go ahead and finish so that the
4415 mode of the result comes out right. */
4416 *expr_p = dummy_object (type);
4417 return GS_ALL_DONE;
4419 else
4421 /* Make it easier for the backends by protecting the valist argument
4422 from multiple evaluations. */
4423 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4425 /* For this case, the backends will be expecting a pointer to
4426 TREE_TYPE (abi), but it's possible we've
4427 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4428 So fix it. */
4429 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4431 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4432 valist = fold_convert_loc (loc, p1,
4433 build_fold_addr_expr_loc (loc, valist));
4436 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4438 else
4439 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4441 if (!targetm.gimplify_va_arg_expr)
4442 /* FIXME: Once most targets are converted we should merely
4443 assert this is non-null. */
4444 return GS_ALL_DONE;
4446 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4447 return GS_OK;
4451 /* Expand EXP, a call to __builtin_va_end. */
4453 static rtx
4454 expand_builtin_va_end (tree exp)
4456 tree valist = CALL_EXPR_ARG (exp, 0);
4458 /* Evaluate for side effects, if needed. I hate macros that don't
4459 do that. */
4460 if (TREE_SIDE_EFFECTS (valist))
4461 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4463 return const0_rtx;
4466 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4467 builtin rather than just as an assignment in stdarg.h because of the
4468 nastiness of array-type va_list types. */
4470 static rtx
4471 expand_builtin_va_copy (tree exp)
4473 tree dst, src, t;
4474 location_t loc = EXPR_LOCATION (exp);
4476 dst = CALL_EXPR_ARG (exp, 0);
4477 src = CALL_EXPR_ARG (exp, 1);
4479 dst = stabilize_va_list_loc (loc, dst, 1);
4480 src = stabilize_va_list_loc (loc, src, 0);
4482 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4484 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4486 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4487 TREE_SIDE_EFFECTS (t) = 1;
4488 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4490 else
4492 rtx dstb, srcb, size;
4494 /* Evaluate to pointers. */
4495 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4496 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4497 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4498 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4500 dstb = convert_memory_address (Pmode, dstb);
4501 srcb = convert_memory_address (Pmode, srcb);
4503 /* "Dereference" to BLKmode memories. */
4504 dstb = gen_rtx_MEM (BLKmode, dstb);
4505 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4506 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4507 srcb = gen_rtx_MEM (BLKmode, srcb);
4508 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4509 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4511 /* Copy. */
4512 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4515 return const0_rtx;
4518 /* Expand a call to one of the builtin functions __builtin_frame_address or
4519 __builtin_return_address. */
4521 static rtx
4522 expand_builtin_frame_address (tree fndecl, tree exp)
4524 /* The argument must be a nonnegative integer constant.
4525 It counts the number of frames to scan up the stack.
4526 The value is the return address saved in that frame. */
4527 if (call_expr_nargs (exp) == 0)
4528 /* Warning about missing arg was already issued. */
4529 return const0_rtx;
4530 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4532 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4533 error ("invalid argument to %<__builtin_frame_address%>");
4534 else
4535 error ("invalid argument to %<__builtin_return_address%>");
4536 return const0_rtx;
4538 else
4540 rtx tem
4541 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4542 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4544 /* Some ports cannot access arbitrary stack frames. */
4545 if (tem == NULL)
4547 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4548 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4549 else
4550 warning (0, "unsupported argument to %<__builtin_return_address%>");
4551 return const0_rtx;
4554 /* For __builtin_frame_address, return what we've got. */
4555 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4556 return tem;
4558 if (!REG_P (tem)
4559 && ! CONSTANT_P (tem))
4560 tem = copy_addr_to_reg (tem);
4561 return tem;
4565 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4566 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4567 is the same as for allocate_dynamic_stack_space. */
4569 static rtx
4570 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4572 rtx op0;
4573 rtx result;
4574 bool valid_arglist;
4575 unsigned int align;
4576 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4577 == BUILT_IN_ALLOCA_WITH_ALIGN);
4579 /* Emit normal call if we use mudflap. */
4580 if (flag_mudflap)
4581 return NULL_RTX;
4583 valid_arglist
4584 = (alloca_with_align
4585 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4586 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4588 if (!valid_arglist)
4589 return NULL_RTX;
4591 /* Compute the argument. */
4592 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4594 /* Compute the alignment. */
4595 align = (alloca_with_align
4596 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4597 : BIGGEST_ALIGNMENT);
4599 /* Allocate the desired space. */
4600 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4601 result = convert_memory_address (ptr_mode, result);
4603 return result;
4606 /* Expand a call to bswap builtin in EXP.
4607 Return NULL_RTX if a normal call should be emitted rather than expanding the
4608 function in-line. If convenient, the result should be placed in TARGET.
4609 SUBTARGET may be used as the target for computing one of EXP's operands. */
4611 static rtx
4612 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4613 rtx subtarget)
4615 tree arg;
4616 rtx op0;
4618 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4619 return NULL_RTX;
4621 arg = CALL_EXPR_ARG (exp, 0);
4622 op0 = expand_expr (arg,
4623 subtarget && GET_MODE (subtarget) == target_mode
4624 ? subtarget : NULL_RTX,
4625 target_mode, EXPAND_NORMAL);
4626 if (GET_MODE (op0) != target_mode)
4627 op0 = convert_to_mode (target_mode, op0, 1);
4629 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4631 gcc_assert (target);
4633 return convert_to_mode (target_mode, target, 1);
4636 /* Expand a call to a unary builtin in EXP.
4637 Return NULL_RTX if a normal call should be emitted rather than expanding the
4638 function in-line. If convenient, the result should be placed in TARGET.
4639 SUBTARGET may be used as the target for computing one of EXP's operands. */
4641 static rtx
4642 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4643 rtx subtarget, optab op_optab)
4645 rtx op0;
4647 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4648 return NULL_RTX;
4650 /* Compute the argument. */
4651 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4652 (subtarget
4653 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4654 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4655 VOIDmode, EXPAND_NORMAL);
4656 /* Compute op, into TARGET if possible.
4657 Set TARGET to wherever the result comes back. */
4658 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4659 op_optab, op0, target, op_optab != clrsb_optab);
4660 gcc_assert (target);
4662 return convert_to_mode (target_mode, target, 0);
4665 /* Expand a call to __builtin_expect. We just return our argument
4666 as the builtin_expect semantic should've been already executed by
4667 tree branch prediction pass. */
4669 static rtx
4670 expand_builtin_expect (tree exp, rtx target)
4672 tree arg;
4674 if (call_expr_nargs (exp) < 2)
4675 return const0_rtx;
4676 arg = CALL_EXPR_ARG (exp, 0);
4678 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4679 /* When guessing was done, the hints should be already stripped away. */
4680 gcc_assert (!flag_guess_branch_prob
4681 || optimize == 0 || seen_error ());
4682 return target;
4685 /* Expand a call to __builtin_assume_aligned. We just return our first
4686 argument as the builtin_assume_aligned semantic should've been already
4687 executed by CCP. */
4689 static rtx
4690 expand_builtin_assume_aligned (tree exp, rtx target)
4692 if (call_expr_nargs (exp) < 2)
4693 return const0_rtx;
4694 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4695 EXPAND_NORMAL);
4696 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4697 && (call_expr_nargs (exp) < 3
4698 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4699 return target;
4702 void
4703 expand_builtin_trap (void)
4705 #ifdef HAVE_trap
4706 if (HAVE_trap)
4708 rtx insn = emit_insn (gen_trap ());
4709 /* For trap insns when not accumulating outgoing args force
4710 REG_ARGS_SIZE note to prevent crossjumping of calls with
4711 different args sizes. */
4712 if (!ACCUMULATE_OUTGOING_ARGS)
4713 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4715 else
4716 #endif
4717 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4718 emit_barrier ();
4721 /* Expand a call to __builtin_unreachable. We do nothing except emit
4722 a barrier saying that control flow will not pass here.
4724 It is the responsibility of the program being compiled to ensure
4725 that control flow does never reach __builtin_unreachable. */
4726 static void
4727 expand_builtin_unreachable (void)
4729 emit_barrier ();
4732 /* Expand EXP, a call to fabs, fabsf or fabsl.
4733 Return NULL_RTX if a normal call should be emitted rather than expanding
4734 the function inline. If convenient, the result should be placed
4735 in TARGET. SUBTARGET may be used as the target for computing
4736 the operand. */
4738 static rtx
4739 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4741 enum machine_mode mode;
4742 tree arg;
4743 rtx op0;
4745 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4746 return NULL_RTX;
4748 arg = CALL_EXPR_ARG (exp, 0);
4749 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4750 mode = TYPE_MODE (TREE_TYPE (arg));
4751 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4752 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4755 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4756 Return NULL is a normal call should be emitted rather than expanding the
4757 function inline. If convenient, the result should be placed in TARGET.
4758 SUBTARGET may be used as the target for computing the operand. */
4760 static rtx
4761 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4763 rtx op0, op1;
4764 tree arg;
4766 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4767 return NULL_RTX;
4769 arg = CALL_EXPR_ARG (exp, 0);
4770 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4772 arg = CALL_EXPR_ARG (exp, 1);
4773 op1 = expand_normal (arg);
4775 return expand_copysign (op0, op1, target);
4778 /* Create a new constant string literal and return a char* pointer to it.
4779 The STRING_CST value is the LEN characters at STR. */
4780 tree
4781 build_string_literal (int len, const char *str)
4783 tree t, elem, index, type;
4785 t = build_string (len, str);
4786 elem = build_type_variant (char_type_node, 1, 0);
4787 index = build_index_type (size_int (len - 1));
4788 type = build_array_type (elem, index);
4789 TREE_TYPE (t) = type;
4790 TREE_CONSTANT (t) = 1;
4791 TREE_READONLY (t) = 1;
4792 TREE_STATIC (t) = 1;
4794 type = build_pointer_type (elem);
4795 t = build1 (ADDR_EXPR, type,
4796 build4 (ARRAY_REF, elem,
4797 t, integer_zero_node, NULL_TREE, NULL_TREE));
4798 return t;
4801 /* Expand a call to __builtin___clear_cache. */
4803 static rtx
4804 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4806 #ifndef HAVE_clear_cache
4807 #ifdef CLEAR_INSN_CACHE
4808 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4809 does something. Just do the default expansion to a call to
4810 __clear_cache(). */
4811 return NULL_RTX;
4812 #else
4813 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4814 does nothing. There is no need to call it. Do nothing. */
4815 return const0_rtx;
4816 #endif /* CLEAR_INSN_CACHE */
4817 #else
4818 /* We have a "clear_cache" insn, and it will handle everything. */
4819 tree begin, end;
4820 rtx begin_rtx, end_rtx;
4822 /* We must not expand to a library call. If we did, any
4823 fallback library function in libgcc that might contain a call to
4824 __builtin___clear_cache() would recurse infinitely. */
4825 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4827 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4828 return const0_rtx;
4831 if (HAVE_clear_cache)
4833 struct expand_operand ops[2];
4835 begin = CALL_EXPR_ARG (exp, 0);
4836 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4838 end = CALL_EXPR_ARG (exp, 1);
4839 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4841 create_address_operand (&ops[0], begin_rtx);
4842 create_address_operand (&ops[1], end_rtx);
4843 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4844 return const0_rtx;
4846 return const0_rtx;
4847 #endif /* HAVE_clear_cache */
4850 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4852 static rtx
4853 round_trampoline_addr (rtx tramp)
4855 rtx temp, addend, mask;
4857 /* If we don't need too much alignment, we'll have been guaranteed
4858 proper alignment by get_trampoline_type. */
4859 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4860 return tramp;
4862 /* Round address up to desired boundary. */
4863 temp = gen_reg_rtx (Pmode);
4864 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
4865 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
4867 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4868 temp, 0, OPTAB_LIB_WIDEN);
4869 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4870 temp, 0, OPTAB_LIB_WIDEN);
4872 return tramp;
4875 static rtx
4876 expand_builtin_init_trampoline (tree exp, bool onstack)
4878 tree t_tramp, t_func, t_chain;
4879 rtx m_tramp, r_tramp, r_chain, tmp;
4881 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4882 POINTER_TYPE, VOID_TYPE))
4883 return NULL_RTX;
4885 t_tramp = CALL_EXPR_ARG (exp, 0);
4886 t_func = CALL_EXPR_ARG (exp, 1);
4887 t_chain = CALL_EXPR_ARG (exp, 2);
4889 r_tramp = expand_normal (t_tramp);
4890 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4891 MEM_NOTRAP_P (m_tramp) = 1;
4893 /* If ONSTACK, the TRAMP argument should be the address of a field
4894 within the local function's FRAME decl. Either way, let's see if
4895 we can fill in the MEM_ATTRs for this memory. */
4896 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4897 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4899 /* Creator of a heap trampoline is responsible for making sure the
4900 address is aligned to at least STACK_BOUNDARY. Normally malloc
4901 will ensure this anyhow. */
4902 tmp = round_trampoline_addr (r_tramp);
4903 if (tmp != r_tramp)
4905 m_tramp = change_address (m_tramp, BLKmode, tmp);
4906 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4907 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4910 /* The FUNC argument should be the address of the nested function.
4911 Extract the actual function decl to pass to the hook. */
4912 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4913 t_func = TREE_OPERAND (t_func, 0);
4914 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4916 r_chain = expand_normal (t_chain);
4918 /* Generate insns to initialize the trampoline. */
4919 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4921 if (onstack)
4923 trampolines_created = 1;
4925 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4926 "trampoline generated for nested function %qD", t_func);
4929 return const0_rtx;
4932 static rtx
4933 expand_builtin_adjust_trampoline (tree exp)
4935 rtx tramp;
4937 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4938 return NULL_RTX;
4940 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4941 tramp = round_trampoline_addr (tramp);
4942 if (targetm.calls.trampoline_adjust_address)
4943 tramp = targetm.calls.trampoline_adjust_address (tramp);
4945 return tramp;
4948 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4949 function. The function first checks whether the back end provides
4950 an insn to implement signbit for the respective mode. If not, it
4951 checks whether the floating point format of the value is such that
4952 the sign bit can be extracted. If that is not the case, the
4953 function returns NULL_RTX to indicate that a normal call should be
4954 emitted rather than expanding the function in-line. EXP is the
4955 expression that is a call to the builtin function; if convenient,
4956 the result should be placed in TARGET. */
4957 static rtx
4958 expand_builtin_signbit (tree exp, rtx target)
4960 const struct real_format *fmt;
4961 enum machine_mode fmode, imode, rmode;
4962 tree arg;
4963 int word, bitpos;
4964 enum insn_code icode;
4965 rtx temp;
4966 location_t loc = EXPR_LOCATION (exp);
4968 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4969 return NULL_RTX;
4971 arg = CALL_EXPR_ARG (exp, 0);
4972 fmode = TYPE_MODE (TREE_TYPE (arg));
4973 rmode = TYPE_MODE (TREE_TYPE (exp));
4974 fmt = REAL_MODE_FORMAT (fmode);
4976 arg = builtin_save_expr (arg);
4978 /* Expand the argument yielding a RTX expression. */
4979 temp = expand_normal (arg);
4981 /* Check if the back end provides an insn that handles signbit for the
4982 argument's mode. */
4983 icode = optab_handler (signbit_optab, fmode);
4984 if (icode != CODE_FOR_nothing)
4986 rtx last = get_last_insn ();
4987 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4988 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4989 return target;
4990 delete_insns_since (last);
4993 /* For floating point formats without a sign bit, implement signbit
4994 as "ARG < 0.0". */
4995 bitpos = fmt->signbit_ro;
4996 if (bitpos < 0)
4998 /* But we can't do this if the format supports signed zero. */
4999 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5000 return NULL_RTX;
5002 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5003 build_real (TREE_TYPE (arg), dconst0));
5004 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5007 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5009 imode = int_mode_for_mode (fmode);
5010 if (imode == BLKmode)
5011 return NULL_RTX;
5012 temp = gen_lowpart (imode, temp);
5014 else
5016 imode = word_mode;
5017 /* Handle targets with different FP word orders. */
5018 if (FLOAT_WORDS_BIG_ENDIAN)
5019 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5020 else
5021 word = bitpos / BITS_PER_WORD;
5022 temp = operand_subword_force (temp, word, fmode);
5023 bitpos = bitpos % BITS_PER_WORD;
5026 /* Force the intermediate word_mode (or narrower) result into a
5027 register. This avoids attempting to create paradoxical SUBREGs
5028 of floating point modes below. */
5029 temp = force_reg (imode, temp);
5031 /* If the bitpos is within the "result mode" lowpart, the operation
5032 can be implement with a single bitwise AND. Otherwise, we need
5033 a right shift and an AND. */
5035 if (bitpos < GET_MODE_BITSIZE (rmode))
5037 double_int mask = double_int_zero.set_bit (bitpos);
5039 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5040 temp = gen_lowpart (rmode, temp);
5041 temp = expand_binop (rmode, and_optab, temp,
5042 immed_double_int_const (mask, rmode),
5043 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5045 else
5047 /* Perform a logical right shift to place the signbit in the least
5048 significant bit, then truncate the result to the desired mode
5049 and mask just this bit. */
5050 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5051 temp = gen_lowpart (rmode, temp);
5052 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5053 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5056 return temp;
5059 /* Expand fork or exec calls. TARGET is the desired target of the
5060 call. EXP is the call. FN is the
5061 identificator of the actual function. IGNORE is nonzero if the
5062 value is to be ignored. */
5064 static rtx
5065 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5067 tree id, decl;
5068 tree call;
5070 /* If we are not profiling, just call the function. */
5071 if (!profile_arc_flag)
5072 return NULL_RTX;
5074 /* Otherwise call the wrapper. This should be equivalent for the rest of
5075 compiler, so the code does not diverge, and the wrapper may run the
5076 code necessary for keeping the profiling sane. */
5078 switch (DECL_FUNCTION_CODE (fn))
5080 case BUILT_IN_FORK:
5081 id = get_identifier ("__gcov_fork");
5082 break;
5084 case BUILT_IN_EXECL:
5085 id = get_identifier ("__gcov_execl");
5086 break;
5088 case BUILT_IN_EXECV:
5089 id = get_identifier ("__gcov_execv");
5090 break;
5092 case BUILT_IN_EXECLP:
5093 id = get_identifier ("__gcov_execlp");
5094 break;
5096 case BUILT_IN_EXECLE:
5097 id = get_identifier ("__gcov_execle");
5098 break;
5100 case BUILT_IN_EXECVP:
5101 id = get_identifier ("__gcov_execvp");
5102 break;
5104 case BUILT_IN_EXECVE:
5105 id = get_identifier ("__gcov_execve");
5106 break;
5108 default:
5109 gcc_unreachable ();
5112 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5113 FUNCTION_DECL, id, TREE_TYPE (fn));
5114 DECL_EXTERNAL (decl) = 1;
5115 TREE_PUBLIC (decl) = 1;
5116 DECL_ARTIFICIAL (decl) = 1;
5117 TREE_NOTHROW (decl) = 1;
5118 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5119 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5120 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5121 return expand_call (call, target, ignore);
5126 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5127 the pointer in these functions is void*, the tree optimizers may remove
5128 casts. The mode computed in expand_builtin isn't reliable either, due
5129 to __sync_bool_compare_and_swap.
5131 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5132 group of builtins. This gives us log2 of the mode size. */
5134 static inline enum machine_mode
5135 get_builtin_sync_mode (int fcode_diff)
5137 /* The size is not negotiable, so ask not to get BLKmode in return
5138 if the target indicates that a smaller size would be better. */
5139 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5142 /* Expand the memory expression LOC and return the appropriate memory operand
5143 for the builtin_sync operations. */
5145 static rtx
5146 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5148 rtx addr, mem;
5150 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5151 addr = convert_memory_address (Pmode, addr);
5153 /* Note that we explicitly do not want any alias information for this
5154 memory, so that we kill all other live memories. Otherwise we don't
5155 satisfy the full barrier semantics of the intrinsic. */
5156 mem = validize_mem (gen_rtx_MEM (mode, addr));
5158 /* The alignment needs to be at least according to that of the mode. */
5159 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5160 get_pointer_alignment (loc)));
5161 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5162 MEM_VOLATILE_P (mem) = 1;
5164 return mem;
5167 /* Make sure an argument is in the right mode.
5168 EXP is the tree argument.
5169 MODE is the mode it should be in. */
5171 static rtx
5172 expand_expr_force_mode (tree exp, enum machine_mode mode)
5174 rtx val;
5175 enum machine_mode old_mode;
5177 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5178 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5179 of CONST_INTs, where we know the old_mode only from the call argument. */
5181 old_mode = GET_MODE (val);
5182 if (old_mode == VOIDmode)
5183 old_mode = TYPE_MODE (TREE_TYPE (exp));
5184 val = convert_modes (mode, old_mode, val, 1);
5185 return val;
5189 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5190 EXP is the CALL_EXPR. CODE is the rtx code
5191 that corresponds to the arithmetic or logical operation from the name;
5192 an exception here is that NOT actually means NAND. TARGET is an optional
5193 place for us to store the results; AFTER is true if this is the
5194 fetch_and_xxx form. */
5196 static rtx
5197 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5198 enum rtx_code code, bool after,
5199 rtx target)
5201 rtx val, mem;
5202 location_t loc = EXPR_LOCATION (exp);
5204 if (code == NOT && warn_sync_nand)
5206 tree fndecl = get_callee_fndecl (exp);
5207 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5209 static bool warned_f_a_n, warned_n_a_f;
5211 switch (fcode)
5213 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5214 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5215 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5216 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5217 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5218 if (warned_f_a_n)
5219 break;
5221 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5222 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5223 warned_f_a_n = true;
5224 break;
5226 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5227 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5228 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5229 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5230 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5231 if (warned_n_a_f)
5232 break;
5234 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5235 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5236 warned_n_a_f = true;
5237 break;
5239 default:
5240 gcc_unreachable ();
5244 /* Expand the operands. */
5245 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5246 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5248 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5249 after);
5252 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5253 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5254 true if this is the boolean form. TARGET is a place for us to store the
5255 results; this is NOT optional if IS_BOOL is true. */
5257 static rtx
5258 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5259 bool is_bool, rtx target)
5261 rtx old_val, new_val, mem;
5262 rtx *pbool, *poval;
5264 /* Expand the operands. */
5265 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5266 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5267 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5269 pbool = poval = NULL;
5270 if (target != const0_rtx)
5272 if (is_bool)
5273 pbool = &target;
5274 else
5275 poval = &target;
5277 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5278 false, MEMMODEL_SEQ_CST,
5279 MEMMODEL_SEQ_CST))
5280 return NULL_RTX;
5282 return target;
5285 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5286 general form is actually an atomic exchange, and some targets only
5287 support a reduced form with the second argument being a constant 1.
5288 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5289 the results. */
5291 static rtx
5292 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5293 rtx target)
5295 rtx val, mem;
5297 /* Expand the operands. */
5298 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5299 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5301 return expand_sync_lock_test_and_set (target, mem, val);
5304 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5306 static void
5307 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5309 rtx mem;
5311 /* Expand the operands. */
5312 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5314 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5317 /* Given an integer representing an ``enum memmodel'', verify its
5318 correctness and return the memory model enum. */
5320 static enum memmodel
5321 get_memmodel (tree exp)
5323 rtx op;
5324 unsigned HOST_WIDE_INT val;
5326 /* If the parameter is not a constant, it's a run time value so we'll just
5327 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5328 if (TREE_CODE (exp) != INTEGER_CST)
5329 return MEMMODEL_SEQ_CST;
5331 op = expand_normal (exp);
5333 val = INTVAL (op);
5334 if (targetm.memmodel_check)
5335 val = targetm.memmodel_check (val);
5336 else if (val & ~MEMMODEL_MASK)
5338 warning (OPT_Winvalid_memory_model,
5339 "Unknown architecture specifier in memory model to builtin.");
5340 return MEMMODEL_SEQ_CST;
5343 if ((INTVAL(op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5345 warning (OPT_Winvalid_memory_model,
5346 "invalid memory model argument to builtin");
5347 return MEMMODEL_SEQ_CST;
5350 return (enum memmodel) val;
5353 /* Expand the __atomic_exchange intrinsic:
5354 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5355 EXP is the CALL_EXPR.
5356 TARGET is an optional place for us to store the results. */
5358 static rtx
5359 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5361 rtx val, mem;
5362 enum memmodel model;
5364 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5365 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5367 error ("invalid memory model for %<__atomic_exchange%>");
5368 return NULL_RTX;
5371 if (!flag_inline_atomics)
5372 return NULL_RTX;
5374 /* Expand the operands. */
5375 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5376 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5378 return expand_atomic_exchange (target, mem, val, model);
5381 /* Expand the __atomic_compare_exchange intrinsic:
5382 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5383 TYPE desired, BOOL weak,
5384 enum memmodel success,
5385 enum memmodel failure)
5386 EXP is the CALL_EXPR.
5387 TARGET is an optional place for us to store the results. */
5389 static rtx
5390 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5391 rtx target)
5393 rtx expect, desired, mem, oldval;
5394 enum memmodel success, failure;
5395 tree weak;
5396 bool is_weak;
5398 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5399 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5401 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5402 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5404 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5405 return NULL_RTX;
5408 if (failure > success)
5410 error ("failure memory model cannot be stronger than success "
5411 "memory model for %<__atomic_compare_exchange%>");
5412 return NULL_RTX;
5415 if (!flag_inline_atomics)
5416 return NULL_RTX;
5418 /* Expand the operands. */
5419 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5421 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5422 expect = convert_memory_address (Pmode, expect);
5423 expect = gen_rtx_MEM (mode, expect);
5424 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5426 weak = CALL_EXPR_ARG (exp, 3);
5427 is_weak = false;
5428 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
5429 is_weak = true;
5431 oldval = expect;
5432 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5433 &oldval, mem, oldval, desired,
5434 is_weak, success, failure))
5435 return NULL_RTX;
5437 if (oldval != expect)
5438 emit_move_insn (expect, oldval);
5440 return target;
5443 /* Expand the __atomic_load intrinsic:
5444 TYPE __atomic_load (TYPE *object, enum memmodel)
5445 EXP is the CALL_EXPR.
5446 TARGET is an optional place for us to store the results. */
5448 static rtx
5449 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5451 rtx mem;
5452 enum memmodel model;
5454 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5455 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5456 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5458 error ("invalid memory model for %<__atomic_load%>");
5459 return NULL_RTX;
5462 if (!flag_inline_atomics)
5463 return NULL_RTX;
5465 /* Expand the operand. */
5466 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5468 return expand_atomic_load (target, mem, model);
5472 /* Expand the __atomic_store intrinsic:
5473 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5474 EXP is the CALL_EXPR.
5475 TARGET is an optional place for us to store the results. */
5477 static rtx
5478 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5480 rtx mem, val;
5481 enum memmodel model;
5483 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5484 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5485 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5486 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5488 error ("invalid memory model for %<__atomic_store%>");
5489 return NULL_RTX;
5492 if (!flag_inline_atomics)
5493 return NULL_RTX;
5495 /* Expand the operands. */
5496 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5497 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5499 return expand_atomic_store (mem, val, model, false);
5502 /* Expand the __atomic_fetch_XXX intrinsic:
5503 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5504 EXP is the CALL_EXPR.
5505 TARGET is an optional place for us to store the results.
5506 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5507 FETCH_AFTER is true if returning the result of the operation.
5508 FETCH_AFTER is false if returning the value before the operation.
5509 IGNORE is true if the result is not used.
5510 EXT_CALL is the correct builtin for an external call if this cannot be
5511 resolved to an instruction sequence. */
5513 static rtx
5514 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5515 enum rtx_code code, bool fetch_after,
5516 bool ignore, enum built_in_function ext_call)
5518 rtx val, mem, ret;
5519 enum memmodel model;
5520 tree fndecl;
5521 tree addr;
5523 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5525 /* Expand the operands. */
5526 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5527 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5529 /* Only try generating instructions if inlining is turned on. */
5530 if (flag_inline_atomics)
5532 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5533 if (ret)
5534 return ret;
5537 /* Return if a different routine isn't needed for the library call. */
5538 if (ext_call == BUILT_IN_NONE)
5539 return NULL_RTX;
5541 /* Change the call to the specified function. */
5542 fndecl = get_callee_fndecl (exp);
5543 addr = CALL_EXPR_FN (exp);
5544 STRIP_NOPS (addr);
5546 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5547 TREE_OPERAND (addr, 0) = builtin_decl_explicit(ext_call);
5549 /* Expand the call here so we can emit trailing code. */
5550 ret = expand_call (exp, target, ignore);
5552 /* Replace the original function just in case it matters. */
5553 TREE_OPERAND (addr, 0) = fndecl;
5555 /* Then issue the arithmetic correction to return the right result. */
5556 if (!ignore)
5558 if (code == NOT)
5560 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5561 OPTAB_LIB_WIDEN);
5562 ret = expand_simple_unop (mode, NOT, ret, target, true);
5564 else
5565 ret = expand_simple_binop (mode, code, ret, val, target, true,
5566 OPTAB_LIB_WIDEN);
5568 return ret;
5572 #ifndef HAVE_atomic_clear
5573 # define HAVE_atomic_clear 0
5574 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5575 #endif
5577 /* Expand an atomic clear operation.
5578 void _atomic_clear (BOOL *obj, enum memmodel)
5579 EXP is the call expression. */
5581 static rtx
5582 expand_builtin_atomic_clear (tree exp)
5584 enum machine_mode mode;
5585 rtx mem, ret;
5586 enum memmodel model;
5588 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5589 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5590 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5592 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5593 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5595 error ("invalid memory model for %<__atomic_store%>");
5596 return const0_rtx;
5599 if (HAVE_atomic_clear)
5601 emit_insn (gen_atomic_clear (mem, model));
5602 return const0_rtx;
5605 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5606 Failing that, a store is issued by __atomic_store. The only way this can
5607 fail is if the bool type is larger than a word size. Unlikely, but
5608 handle it anyway for completeness. Assume a single threaded model since
5609 there is no atomic support in this case, and no barriers are required. */
5610 ret = expand_atomic_store (mem, const0_rtx, model, true);
5611 if (!ret)
5612 emit_move_insn (mem, const0_rtx);
5613 return const0_rtx;
5616 /* Expand an atomic test_and_set operation.
5617 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5618 EXP is the call expression. */
5620 static rtx
5621 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5623 rtx mem;
5624 enum memmodel model;
5625 enum machine_mode mode;
5627 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5628 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5629 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5631 return expand_atomic_test_and_set (target, mem, model);
5635 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5636 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5638 static tree
5639 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5641 int size;
5642 enum machine_mode mode;
5643 unsigned int mode_align, type_align;
5645 if (TREE_CODE (arg0) != INTEGER_CST)
5646 return NULL_TREE;
5648 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5649 mode = mode_for_size (size, MODE_INT, 0);
5650 mode_align = GET_MODE_ALIGNMENT (mode);
5652 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5653 type_align = mode_align;
5654 else
5656 tree ttype = TREE_TYPE (arg1);
5658 /* This function is usually invoked and folded immediately by the front
5659 end before anything else has a chance to look at it. The pointer
5660 parameter at this point is usually cast to a void *, so check for that
5661 and look past the cast. */
5662 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5663 && VOID_TYPE_P (TREE_TYPE (ttype)))
5664 arg1 = TREE_OPERAND (arg1, 0);
5666 ttype = TREE_TYPE (arg1);
5667 gcc_assert (POINTER_TYPE_P (ttype));
5669 /* Get the underlying type of the object. */
5670 ttype = TREE_TYPE (ttype);
5671 type_align = TYPE_ALIGN (ttype);
5674 /* If the object has smaller alignment, the the lock free routines cannot
5675 be used. */
5676 if (type_align < mode_align)
5677 return boolean_false_node;
5679 /* Check if a compare_and_swap pattern exists for the mode which represents
5680 the required size. The pattern is not allowed to fail, so the existence
5681 of the pattern indicates support is present. */
5682 if (can_compare_and_swap_p (mode, true))
5683 return boolean_true_node;
5684 else
5685 return boolean_false_node;
5688 /* Return true if the parameters to call EXP represent an object which will
5689 always generate lock free instructions. The first argument represents the
5690 size of the object, and the second parameter is a pointer to the object
5691 itself. If NULL is passed for the object, then the result is based on
5692 typical alignment for an object of the specified size. Otherwise return
5693 false. */
5695 static rtx
5696 expand_builtin_atomic_always_lock_free (tree exp)
5698 tree size;
5699 tree arg0 = CALL_EXPR_ARG (exp, 0);
5700 tree arg1 = CALL_EXPR_ARG (exp, 1);
5702 if (TREE_CODE (arg0) != INTEGER_CST)
5704 error ("non-constant argument 1 to __atomic_always_lock_free");
5705 return const0_rtx;
5708 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5709 if (size == boolean_true_node)
5710 return const1_rtx;
5711 return const0_rtx;
5714 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5715 is lock free on this architecture. */
5717 static tree
5718 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5720 if (!flag_inline_atomics)
5721 return NULL_TREE;
5723 /* If it isn't always lock free, don't generate a result. */
5724 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5725 return boolean_true_node;
5727 return NULL_TREE;
5730 /* Return true if the parameters to call EXP represent an object which will
5731 always generate lock free instructions. The first argument represents the
5732 size of the object, and the second parameter is a pointer to the object
5733 itself. If NULL is passed for the object, then the result is based on
5734 typical alignment for an object of the specified size. Otherwise return
5735 NULL*/
5737 static rtx
5738 expand_builtin_atomic_is_lock_free (tree exp)
5740 tree size;
5741 tree arg0 = CALL_EXPR_ARG (exp, 0);
5742 tree arg1 = CALL_EXPR_ARG (exp, 1);
5744 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5746 error ("non-integer argument 1 to __atomic_is_lock_free");
5747 return NULL_RTX;
5750 if (!flag_inline_atomics)
5751 return NULL_RTX;
5753 /* If the value is known at compile time, return the RTX for it. */
5754 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5755 if (size == boolean_true_node)
5756 return const1_rtx;
5758 return NULL_RTX;
5761 /* Expand the __atomic_thread_fence intrinsic:
5762 void __atomic_thread_fence (enum memmodel)
5763 EXP is the CALL_EXPR. */
5765 static void
5766 expand_builtin_atomic_thread_fence (tree exp)
5768 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5769 expand_mem_thread_fence (model);
5772 /* Expand the __atomic_signal_fence intrinsic:
5773 void __atomic_signal_fence (enum memmodel)
5774 EXP is the CALL_EXPR. */
5776 static void
5777 expand_builtin_atomic_signal_fence (tree exp)
5779 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5780 expand_mem_signal_fence (model);
5783 /* Expand the __sync_synchronize intrinsic. */
5785 static void
5786 expand_builtin_sync_synchronize (void)
5788 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5791 static rtx
5792 expand_builtin_thread_pointer (tree exp, rtx target)
5794 enum insn_code icode;
5795 if (!validate_arglist (exp, VOID_TYPE))
5796 return const0_rtx;
5797 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5798 if (icode != CODE_FOR_nothing)
5800 struct expand_operand op;
5801 if (!REG_P (target) || GET_MODE (target) != Pmode)
5802 target = gen_reg_rtx (Pmode);
5803 create_output_operand (&op, target, Pmode);
5804 expand_insn (icode, 1, &op);
5805 return target;
5807 error ("__builtin_thread_pointer is not supported on this target");
5808 return const0_rtx;
5811 static void
5812 expand_builtin_set_thread_pointer (tree exp)
5814 enum insn_code icode;
5815 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5816 return;
5817 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5818 if (icode != CODE_FOR_nothing)
5820 struct expand_operand op;
5821 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5822 Pmode, EXPAND_NORMAL);
5823 create_input_operand (&op, val, Pmode);
5824 expand_insn (icode, 1, &op);
5825 return;
5827 error ("__builtin_set_thread_pointer is not supported on this target");
5831 /* Expand an expression EXP that calls a built-in function,
5832 with result going to TARGET if that's convenient
5833 (and in mode MODE if that's convenient).
5834 SUBTARGET may be used as the target for computing one of EXP's operands.
5835 IGNORE is nonzero if the value is to be ignored. */
5838 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5839 int ignore)
5841 tree fndecl = get_callee_fndecl (exp);
5842 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5843 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5844 int flags;
5846 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5847 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5849 /* When not optimizing, generate calls to library functions for a certain
5850 set of builtins. */
5851 if (!optimize
5852 && !called_as_built_in (fndecl)
5853 && fcode != BUILT_IN_ALLOCA
5854 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5855 && fcode != BUILT_IN_FREE)
5856 return expand_call (exp, target, ignore);
5858 /* The built-in function expanders test for target == const0_rtx
5859 to determine whether the function's result will be ignored. */
5860 if (ignore)
5861 target = const0_rtx;
5863 /* If the result of a pure or const built-in function is ignored, and
5864 none of its arguments are volatile, we can avoid expanding the
5865 built-in call and just evaluate the arguments for side-effects. */
5866 if (target == const0_rtx
5867 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5868 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5870 bool volatilep = false;
5871 tree arg;
5872 call_expr_arg_iterator iter;
5874 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5875 if (TREE_THIS_VOLATILE (arg))
5877 volatilep = true;
5878 break;
5881 if (! volatilep)
5883 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5884 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5885 return const0_rtx;
5889 switch (fcode)
5891 CASE_FLT_FN (BUILT_IN_FABS):
5892 case BUILT_IN_FABSD32:
5893 case BUILT_IN_FABSD64:
5894 case BUILT_IN_FABSD128:
5895 target = expand_builtin_fabs (exp, target, subtarget);
5896 if (target)
5897 return target;
5898 break;
5900 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5901 target = expand_builtin_copysign (exp, target, subtarget);
5902 if (target)
5903 return target;
5904 break;
5906 /* Just do a normal library call if we were unable to fold
5907 the values. */
5908 CASE_FLT_FN (BUILT_IN_CABS):
5909 break;
5911 CASE_FLT_FN (BUILT_IN_EXP):
5912 CASE_FLT_FN (BUILT_IN_EXP10):
5913 CASE_FLT_FN (BUILT_IN_POW10):
5914 CASE_FLT_FN (BUILT_IN_EXP2):
5915 CASE_FLT_FN (BUILT_IN_EXPM1):
5916 CASE_FLT_FN (BUILT_IN_LOGB):
5917 CASE_FLT_FN (BUILT_IN_LOG):
5918 CASE_FLT_FN (BUILT_IN_LOG10):
5919 CASE_FLT_FN (BUILT_IN_LOG2):
5920 CASE_FLT_FN (BUILT_IN_LOG1P):
5921 CASE_FLT_FN (BUILT_IN_TAN):
5922 CASE_FLT_FN (BUILT_IN_ASIN):
5923 CASE_FLT_FN (BUILT_IN_ACOS):
5924 CASE_FLT_FN (BUILT_IN_ATAN):
5925 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5926 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5927 because of possible accuracy problems. */
5928 if (! flag_unsafe_math_optimizations)
5929 break;
5930 CASE_FLT_FN (BUILT_IN_SQRT):
5931 CASE_FLT_FN (BUILT_IN_FLOOR):
5932 CASE_FLT_FN (BUILT_IN_CEIL):
5933 CASE_FLT_FN (BUILT_IN_TRUNC):
5934 CASE_FLT_FN (BUILT_IN_ROUND):
5935 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5936 CASE_FLT_FN (BUILT_IN_RINT):
5937 target = expand_builtin_mathfn (exp, target, subtarget);
5938 if (target)
5939 return target;
5940 break;
5942 CASE_FLT_FN (BUILT_IN_FMA):
5943 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5944 if (target)
5945 return target;
5946 break;
5948 CASE_FLT_FN (BUILT_IN_ILOGB):
5949 if (! flag_unsafe_math_optimizations)
5950 break;
5951 CASE_FLT_FN (BUILT_IN_ISINF):
5952 CASE_FLT_FN (BUILT_IN_FINITE):
5953 case BUILT_IN_ISFINITE:
5954 case BUILT_IN_ISNORMAL:
5955 target = expand_builtin_interclass_mathfn (exp, target);
5956 if (target)
5957 return target;
5958 break;
5960 CASE_FLT_FN (BUILT_IN_ICEIL):
5961 CASE_FLT_FN (BUILT_IN_LCEIL):
5962 CASE_FLT_FN (BUILT_IN_LLCEIL):
5963 CASE_FLT_FN (BUILT_IN_LFLOOR):
5964 CASE_FLT_FN (BUILT_IN_IFLOOR):
5965 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5966 target = expand_builtin_int_roundingfn (exp, target);
5967 if (target)
5968 return target;
5969 break;
5971 CASE_FLT_FN (BUILT_IN_IRINT):
5972 CASE_FLT_FN (BUILT_IN_LRINT):
5973 CASE_FLT_FN (BUILT_IN_LLRINT):
5974 CASE_FLT_FN (BUILT_IN_IROUND):
5975 CASE_FLT_FN (BUILT_IN_LROUND):
5976 CASE_FLT_FN (BUILT_IN_LLROUND):
5977 target = expand_builtin_int_roundingfn_2 (exp, target);
5978 if (target)
5979 return target;
5980 break;
5982 CASE_FLT_FN (BUILT_IN_POWI):
5983 target = expand_builtin_powi (exp, target);
5984 if (target)
5985 return target;
5986 break;
5988 CASE_FLT_FN (BUILT_IN_ATAN2):
5989 CASE_FLT_FN (BUILT_IN_LDEXP):
5990 CASE_FLT_FN (BUILT_IN_SCALB):
5991 CASE_FLT_FN (BUILT_IN_SCALBN):
5992 CASE_FLT_FN (BUILT_IN_SCALBLN):
5993 if (! flag_unsafe_math_optimizations)
5994 break;
5996 CASE_FLT_FN (BUILT_IN_FMOD):
5997 CASE_FLT_FN (BUILT_IN_REMAINDER):
5998 CASE_FLT_FN (BUILT_IN_DREM):
5999 CASE_FLT_FN (BUILT_IN_POW):
6000 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6001 if (target)
6002 return target;
6003 break;
6005 CASE_FLT_FN (BUILT_IN_CEXPI):
6006 target = expand_builtin_cexpi (exp, target);
6007 gcc_assert (target);
6008 return target;
6010 CASE_FLT_FN (BUILT_IN_SIN):
6011 CASE_FLT_FN (BUILT_IN_COS):
6012 if (! flag_unsafe_math_optimizations)
6013 break;
6014 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6015 if (target)
6016 return target;
6017 break;
6019 CASE_FLT_FN (BUILT_IN_SINCOS):
6020 if (! flag_unsafe_math_optimizations)
6021 break;
6022 target = expand_builtin_sincos (exp);
6023 if (target)
6024 return target;
6025 break;
6027 case BUILT_IN_APPLY_ARGS:
6028 return expand_builtin_apply_args ();
6030 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6031 FUNCTION with a copy of the parameters described by
6032 ARGUMENTS, and ARGSIZE. It returns a block of memory
6033 allocated on the stack into which is stored all the registers
6034 that might possibly be used for returning the result of a
6035 function. ARGUMENTS is the value returned by
6036 __builtin_apply_args. ARGSIZE is the number of bytes of
6037 arguments that must be copied. ??? How should this value be
6038 computed? We'll also need a safe worst case value for varargs
6039 functions. */
6040 case BUILT_IN_APPLY:
6041 if (!validate_arglist (exp, POINTER_TYPE,
6042 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6043 && !validate_arglist (exp, REFERENCE_TYPE,
6044 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6045 return const0_rtx;
6046 else
6048 rtx ops[3];
6050 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6051 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6052 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6054 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6057 /* __builtin_return (RESULT) causes the function to return the
6058 value described by RESULT. RESULT is address of the block of
6059 memory returned by __builtin_apply. */
6060 case BUILT_IN_RETURN:
6061 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6062 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6063 return const0_rtx;
6065 case BUILT_IN_SAVEREGS:
6066 return expand_builtin_saveregs ();
6068 case BUILT_IN_VA_ARG_PACK:
6069 /* All valid uses of __builtin_va_arg_pack () are removed during
6070 inlining. */
6071 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6072 return const0_rtx;
6074 case BUILT_IN_VA_ARG_PACK_LEN:
6075 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6076 inlining. */
6077 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6078 return const0_rtx;
6080 /* Return the address of the first anonymous stack arg. */
6081 case BUILT_IN_NEXT_ARG:
6082 if (fold_builtin_next_arg (exp, false))
6083 return const0_rtx;
6084 return expand_builtin_next_arg ();
6086 case BUILT_IN_CLEAR_CACHE:
6087 target = expand_builtin___clear_cache (exp);
6088 if (target)
6089 return target;
6090 break;
6092 case BUILT_IN_CLASSIFY_TYPE:
6093 return expand_builtin_classify_type (exp);
6095 case BUILT_IN_CONSTANT_P:
6096 return const0_rtx;
6098 case BUILT_IN_FRAME_ADDRESS:
6099 case BUILT_IN_RETURN_ADDRESS:
6100 return expand_builtin_frame_address (fndecl, exp);
6102 /* Returns the address of the area where the structure is returned.
6103 0 otherwise. */
6104 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6105 if (call_expr_nargs (exp) != 0
6106 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6107 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6108 return const0_rtx;
6109 else
6110 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6112 case BUILT_IN_ALLOCA:
6113 case BUILT_IN_ALLOCA_WITH_ALIGN:
6114 /* If the allocation stems from the declaration of a variable-sized
6115 object, it cannot accumulate. */
6116 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6117 if (target)
6118 return target;
6119 break;
6121 case BUILT_IN_STACK_SAVE:
6122 return expand_stack_save ();
6124 case BUILT_IN_STACK_RESTORE:
6125 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6126 return const0_rtx;
6128 case BUILT_IN_BSWAP16:
6129 case BUILT_IN_BSWAP32:
6130 case BUILT_IN_BSWAP64:
6131 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6132 if (target)
6133 return target;
6134 break;
6136 CASE_INT_FN (BUILT_IN_FFS):
6137 target = expand_builtin_unop (target_mode, exp, target,
6138 subtarget, ffs_optab);
6139 if (target)
6140 return target;
6141 break;
6143 CASE_INT_FN (BUILT_IN_CLZ):
6144 target = expand_builtin_unop (target_mode, exp, target,
6145 subtarget, clz_optab);
6146 if (target)
6147 return target;
6148 break;
6150 CASE_INT_FN (BUILT_IN_CTZ):
6151 target = expand_builtin_unop (target_mode, exp, target,
6152 subtarget, ctz_optab);
6153 if (target)
6154 return target;
6155 break;
6157 CASE_INT_FN (BUILT_IN_CLRSB):
6158 target = expand_builtin_unop (target_mode, exp, target,
6159 subtarget, clrsb_optab);
6160 if (target)
6161 return target;
6162 break;
6164 CASE_INT_FN (BUILT_IN_POPCOUNT):
6165 target = expand_builtin_unop (target_mode, exp, target,
6166 subtarget, popcount_optab);
6167 if (target)
6168 return target;
6169 break;
6171 CASE_INT_FN (BUILT_IN_PARITY):
6172 target = expand_builtin_unop (target_mode, exp, target,
6173 subtarget, parity_optab);
6174 if (target)
6175 return target;
6176 break;
6178 case BUILT_IN_STRLEN:
6179 target = expand_builtin_strlen (exp, target, target_mode);
6180 if (target)
6181 return target;
6182 break;
6184 case BUILT_IN_STRCPY:
6185 target = expand_builtin_strcpy (exp, target);
6186 if (target)
6187 return target;
6188 break;
6190 case BUILT_IN_STRNCPY:
6191 target = expand_builtin_strncpy (exp, target);
6192 if (target)
6193 return target;
6194 break;
6196 case BUILT_IN_STPCPY:
6197 target = expand_builtin_stpcpy (exp, target, mode);
6198 if (target)
6199 return target;
6200 break;
6202 case BUILT_IN_MEMCPY:
6203 target = expand_builtin_memcpy (exp, target);
6204 if (target)
6205 return target;
6206 break;
6208 case BUILT_IN_MEMPCPY:
6209 target = expand_builtin_mempcpy (exp, target, mode);
6210 if (target)
6211 return target;
6212 break;
6214 case BUILT_IN_MEMSET:
6215 target = expand_builtin_memset (exp, target, mode);
6216 if (target)
6217 return target;
6218 break;
6220 case BUILT_IN_BZERO:
6221 target = expand_builtin_bzero (exp);
6222 if (target)
6223 return target;
6224 break;
6226 case BUILT_IN_STRCMP:
6227 target = expand_builtin_strcmp (exp, target);
6228 if (target)
6229 return target;
6230 break;
6232 case BUILT_IN_STRNCMP:
6233 target = expand_builtin_strncmp (exp, target, mode);
6234 if (target)
6235 return target;
6236 break;
6238 case BUILT_IN_BCMP:
6239 case BUILT_IN_MEMCMP:
6240 target = expand_builtin_memcmp (exp, target, mode);
6241 if (target)
6242 return target;
6243 break;
6245 case BUILT_IN_SETJMP:
6246 /* This should have been lowered to the builtins below. */
6247 gcc_unreachable ();
6249 case BUILT_IN_SETJMP_SETUP:
6250 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6251 and the receiver label. */
6252 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6254 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6255 VOIDmode, EXPAND_NORMAL);
6256 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6257 rtx label_r = label_rtx (label);
6259 /* This is copied from the handling of non-local gotos. */
6260 expand_builtin_setjmp_setup (buf_addr, label_r);
6261 nonlocal_goto_handler_labels
6262 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6263 nonlocal_goto_handler_labels);
6264 /* ??? Do not let expand_label treat us as such since we would
6265 not want to be both on the list of non-local labels and on
6266 the list of forced labels. */
6267 FORCED_LABEL (label) = 0;
6268 return const0_rtx;
6270 break;
6272 case BUILT_IN_SETJMP_DISPATCHER:
6273 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6274 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6276 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6277 rtx label_r = label_rtx (label);
6279 /* Remove the dispatcher label from the list of non-local labels
6280 since the receiver labels have been added to it above. */
6281 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6282 return const0_rtx;
6284 break;
6286 case BUILT_IN_SETJMP_RECEIVER:
6287 /* __builtin_setjmp_receiver is passed the receiver label. */
6288 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6290 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6291 rtx label_r = label_rtx (label);
6293 expand_builtin_setjmp_receiver (label_r);
6294 return const0_rtx;
6296 break;
6298 /* __builtin_longjmp is passed a pointer to an array of five words.
6299 It's similar to the C library longjmp function but works with
6300 __builtin_setjmp above. */
6301 case BUILT_IN_LONGJMP:
6302 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6304 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6305 VOIDmode, EXPAND_NORMAL);
6306 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6308 if (value != const1_rtx)
6310 error ("%<__builtin_longjmp%> second argument must be 1");
6311 return const0_rtx;
6314 expand_builtin_longjmp (buf_addr, value);
6315 return const0_rtx;
6317 break;
6319 case BUILT_IN_NONLOCAL_GOTO:
6320 target = expand_builtin_nonlocal_goto (exp);
6321 if (target)
6322 return target;
6323 break;
6325 /* This updates the setjmp buffer that is its argument with the value
6326 of the current stack pointer. */
6327 case BUILT_IN_UPDATE_SETJMP_BUF:
6328 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6330 rtx buf_addr
6331 = expand_normal (CALL_EXPR_ARG (exp, 0));
6333 expand_builtin_update_setjmp_buf (buf_addr);
6334 return const0_rtx;
6336 break;
6338 case BUILT_IN_TRAP:
6339 expand_builtin_trap ();
6340 return const0_rtx;
6342 case BUILT_IN_UNREACHABLE:
6343 expand_builtin_unreachable ();
6344 return const0_rtx;
6346 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6347 case BUILT_IN_SIGNBITD32:
6348 case BUILT_IN_SIGNBITD64:
6349 case BUILT_IN_SIGNBITD128:
6350 target = expand_builtin_signbit (exp, target);
6351 if (target)
6352 return target;
6353 break;
6355 /* Various hooks for the DWARF 2 __throw routine. */
6356 case BUILT_IN_UNWIND_INIT:
6357 expand_builtin_unwind_init ();
6358 return const0_rtx;
6359 case BUILT_IN_DWARF_CFA:
6360 return virtual_cfa_rtx;
6361 #ifdef DWARF2_UNWIND_INFO
6362 case BUILT_IN_DWARF_SP_COLUMN:
6363 return expand_builtin_dwarf_sp_column ();
6364 case BUILT_IN_INIT_DWARF_REG_SIZES:
6365 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6366 return const0_rtx;
6367 #endif
6368 case BUILT_IN_FROB_RETURN_ADDR:
6369 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6370 case BUILT_IN_EXTRACT_RETURN_ADDR:
6371 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6372 case BUILT_IN_EH_RETURN:
6373 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6374 CALL_EXPR_ARG (exp, 1));
6375 return const0_rtx;
6376 #ifdef EH_RETURN_DATA_REGNO
6377 case BUILT_IN_EH_RETURN_DATA_REGNO:
6378 return expand_builtin_eh_return_data_regno (exp);
6379 #endif
6380 case BUILT_IN_EXTEND_POINTER:
6381 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6382 case BUILT_IN_EH_POINTER:
6383 return expand_builtin_eh_pointer (exp);
6384 case BUILT_IN_EH_FILTER:
6385 return expand_builtin_eh_filter (exp);
6386 case BUILT_IN_EH_COPY_VALUES:
6387 return expand_builtin_eh_copy_values (exp);
6389 case BUILT_IN_VA_START:
6390 return expand_builtin_va_start (exp);
6391 case BUILT_IN_VA_END:
6392 return expand_builtin_va_end (exp);
6393 case BUILT_IN_VA_COPY:
6394 return expand_builtin_va_copy (exp);
6395 case BUILT_IN_EXPECT:
6396 return expand_builtin_expect (exp, target);
6397 case BUILT_IN_ASSUME_ALIGNED:
6398 return expand_builtin_assume_aligned (exp, target);
6399 case BUILT_IN_PREFETCH:
6400 expand_builtin_prefetch (exp);
6401 return const0_rtx;
6403 case BUILT_IN_INIT_TRAMPOLINE:
6404 return expand_builtin_init_trampoline (exp, true);
6405 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6406 return expand_builtin_init_trampoline (exp, false);
6407 case BUILT_IN_ADJUST_TRAMPOLINE:
6408 return expand_builtin_adjust_trampoline (exp);
6410 case BUILT_IN_FORK:
6411 case BUILT_IN_EXECL:
6412 case BUILT_IN_EXECV:
6413 case BUILT_IN_EXECLP:
6414 case BUILT_IN_EXECLE:
6415 case BUILT_IN_EXECVP:
6416 case BUILT_IN_EXECVE:
6417 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6418 if (target)
6419 return target;
6420 break;
6422 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6423 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6424 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6425 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6426 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6427 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6428 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6429 if (target)
6430 return target;
6431 break;
6433 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6434 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6435 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6436 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6437 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6438 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6439 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6440 if (target)
6441 return target;
6442 break;
6444 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6445 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6446 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6447 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6448 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6449 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6450 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6451 if (target)
6452 return target;
6453 break;
6455 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6456 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6457 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6458 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6459 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6460 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6461 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6462 if (target)
6463 return target;
6464 break;
6466 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6467 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6468 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6469 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6470 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6471 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6472 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6473 if (target)
6474 return target;
6475 break;
6477 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6478 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6479 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6480 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6481 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6482 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6483 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6484 if (target)
6485 return target;
6486 break;
6488 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6489 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6490 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6491 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6492 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6493 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6494 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6495 if (target)
6496 return target;
6497 break;
6499 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6500 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6501 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6502 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6503 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6504 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6505 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6506 if (target)
6507 return target;
6508 break;
6510 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6511 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6512 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6513 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6514 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6515 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6516 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6517 if (target)
6518 return target;
6519 break;
6521 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6522 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6523 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6524 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6525 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6526 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6527 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6528 if (target)
6529 return target;
6530 break;
6532 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6533 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6534 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6535 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6536 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6537 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6538 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6539 if (target)
6540 return target;
6541 break;
6543 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6544 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6545 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6546 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6547 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6548 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6549 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6550 if (target)
6551 return target;
6552 break;
6554 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6555 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6556 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6557 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6558 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6559 if (mode == VOIDmode)
6560 mode = TYPE_MODE (boolean_type_node);
6561 if (!target || !register_operand (target, mode))
6562 target = gen_reg_rtx (mode);
6564 mode = get_builtin_sync_mode
6565 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6566 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6567 if (target)
6568 return target;
6569 break;
6571 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6572 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6573 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6574 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6575 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6576 mode = get_builtin_sync_mode
6577 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6578 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6579 if (target)
6580 return target;
6581 break;
6583 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6584 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6585 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6586 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6587 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6588 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6589 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6590 if (target)
6591 return target;
6592 break;
6594 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6595 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6596 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6597 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6598 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6599 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6600 expand_builtin_sync_lock_release (mode, exp);
6601 return const0_rtx;
6603 case BUILT_IN_SYNC_SYNCHRONIZE:
6604 expand_builtin_sync_synchronize ();
6605 return const0_rtx;
6607 case BUILT_IN_ATOMIC_EXCHANGE_1:
6608 case BUILT_IN_ATOMIC_EXCHANGE_2:
6609 case BUILT_IN_ATOMIC_EXCHANGE_4:
6610 case BUILT_IN_ATOMIC_EXCHANGE_8:
6611 case BUILT_IN_ATOMIC_EXCHANGE_16:
6612 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6613 target = expand_builtin_atomic_exchange (mode, exp, target);
6614 if (target)
6615 return target;
6616 break;
6618 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6619 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6620 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6621 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6622 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6624 unsigned int nargs, z;
6625 vec<tree, va_gc> *vec;
6627 mode =
6628 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6629 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6630 if (target)
6631 return target;
6633 /* If this is turned into an external library call, the weak parameter
6634 must be dropped to match the expected parameter list. */
6635 nargs = call_expr_nargs (exp);
6636 vec_alloc (vec, nargs - 1);
6637 for (z = 0; z < 3; z++)
6638 vec->quick_push (CALL_EXPR_ARG (exp, z));
6639 /* Skip the boolean weak parameter. */
6640 for (z = 4; z < 6; z++)
6641 vec->quick_push (CALL_EXPR_ARG (exp, z));
6642 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6643 break;
6646 case BUILT_IN_ATOMIC_LOAD_1:
6647 case BUILT_IN_ATOMIC_LOAD_2:
6648 case BUILT_IN_ATOMIC_LOAD_4:
6649 case BUILT_IN_ATOMIC_LOAD_8:
6650 case BUILT_IN_ATOMIC_LOAD_16:
6651 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6652 target = expand_builtin_atomic_load (mode, exp, target);
6653 if (target)
6654 return target;
6655 break;
6657 case BUILT_IN_ATOMIC_STORE_1:
6658 case BUILT_IN_ATOMIC_STORE_2:
6659 case BUILT_IN_ATOMIC_STORE_4:
6660 case BUILT_IN_ATOMIC_STORE_8:
6661 case BUILT_IN_ATOMIC_STORE_16:
6662 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6663 target = expand_builtin_atomic_store (mode, exp);
6664 if (target)
6665 return const0_rtx;
6666 break;
6668 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6669 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6670 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6671 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6672 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6674 enum built_in_function lib;
6675 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6676 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6677 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6678 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6679 ignore, lib);
6680 if (target)
6681 return target;
6682 break;
6684 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6685 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6686 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6687 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6688 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6690 enum built_in_function lib;
6691 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6692 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6693 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6694 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6695 ignore, lib);
6696 if (target)
6697 return target;
6698 break;
6700 case BUILT_IN_ATOMIC_AND_FETCH_1:
6701 case BUILT_IN_ATOMIC_AND_FETCH_2:
6702 case BUILT_IN_ATOMIC_AND_FETCH_4:
6703 case BUILT_IN_ATOMIC_AND_FETCH_8:
6704 case BUILT_IN_ATOMIC_AND_FETCH_16:
6706 enum built_in_function lib;
6707 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6708 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6709 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6710 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6711 ignore, lib);
6712 if (target)
6713 return target;
6714 break;
6716 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6717 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6718 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6719 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6720 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6722 enum built_in_function lib;
6723 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6724 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6725 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6726 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6727 ignore, lib);
6728 if (target)
6729 return target;
6730 break;
6732 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6733 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6734 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6735 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6736 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6738 enum built_in_function lib;
6739 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6740 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6741 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6742 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6743 ignore, lib);
6744 if (target)
6745 return target;
6746 break;
6748 case BUILT_IN_ATOMIC_OR_FETCH_1:
6749 case BUILT_IN_ATOMIC_OR_FETCH_2:
6750 case BUILT_IN_ATOMIC_OR_FETCH_4:
6751 case BUILT_IN_ATOMIC_OR_FETCH_8:
6752 case BUILT_IN_ATOMIC_OR_FETCH_16:
6754 enum built_in_function lib;
6755 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6756 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6757 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6758 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6759 ignore, lib);
6760 if (target)
6761 return target;
6762 break;
6764 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6765 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6766 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6767 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6768 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6769 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6770 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6771 ignore, BUILT_IN_NONE);
6772 if (target)
6773 return target;
6774 break;
6776 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6777 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6778 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6779 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6780 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6781 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6782 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6783 ignore, BUILT_IN_NONE);
6784 if (target)
6785 return target;
6786 break;
6788 case BUILT_IN_ATOMIC_FETCH_AND_1:
6789 case BUILT_IN_ATOMIC_FETCH_AND_2:
6790 case BUILT_IN_ATOMIC_FETCH_AND_4:
6791 case BUILT_IN_ATOMIC_FETCH_AND_8:
6792 case BUILT_IN_ATOMIC_FETCH_AND_16:
6793 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6794 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6795 ignore, BUILT_IN_NONE);
6796 if (target)
6797 return target;
6798 break;
6800 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6801 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6802 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6803 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6804 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6805 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6806 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6807 ignore, BUILT_IN_NONE);
6808 if (target)
6809 return target;
6810 break;
6812 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6813 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6814 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6815 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6816 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6817 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6818 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6819 ignore, BUILT_IN_NONE);
6820 if (target)
6821 return target;
6822 break;
6824 case BUILT_IN_ATOMIC_FETCH_OR_1:
6825 case BUILT_IN_ATOMIC_FETCH_OR_2:
6826 case BUILT_IN_ATOMIC_FETCH_OR_4:
6827 case BUILT_IN_ATOMIC_FETCH_OR_8:
6828 case BUILT_IN_ATOMIC_FETCH_OR_16:
6829 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6830 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6831 ignore, BUILT_IN_NONE);
6832 if (target)
6833 return target;
6834 break;
6836 case BUILT_IN_ATOMIC_TEST_AND_SET:
6837 return expand_builtin_atomic_test_and_set (exp, target);
6839 case BUILT_IN_ATOMIC_CLEAR:
6840 return expand_builtin_atomic_clear (exp);
6842 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6843 return expand_builtin_atomic_always_lock_free (exp);
6845 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6846 target = expand_builtin_atomic_is_lock_free (exp);
6847 if (target)
6848 return target;
6849 break;
6851 case BUILT_IN_ATOMIC_THREAD_FENCE:
6852 expand_builtin_atomic_thread_fence (exp);
6853 return const0_rtx;
6855 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6856 expand_builtin_atomic_signal_fence (exp);
6857 return const0_rtx;
6859 case BUILT_IN_OBJECT_SIZE:
6860 return expand_builtin_object_size (exp);
6862 case BUILT_IN_MEMCPY_CHK:
6863 case BUILT_IN_MEMPCPY_CHK:
6864 case BUILT_IN_MEMMOVE_CHK:
6865 case BUILT_IN_MEMSET_CHK:
6866 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6867 if (target)
6868 return target;
6869 break;
6871 case BUILT_IN_STRCPY_CHK:
6872 case BUILT_IN_STPCPY_CHK:
6873 case BUILT_IN_STRNCPY_CHK:
6874 case BUILT_IN_STPNCPY_CHK:
6875 case BUILT_IN_STRCAT_CHK:
6876 case BUILT_IN_STRNCAT_CHK:
6877 case BUILT_IN_SNPRINTF_CHK:
6878 case BUILT_IN_VSNPRINTF_CHK:
6879 maybe_emit_chk_warning (exp, fcode);
6880 break;
6882 case BUILT_IN_SPRINTF_CHK:
6883 case BUILT_IN_VSPRINTF_CHK:
6884 maybe_emit_sprintf_chk_warning (exp, fcode);
6885 break;
6887 case BUILT_IN_FREE:
6888 if (warn_free_nonheap_object)
6889 maybe_emit_free_warning (exp);
6890 break;
6892 case BUILT_IN_THREAD_POINTER:
6893 return expand_builtin_thread_pointer (exp, target);
6895 case BUILT_IN_SET_THREAD_POINTER:
6896 expand_builtin_set_thread_pointer (exp);
6897 return const0_rtx;
6899 default: /* just do library call, if unknown builtin */
6900 break;
6903 /* The switch statement above can drop through to cause the function
6904 to be called normally. */
6905 return expand_call (exp, target, ignore);
6908 /* Determine whether a tree node represents a call to a built-in
6909 function. If the tree T is a call to a built-in function with
6910 the right number of arguments of the appropriate types, return
6911 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6912 Otherwise the return value is END_BUILTINS. */
6914 enum built_in_function
6915 builtin_mathfn_code (const_tree t)
6917 const_tree fndecl, arg, parmlist;
6918 const_tree argtype, parmtype;
6919 const_call_expr_arg_iterator iter;
6921 if (TREE_CODE (t) != CALL_EXPR
6922 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6923 return END_BUILTINS;
6925 fndecl = get_callee_fndecl (t);
6926 if (fndecl == NULL_TREE
6927 || TREE_CODE (fndecl) != FUNCTION_DECL
6928 || ! DECL_BUILT_IN (fndecl)
6929 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6930 return END_BUILTINS;
6932 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6933 init_const_call_expr_arg_iterator (t, &iter);
6934 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6936 /* If a function doesn't take a variable number of arguments,
6937 the last element in the list will have type `void'. */
6938 parmtype = TREE_VALUE (parmlist);
6939 if (VOID_TYPE_P (parmtype))
6941 if (more_const_call_expr_args_p (&iter))
6942 return END_BUILTINS;
6943 return DECL_FUNCTION_CODE (fndecl);
6946 if (! more_const_call_expr_args_p (&iter))
6947 return END_BUILTINS;
6949 arg = next_const_call_expr_arg (&iter);
6950 argtype = TREE_TYPE (arg);
6952 if (SCALAR_FLOAT_TYPE_P (parmtype))
6954 if (! SCALAR_FLOAT_TYPE_P (argtype))
6955 return END_BUILTINS;
6957 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6959 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6960 return END_BUILTINS;
6962 else if (POINTER_TYPE_P (parmtype))
6964 if (! POINTER_TYPE_P (argtype))
6965 return END_BUILTINS;
6967 else if (INTEGRAL_TYPE_P (parmtype))
6969 if (! INTEGRAL_TYPE_P (argtype))
6970 return END_BUILTINS;
6972 else
6973 return END_BUILTINS;
6976 /* Variable-length argument list. */
6977 return DECL_FUNCTION_CODE (fndecl);
6980 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6981 evaluate to a constant. */
6983 static tree
6984 fold_builtin_constant_p (tree arg)
6986 /* We return 1 for a numeric type that's known to be a constant
6987 value at compile-time or for an aggregate type that's a
6988 literal constant. */
6989 STRIP_NOPS (arg);
6991 /* If we know this is a constant, emit the constant of one. */
6992 if (CONSTANT_CLASS_P (arg)
6993 || (TREE_CODE (arg) == CONSTRUCTOR
6994 && TREE_CONSTANT (arg)))
6995 return integer_one_node;
6996 if (TREE_CODE (arg) == ADDR_EXPR)
6998 tree op = TREE_OPERAND (arg, 0);
6999 if (TREE_CODE (op) == STRING_CST
7000 || (TREE_CODE (op) == ARRAY_REF
7001 && integer_zerop (TREE_OPERAND (op, 1))
7002 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7003 return integer_one_node;
7006 /* If this expression has side effects, show we don't know it to be a
7007 constant. Likewise if it's a pointer or aggregate type since in
7008 those case we only want literals, since those are only optimized
7009 when generating RTL, not later.
7010 And finally, if we are compiling an initializer, not code, we
7011 need to return a definite result now; there's not going to be any
7012 more optimization done. */
7013 if (TREE_SIDE_EFFECTS (arg)
7014 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7015 || POINTER_TYPE_P (TREE_TYPE (arg))
7016 || cfun == 0
7017 || folding_initializer
7018 || force_folding_builtin_constant_p)
7019 return integer_zero_node;
7021 return NULL_TREE;
7024 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7025 return it as a truthvalue. */
7027 static tree
7028 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
7030 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7032 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7033 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7034 ret_type = TREE_TYPE (TREE_TYPE (fn));
7035 pred_type = TREE_VALUE (arg_types);
7036 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7038 pred = fold_convert_loc (loc, pred_type, pred);
7039 expected = fold_convert_loc (loc, expected_type, expected);
7040 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
7042 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7043 build_int_cst (ret_type, 0));
7046 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7047 NULL_TREE if no simplification is possible. */
7049 static tree
7050 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7052 tree inner, fndecl, inner_arg0;
7053 enum tree_code code;
7055 /* Distribute the expected value over short-circuiting operators.
7056 See through the cast from truthvalue_type_node to long. */
7057 inner_arg0 = arg0;
7058 while (TREE_CODE (inner_arg0) == NOP_EXPR
7059 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7060 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7061 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7063 /* If this is a builtin_expect within a builtin_expect keep the
7064 inner one. See through a comparison against a constant. It
7065 might have been added to create a thruthvalue. */
7066 inner = inner_arg0;
7068 if (COMPARISON_CLASS_P (inner)
7069 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7070 inner = TREE_OPERAND (inner, 0);
7072 if (TREE_CODE (inner) == CALL_EXPR
7073 && (fndecl = get_callee_fndecl (inner))
7074 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7075 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7076 return arg0;
7078 inner = inner_arg0;
7079 code = TREE_CODE (inner);
7080 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7082 tree op0 = TREE_OPERAND (inner, 0);
7083 tree op1 = TREE_OPERAND (inner, 1);
7085 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7086 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7087 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7089 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7092 /* If the argument isn't invariant then there's nothing else we can do. */
7093 if (!TREE_CONSTANT (inner_arg0))
7094 return NULL_TREE;
7096 /* If we expect that a comparison against the argument will fold to
7097 a constant return the constant. In practice, this means a true
7098 constant or the address of a non-weak symbol. */
7099 inner = inner_arg0;
7100 STRIP_NOPS (inner);
7101 if (TREE_CODE (inner) == ADDR_EXPR)
7105 inner = TREE_OPERAND (inner, 0);
7107 while (TREE_CODE (inner) == COMPONENT_REF
7108 || TREE_CODE (inner) == ARRAY_REF);
7109 if ((TREE_CODE (inner) == VAR_DECL
7110 || TREE_CODE (inner) == FUNCTION_DECL)
7111 && DECL_WEAK (inner))
7112 return NULL_TREE;
7115 /* Otherwise, ARG0 already has the proper type for the return value. */
7116 return arg0;
7119 /* Fold a call to __builtin_classify_type with argument ARG. */
7121 static tree
7122 fold_builtin_classify_type (tree arg)
7124 if (arg == 0)
7125 return build_int_cst (integer_type_node, no_type_class);
7127 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7130 /* Fold a call to __builtin_strlen with argument ARG. */
7132 static tree
7133 fold_builtin_strlen (location_t loc, tree type, tree arg)
7135 if (!validate_arg (arg, POINTER_TYPE))
7136 return NULL_TREE;
7137 else
7139 tree len = c_strlen (arg, 0);
7141 if (len)
7142 return fold_convert_loc (loc, type, len);
7144 return NULL_TREE;
7148 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7150 static tree
7151 fold_builtin_inf (location_t loc, tree type, int warn)
7153 REAL_VALUE_TYPE real;
7155 /* __builtin_inff is intended to be usable to define INFINITY on all
7156 targets. If an infinity is not available, INFINITY expands "to a
7157 positive constant of type float that overflows at translation
7158 time", footnote "In this case, using INFINITY will violate the
7159 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7160 Thus we pedwarn to ensure this constraint violation is
7161 diagnosed. */
7162 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7163 pedwarn (loc, 0, "target format does not support infinity");
7165 real_inf (&real);
7166 return build_real (type, real);
7169 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7171 static tree
7172 fold_builtin_nan (tree arg, tree type, int quiet)
7174 REAL_VALUE_TYPE real;
7175 const char *str;
7177 if (!validate_arg (arg, POINTER_TYPE))
7178 return NULL_TREE;
7179 str = c_getstr (arg);
7180 if (!str)
7181 return NULL_TREE;
7183 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7184 return NULL_TREE;
7186 return build_real (type, real);
7189 /* Return true if the floating point expression T has an integer value.
7190 We also allow +Inf, -Inf and NaN to be considered integer values. */
7192 static bool
7193 integer_valued_real_p (tree t)
7195 switch (TREE_CODE (t))
7197 case FLOAT_EXPR:
7198 return true;
7200 case ABS_EXPR:
7201 case SAVE_EXPR:
7202 return integer_valued_real_p (TREE_OPERAND (t, 0));
7204 case COMPOUND_EXPR:
7205 case MODIFY_EXPR:
7206 case BIND_EXPR:
7207 return integer_valued_real_p (TREE_OPERAND (t, 1));
7209 case PLUS_EXPR:
7210 case MINUS_EXPR:
7211 case MULT_EXPR:
7212 case MIN_EXPR:
7213 case MAX_EXPR:
7214 return integer_valued_real_p (TREE_OPERAND (t, 0))
7215 && integer_valued_real_p (TREE_OPERAND (t, 1));
7217 case COND_EXPR:
7218 return integer_valued_real_p (TREE_OPERAND (t, 1))
7219 && integer_valued_real_p (TREE_OPERAND (t, 2));
7221 case REAL_CST:
7222 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7224 case NOP_EXPR:
7226 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7227 if (TREE_CODE (type) == INTEGER_TYPE)
7228 return true;
7229 if (TREE_CODE (type) == REAL_TYPE)
7230 return integer_valued_real_p (TREE_OPERAND (t, 0));
7231 break;
7234 case CALL_EXPR:
7235 switch (builtin_mathfn_code (t))
7237 CASE_FLT_FN (BUILT_IN_CEIL):
7238 CASE_FLT_FN (BUILT_IN_FLOOR):
7239 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7240 CASE_FLT_FN (BUILT_IN_RINT):
7241 CASE_FLT_FN (BUILT_IN_ROUND):
7242 CASE_FLT_FN (BUILT_IN_TRUNC):
7243 return true;
7245 CASE_FLT_FN (BUILT_IN_FMIN):
7246 CASE_FLT_FN (BUILT_IN_FMAX):
7247 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7248 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7250 default:
7251 break;
7253 break;
7255 default:
7256 break;
7258 return false;
7261 /* FNDECL is assumed to be a builtin where truncation can be propagated
7262 across (for instance floor((double)f) == (double)floorf (f).
7263 Do the transformation for a call with argument ARG. */
7265 static tree
7266 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7268 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7270 if (!validate_arg (arg, REAL_TYPE))
7271 return NULL_TREE;
7273 /* Integer rounding functions are idempotent. */
7274 if (fcode == builtin_mathfn_code (arg))
7275 return arg;
7277 /* If argument is already integer valued, and we don't need to worry
7278 about setting errno, there's no need to perform rounding. */
7279 if (! flag_errno_math && integer_valued_real_p (arg))
7280 return arg;
7282 if (optimize)
7284 tree arg0 = strip_float_extensions (arg);
7285 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7286 tree newtype = TREE_TYPE (arg0);
7287 tree decl;
7289 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7290 && (decl = mathfn_built_in (newtype, fcode)))
7291 return fold_convert_loc (loc, ftype,
7292 build_call_expr_loc (loc, decl, 1,
7293 fold_convert_loc (loc,
7294 newtype,
7295 arg0)));
7297 return NULL_TREE;
7300 /* FNDECL is assumed to be builtin which can narrow the FP type of
7301 the argument, for instance lround((double)f) -> lroundf (f).
7302 Do the transformation for a call with argument ARG. */
7304 static tree
7305 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7307 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7309 if (!validate_arg (arg, REAL_TYPE))
7310 return NULL_TREE;
7312 /* If argument is already integer valued, and we don't need to worry
7313 about setting errno, there's no need to perform rounding. */
7314 if (! flag_errno_math && integer_valued_real_p (arg))
7315 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7316 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7318 if (optimize)
7320 tree ftype = TREE_TYPE (arg);
7321 tree arg0 = strip_float_extensions (arg);
7322 tree newtype = TREE_TYPE (arg0);
7323 tree decl;
7325 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7326 && (decl = mathfn_built_in (newtype, fcode)))
7327 return build_call_expr_loc (loc, decl, 1,
7328 fold_convert_loc (loc, newtype, arg0));
7331 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7332 sizeof (int) == sizeof (long). */
7333 if (TYPE_PRECISION (integer_type_node)
7334 == TYPE_PRECISION (long_integer_type_node))
7336 tree newfn = NULL_TREE;
7337 switch (fcode)
7339 CASE_FLT_FN (BUILT_IN_ICEIL):
7340 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7341 break;
7343 CASE_FLT_FN (BUILT_IN_IFLOOR):
7344 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7345 break;
7347 CASE_FLT_FN (BUILT_IN_IROUND):
7348 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7349 break;
7351 CASE_FLT_FN (BUILT_IN_IRINT):
7352 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7353 break;
7355 default:
7356 break;
7359 if (newfn)
7361 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7362 return fold_convert_loc (loc,
7363 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7367 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7368 sizeof (long long) == sizeof (long). */
7369 if (TYPE_PRECISION (long_long_integer_type_node)
7370 == TYPE_PRECISION (long_integer_type_node))
7372 tree newfn = NULL_TREE;
7373 switch (fcode)
7375 CASE_FLT_FN (BUILT_IN_LLCEIL):
7376 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7377 break;
7379 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7380 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7381 break;
7383 CASE_FLT_FN (BUILT_IN_LLROUND):
7384 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7385 break;
7387 CASE_FLT_FN (BUILT_IN_LLRINT):
7388 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7389 break;
7391 default:
7392 break;
7395 if (newfn)
7397 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7398 return fold_convert_loc (loc,
7399 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7403 return NULL_TREE;
7406 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7407 return type. Return NULL_TREE if no simplification can be made. */
7409 static tree
7410 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7412 tree res;
7414 if (!validate_arg (arg, COMPLEX_TYPE)
7415 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7416 return NULL_TREE;
7418 /* Calculate the result when the argument is a constant. */
7419 if (TREE_CODE (arg) == COMPLEX_CST
7420 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7421 type, mpfr_hypot)))
7422 return res;
7424 if (TREE_CODE (arg) == COMPLEX_EXPR)
7426 tree real = TREE_OPERAND (arg, 0);
7427 tree imag = TREE_OPERAND (arg, 1);
7429 /* If either part is zero, cabs is fabs of the other. */
7430 if (real_zerop (real))
7431 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7432 if (real_zerop (imag))
7433 return fold_build1_loc (loc, ABS_EXPR, type, real);
7435 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7436 if (flag_unsafe_math_optimizations
7437 && operand_equal_p (real, imag, OEP_PURE_SAME))
7439 const REAL_VALUE_TYPE sqrt2_trunc
7440 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7441 STRIP_NOPS (real);
7442 return fold_build2_loc (loc, MULT_EXPR, type,
7443 fold_build1_loc (loc, ABS_EXPR, type, real),
7444 build_real (type, sqrt2_trunc));
7448 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7449 if (TREE_CODE (arg) == NEGATE_EXPR
7450 || TREE_CODE (arg) == CONJ_EXPR)
7451 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7453 /* Don't do this when optimizing for size. */
7454 if (flag_unsafe_math_optimizations
7455 && optimize && optimize_function_for_speed_p (cfun))
7457 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7459 if (sqrtfn != NULL_TREE)
7461 tree rpart, ipart, result;
7463 arg = builtin_save_expr (arg);
7465 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7466 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7468 rpart = builtin_save_expr (rpart);
7469 ipart = builtin_save_expr (ipart);
7471 result = fold_build2_loc (loc, PLUS_EXPR, type,
7472 fold_build2_loc (loc, MULT_EXPR, type,
7473 rpart, rpart),
7474 fold_build2_loc (loc, MULT_EXPR, type,
7475 ipart, ipart));
7477 return build_call_expr_loc (loc, sqrtfn, 1, result);
7481 return NULL_TREE;
7484 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7485 complex tree type of the result. If NEG is true, the imaginary
7486 zero is negative. */
7488 static tree
7489 build_complex_cproj (tree type, bool neg)
7491 REAL_VALUE_TYPE rinf, rzero = dconst0;
7493 real_inf (&rinf);
7494 rzero.sign = neg;
7495 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7496 build_real (TREE_TYPE (type), rzero));
7499 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7500 return type. Return NULL_TREE if no simplification can be made. */
7502 static tree
7503 fold_builtin_cproj (location_t loc, tree arg, tree type)
7505 if (!validate_arg (arg, COMPLEX_TYPE)
7506 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7507 return NULL_TREE;
7509 /* If there are no infinities, return arg. */
7510 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7511 return non_lvalue_loc (loc, arg);
7513 /* Calculate the result when the argument is a constant. */
7514 if (TREE_CODE (arg) == COMPLEX_CST)
7516 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7517 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7519 if (real_isinf (real) || real_isinf (imag))
7520 return build_complex_cproj (type, imag->sign);
7521 else
7522 return arg;
7524 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7526 tree real = TREE_OPERAND (arg, 0);
7527 tree imag = TREE_OPERAND (arg, 1);
7529 STRIP_NOPS (real);
7530 STRIP_NOPS (imag);
7532 /* If the real part is inf and the imag part is known to be
7533 nonnegative, return (inf + 0i). Remember side-effects are
7534 possible in the imag part. */
7535 if (TREE_CODE (real) == REAL_CST
7536 && real_isinf (TREE_REAL_CST_PTR (real))
7537 && tree_expr_nonnegative_p (imag))
7538 return omit_one_operand_loc (loc, type,
7539 build_complex_cproj (type, false),
7540 arg);
7542 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7543 Remember side-effects are possible in the real part. */
7544 if (TREE_CODE (imag) == REAL_CST
7545 && real_isinf (TREE_REAL_CST_PTR (imag)))
7546 return
7547 omit_one_operand_loc (loc, type,
7548 build_complex_cproj (type, TREE_REAL_CST_PTR
7549 (imag)->sign), arg);
7552 return NULL_TREE;
7555 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7556 Return NULL_TREE if no simplification can be made. */
7558 static tree
7559 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7562 enum built_in_function fcode;
7563 tree res;
7565 if (!validate_arg (arg, REAL_TYPE))
7566 return NULL_TREE;
7568 /* Calculate the result when the argument is a constant. */
7569 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7570 return res;
7572 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7573 fcode = builtin_mathfn_code (arg);
7574 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7576 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7577 arg = fold_build2_loc (loc, MULT_EXPR, type,
7578 CALL_EXPR_ARG (arg, 0),
7579 build_real (type, dconsthalf));
7580 return build_call_expr_loc (loc, expfn, 1, arg);
7583 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7584 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7586 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7588 if (powfn)
7590 tree arg0 = CALL_EXPR_ARG (arg, 0);
7591 tree tree_root;
7592 /* The inner root was either sqrt or cbrt. */
7593 /* This was a conditional expression but it triggered a bug
7594 in Sun C 5.5. */
7595 REAL_VALUE_TYPE dconstroot;
7596 if (BUILTIN_SQRT_P (fcode))
7597 dconstroot = dconsthalf;
7598 else
7599 dconstroot = dconst_third ();
7601 /* Adjust for the outer root. */
7602 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7603 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7604 tree_root = build_real (type, dconstroot);
7605 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7609 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7610 if (flag_unsafe_math_optimizations
7611 && (fcode == BUILT_IN_POW
7612 || fcode == BUILT_IN_POWF
7613 || fcode == BUILT_IN_POWL))
7615 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7616 tree arg0 = CALL_EXPR_ARG (arg, 0);
7617 tree arg1 = CALL_EXPR_ARG (arg, 1);
7618 tree narg1;
7619 if (!tree_expr_nonnegative_p (arg0))
7620 arg0 = build1 (ABS_EXPR, type, arg0);
7621 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7622 build_real (type, dconsthalf));
7623 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7626 return NULL_TREE;
7629 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7630 Return NULL_TREE if no simplification can be made. */
7632 static tree
7633 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7635 const enum built_in_function fcode = builtin_mathfn_code (arg);
7636 tree res;
7638 if (!validate_arg (arg, REAL_TYPE))
7639 return NULL_TREE;
7641 /* Calculate the result when the argument is a constant. */
7642 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7643 return res;
7645 if (flag_unsafe_math_optimizations)
7647 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7648 if (BUILTIN_EXPONENT_P (fcode))
7650 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7651 const REAL_VALUE_TYPE third_trunc =
7652 real_value_truncate (TYPE_MODE (type), dconst_third ());
7653 arg = fold_build2_loc (loc, MULT_EXPR, type,
7654 CALL_EXPR_ARG (arg, 0),
7655 build_real (type, third_trunc));
7656 return build_call_expr_loc (loc, expfn, 1, arg);
7659 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7660 if (BUILTIN_SQRT_P (fcode))
7662 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7664 if (powfn)
7666 tree arg0 = CALL_EXPR_ARG (arg, 0);
7667 tree tree_root;
7668 REAL_VALUE_TYPE dconstroot = dconst_third ();
7670 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7671 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7672 tree_root = build_real (type, dconstroot);
7673 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7677 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7678 if (BUILTIN_CBRT_P (fcode))
7680 tree arg0 = CALL_EXPR_ARG (arg, 0);
7681 if (tree_expr_nonnegative_p (arg0))
7683 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7685 if (powfn)
7687 tree tree_root;
7688 REAL_VALUE_TYPE dconstroot;
7690 real_arithmetic (&dconstroot, MULT_EXPR,
7691 dconst_third_ptr (), dconst_third_ptr ());
7692 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7693 tree_root = build_real (type, dconstroot);
7694 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7699 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7700 if (fcode == BUILT_IN_POW
7701 || fcode == BUILT_IN_POWF
7702 || fcode == BUILT_IN_POWL)
7704 tree arg00 = CALL_EXPR_ARG (arg, 0);
7705 tree arg01 = CALL_EXPR_ARG (arg, 1);
7706 if (tree_expr_nonnegative_p (arg00))
7708 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7709 const REAL_VALUE_TYPE dconstroot
7710 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7711 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7712 build_real (type, dconstroot));
7713 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7717 return NULL_TREE;
7720 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7721 TYPE is the type of the return value. Return NULL_TREE if no
7722 simplification can be made. */
7724 static tree
7725 fold_builtin_cos (location_t loc,
7726 tree arg, tree type, tree fndecl)
7728 tree res, narg;
7730 if (!validate_arg (arg, REAL_TYPE))
7731 return NULL_TREE;
7733 /* Calculate the result when the argument is a constant. */
7734 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7735 return res;
7737 /* Optimize cos(-x) into cos (x). */
7738 if ((narg = fold_strip_sign_ops (arg)))
7739 return build_call_expr_loc (loc, fndecl, 1, narg);
7741 return NULL_TREE;
7744 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7745 Return NULL_TREE if no simplification can be made. */
7747 static tree
7748 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7750 if (validate_arg (arg, REAL_TYPE))
7752 tree res, narg;
7754 /* Calculate the result when the argument is a constant. */
7755 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7756 return res;
7758 /* Optimize cosh(-x) into cosh (x). */
7759 if ((narg = fold_strip_sign_ops (arg)))
7760 return build_call_expr_loc (loc, fndecl, 1, narg);
7763 return NULL_TREE;
7766 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7767 argument ARG. TYPE is the type of the return value. Return
7768 NULL_TREE if no simplification can be made. */
7770 static tree
7771 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7772 bool hyper)
7774 if (validate_arg (arg, COMPLEX_TYPE)
7775 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7777 tree tmp;
7779 /* Calculate the result when the argument is a constant. */
7780 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7781 return tmp;
7783 /* Optimize fn(-x) into fn(x). */
7784 if ((tmp = fold_strip_sign_ops (arg)))
7785 return build_call_expr_loc (loc, fndecl, 1, tmp);
7788 return NULL_TREE;
7791 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7792 Return NULL_TREE if no simplification can be made. */
7794 static tree
7795 fold_builtin_tan (tree arg, tree type)
7797 enum built_in_function fcode;
7798 tree res;
7800 if (!validate_arg (arg, REAL_TYPE))
7801 return NULL_TREE;
7803 /* Calculate the result when the argument is a constant. */
7804 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7805 return res;
7807 /* Optimize tan(atan(x)) = x. */
7808 fcode = builtin_mathfn_code (arg);
7809 if (flag_unsafe_math_optimizations
7810 && (fcode == BUILT_IN_ATAN
7811 || fcode == BUILT_IN_ATANF
7812 || fcode == BUILT_IN_ATANL))
7813 return CALL_EXPR_ARG (arg, 0);
7815 return NULL_TREE;
7818 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7819 NULL_TREE if no simplification can be made. */
7821 static tree
7822 fold_builtin_sincos (location_t loc,
7823 tree arg0, tree arg1, tree arg2)
7825 tree type;
7826 tree res, fn, call;
7828 if (!validate_arg (arg0, REAL_TYPE)
7829 || !validate_arg (arg1, POINTER_TYPE)
7830 || !validate_arg (arg2, POINTER_TYPE))
7831 return NULL_TREE;
7833 type = TREE_TYPE (arg0);
7835 /* Calculate the result when the argument is a constant. */
7836 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7837 return res;
7839 /* Canonicalize sincos to cexpi. */
7840 if (!targetm.libc_has_function (function_c99_math_complex))
7841 return NULL_TREE;
7842 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7843 if (!fn)
7844 return NULL_TREE;
7846 call = build_call_expr_loc (loc, fn, 1, arg0);
7847 call = builtin_save_expr (call);
7849 return build2 (COMPOUND_EXPR, void_type_node,
7850 build2 (MODIFY_EXPR, void_type_node,
7851 build_fold_indirect_ref_loc (loc, arg1),
7852 build1 (IMAGPART_EXPR, type, call)),
7853 build2 (MODIFY_EXPR, void_type_node,
7854 build_fold_indirect_ref_loc (loc, arg2),
7855 build1 (REALPART_EXPR, type, call)));
7858 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7859 NULL_TREE if no simplification can be made. */
7861 static tree
7862 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7864 tree rtype;
7865 tree realp, imagp, ifn;
7866 tree res;
7868 if (!validate_arg (arg0, COMPLEX_TYPE)
7869 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7870 return NULL_TREE;
7872 /* Calculate the result when the argument is a constant. */
7873 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7874 return res;
7876 rtype = TREE_TYPE (TREE_TYPE (arg0));
7878 /* In case we can figure out the real part of arg0 and it is constant zero
7879 fold to cexpi. */
7880 if (!targetm.libc_has_function (function_c99_math_complex))
7881 return NULL_TREE;
7882 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7883 if (!ifn)
7884 return NULL_TREE;
7886 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7887 && real_zerop (realp))
7889 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7890 return build_call_expr_loc (loc, ifn, 1, narg);
7893 /* In case we can easily decompose real and imaginary parts split cexp
7894 to exp (r) * cexpi (i). */
7895 if (flag_unsafe_math_optimizations
7896 && realp)
7898 tree rfn, rcall, icall;
7900 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7901 if (!rfn)
7902 return NULL_TREE;
7904 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7905 if (!imagp)
7906 return NULL_TREE;
7908 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7909 icall = builtin_save_expr (icall);
7910 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7911 rcall = builtin_save_expr (rcall);
7912 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7913 fold_build2_loc (loc, MULT_EXPR, rtype,
7914 rcall,
7915 fold_build1_loc (loc, REALPART_EXPR,
7916 rtype, icall)),
7917 fold_build2_loc (loc, MULT_EXPR, rtype,
7918 rcall,
7919 fold_build1_loc (loc, IMAGPART_EXPR,
7920 rtype, icall)));
7923 return NULL_TREE;
7926 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7927 Return NULL_TREE if no simplification can be made. */
7929 static tree
7930 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7932 if (!validate_arg (arg, REAL_TYPE))
7933 return NULL_TREE;
7935 /* Optimize trunc of constant value. */
7936 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7938 REAL_VALUE_TYPE r, x;
7939 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7941 x = TREE_REAL_CST (arg);
7942 real_trunc (&r, TYPE_MODE (type), &x);
7943 return build_real (type, r);
7946 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7949 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7950 Return NULL_TREE if no simplification can be made. */
7952 static tree
7953 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7955 if (!validate_arg (arg, REAL_TYPE))
7956 return NULL_TREE;
7958 /* Optimize floor of constant value. */
7959 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7961 REAL_VALUE_TYPE x;
7963 x = TREE_REAL_CST (arg);
7964 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7966 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7967 REAL_VALUE_TYPE r;
7969 real_floor (&r, TYPE_MODE (type), &x);
7970 return build_real (type, r);
7974 /* Fold floor (x) where x is nonnegative to trunc (x). */
7975 if (tree_expr_nonnegative_p (arg))
7977 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7978 if (truncfn)
7979 return build_call_expr_loc (loc, truncfn, 1, arg);
7982 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7985 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7986 Return NULL_TREE if no simplification can be made. */
7988 static tree
7989 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7991 if (!validate_arg (arg, REAL_TYPE))
7992 return NULL_TREE;
7994 /* Optimize ceil of constant value. */
7995 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7997 REAL_VALUE_TYPE x;
7999 x = TREE_REAL_CST (arg);
8000 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8002 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8003 REAL_VALUE_TYPE r;
8005 real_ceil (&r, TYPE_MODE (type), &x);
8006 return build_real (type, r);
8010 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8013 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8014 Return NULL_TREE if no simplification can be made. */
8016 static tree
8017 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8019 if (!validate_arg (arg, REAL_TYPE))
8020 return NULL_TREE;
8022 /* Optimize round of constant value. */
8023 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8025 REAL_VALUE_TYPE x;
8027 x = TREE_REAL_CST (arg);
8028 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8030 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8031 REAL_VALUE_TYPE r;
8033 real_round (&r, TYPE_MODE (type), &x);
8034 return build_real (type, r);
8038 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8041 /* Fold function call to builtin lround, lroundf or lroundl (or the
8042 corresponding long long versions) and other rounding functions. ARG
8043 is the argument to the call. Return NULL_TREE if no simplification
8044 can be made. */
8046 static tree
8047 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8049 if (!validate_arg (arg, REAL_TYPE))
8050 return NULL_TREE;
8052 /* Optimize lround of constant value. */
8053 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8055 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8057 if (real_isfinite (&x))
8059 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8060 tree ftype = TREE_TYPE (arg);
8061 double_int val;
8062 REAL_VALUE_TYPE r;
8064 switch (DECL_FUNCTION_CODE (fndecl))
8066 CASE_FLT_FN (BUILT_IN_IFLOOR):
8067 CASE_FLT_FN (BUILT_IN_LFLOOR):
8068 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8069 real_floor (&r, TYPE_MODE (ftype), &x);
8070 break;
8072 CASE_FLT_FN (BUILT_IN_ICEIL):
8073 CASE_FLT_FN (BUILT_IN_LCEIL):
8074 CASE_FLT_FN (BUILT_IN_LLCEIL):
8075 real_ceil (&r, TYPE_MODE (ftype), &x);
8076 break;
8078 CASE_FLT_FN (BUILT_IN_IROUND):
8079 CASE_FLT_FN (BUILT_IN_LROUND):
8080 CASE_FLT_FN (BUILT_IN_LLROUND):
8081 real_round (&r, TYPE_MODE (ftype), &x);
8082 break;
8084 default:
8085 gcc_unreachable ();
8088 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8089 if (double_int_fits_to_tree_p (itype, val))
8090 return double_int_to_tree (itype, val);
8094 switch (DECL_FUNCTION_CODE (fndecl))
8096 CASE_FLT_FN (BUILT_IN_LFLOOR):
8097 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8098 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8099 if (tree_expr_nonnegative_p (arg))
8100 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8101 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8102 break;
8103 default:;
8106 return fold_fixed_mathfn (loc, fndecl, arg);
8109 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8110 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8111 the argument to the call. Return NULL_TREE if no simplification can
8112 be made. */
8114 static tree
8115 fold_builtin_bitop (tree fndecl, tree arg)
8117 if (!validate_arg (arg, INTEGER_TYPE))
8118 return NULL_TREE;
8120 /* Optimize for constant argument. */
8121 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8123 HOST_WIDE_INT hi, width, result;
8124 unsigned HOST_WIDE_INT lo;
8125 tree type;
8127 type = TREE_TYPE (arg);
8128 width = TYPE_PRECISION (type);
8129 lo = TREE_INT_CST_LOW (arg);
8131 /* Clear all the bits that are beyond the type's precision. */
8132 if (width > HOST_BITS_PER_WIDE_INT)
8134 hi = TREE_INT_CST_HIGH (arg);
8135 if (width < HOST_BITS_PER_DOUBLE_INT)
8136 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
8138 else
8140 hi = 0;
8141 if (width < HOST_BITS_PER_WIDE_INT)
8142 lo &= ~(HOST_WIDE_INT_M1U << width);
8145 switch (DECL_FUNCTION_CODE (fndecl))
8147 CASE_INT_FN (BUILT_IN_FFS):
8148 if (lo != 0)
8149 result = ffs_hwi (lo);
8150 else if (hi != 0)
8151 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8152 else
8153 result = 0;
8154 break;
8156 CASE_INT_FN (BUILT_IN_CLZ):
8157 if (hi != 0)
8158 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8159 else if (lo != 0)
8160 result = width - floor_log2 (lo) - 1;
8161 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8162 result = width;
8163 break;
8165 CASE_INT_FN (BUILT_IN_CTZ):
8166 if (lo != 0)
8167 result = ctz_hwi (lo);
8168 else if (hi != 0)
8169 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8170 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8171 result = width;
8172 break;
8174 CASE_INT_FN (BUILT_IN_CLRSB):
8175 if (width > 2 * HOST_BITS_PER_WIDE_INT)
8176 return NULL_TREE;
8177 if (width > HOST_BITS_PER_WIDE_INT
8178 && (hi & ((unsigned HOST_WIDE_INT) 1
8179 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8181 hi = ~hi & ~(HOST_WIDE_INT_M1U
8182 << (width - HOST_BITS_PER_WIDE_INT - 1));
8183 lo = ~lo;
8185 else if (width <= HOST_BITS_PER_WIDE_INT
8186 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8187 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
8188 if (hi != 0)
8189 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8190 else if (lo != 0)
8191 result = width - floor_log2 (lo) - 2;
8192 else
8193 result = width - 1;
8194 break;
8196 CASE_INT_FN (BUILT_IN_POPCOUNT):
8197 result = 0;
8198 while (lo)
8199 result++, lo &= lo - 1;
8200 while (hi)
8201 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8202 break;
8204 CASE_INT_FN (BUILT_IN_PARITY):
8205 result = 0;
8206 while (lo)
8207 result++, lo &= lo - 1;
8208 while (hi)
8209 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8210 result &= 1;
8211 break;
8213 default:
8214 gcc_unreachable ();
8217 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8220 return NULL_TREE;
8223 /* Fold function call to builtin_bswap and the short, long and long long
8224 variants. Return NULL_TREE if no simplification can be made. */
8225 static tree
8226 fold_builtin_bswap (tree fndecl, tree arg)
8228 if (! validate_arg (arg, INTEGER_TYPE))
8229 return NULL_TREE;
8231 /* Optimize constant value. */
8232 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8234 HOST_WIDE_INT hi, width, r_hi = 0;
8235 unsigned HOST_WIDE_INT lo, r_lo = 0;
8236 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8238 width = TYPE_PRECISION (type);
8239 lo = TREE_INT_CST_LOW (arg);
8240 hi = TREE_INT_CST_HIGH (arg);
8242 switch (DECL_FUNCTION_CODE (fndecl))
8244 case BUILT_IN_BSWAP16:
8245 case BUILT_IN_BSWAP32:
8246 case BUILT_IN_BSWAP64:
8248 int s;
8250 for (s = 0; s < width; s += 8)
8252 int d = width - s - 8;
8253 unsigned HOST_WIDE_INT byte;
8255 if (s < HOST_BITS_PER_WIDE_INT)
8256 byte = (lo >> s) & 0xff;
8257 else
8258 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8260 if (d < HOST_BITS_PER_WIDE_INT)
8261 r_lo |= byte << d;
8262 else
8263 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8267 break;
8269 default:
8270 gcc_unreachable ();
8273 if (width < HOST_BITS_PER_WIDE_INT)
8274 return build_int_cst (type, r_lo);
8275 else
8276 return build_int_cst_wide (type, r_lo, r_hi);
8279 return NULL_TREE;
8282 /* A subroutine of fold_builtin to fold the various logarithmic
8283 functions. Return NULL_TREE if no simplification can me made.
8284 FUNC is the corresponding MPFR logarithm function. */
8286 static tree
8287 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8288 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8290 if (validate_arg (arg, REAL_TYPE))
8292 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8293 tree res;
8294 const enum built_in_function fcode = builtin_mathfn_code (arg);
8296 /* Calculate the result when the argument is a constant. */
8297 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8298 return res;
8300 /* Special case, optimize logN(expN(x)) = x. */
8301 if (flag_unsafe_math_optimizations
8302 && ((func == mpfr_log
8303 && (fcode == BUILT_IN_EXP
8304 || fcode == BUILT_IN_EXPF
8305 || fcode == BUILT_IN_EXPL))
8306 || (func == mpfr_log2
8307 && (fcode == BUILT_IN_EXP2
8308 || fcode == BUILT_IN_EXP2F
8309 || fcode == BUILT_IN_EXP2L))
8310 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8311 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8313 /* Optimize logN(func()) for various exponential functions. We
8314 want to determine the value "x" and the power "exponent" in
8315 order to transform logN(x**exponent) into exponent*logN(x). */
8316 if (flag_unsafe_math_optimizations)
8318 tree exponent = 0, x = 0;
8320 switch (fcode)
8322 CASE_FLT_FN (BUILT_IN_EXP):
8323 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8324 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8325 dconst_e ()));
8326 exponent = CALL_EXPR_ARG (arg, 0);
8327 break;
8328 CASE_FLT_FN (BUILT_IN_EXP2):
8329 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8330 x = build_real (type, dconst2);
8331 exponent = CALL_EXPR_ARG (arg, 0);
8332 break;
8333 CASE_FLT_FN (BUILT_IN_EXP10):
8334 CASE_FLT_FN (BUILT_IN_POW10):
8335 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8337 REAL_VALUE_TYPE dconst10;
8338 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8339 x = build_real (type, dconst10);
8341 exponent = CALL_EXPR_ARG (arg, 0);
8342 break;
8343 CASE_FLT_FN (BUILT_IN_SQRT):
8344 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8345 x = CALL_EXPR_ARG (arg, 0);
8346 exponent = build_real (type, dconsthalf);
8347 break;
8348 CASE_FLT_FN (BUILT_IN_CBRT):
8349 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8350 x = CALL_EXPR_ARG (arg, 0);
8351 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8352 dconst_third ()));
8353 break;
8354 CASE_FLT_FN (BUILT_IN_POW):
8355 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8356 x = CALL_EXPR_ARG (arg, 0);
8357 exponent = CALL_EXPR_ARG (arg, 1);
8358 break;
8359 default:
8360 break;
8363 /* Now perform the optimization. */
8364 if (x && exponent)
8366 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8367 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8372 return NULL_TREE;
8375 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8376 NULL_TREE if no simplification can be made. */
8378 static tree
8379 fold_builtin_hypot (location_t loc, tree fndecl,
8380 tree arg0, tree arg1, tree type)
8382 tree res, narg0, narg1;
8384 if (!validate_arg (arg0, REAL_TYPE)
8385 || !validate_arg (arg1, REAL_TYPE))
8386 return NULL_TREE;
8388 /* Calculate the result when the argument is a constant. */
8389 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8390 return res;
8392 /* If either argument to hypot has a negate or abs, strip that off.
8393 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8394 narg0 = fold_strip_sign_ops (arg0);
8395 narg1 = fold_strip_sign_ops (arg1);
8396 if (narg0 || narg1)
8398 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8399 narg1 ? narg1 : arg1);
8402 /* If either argument is zero, hypot is fabs of the other. */
8403 if (real_zerop (arg0))
8404 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8405 else if (real_zerop (arg1))
8406 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8408 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8409 if (flag_unsafe_math_optimizations
8410 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8412 const REAL_VALUE_TYPE sqrt2_trunc
8413 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8414 return fold_build2_loc (loc, MULT_EXPR, type,
8415 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8416 build_real (type, sqrt2_trunc));
8419 return NULL_TREE;
8423 /* Fold a builtin function call to pow, powf, or powl. Return
8424 NULL_TREE if no simplification can be made. */
8425 static tree
8426 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8428 tree res;
8430 if (!validate_arg (arg0, REAL_TYPE)
8431 || !validate_arg (arg1, REAL_TYPE))
8432 return NULL_TREE;
8434 /* Calculate the result when the argument is a constant. */
8435 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8436 return res;
8438 /* Optimize pow(1.0,y) = 1.0. */
8439 if (real_onep (arg0))
8440 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8442 if (TREE_CODE (arg1) == REAL_CST
8443 && !TREE_OVERFLOW (arg1))
8445 REAL_VALUE_TYPE cint;
8446 REAL_VALUE_TYPE c;
8447 HOST_WIDE_INT n;
8449 c = TREE_REAL_CST (arg1);
8451 /* Optimize pow(x,0.0) = 1.0. */
8452 if (REAL_VALUES_EQUAL (c, dconst0))
8453 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8454 arg0);
8456 /* Optimize pow(x,1.0) = x. */
8457 if (REAL_VALUES_EQUAL (c, dconst1))
8458 return arg0;
8460 /* Optimize pow(x,-1.0) = 1.0/x. */
8461 if (REAL_VALUES_EQUAL (c, dconstm1))
8462 return fold_build2_loc (loc, RDIV_EXPR, type,
8463 build_real (type, dconst1), arg0);
8465 /* Optimize pow(x,0.5) = sqrt(x). */
8466 if (flag_unsafe_math_optimizations
8467 && REAL_VALUES_EQUAL (c, dconsthalf))
8469 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8471 if (sqrtfn != NULL_TREE)
8472 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8475 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8476 if (flag_unsafe_math_optimizations)
8478 const REAL_VALUE_TYPE dconstroot
8479 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8481 if (REAL_VALUES_EQUAL (c, dconstroot))
8483 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8484 if (cbrtfn != NULL_TREE)
8485 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8489 /* Check for an integer exponent. */
8490 n = real_to_integer (&c);
8491 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8492 if (real_identical (&c, &cint))
8494 /* Attempt to evaluate pow at compile-time, unless this should
8495 raise an exception. */
8496 if (TREE_CODE (arg0) == REAL_CST
8497 && !TREE_OVERFLOW (arg0)
8498 && (n > 0
8499 || (!flag_trapping_math && !flag_errno_math)
8500 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8502 REAL_VALUE_TYPE x;
8503 bool inexact;
8505 x = TREE_REAL_CST (arg0);
8506 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8507 if (flag_unsafe_math_optimizations || !inexact)
8508 return build_real (type, x);
8511 /* Strip sign ops from even integer powers. */
8512 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8514 tree narg0 = fold_strip_sign_ops (arg0);
8515 if (narg0)
8516 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8521 if (flag_unsafe_math_optimizations)
8523 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8525 /* Optimize pow(expN(x),y) = expN(x*y). */
8526 if (BUILTIN_EXPONENT_P (fcode))
8528 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8529 tree arg = CALL_EXPR_ARG (arg0, 0);
8530 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8531 return build_call_expr_loc (loc, expfn, 1, arg);
8534 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8535 if (BUILTIN_SQRT_P (fcode))
8537 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8538 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8539 build_real (type, dconsthalf));
8540 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8543 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8544 if (BUILTIN_CBRT_P (fcode))
8546 tree arg = CALL_EXPR_ARG (arg0, 0);
8547 if (tree_expr_nonnegative_p (arg))
8549 const REAL_VALUE_TYPE dconstroot
8550 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8551 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8552 build_real (type, dconstroot));
8553 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8557 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8558 if (fcode == BUILT_IN_POW
8559 || fcode == BUILT_IN_POWF
8560 || fcode == BUILT_IN_POWL)
8562 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8563 if (tree_expr_nonnegative_p (arg00))
8565 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8566 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8567 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8572 return NULL_TREE;
8575 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8576 Return NULL_TREE if no simplification can be made. */
8577 static tree
8578 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8579 tree arg0, tree arg1, tree type)
8581 if (!validate_arg (arg0, REAL_TYPE)
8582 || !validate_arg (arg1, INTEGER_TYPE))
8583 return NULL_TREE;
8585 /* Optimize pow(1.0,y) = 1.0. */
8586 if (real_onep (arg0))
8587 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8589 if (host_integerp (arg1, 0))
8591 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8593 /* Evaluate powi at compile-time. */
8594 if (TREE_CODE (arg0) == REAL_CST
8595 && !TREE_OVERFLOW (arg0))
8597 REAL_VALUE_TYPE x;
8598 x = TREE_REAL_CST (arg0);
8599 real_powi (&x, TYPE_MODE (type), &x, c);
8600 return build_real (type, x);
8603 /* Optimize pow(x,0) = 1.0. */
8604 if (c == 0)
8605 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8606 arg0);
8608 /* Optimize pow(x,1) = x. */
8609 if (c == 1)
8610 return arg0;
8612 /* Optimize pow(x,-1) = 1.0/x. */
8613 if (c == -1)
8614 return fold_build2_loc (loc, RDIV_EXPR, type,
8615 build_real (type, dconst1), arg0);
8618 return NULL_TREE;
8621 /* A subroutine of fold_builtin to fold the various exponent
8622 functions. Return NULL_TREE if no simplification can be made.
8623 FUNC is the corresponding MPFR exponent function. */
8625 static tree
8626 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8627 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8629 if (validate_arg (arg, REAL_TYPE))
8631 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8632 tree res;
8634 /* Calculate the result when the argument is a constant. */
8635 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8636 return res;
8638 /* Optimize expN(logN(x)) = x. */
8639 if (flag_unsafe_math_optimizations)
8641 const enum built_in_function fcode = builtin_mathfn_code (arg);
8643 if ((func == mpfr_exp
8644 && (fcode == BUILT_IN_LOG
8645 || fcode == BUILT_IN_LOGF
8646 || fcode == BUILT_IN_LOGL))
8647 || (func == mpfr_exp2
8648 && (fcode == BUILT_IN_LOG2
8649 || fcode == BUILT_IN_LOG2F
8650 || fcode == BUILT_IN_LOG2L))
8651 || (func == mpfr_exp10
8652 && (fcode == BUILT_IN_LOG10
8653 || fcode == BUILT_IN_LOG10F
8654 || fcode == BUILT_IN_LOG10L)))
8655 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8659 return NULL_TREE;
8662 /* Return true if VAR is a VAR_DECL or a component thereof. */
8664 static bool
8665 var_decl_component_p (tree var)
8667 tree inner = var;
8668 while (handled_component_p (inner))
8669 inner = TREE_OPERAND (inner, 0);
8670 return SSA_VAR_P (inner);
8673 /* Fold function call to builtin memset. Return
8674 NULL_TREE if no simplification can be made. */
8676 static tree
8677 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8678 tree type, bool ignore)
8680 tree var, ret, etype;
8681 unsigned HOST_WIDE_INT length, cval;
8683 if (! validate_arg (dest, POINTER_TYPE)
8684 || ! validate_arg (c, INTEGER_TYPE)
8685 || ! validate_arg (len, INTEGER_TYPE))
8686 return NULL_TREE;
8688 if (! host_integerp (len, 1))
8689 return NULL_TREE;
8691 /* If the LEN parameter is zero, return DEST. */
8692 if (integer_zerop (len))
8693 return omit_one_operand_loc (loc, type, dest, c);
8695 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8696 return NULL_TREE;
8698 var = dest;
8699 STRIP_NOPS (var);
8700 if (TREE_CODE (var) != ADDR_EXPR)
8701 return NULL_TREE;
8703 var = TREE_OPERAND (var, 0);
8704 if (TREE_THIS_VOLATILE (var))
8705 return NULL_TREE;
8707 etype = TREE_TYPE (var);
8708 if (TREE_CODE (etype) == ARRAY_TYPE)
8709 etype = TREE_TYPE (etype);
8711 if (!INTEGRAL_TYPE_P (etype)
8712 && !POINTER_TYPE_P (etype))
8713 return NULL_TREE;
8715 if (! var_decl_component_p (var))
8716 return NULL_TREE;
8718 length = tree_low_cst (len, 1);
8719 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8720 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8721 return NULL_TREE;
8723 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8724 return NULL_TREE;
8726 if (integer_zerop (c))
8727 cval = 0;
8728 else
8730 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8731 return NULL_TREE;
8733 cval = TREE_INT_CST_LOW (c);
8734 cval &= 0xff;
8735 cval |= cval << 8;
8736 cval |= cval << 16;
8737 cval |= (cval << 31) << 1;
8740 ret = build_int_cst_type (etype, cval);
8741 var = build_fold_indirect_ref_loc (loc,
8742 fold_convert_loc (loc,
8743 build_pointer_type (etype),
8744 dest));
8745 ret = build2 (MODIFY_EXPR, etype, var, ret);
8746 if (ignore)
8747 return ret;
8749 return omit_one_operand_loc (loc, type, dest, ret);
8752 /* Fold function call to builtin memset. Return
8753 NULL_TREE if no simplification can be made. */
8755 static tree
8756 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8758 if (! validate_arg (dest, POINTER_TYPE)
8759 || ! validate_arg (size, INTEGER_TYPE))
8760 return NULL_TREE;
8762 if (!ignore)
8763 return NULL_TREE;
8765 /* New argument list transforming bzero(ptr x, int y) to
8766 memset(ptr x, int 0, size_t y). This is done this way
8767 so that if it isn't expanded inline, we fallback to
8768 calling bzero instead of memset. */
8770 return fold_builtin_memset (loc, dest, integer_zero_node,
8771 fold_convert_loc (loc, size_type_node, size),
8772 void_type_node, ignore);
8775 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8776 NULL_TREE if no simplification can be made.
8777 If ENDP is 0, return DEST (like memcpy).
8778 If ENDP is 1, return DEST+LEN (like mempcpy).
8779 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8780 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8781 (memmove). */
8783 static tree
8784 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8785 tree len, tree type, bool ignore, int endp)
8787 tree destvar, srcvar, expr;
8789 if (! validate_arg (dest, POINTER_TYPE)
8790 || ! validate_arg (src, POINTER_TYPE)
8791 || ! validate_arg (len, INTEGER_TYPE))
8792 return NULL_TREE;
8794 /* If the LEN parameter is zero, return DEST. */
8795 if (integer_zerop (len))
8796 return omit_one_operand_loc (loc, type, dest, src);
8798 /* If SRC and DEST are the same (and not volatile), return
8799 DEST{,+LEN,+LEN-1}. */
8800 if (operand_equal_p (src, dest, 0))
8801 expr = len;
8802 else
8804 tree srctype, desttype;
8805 unsigned int src_align, dest_align;
8806 tree off0;
8808 if (endp == 3)
8810 src_align = get_pointer_alignment (src);
8811 dest_align = get_pointer_alignment (dest);
8813 /* Both DEST and SRC must be pointer types.
8814 ??? This is what old code did. Is the testing for pointer types
8815 really mandatory?
8817 If either SRC is readonly or length is 1, we can use memcpy. */
8818 if (!dest_align || !src_align)
8819 return NULL_TREE;
8820 if (readonly_data_expr (src)
8821 || (host_integerp (len, 1)
8822 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8823 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8825 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8826 if (!fn)
8827 return NULL_TREE;
8828 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8831 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8832 if (TREE_CODE (src) == ADDR_EXPR
8833 && TREE_CODE (dest) == ADDR_EXPR)
8835 tree src_base, dest_base, fn;
8836 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8837 HOST_WIDE_INT size = -1;
8838 HOST_WIDE_INT maxsize = -1;
8840 srcvar = TREE_OPERAND (src, 0);
8841 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8842 &size, &maxsize);
8843 destvar = TREE_OPERAND (dest, 0);
8844 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8845 &size, &maxsize);
8846 if (host_integerp (len, 1))
8847 maxsize = tree_low_cst (len, 1);
8848 else
8849 maxsize = -1;
8850 src_offset /= BITS_PER_UNIT;
8851 dest_offset /= BITS_PER_UNIT;
8852 if (SSA_VAR_P (src_base)
8853 && SSA_VAR_P (dest_base))
8855 if (operand_equal_p (src_base, dest_base, 0)
8856 && ranges_overlap_p (src_offset, maxsize,
8857 dest_offset, maxsize))
8858 return NULL_TREE;
8860 else if (TREE_CODE (src_base) == MEM_REF
8861 && TREE_CODE (dest_base) == MEM_REF)
8863 double_int off;
8864 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8865 TREE_OPERAND (dest_base, 0), 0))
8866 return NULL_TREE;
8867 off = mem_ref_offset (src_base) +
8868 double_int::from_shwi (src_offset);
8869 if (!off.fits_shwi ())
8870 return NULL_TREE;
8871 src_offset = off.low;
8872 off = mem_ref_offset (dest_base) +
8873 double_int::from_shwi (dest_offset);
8874 if (!off.fits_shwi ())
8875 return NULL_TREE;
8876 dest_offset = off.low;
8877 if (ranges_overlap_p (src_offset, maxsize,
8878 dest_offset, maxsize))
8879 return NULL_TREE;
8881 else
8882 return NULL_TREE;
8884 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8885 if (!fn)
8886 return NULL_TREE;
8887 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8890 /* If the destination and source do not alias optimize into
8891 memcpy as well. */
8892 if ((is_gimple_min_invariant (dest)
8893 || TREE_CODE (dest) == SSA_NAME)
8894 && (is_gimple_min_invariant (src)
8895 || TREE_CODE (src) == SSA_NAME))
8897 ao_ref destr, srcr;
8898 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8899 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8900 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8902 tree fn;
8903 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8904 if (!fn)
8905 return NULL_TREE;
8906 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8910 return NULL_TREE;
8913 if (!host_integerp (len, 0))
8914 return NULL_TREE;
8915 /* FIXME:
8916 This logic lose for arguments like (type *)malloc (sizeof (type)),
8917 since we strip the casts of up to VOID return value from malloc.
8918 Perhaps we ought to inherit type from non-VOID argument here? */
8919 STRIP_NOPS (src);
8920 STRIP_NOPS (dest);
8921 if (!POINTER_TYPE_P (TREE_TYPE (src))
8922 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8923 return NULL_TREE;
8924 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8925 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8927 tree tem = TREE_OPERAND (src, 0);
8928 STRIP_NOPS (tem);
8929 if (tem != TREE_OPERAND (src, 0))
8930 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8932 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8934 tree tem = TREE_OPERAND (dest, 0);
8935 STRIP_NOPS (tem);
8936 if (tem != TREE_OPERAND (dest, 0))
8937 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8939 srctype = TREE_TYPE (TREE_TYPE (src));
8940 if (TREE_CODE (srctype) == ARRAY_TYPE
8941 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8943 srctype = TREE_TYPE (srctype);
8944 STRIP_NOPS (src);
8945 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8947 desttype = TREE_TYPE (TREE_TYPE (dest));
8948 if (TREE_CODE (desttype) == ARRAY_TYPE
8949 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8951 desttype = TREE_TYPE (desttype);
8952 STRIP_NOPS (dest);
8953 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8955 if (TREE_ADDRESSABLE (srctype)
8956 || TREE_ADDRESSABLE (desttype))
8957 return NULL_TREE;
8959 src_align = get_pointer_alignment (src);
8960 dest_align = get_pointer_alignment (dest);
8961 if (dest_align < TYPE_ALIGN (desttype)
8962 || src_align < TYPE_ALIGN (srctype))
8963 return NULL_TREE;
8965 if (!ignore)
8966 dest = builtin_save_expr (dest);
8968 /* Build accesses at offset zero with a ref-all character type. */
8969 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8970 ptr_mode, true), 0);
8972 destvar = dest;
8973 STRIP_NOPS (destvar);
8974 if (TREE_CODE (destvar) == ADDR_EXPR
8975 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8976 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8977 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8978 else
8979 destvar = NULL_TREE;
8981 srcvar = src;
8982 STRIP_NOPS (srcvar);
8983 if (TREE_CODE (srcvar) == ADDR_EXPR
8984 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8985 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8987 if (!destvar
8988 || src_align >= TYPE_ALIGN (desttype))
8989 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8990 srcvar, off0);
8991 else if (!STRICT_ALIGNMENT)
8993 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8994 src_align);
8995 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8997 else
8998 srcvar = NULL_TREE;
9000 else
9001 srcvar = NULL_TREE;
9003 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9004 return NULL_TREE;
9006 if (srcvar == NULL_TREE)
9008 STRIP_NOPS (src);
9009 if (src_align >= TYPE_ALIGN (desttype))
9010 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
9011 else
9013 if (STRICT_ALIGNMENT)
9014 return NULL_TREE;
9015 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
9016 src_align);
9017 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
9020 else if (destvar == NULL_TREE)
9022 STRIP_NOPS (dest);
9023 if (dest_align >= TYPE_ALIGN (srctype))
9024 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
9025 else
9027 if (STRICT_ALIGNMENT)
9028 return NULL_TREE;
9029 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
9030 dest_align);
9031 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
9035 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
9038 if (ignore)
9039 return expr;
9041 if (endp == 0 || endp == 3)
9042 return omit_one_operand_loc (loc, type, dest, expr);
9044 if (expr == len)
9045 expr = NULL_TREE;
9047 if (endp == 2)
9048 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9049 ssize_int (1));
9051 dest = fold_build_pointer_plus_loc (loc, dest, len);
9052 dest = fold_convert_loc (loc, type, dest);
9053 if (expr)
9054 dest = omit_one_operand_loc (loc, type, dest, expr);
9055 return dest;
9058 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9059 If LEN is not NULL, it represents the length of the string to be
9060 copied. Return NULL_TREE if no simplification can be made. */
9062 tree
9063 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9065 tree fn;
9067 if (!validate_arg (dest, POINTER_TYPE)
9068 || !validate_arg (src, POINTER_TYPE))
9069 return NULL_TREE;
9071 /* If SRC and DEST are the same (and not volatile), return DEST. */
9072 if (operand_equal_p (src, dest, 0))
9073 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9075 if (optimize_function_for_size_p (cfun))
9076 return NULL_TREE;
9078 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9079 if (!fn)
9080 return NULL_TREE;
9082 if (!len)
9084 len = c_strlen (src, 1);
9085 if (! len || TREE_SIDE_EFFECTS (len))
9086 return NULL_TREE;
9089 len = fold_convert_loc (loc, size_type_node, len);
9090 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9091 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9092 build_call_expr_loc (loc, fn, 3, dest, src, len));
9095 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9096 Return NULL_TREE if no simplification can be made. */
9098 static tree
9099 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9101 tree fn, len, lenp1, call, type;
9103 if (!validate_arg (dest, POINTER_TYPE)
9104 || !validate_arg (src, POINTER_TYPE))
9105 return NULL_TREE;
9107 len = c_strlen (src, 1);
9108 if (!len
9109 || TREE_CODE (len) != INTEGER_CST)
9110 return NULL_TREE;
9112 if (optimize_function_for_size_p (cfun)
9113 /* If length is zero it's small enough. */
9114 && !integer_zerop (len))
9115 return NULL_TREE;
9117 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9118 if (!fn)
9119 return NULL_TREE;
9121 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9122 fold_convert_loc (loc, size_type_node, len),
9123 build_int_cst (size_type_node, 1));
9124 /* We use dest twice in building our expression. Save it from
9125 multiple expansions. */
9126 dest = builtin_save_expr (dest);
9127 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9129 type = TREE_TYPE (TREE_TYPE (fndecl));
9130 dest = fold_build_pointer_plus_loc (loc, dest, len);
9131 dest = fold_convert_loc (loc, type, dest);
9132 dest = omit_one_operand_loc (loc, type, dest, call);
9133 return dest;
9136 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9137 If SLEN is not NULL, it represents the length of the source string.
9138 Return NULL_TREE if no simplification can be made. */
9140 tree
9141 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9142 tree src, tree len, tree slen)
9144 tree fn;
9146 if (!validate_arg (dest, POINTER_TYPE)
9147 || !validate_arg (src, POINTER_TYPE)
9148 || !validate_arg (len, INTEGER_TYPE))
9149 return NULL_TREE;
9151 /* If the LEN parameter is zero, return DEST. */
9152 if (integer_zerop (len))
9153 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9155 /* We can't compare slen with len as constants below if len is not a
9156 constant. */
9157 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9158 return NULL_TREE;
9160 if (!slen)
9161 slen = c_strlen (src, 1);
9163 /* Now, we must be passed a constant src ptr parameter. */
9164 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9165 return NULL_TREE;
9167 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9169 /* We do not support simplification of this case, though we do
9170 support it when expanding trees into RTL. */
9171 /* FIXME: generate a call to __builtin_memset. */
9172 if (tree_int_cst_lt (slen, len))
9173 return NULL_TREE;
9175 /* OK transform into builtin memcpy. */
9176 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9177 if (!fn)
9178 return NULL_TREE;
9180 len = fold_convert_loc (loc, size_type_node, len);
9181 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9182 build_call_expr_loc (loc, fn, 3, dest, src, len));
9185 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9186 arguments to the call, and TYPE is its return type.
9187 Return NULL_TREE if no simplification can be made. */
9189 static tree
9190 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9192 if (!validate_arg (arg1, POINTER_TYPE)
9193 || !validate_arg (arg2, INTEGER_TYPE)
9194 || !validate_arg (len, INTEGER_TYPE))
9195 return NULL_TREE;
9196 else
9198 const char *p1;
9200 if (TREE_CODE (arg2) != INTEGER_CST
9201 || !host_integerp (len, 1))
9202 return NULL_TREE;
9204 p1 = c_getstr (arg1);
9205 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9207 char c;
9208 const char *r;
9209 tree tem;
9211 if (target_char_cast (arg2, &c))
9212 return NULL_TREE;
9214 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9216 if (r == NULL)
9217 return build_int_cst (TREE_TYPE (arg1), 0);
9219 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9220 return fold_convert_loc (loc, type, tem);
9222 return NULL_TREE;
9226 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9227 Return NULL_TREE if no simplification can be made. */
9229 static tree
9230 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9232 const char *p1, *p2;
9234 if (!validate_arg (arg1, POINTER_TYPE)
9235 || !validate_arg (arg2, POINTER_TYPE)
9236 || !validate_arg (len, INTEGER_TYPE))
9237 return NULL_TREE;
9239 /* If the LEN parameter is zero, return zero. */
9240 if (integer_zerop (len))
9241 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9242 arg1, arg2);
9244 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9245 if (operand_equal_p (arg1, arg2, 0))
9246 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9248 p1 = c_getstr (arg1);
9249 p2 = c_getstr (arg2);
9251 /* If all arguments are constant, and the value of len is not greater
9252 than the lengths of arg1 and arg2, evaluate at compile-time. */
9253 if (host_integerp (len, 1) && p1 && p2
9254 && compare_tree_int (len, strlen (p1) + 1) <= 0
9255 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9257 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9259 if (r > 0)
9260 return integer_one_node;
9261 else if (r < 0)
9262 return integer_minus_one_node;
9263 else
9264 return integer_zero_node;
9267 /* If len parameter is one, return an expression corresponding to
9268 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9269 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9271 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9272 tree cst_uchar_ptr_node
9273 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9275 tree ind1
9276 = fold_convert_loc (loc, integer_type_node,
9277 build1 (INDIRECT_REF, cst_uchar_node,
9278 fold_convert_loc (loc,
9279 cst_uchar_ptr_node,
9280 arg1)));
9281 tree ind2
9282 = fold_convert_loc (loc, integer_type_node,
9283 build1 (INDIRECT_REF, cst_uchar_node,
9284 fold_convert_loc (loc,
9285 cst_uchar_ptr_node,
9286 arg2)));
9287 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9290 return NULL_TREE;
9293 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9294 Return NULL_TREE if no simplification can be made. */
9296 static tree
9297 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9299 const char *p1, *p2;
9301 if (!validate_arg (arg1, POINTER_TYPE)
9302 || !validate_arg (arg2, POINTER_TYPE))
9303 return NULL_TREE;
9305 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9306 if (operand_equal_p (arg1, arg2, 0))
9307 return integer_zero_node;
9309 p1 = c_getstr (arg1);
9310 p2 = c_getstr (arg2);
9312 if (p1 && p2)
9314 const int i = strcmp (p1, p2);
9315 if (i < 0)
9316 return integer_minus_one_node;
9317 else if (i > 0)
9318 return integer_one_node;
9319 else
9320 return integer_zero_node;
9323 /* If the second arg is "", return *(const unsigned char*)arg1. */
9324 if (p2 && *p2 == '\0')
9326 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9327 tree cst_uchar_ptr_node
9328 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9330 return fold_convert_loc (loc, integer_type_node,
9331 build1 (INDIRECT_REF, cst_uchar_node,
9332 fold_convert_loc (loc,
9333 cst_uchar_ptr_node,
9334 arg1)));
9337 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9338 if (p1 && *p1 == '\0')
9340 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9341 tree cst_uchar_ptr_node
9342 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9344 tree temp
9345 = fold_convert_loc (loc, integer_type_node,
9346 build1 (INDIRECT_REF, cst_uchar_node,
9347 fold_convert_loc (loc,
9348 cst_uchar_ptr_node,
9349 arg2)));
9350 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9353 return NULL_TREE;
9356 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9357 Return NULL_TREE if no simplification can be made. */
9359 static tree
9360 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9362 const char *p1, *p2;
9364 if (!validate_arg (arg1, POINTER_TYPE)
9365 || !validate_arg (arg2, POINTER_TYPE)
9366 || !validate_arg (len, INTEGER_TYPE))
9367 return NULL_TREE;
9369 /* If the LEN parameter is zero, return zero. */
9370 if (integer_zerop (len))
9371 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9372 arg1, arg2);
9374 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9375 if (operand_equal_p (arg1, arg2, 0))
9376 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9378 p1 = c_getstr (arg1);
9379 p2 = c_getstr (arg2);
9381 if (host_integerp (len, 1) && p1 && p2)
9383 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9384 if (i > 0)
9385 return integer_one_node;
9386 else if (i < 0)
9387 return integer_minus_one_node;
9388 else
9389 return integer_zero_node;
9392 /* If the second arg is "", and the length is greater than zero,
9393 return *(const unsigned char*)arg1. */
9394 if (p2 && *p2 == '\0'
9395 && TREE_CODE (len) == INTEGER_CST
9396 && tree_int_cst_sgn (len) == 1)
9398 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9399 tree cst_uchar_ptr_node
9400 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9402 return fold_convert_loc (loc, integer_type_node,
9403 build1 (INDIRECT_REF, cst_uchar_node,
9404 fold_convert_loc (loc,
9405 cst_uchar_ptr_node,
9406 arg1)));
9409 /* If the first arg is "", and the length is greater than zero,
9410 return -*(const unsigned char*)arg2. */
9411 if (p1 && *p1 == '\0'
9412 && TREE_CODE (len) == INTEGER_CST
9413 && tree_int_cst_sgn (len) == 1)
9415 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9416 tree cst_uchar_ptr_node
9417 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9419 tree temp = fold_convert_loc (loc, integer_type_node,
9420 build1 (INDIRECT_REF, cst_uchar_node,
9421 fold_convert_loc (loc,
9422 cst_uchar_ptr_node,
9423 arg2)));
9424 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9427 /* If len parameter is one, return an expression corresponding to
9428 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9429 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9431 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9432 tree cst_uchar_ptr_node
9433 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9435 tree ind1 = fold_convert_loc (loc, integer_type_node,
9436 build1 (INDIRECT_REF, cst_uchar_node,
9437 fold_convert_loc (loc,
9438 cst_uchar_ptr_node,
9439 arg1)));
9440 tree ind2 = fold_convert_loc (loc, integer_type_node,
9441 build1 (INDIRECT_REF, cst_uchar_node,
9442 fold_convert_loc (loc,
9443 cst_uchar_ptr_node,
9444 arg2)));
9445 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9448 return NULL_TREE;
9451 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9452 ARG. Return NULL_TREE if no simplification can be made. */
9454 static tree
9455 fold_builtin_signbit (location_t loc, tree arg, tree type)
9457 if (!validate_arg (arg, REAL_TYPE))
9458 return NULL_TREE;
9460 /* If ARG is a compile-time constant, determine the result. */
9461 if (TREE_CODE (arg) == REAL_CST
9462 && !TREE_OVERFLOW (arg))
9464 REAL_VALUE_TYPE c;
9466 c = TREE_REAL_CST (arg);
9467 return (REAL_VALUE_NEGATIVE (c)
9468 ? build_one_cst (type)
9469 : build_zero_cst (type));
9472 /* If ARG is non-negative, the result is always zero. */
9473 if (tree_expr_nonnegative_p (arg))
9474 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9476 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9477 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9478 return fold_convert (type,
9479 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9480 build_real (TREE_TYPE (arg), dconst0)));
9482 return NULL_TREE;
9485 /* Fold function call to builtin copysign, copysignf or copysignl with
9486 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9487 be made. */
9489 static tree
9490 fold_builtin_copysign (location_t loc, tree fndecl,
9491 tree arg1, tree arg2, tree type)
9493 tree tem;
9495 if (!validate_arg (arg1, REAL_TYPE)
9496 || !validate_arg (arg2, REAL_TYPE))
9497 return NULL_TREE;
9499 /* copysign(X,X) is X. */
9500 if (operand_equal_p (arg1, arg2, 0))
9501 return fold_convert_loc (loc, type, arg1);
9503 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9504 if (TREE_CODE (arg1) == REAL_CST
9505 && TREE_CODE (arg2) == REAL_CST
9506 && !TREE_OVERFLOW (arg1)
9507 && !TREE_OVERFLOW (arg2))
9509 REAL_VALUE_TYPE c1, c2;
9511 c1 = TREE_REAL_CST (arg1);
9512 c2 = TREE_REAL_CST (arg2);
9513 /* c1.sign := c2.sign. */
9514 real_copysign (&c1, &c2);
9515 return build_real (type, c1);
9518 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9519 Remember to evaluate Y for side-effects. */
9520 if (tree_expr_nonnegative_p (arg2))
9521 return omit_one_operand_loc (loc, type,
9522 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9523 arg2);
9525 /* Strip sign changing operations for the first argument. */
9526 tem = fold_strip_sign_ops (arg1);
9527 if (tem)
9528 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9530 return NULL_TREE;
9533 /* Fold a call to builtin isascii with argument ARG. */
9535 static tree
9536 fold_builtin_isascii (location_t loc, tree arg)
9538 if (!validate_arg (arg, INTEGER_TYPE))
9539 return NULL_TREE;
9540 else
9542 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9543 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9544 build_int_cst (integer_type_node,
9545 ~ (unsigned HOST_WIDE_INT) 0x7f));
9546 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9547 arg, integer_zero_node);
9551 /* Fold a call to builtin toascii with argument ARG. */
9553 static tree
9554 fold_builtin_toascii (location_t loc, tree arg)
9556 if (!validate_arg (arg, INTEGER_TYPE))
9557 return NULL_TREE;
9559 /* Transform toascii(c) -> (c & 0x7f). */
9560 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9561 build_int_cst (integer_type_node, 0x7f));
9564 /* Fold a call to builtin isdigit with argument ARG. */
9566 static tree
9567 fold_builtin_isdigit (location_t loc, tree arg)
9569 if (!validate_arg (arg, INTEGER_TYPE))
9570 return NULL_TREE;
9571 else
9573 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9574 /* According to the C standard, isdigit is unaffected by locale.
9575 However, it definitely is affected by the target character set. */
9576 unsigned HOST_WIDE_INT target_digit0
9577 = lang_hooks.to_target_charset ('0');
9579 if (target_digit0 == 0)
9580 return NULL_TREE;
9582 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9583 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9584 build_int_cst (unsigned_type_node, target_digit0));
9585 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9586 build_int_cst (unsigned_type_node, 9));
9590 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9592 static tree
9593 fold_builtin_fabs (location_t loc, tree arg, tree type)
9595 if (!validate_arg (arg, REAL_TYPE))
9596 return NULL_TREE;
9598 arg = fold_convert_loc (loc, type, arg);
9599 if (TREE_CODE (arg) == REAL_CST)
9600 return fold_abs_const (arg, type);
9601 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9604 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9606 static tree
9607 fold_builtin_abs (location_t loc, tree arg, tree type)
9609 if (!validate_arg (arg, INTEGER_TYPE))
9610 return NULL_TREE;
9612 arg = fold_convert_loc (loc, type, arg);
9613 if (TREE_CODE (arg) == INTEGER_CST)
9614 return fold_abs_const (arg, type);
9615 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9618 /* Fold a fma operation with arguments ARG[012]. */
9620 tree
9621 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9622 tree type, tree arg0, tree arg1, tree arg2)
9624 if (TREE_CODE (arg0) == REAL_CST
9625 && TREE_CODE (arg1) == REAL_CST
9626 && TREE_CODE (arg2) == REAL_CST)
9627 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9629 return NULL_TREE;
9632 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9634 static tree
9635 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9637 if (validate_arg (arg0, REAL_TYPE)
9638 && validate_arg(arg1, REAL_TYPE)
9639 && validate_arg(arg2, REAL_TYPE))
9641 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9642 if (tem)
9643 return tem;
9645 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9646 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9647 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9649 return NULL_TREE;
9652 /* Fold a call to builtin fmin or fmax. */
9654 static tree
9655 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9656 tree type, bool max)
9658 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9660 /* Calculate the result when the argument is a constant. */
9661 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9663 if (res)
9664 return res;
9666 /* If either argument is NaN, return the other one. Avoid the
9667 transformation if we get (and honor) a signalling NaN. Using
9668 omit_one_operand() ensures we create a non-lvalue. */
9669 if (TREE_CODE (arg0) == REAL_CST
9670 && real_isnan (&TREE_REAL_CST (arg0))
9671 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9672 || ! TREE_REAL_CST (arg0).signalling))
9673 return omit_one_operand_loc (loc, type, arg1, arg0);
9674 if (TREE_CODE (arg1) == REAL_CST
9675 && real_isnan (&TREE_REAL_CST (arg1))
9676 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9677 || ! TREE_REAL_CST (arg1).signalling))
9678 return omit_one_operand_loc (loc, type, arg0, arg1);
9680 /* Transform fmin/fmax(x,x) -> x. */
9681 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9682 return omit_one_operand_loc (loc, type, arg0, arg1);
9684 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9685 functions to return the numeric arg if the other one is NaN.
9686 These tree codes don't honor that, so only transform if
9687 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9688 handled, so we don't have to worry about it either. */
9689 if (flag_finite_math_only)
9690 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9691 fold_convert_loc (loc, type, arg0),
9692 fold_convert_loc (loc, type, arg1));
9694 return NULL_TREE;
9697 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9699 static tree
9700 fold_builtin_carg (location_t loc, tree arg, tree type)
9702 if (validate_arg (arg, COMPLEX_TYPE)
9703 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9705 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9707 if (atan2_fn)
9709 tree new_arg = builtin_save_expr (arg);
9710 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9711 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9712 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9716 return NULL_TREE;
9719 /* Fold a call to builtin logb/ilogb. */
9721 static tree
9722 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9724 if (! validate_arg (arg, REAL_TYPE))
9725 return NULL_TREE;
9727 STRIP_NOPS (arg);
9729 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9731 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9733 switch (value->cl)
9735 case rvc_nan:
9736 case rvc_inf:
9737 /* If arg is Inf or NaN and we're logb, return it. */
9738 if (TREE_CODE (rettype) == REAL_TYPE)
9740 /* For logb(-Inf) we have to return +Inf. */
9741 if (real_isinf (value) && real_isneg (value))
9743 REAL_VALUE_TYPE tem;
9744 real_inf (&tem);
9745 return build_real (rettype, tem);
9747 return fold_convert_loc (loc, rettype, arg);
9749 /* Fall through... */
9750 case rvc_zero:
9751 /* Zero may set errno and/or raise an exception for logb, also
9752 for ilogb we don't know FP_ILOGB0. */
9753 return NULL_TREE;
9754 case rvc_normal:
9755 /* For normal numbers, proceed iff radix == 2. In GCC,
9756 normalized significands are in the range [0.5, 1.0). We
9757 want the exponent as if they were [1.0, 2.0) so get the
9758 exponent and subtract 1. */
9759 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9760 return fold_convert_loc (loc, rettype,
9761 build_int_cst (integer_type_node,
9762 REAL_EXP (value)-1));
9763 break;
9767 return NULL_TREE;
9770 /* Fold a call to builtin significand, if radix == 2. */
9772 static tree
9773 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9775 if (! validate_arg (arg, REAL_TYPE))
9776 return NULL_TREE;
9778 STRIP_NOPS (arg);
9780 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9782 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9784 switch (value->cl)
9786 case rvc_zero:
9787 case rvc_nan:
9788 case rvc_inf:
9789 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9790 return fold_convert_loc (loc, rettype, arg);
9791 case rvc_normal:
9792 /* For normal numbers, proceed iff radix == 2. */
9793 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9795 REAL_VALUE_TYPE result = *value;
9796 /* In GCC, normalized significands are in the range [0.5,
9797 1.0). We want them to be [1.0, 2.0) so set the
9798 exponent to 1. */
9799 SET_REAL_EXP (&result, 1);
9800 return build_real (rettype, result);
9802 break;
9806 return NULL_TREE;
9809 /* Fold a call to builtin frexp, we can assume the base is 2. */
9811 static tree
9812 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9814 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9815 return NULL_TREE;
9817 STRIP_NOPS (arg0);
9819 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9820 return NULL_TREE;
9822 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9824 /* Proceed if a valid pointer type was passed in. */
9825 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9827 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9828 tree frac, exp;
9830 switch (value->cl)
9832 case rvc_zero:
9833 /* For +-0, return (*exp = 0, +-0). */
9834 exp = integer_zero_node;
9835 frac = arg0;
9836 break;
9837 case rvc_nan:
9838 case rvc_inf:
9839 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9840 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9841 case rvc_normal:
9843 /* Since the frexp function always expects base 2, and in
9844 GCC normalized significands are already in the range
9845 [0.5, 1.0), we have exactly what frexp wants. */
9846 REAL_VALUE_TYPE frac_rvt = *value;
9847 SET_REAL_EXP (&frac_rvt, 0);
9848 frac = build_real (rettype, frac_rvt);
9849 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9851 break;
9852 default:
9853 gcc_unreachable ();
9856 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9857 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9858 TREE_SIDE_EFFECTS (arg1) = 1;
9859 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9862 return NULL_TREE;
9865 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9866 then we can assume the base is two. If it's false, then we have to
9867 check the mode of the TYPE parameter in certain cases. */
9869 static tree
9870 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9871 tree type, bool ldexp)
9873 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9875 STRIP_NOPS (arg0);
9876 STRIP_NOPS (arg1);
9878 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9879 if (real_zerop (arg0) || integer_zerop (arg1)
9880 || (TREE_CODE (arg0) == REAL_CST
9881 && !real_isfinite (&TREE_REAL_CST (arg0))))
9882 return omit_one_operand_loc (loc, type, arg0, arg1);
9884 /* If both arguments are constant, then try to evaluate it. */
9885 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9886 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9887 && host_integerp (arg1, 0))
9889 /* Bound the maximum adjustment to twice the range of the
9890 mode's valid exponents. Use abs to ensure the range is
9891 positive as a sanity check. */
9892 const long max_exp_adj = 2 *
9893 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9894 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9896 /* Get the user-requested adjustment. */
9897 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9899 /* The requested adjustment must be inside this range. This
9900 is a preliminary cap to avoid things like overflow, we
9901 may still fail to compute the result for other reasons. */
9902 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9904 REAL_VALUE_TYPE initial_result;
9906 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9908 /* Ensure we didn't overflow. */
9909 if (! real_isinf (&initial_result))
9911 const REAL_VALUE_TYPE trunc_result
9912 = real_value_truncate (TYPE_MODE (type), initial_result);
9914 /* Only proceed if the target mode can hold the
9915 resulting value. */
9916 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9917 return build_real (type, trunc_result);
9923 return NULL_TREE;
9926 /* Fold a call to builtin modf. */
9928 static tree
9929 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9931 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9932 return NULL_TREE;
9934 STRIP_NOPS (arg0);
9936 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9937 return NULL_TREE;
9939 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9941 /* Proceed if a valid pointer type was passed in. */
9942 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9944 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9945 REAL_VALUE_TYPE trunc, frac;
9947 switch (value->cl)
9949 case rvc_nan:
9950 case rvc_zero:
9951 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9952 trunc = frac = *value;
9953 break;
9954 case rvc_inf:
9955 /* For +-Inf, return (*arg1 = arg0, +-0). */
9956 frac = dconst0;
9957 frac.sign = value->sign;
9958 trunc = *value;
9959 break;
9960 case rvc_normal:
9961 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9962 real_trunc (&trunc, VOIDmode, value);
9963 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9964 /* If the original number was negative and already
9965 integral, then the fractional part is -0.0. */
9966 if (value->sign && frac.cl == rvc_zero)
9967 frac.sign = value->sign;
9968 break;
9971 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9972 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9973 build_real (rettype, trunc));
9974 TREE_SIDE_EFFECTS (arg1) = 1;
9975 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9976 build_real (rettype, frac));
9979 return NULL_TREE;
9982 /* Given a location LOC, an interclass builtin function decl FNDECL
9983 and its single argument ARG, return an folded expression computing
9984 the same, or NULL_TREE if we either couldn't or didn't want to fold
9985 (the latter happen if there's an RTL instruction available). */
9987 static tree
9988 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9990 enum machine_mode mode;
9992 if (!validate_arg (arg, REAL_TYPE))
9993 return NULL_TREE;
9995 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9996 return NULL_TREE;
9998 mode = TYPE_MODE (TREE_TYPE (arg));
10000 /* If there is no optab, try generic code. */
10001 switch (DECL_FUNCTION_CODE (fndecl))
10003 tree result;
10005 CASE_FLT_FN (BUILT_IN_ISINF):
10007 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
10008 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
10009 tree const type = TREE_TYPE (arg);
10010 REAL_VALUE_TYPE r;
10011 char buf[128];
10013 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10014 real_from_string (&r, buf);
10015 result = build_call_expr (isgr_fn, 2,
10016 fold_build1_loc (loc, ABS_EXPR, type, arg),
10017 build_real (type, r));
10018 return result;
10020 CASE_FLT_FN (BUILT_IN_FINITE):
10021 case BUILT_IN_ISFINITE:
10023 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
10024 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10025 tree const type = TREE_TYPE (arg);
10026 REAL_VALUE_TYPE r;
10027 char buf[128];
10029 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10030 real_from_string (&r, buf);
10031 result = build_call_expr (isle_fn, 2,
10032 fold_build1_loc (loc, ABS_EXPR, type, arg),
10033 build_real (type, r));
10034 /*result = fold_build2_loc (loc, UNGT_EXPR,
10035 TREE_TYPE (TREE_TYPE (fndecl)),
10036 fold_build1_loc (loc, ABS_EXPR, type, arg),
10037 build_real (type, r));
10038 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10039 TREE_TYPE (TREE_TYPE (fndecl)),
10040 result);*/
10041 return result;
10043 case BUILT_IN_ISNORMAL:
10045 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10046 islessequal(fabs(x),DBL_MAX). */
10047 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10048 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10049 tree const type = TREE_TYPE (arg);
10050 REAL_VALUE_TYPE rmax, rmin;
10051 char buf[128];
10053 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10054 real_from_string (&rmax, buf);
10055 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10056 real_from_string (&rmin, buf);
10057 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10058 result = build_call_expr (isle_fn, 2, arg,
10059 build_real (type, rmax));
10060 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
10061 build_call_expr (isge_fn, 2, arg,
10062 build_real (type, rmin)));
10063 return result;
10065 default:
10066 break;
10069 return NULL_TREE;
10072 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10073 ARG is the argument for the call. */
10075 static tree
10076 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10078 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10079 REAL_VALUE_TYPE r;
10081 if (!validate_arg (arg, REAL_TYPE))
10082 return NULL_TREE;
10084 switch (builtin_index)
10086 case BUILT_IN_ISINF:
10087 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10088 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10090 if (TREE_CODE (arg) == REAL_CST)
10092 r = TREE_REAL_CST (arg);
10093 if (real_isinf (&r))
10094 return real_compare (GT_EXPR, &r, &dconst0)
10095 ? integer_one_node : integer_minus_one_node;
10096 else
10097 return integer_zero_node;
10100 return NULL_TREE;
10102 case BUILT_IN_ISINF_SIGN:
10104 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10105 /* In a boolean context, GCC will fold the inner COND_EXPR to
10106 1. So e.g. "if (isinf_sign(x))" would be folded to just
10107 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10108 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10109 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10110 tree tmp = NULL_TREE;
10112 arg = builtin_save_expr (arg);
10114 if (signbit_fn && isinf_fn)
10116 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10117 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10119 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10120 signbit_call, integer_zero_node);
10121 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10122 isinf_call, integer_zero_node);
10124 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10125 integer_minus_one_node, integer_one_node);
10126 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10127 isinf_call, tmp,
10128 integer_zero_node);
10131 return tmp;
10134 case BUILT_IN_ISFINITE:
10135 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10136 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10137 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10139 if (TREE_CODE (arg) == REAL_CST)
10141 r = TREE_REAL_CST (arg);
10142 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10145 return NULL_TREE;
10147 case BUILT_IN_ISNAN:
10148 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10149 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10151 if (TREE_CODE (arg) == REAL_CST)
10153 r = TREE_REAL_CST (arg);
10154 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10157 arg = builtin_save_expr (arg);
10158 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10160 default:
10161 gcc_unreachable ();
10165 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10166 This builtin will generate code to return the appropriate floating
10167 point classification depending on the value of the floating point
10168 number passed in. The possible return values must be supplied as
10169 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10170 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10171 one floating point argument which is "type generic". */
10173 static tree
10174 fold_builtin_fpclassify (location_t loc, tree exp)
10176 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10177 arg, type, res, tmp;
10178 enum machine_mode mode;
10179 REAL_VALUE_TYPE r;
10180 char buf[128];
10182 /* Verify the required arguments in the original call. */
10183 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10184 INTEGER_TYPE, INTEGER_TYPE,
10185 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10186 return NULL_TREE;
10188 fp_nan = CALL_EXPR_ARG (exp, 0);
10189 fp_infinite = CALL_EXPR_ARG (exp, 1);
10190 fp_normal = CALL_EXPR_ARG (exp, 2);
10191 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10192 fp_zero = CALL_EXPR_ARG (exp, 4);
10193 arg = CALL_EXPR_ARG (exp, 5);
10194 type = TREE_TYPE (arg);
10195 mode = TYPE_MODE (type);
10196 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10198 /* fpclassify(x) ->
10199 isnan(x) ? FP_NAN :
10200 (fabs(x) == Inf ? FP_INFINITE :
10201 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10202 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10204 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10205 build_real (type, dconst0));
10206 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10207 tmp, fp_zero, fp_subnormal);
10209 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10210 real_from_string (&r, buf);
10211 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10212 arg, build_real (type, r));
10213 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10215 if (HONOR_INFINITIES (mode))
10217 real_inf (&r);
10218 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10219 build_real (type, r));
10220 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10221 fp_infinite, res);
10224 if (HONOR_NANS (mode))
10226 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10227 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10230 return res;
10233 /* Fold a call to an unordered comparison function such as
10234 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10235 being called and ARG0 and ARG1 are the arguments for the call.
10236 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10237 the opposite of the desired result. UNORDERED_CODE is used
10238 for modes that can hold NaNs and ORDERED_CODE is used for
10239 the rest. */
10241 static tree
10242 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10243 enum tree_code unordered_code,
10244 enum tree_code ordered_code)
10246 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10247 enum tree_code code;
10248 tree type0, type1;
10249 enum tree_code code0, code1;
10250 tree cmp_type = NULL_TREE;
10252 type0 = TREE_TYPE (arg0);
10253 type1 = TREE_TYPE (arg1);
10255 code0 = TREE_CODE (type0);
10256 code1 = TREE_CODE (type1);
10258 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10259 /* Choose the wider of two real types. */
10260 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10261 ? type0 : type1;
10262 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10263 cmp_type = type0;
10264 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10265 cmp_type = type1;
10267 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10268 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10270 if (unordered_code == UNORDERED_EXPR)
10272 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10273 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10274 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10277 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10278 : ordered_code;
10279 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10280 fold_build2_loc (loc, code, type, arg0, arg1));
10283 /* Fold a call to built-in function FNDECL with 0 arguments.
10284 IGNORE is true if the result of the function call is ignored. This
10285 function returns NULL_TREE if no simplification was possible. */
10287 static tree
10288 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10290 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10291 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10292 switch (fcode)
10294 CASE_FLT_FN (BUILT_IN_INF):
10295 case BUILT_IN_INFD32:
10296 case BUILT_IN_INFD64:
10297 case BUILT_IN_INFD128:
10298 return fold_builtin_inf (loc, type, true);
10300 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10301 return fold_builtin_inf (loc, type, false);
10303 case BUILT_IN_CLASSIFY_TYPE:
10304 return fold_builtin_classify_type (NULL_TREE);
10306 default:
10307 break;
10309 return NULL_TREE;
10312 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10313 IGNORE is true if the result of the function call is ignored. This
10314 function returns NULL_TREE if no simplification was possible. */
10316 static tree
10317 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10319 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10320 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10321 switch (fcode)
10323 case BUILT_IN_CONSTANT_P:
10325 tree val = fold_builtin_constant_p (arg0);
10327 /* Gimplification will pull the CALL_EXPR for the builtin out of
10328 an if condition. When not optimizing, we'll not CSE it back.
10329 To avoid link error types of regressions, return false now. */
10330 if (!val && !optimize)
10331 val = integer_zero_node;
10333 return val;
10336 case BUILT_IN_CLASSIFY_TYPE:
10337 return fold_builtin_classify_type (arg0);
10339 case BUILT_IN_STRLEN:
10340 return fold_builtin_strlen (loc, type, arg0);
10342 CASE_FLT_FN (BUILT_IN_FABS):
10343 case BUILT_IN_FABSD32:
10344 case BUILT_IN_FABSD64:
10345 case BUILT_IN_FABSD128:
10346 return fold_builtin_fabs (loc, arg0, type);
10348 case BUILT_IN_ABS:
10349 case BUILT_IN_LABS:
10350 case BUILT_IN_LLABS:
10351 case BUILT_IN_IMAXABS:
10352 return fold_builtin_abs (loc, arg0, type);
10354 CASE_FLT_FN (BUILT_IN_CONJ):
10355 if (validate_arg (arg0, COMPLEX_TYPE)
10356 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10357 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10358 break;
10360 CASE_FLT_FN (BUILT_IN_CREAL):
10361 if (validate_arg (arg0, COMPLEX_TYPE)
10362 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10363 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10364 break;
10366 CASE_FLT_FN (BUILT_IN_CIMAG):
10367 if (validate_arg (arg0, COMPLEX_TYPE)
10368 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10369 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10370 break;
10372 CASE_FLT_FN (BUILT_IN_CCOS):
10373 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
10375 CASE_FLT_FN (BUILT_IN_CCOSH):
10376 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
10378 CASE_FLT_FN (BUILT_IN_CPROJ):
10379 return fold_builtin_cproj(loc, arg0, type);
10381 CASE_FLT_FN (BUILT_IN_CSIN):
10382 if (validate_arg (arg0, COMPLEX_TYPE)
10383 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10384 return do_mpc_arg1 (arg0, type, mpc_sin);
10385 break;
10387 CASE_FLT_FN (BUILT_IN_CSINH):
10388 if (validate_arg (arg0, COMPLEX_TYPE)
10389 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10390 return do_mpc_arg1 (arg0, type, mpc_sinh);
10391 break;
10393 CASE_FLT_FN (BUILT_IN_CTAN):
10394 if (validate_arg (arg0, COMPLEX_TYPE)
10395 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10396 return do_mpc_arg1 (arg0, type, mpc_tan);
10397 break;
10399 CASE_FLT_FN (BUILT_IN_CTANH):
10400 if (validate_arg (arg0, COMPLEX_TYPE)
10401 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10402 return do_mpc_arg1 (arg0, type, mpc_tanh);
10403 break;
10405 CASE_FLT_FN (BUILT_IN_CLOG):
10406 if (validate_arg (arg0, COMPLEX_TYPE)
10407 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10408 return do_mpc_arg1 (arg0, type, mpc_log);
10409 break;
10411 CASE_FLT_FN (BUILT_IN_CSQRT):
10412 if (validate_arg (arg0, COMPLEX_TYPE)
10413 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10414 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10415 break;
10417 CASE_FLT_FN (BUILT_IN_CASIN):
10418 if (validate_arg (arg0, COMPLEX_TYPE)
10419 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10420 return do_mpc_arg1 (arg0, type, mpc_asin);
10421 break;
10423 CASE_FLT_FN (BUILT_IN_CACOS):
10424 if (validate_arg (arg0, COMPLEX_TYPE)
10425 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10426 return do_mpc_arg1 (arg0, type, mpc_acos);
10427 break;
10429 CASE_FLT_FN (BUILT_IN_CATAN):
10430 if (validate_arg (arg0, COMPLEX_TYPE)
10431 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10432 return do_mpc_arg1 (arg0, type, mpc_atan);
10433 break;
10435 CASE_FLT_FN (BUILT_IN_CASINH):
10436 if (validate_arg (arg0, COMPLEX_TYPE)
10437 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10438 return do_mpc_arg1 (arg0, type, mpc_asinh);
10439 break;
10441 CASE_FLT_FN (BUILT_IN_CACOSH):
10442 if (validate_arg (arg0, COMPLEX_TYPE)
10443 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10444 return do_mpc_arg1 (arg0, type, mpc_acosh);
10445 break;
10447 CASE_FLT_FN (BUILT_IN_CATANH):
10448 if (validate_arg (arg0, COMPLEX_TYPE)
10449 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10450 return do_mpc_arg1 (arg0, type, mpc_atanh);
10451 break;
10453 CASE_FLT_FN (BUILT_IN_CABS):
10454 return fold_builtin_cabs (loc, arg0, type, fndecl);
10456 CASE_FLT_FN (BUILT_IN_CARG):
10457 return fold_builtin_carg (loc, arg0, type);
10459 CASE_FLT_FN (BUILT_IN_SQRT):
10460 return fold_builtin_sqrt (loc, arg0, type);
10462 CASE_FLT_FN (BUILT_IN_CBRT):
10463 return fold_builtin_cbrt (loc, arg0, type);
10465 CASE_FLT_FN (BUILT_IN_ASIN):
10466 if (validate_arg (arg0, REAL_TYPE))
10467 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10468 &dconstm1, &dconst1, true);
10469 break;
10471 CASE_FLT_FN (BUILT_IN_ACOS):
10472 if (validate_arg (arg0, REAL_TYPE))
10473 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10474 &dconstm1, &dconst1, true);
10475 break;
10477 CASE_FLT_FN (BUILT_IN_ATAN):
10478 if (validate_arg (arg0, REAL_TYPE))
10479 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10480 break;
10482 CASE_FLT_FN (BUILT_IN_ASINH):
10483 if (validate_arg (arg0, REAL_TYPE))
10484 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10485 break;
10487 CASE_FLT_FN (BUILT_IN_ACOSH):
10488 if (validate_arg (arg0, REAL_TYPE))
10489 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10490 &dconst1, NULL, true);
10491 break;
10493 CASE_FLT_FN (BUILT_IN_ATANH):
10494 if (validate_arg (arg0, REAL_TYPE))
10495 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10496 &dconstm1, &dconst1, false);
10497 break;
10499 CASE_FLT_FN (BUILT_IN_SIN):
10500 if (validate_arg (arg0, REAL_TYPE))
10501 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10502 break;
10504 CASE_FLT_FN (BUILT_IN_COS):
10505 return fold_builtin_cos (loc, arg0, type, fndecl);
10507 CASE_FLT_FN (BUILT_IN_TAN):
10508 return fold_builtin_tan (arg0, type);
10510 CASE_FLT_FN (BUILT_IN_CEXP):
10511 return fold_builtin_cexp (loc, arg0, type);
10513 CASE_FLT_FN (BUILT_IN_CEXPI):
10514 if (validate_arg (arg0, REAL_TYPE))
10515 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10516 break;
10518 CASE_FLT_FN (BUILT_IN_SINH):
10519 if (validate_arg (arg0, REAL_TYPE))
10520 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10521 break;
10523 CASE_FLT_FN (BUILT_IN_COSH):
10524 return fold_builtin_cosh (loc, arg0, type, fndecl);
10526 CASE_FLT_FN (BUILT_IN_TANH):
10527 if (validate_arg (arg0, REAL_TYPE))
10528 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10529 break;
10531 CASE_FLT_FN (BUILT_IN_ERF):
10532 if (validate_arg (arg0, REAL_TYPE))
10533 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10534 break;
10536 CASE_FLT_FN (BUILT_IN_ERFC):
10537 if (validate_arg (arg0, REAL_TYPE))
10538 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10539 break;
10541 CASE_FLT_FN (BUILT_IN_TGAMMA):
10542 if (validate_arg (arg0, REAL_TYPE))
10543 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10544 break;
10546 CASE_FLT_FN (BUILT_IN_EXP):
10547 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10549 CASE_FLT_FN (BUILT_IN_EXP2):
10550 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10552 CASE_FLT_FN (BUILT_IN_EXP10):
10553 CASE_FLT_FN (BUILT_IN_POW10):
10554 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10556 CASE_FLT_FN (BUILT_IN_EXPM1):
10557 if (validate_arg (arg0, REAL_TYPE))
10558 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10559 break;
10561 CASE_FLT_FN (BUILT_IN_LOG):
10562 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10564 CASE_FLT_FN (BUILT_IN_LOG2):
10565 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10567 CASE_FLT_FN (BUILT_IN_LOG10):
10568 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10570 CASE_FLT_FN (BUILT_IN_LOG1P):
10571 if (validate_arg (arg0, REAL_TYPE))
10572 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10573 &dconstm1, NULL, false);
10574 break;
10576 CASE_FLT_FN (BUILT_IN_J0):
10577 if (validate_arg (arg0, REAL_TYPE))
10578 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10579 NULL, NULL, 0);
10580 break;
10582 CASE_FLT_FN (BUILT_IN_J1):
10583 if (validate_arg (arg0, REAL_TYPE))
10584 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10585 NULL, NULL, 0);
10586 break;
10588 CASE_FLT_FN (BUILT_IN_Y0):
10589 if (validate_arg (arg0, REAL_TYPE))
10590 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10591 &dconst0, NULL, false);
10592 break;
10594 CASE_FLT_FN (BUILT_IN_Y1):
10595 if (validate_arg (arg0, REAL_TYPE))
10596 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10597 &dconst0, NULL, false);
10598 break;
10600 CASE_FLT_FN (BUILT_IN_NAN):
10601 case BUILT_IN_NAND32:
10602 case BUILT_IN_NAND64:
10603 case BUILT_IN_NAND128:
10604 return fold_builtin_nan (arg0, type, true);
10606 CASE_FLT_FN (BUILT_IN_NANS):
10607 return fold_builtin_nan (arg0, type, false);
10609 CASE_FLT_FN (BUILT_IN_FLOOR):
10610 return fold_builtin_floor (loc, fndecl, arg0);
10612 CASE_FLT_FN (BUILT_IN_CEIL):
10613 return fold_builtin_ceil (loc, fndecl, arg0);
10615 CASE_FLT_FN (BUILT_IN_TRUNC):
10616 return fold_builtin_trunc (loc, fndecl, arg0);
10618 CASE_FLT_FN (BUILT_IN_ROUND):
10619 return fold_builtin_round (loc, fndecl, arg0);
10621 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10622 CASE_FLT_FN (BUILT_IN_RINT):
10623 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10625 CASE_FLT_FN (BUILT_IN_ICEIL):
10626 CASE_FLT_FN (BUILT_IN_LCEIL):
10627 CASE_FLT_FN (BUILT_IN_LLCEIL):
10628 CASE_FLT_FN (BUILT_IN_LFLOOR):
10629 CASE_FLT_FN (BUILT_IN_IFLOOR):
10630 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10631 CASE_FLT_FN (BUILT_IN_IROUND):
10632 CASE_FLT_FN (BUILT_IN_LROUND):
10633 CASE_FLT_FN (BUILT_IN_LLROUND):
10634 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10636 CASE_FLT_FN (BUILT_IN_IRINT):
10637 CASE_FLT_FN (BUILT_IN_LRINT):
10638 CASE_FLT_FN (BUILT_IN_LLRINT):
10639 return fold_fixed_mathfn (loc, fndecl, arg0);
10641 case BUILT_IN_BSWAP16:
10642 case BUILT_IN_BSWAP32:
10643 case BUILT_IN_BSWAP64:
10644 return fold_builtin_bswap (fndecl, arg0);
10646 CASE_INT_FN (BUILT_IN_FFS):
10647 CASE_INT_FN (BUILT_IN_CLZ):
10648 CASE_INT_FN (BUILT_IN_CTZ):
10649 CASE_INT_FN (BUILT_IN_CLRSB):
10650 CASE_INT_FN (BUILT_IN_POPCOUNT):
10651 CASE_INT_FN (BUILT_IN_PARITY):
10652 return fold_builtin_bitop (fndecl, arg0);
10654 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10655 return fold_builtin_signbit (loc, arg0, type);
10657 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10658 return fold_builtin_significand (loc, arg0, type);
10660 CASE_FLT_FN (BUILT_IN_ILOGB):
10661 CASE_FLT_FN (BUILT_IN_LOGB):
10662 return fold_builtin_logb (loc, arg0, type);
10664 case BUILT_IN_ISASCII:
10665 return fold_builtin_isascii (loc, arg0);
10667 case BUILT_IN_TOASCII:
10668 return fold_builtin_toascii (loc, arg0);
10670 case BUILT_IN_ISDIGIT:
10671 return fold_builtin_isdigit (loc, arg0);
10673 CASE_FLT_FN (BUILT_IN_FINITE):
10674 case BUILT_IN_FINITED32:
10675 case BUILT_IN_FINITED64:
10676 case BUILT_IN_FINITED128:
10677 case BUILT_IN_ISFINITE:
10679 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10680 if (ret)
10681 return ret;
10682 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10685 CASE_FLT_FN (BUILT_IN_ISINF):
10686 case BUILT_IN_ISINFD32:
10687 case BUILT_IN_ISINFD64:
10688 case BUILT_IN_ISINFD128:
10690 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10691 if (ret)
10692 return ret;
10693 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10696 case BUILT_IN_ISNORMAL:
10697 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10699 case BUILT_IN_ISINF_SIGN:
10700 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10702 CASE_FLT_FN (BUILT_IN_ISNAN):
10703 case BUILT_IN_ISNAND32:
10704 case BUILT_IN_ISNAND64:
10705 case BUILT_IN_ISNAND128:
10706 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10708 case BUILT_IN_PRINTF:
10709 case BUILT_IN_PRINTF_UNLOCKED:
10710 case BUILT_IN_VPRINTF:
10711 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10713 case BUILT_IN_FREE:
10714 if (integer_zerop (arg0))
10715 return build_empty_stmt (loc);
10716 break;
10718 default:
10719 break;
10722 return NULL_TREE;
10726 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10727 IGNORE is true if the result of the function call is ignored. This
10728 function returns NULL_TREE if no simplification was possible. */
10730 static tree
10731 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10733 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10734 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10736 switch (fcode)
10738 CASE_FLT_FN (BUILT_IN_JN):
10739 if (validate_arg (arg0, INTEGER_TYPE)
10740 && validate_arg (arg1, REAL_TYPE))
10741 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10742 break;
10744 CASE_FLT_FN (BUILT_IN_YN):
10745 if (validate_arg (arg0, INTEGER_TYPE)
10746 && validate_arg (arg1, REAL_TYPE))
10747 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10748 &dconst0, false);
10749 break;
10751 CASE_FLT_FN (BUILT_IN_DREM):
10752 CASE_FLT_FN (BUILT_IN_REMAINDER):
10753 if (validate_arg (arg0, REAL_TYPE)
10754 && validate_arg(arg1, REAL_TYPE))
10755 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10756 break;
10758 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10759 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10760 if (validate_arg (arg0, REAL_TYPE)
10761 && validate_arg(arg1, POINTER_TYPE))
10762 return do_mpfr_lgamma_r (arg0, arg1, type);
10763 break;
10765 CASE_FLT_FN (BUILT_IN_ATAN2):
10766 if (validate_arg (arg0, REAL_TYPE)
10767 && validate_arg(arg1, REAL_TYPE))
10768 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10769 break;
10771 CASE_FLT_FN (BUILT_IN_FDIM):
10772 if (validate_arg (arg0, REAL_TYPE)
10773 && validate_arg(arg1, REAL_TYPE))
10774 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10775 break;
10777 CASE_FLT_FN (BUILT_IN_HYPOT):
10778 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10780 CASE_FLT_FN (BUILT_IN_CPOW):
10781 if (validate_arg (arg0, COMPLEX_TYPE)
10782 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10783 && validate_arg (arg1, COMPLEX_TYPE)
10784 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10785 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10786 break;
10788 CASE_FLT_FN (BUILT_IN_LDEXP):
10789 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10790 CASE_FLT_FN (BUILT_IN_SCALBN):
10791 CASE_FLT_FN (BUILT_IN_SCALBLN):
10792 return fold_builtin_load_exponent (loc, arg0, arg1,
10793 type, /*ldexp=*/false);
10795 CASE_FLT_FN (BUILT_IN_FREXP):
10796 return fold_builtin_frexp (loc, arg0, arg1, type);
10798 CASE_FLT_FN (BUILT_IN_MODF):
10799 return fold_builtin_modf (loc, arg0, arg1, type);
10801 case BUILT_IN_BZERO:
10802 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10804 case BUILT_IN_FPUTS:
10805 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10807 case BUILT_IN_FPUTS_UNLOCKED:
10808 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10810 case BUILT_IN_STRSTR:
10811 return fold_builtin_strstr (loc, arg0, arg1, type);
10813 case BUILT_IN_STRCAT:
10814 return fold_builtin_strcat (loc, arg0, arg1);
10816 case BUILT_IN_STRSPN:
10817 return fold_builtin_strspn (loc, arg0, arg1);
10819 case BUILT_IN_STRCSPN:
10820 return fold_builtin_strcspn (loc, arg0, arg1);
10822 case BUILT_IN_STRCHR:
10823 case BUILT_IN_INDEX:
10824 return fold_builtin_strchr (loc, arg0, arg1, type);
10826 case BUILT_IN_STRRCHR:
10827 case BUILT_IN_RINDEX:
10828 return fold_builtin_strrchr (loc, arg0, arg1, type);
10830 case BUILT_IN_STRCPY:
10831 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10833 case BUILT_IN_STPCPY:
10834 if (ignore)
10836 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10837 if (!fn)
10838 break;
10840 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10842 else
10843 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10844 break;
10846 case BUILT_IN_STRCMP:
10847 return fold_builtin_strcmp (loc, arg0, arg1);
10849 case BUILT_IN_STRPBRK:
10850 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10852 case BUILT_IN_EXPECT:
10853 return fold_builtin_expect (loc, arg0, arg1);
10855 CASE_FLT_FN (BUILT_IN_POW):
10856 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10858 CASE_FLT_FN (BUILT_IN_POWI):
10859 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10861 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10862 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10864 CASE_FLT_FN (BUILT_IN_FMIN):
10865 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10867 CASE_FLT_FN (BUILT_IN_FMAX):
10868 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10870 case BUILT_IN_ISGREATER:
10871 return fold_builtin_unordered_cmp (loc, fndecl,
10872 arg0, arg1, UNLE_EXPR, LE_EXPR);
10873 case BUILT_IN_ISGREATEREQUAL:
10874 return fold_builtin_unordered_cmp (loc, fndecl,
10875 arg0, arg1, UNLT_EXPR, LT_EXPR);
10876 case BUILT_IN_ISLESS:
10877 return fold_builtin_unordered_cmp (loc, fndecl,
10878 arg0, arg1, UNGE_EXPR, GE_EXPR);
10879 case BUILT_IN_ISLESSEQUAL:
10880 return fold_builtin_unordered_cmp (loc, fndecl,
10881 arg0, arg1, UNGT_EXPR, GT_EXPR);
10882 case BUILT_IN_ISLESSGREATER:
10883 return fold_builtin_unordered_cmp (loc, fndecl,
10884 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10885 case BUILT_IN_ISUNORDERED:
10886 return fold_builtin_unordered_cmp (loc, fndecl,
10887 arg0, arg1, UNORDERED_EXPR,
10888 NOP_EXPR);
10890 /* We do the folding for va_start in the expander. */
10891 case BUILT_IN_VA_START:
10892 break;
10894 case BUILT_IN_SPRINTF:
10895 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10897 case BUILT_IN_OBJECT_SIZE:
10898 return fold_builtin_object_size (arg0, arg1);
10900 case BUILT_IN_PRINTF:
10901 case BUILT_IN_PRINTF_UNLOCKED:
10902 case BUILT_IN_VPRINTF:
10903 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10905 case BUILT_IN_PRINTF_CHK:
10906 case BUILT_IN_VPRINTF_CHK:
10907 if (!validate_arg (arg0, INTEGER_TYPE)
10908 || TREE_SIDE_EFFECTS (arg0))
10909 return NULL_TREE;
10910 else
10911 return fold_builtin_printf (loc, fndecl,
10912 arg1, NULL_TREE, ignore, fcode);
10913 break;
10915 case BUILT_IN_FPRINTF:
10916 case BUILT_IN_FPRINTF_UNLOCKED:
10917 case BUILT_IN_VFPRINTF:
10918 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10919 ignore, fcode);
10921 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10922 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10924 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10925 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10927 default:
10928 break;
10930 return NULL_TREE;
10933 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10934 and ARG2. IGNORE is true if the result of the function call is ignored.
10935 This function returns NULL_TREE if no simplification was possible. */
10937 static tree
10938 fold_builtin_3 (location_t loc, tree fndecl,
10939 tree arg0, tree arg1, tree arg2, bool ignore)
10941 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10942 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10943 switch (fcode)
10946 CASE_FLT_FN (BUILT_IN_SINCOS):
10947 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10949 CASE_FLT_FN (BUILT_IN_FMA):
10950 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10951 break;
10953 CASE_FLT_FN (BUILT_IN_REMQUO):
10954 if (validate_arg (arg0, REAL_TYPE)
10955 && validate_arg(arg1, REAL_TYPE)
10956 && validate_arg(arg2, POINTER_TYPE))
10957 return do_mpfr_remquo (arg0, arg1, arg2);
10958 break;
10960 case BUILT_IN_MEMSET:
10961 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10963 case BUILT_IN_BCOPY:
10964 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10965 void_type_node, true, /*endp=*/3);
10967 case BUILT_IN_MEMCPY:
10968 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10969 type, ignore, /*endp=*/0);
10971 case BUILT_IN_MEMPCPY:
10972 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10973 type, ignore, /*endp=*/1);
10975 case BUILT_IN_MEMMOVE:
10976 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10977 type, ignore, /*endp=*/3);
10979 case BUILT_IN_STRNCAT:
10980 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10982 case BUILT_IN_STRNCPY:
10983 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10985 case BUILT_IN_STRNCMP:
10986 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10988 case BUILT_IN_MEMCHR:
10989 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10991 case BUILT_IN_BCMP:
10992 case BUILT_IN_MEMCMP:
10993 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10995 case BUILT_IN_SPRINTF:
10996 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10998 case BUILT_IN_SNPRINTF:
10999 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
11001 case BUILT_IN_STRCPY_CHK:
11002 case BUILT_IN_STPCPY_CHK:
11003 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
11004 ignore, fcode);
11006 case BUILT_IN_STRCAT_CHK:
11007 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
11009 case BUILT_IN_PRINTF_CHK:
11010 case BUILT_IN_VPRINTF_CHK:
11011 if (!validate_arg (arg0, INTEGER_TYPE)
11012 || TREE_SIDE_EFFECTS (arg0))
11013 return NULL_TREE;
11014 else
11015 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
11016 break;
11018 case BUILT_IN_FPRINTF:
11019 case BUILT_IN_FPRINTF_UNLOCKED:
11020 case BUILT_IN_VFPRINTF:
11021 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
11022 ignore, fcode);
11024 case BUILT_IN_FPRINTF_CHK:
11025 case BUILT_IN_VFPRINTF_CHK:
11026 if (!validate_arg (arg1, INTEGER_TYPE)
11027 || TREE_SIDE_EFFECTS (arg1))
11028 return NULL_TREE;
11029 else
11030 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
11031 ignore, fcode);
11033 default:
11034 break;
11036 return NULL_TREE;
11039 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11040 ARG2, and ARG3. IGNORE is true if the result of the function call is
11041 ignored. This function returns NULL_TREE if no simplification was
11042 possible. */
11044 static tree
11045 fold_builtin_4 (location_t loc, tree fndecl,
11046 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11048 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11050 switch (fcode)
11052 case BUILT_IN_MEMCPY_CHK:
11053 case BUILT_IN_MEMPCPY_CHK:
11054 case BUILT_IN_MEMMOVE_CHK:
11055 case BUILT_IN_MEMSET_CHK:
11056 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11057 NULL_TREE, ignore,
11058 DECL_FUNCTION_CODE (fndecl));
11060 case BUILT_IN_STRNCPY_CHK:
11061 case BUILT_IN_STPNCPY_CHK:
11062 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
11063 ignore, fcode);
11065 case BUILT_IN_STRNCAT_CHK:
11066 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11068 case BUILT_IN_SNPRINTF:
11069 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
11071 case BUILT_IN_FPRINTF_CHK:
11072 case BUILT_IN_VFPRINTF_CHK:
11073 if (!validate_arg (arg1, INTEGER_TYPE)
11074 || TREE_SIDE_EFFECTS (arg1))
11075 return NULL_TREE;
11076 else
11077 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11078 ignore, fcode);
11079 break;
11081 default:
11082 break;
11084 return NULL_TREE;
11087 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11088 arguments, where NARGS <= 4. IGNORE is true if the result of the
11089 function call is ignored. This function returns NULL_TREE if no
11090 simplification was possible. Note that this only folds builtins with
11091 fixed argument patterns. Foldings that do varargs-to-varargs
11092 transformations, or that match calls with more than 4 arguments,
11093 need to be handled with fold_builtin_varargs instead. */
11095 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11097 static tree
11098 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11100 tree ret = NULL_TREE;
11102 switch (nargs)
11104 case 0:
11105 ret = fold_builtin_0 (loc, fndecl, ignore);
11106 break;
11107 case 1:
11108 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11109 break;
11110 case 2:
11111 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11112 break;
11113 case 3:
11114 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11115 break;
11116 case 4:
11117 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11118 ignore);
11119 break;
11120 default:
11121 break;
11123 if (ret)
11125 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11126 SET_EXPR_LOCATION (ret, loc);
11127 TREE_NO_WARNING (ret) = 1;
11128 return ret;
11130 return NULL_TREE;
11133 /* Builtins with folding operations that operate on "..." arguments
11134 need special handling; we need to store the arguments in a convenient
11135 data structure before attempting any folding. Fortunately there are
11136 only a few builtins that fall into this category. FNDECL is the
11137 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11138 result of the function call is ignored. */
11140 static tree
11141 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11142 bool ignore ATTRIBUTE_UNUSED)
11144 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11145 tree ret = NULL_TREE;
11147 switch (fcode)
11149 case BUILT_IN_SPRINTF_CHK:
11150 case BUILT_IN_VSPRINTF_CHK:
11151 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11152 break;
11154 case BUILT_IN_SNPRINTF_CHK:
11155 case BUILT_IN_VSNPRINTF_CHK:
11156 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11157 break;
11159 case BUILT_IN_FPCLASSIFY:
11160 ret = fold_builtin_fpclassify (loc, exp);
11161 break;
11163 default:
11164 break;
11166 if (ret)
11168 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11169 SET_EXPR_LOCATION (ret, loc);
11170 TREE_NO_WARNING (ret) = 1;
11171 return ret;
11173 return NULL_TREE;
11176 /* Return true if FNDECL shouldn't be folded right now.
11177 If a built-in function has an inline attribute always_inline
11178 wrapper, defer folding it after always_inline functions have
11179 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11180 might not be performed. */
11182 bool
11183 avoid_folding_inline_builtin (tree fndecl)
11185 return (DECL_DECLARED_INLINE_P (fndecl)
11186 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11187 && cfun
11188 && !cfun->always_inline_functions_inlined
11189 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11192 /* A wrapper function for builtin folding that prevents warnings for
11193 "statement without effect" and the like, caused by removing the
11194 call node earlier than the warning is generated. */
11196 tree
11197 fold_call_expr (location_t loc, tree exp, bool ignore)
11199 tree ret = NULL_TREE;
11200 tree fndecl = get_callee_fndecl (exp);
11201 if (fndecl
11202 && TREE_CODE (fndecl) == FUNCTION_DECL
11203 && DECL_BUILT_IN (fndecl)
11204 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11205 yet. Defer folding until we see all the arguments
11206 (after inlining). */
11207 && !CALL_EXPR_VA_ARG_PACK (exp))
11209 int nargs = call_expr_nargs (exp);
11211 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11212 instead last argument is __builtin_va_arg_pack (). Defer folding
11213 even in that case, until arguments are finalized. */
11214 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11216 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11217 if (fndecl2
11218 && TREE_CODE (fndecl2) == FUNCTION_DECL
11219 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11220 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11221 return NULL_TREE;
11224 if (avoid_folding_inline_builtin (fndecl))
11225 return NULL_TREE;
11227 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11228 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11229 CALL_EXPR_ARGP (exp), ignore);
11230 else
11232 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11234 tree *args = CALL_EXPR_ARGP (exp);
11235 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11237 if (!ret)
11238 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11239 if (ret)
11240 return ret;
11243 return NULL_TREE;
11246 /* Conveniently construct a function call expression. FNDECL names the
11247 function to be called and N arguments are passed in the array
11248 ARGARRAY. */
11250 tree
11251 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11253 tree fntype = TREE_TYPE (fndecl);
11254 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11256 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11259 /* Conveniently construct a function call expression. FNDECL names the
11260 function to be called and the arguments are passed in the vector
11261 VEC. */
11263 tree
11264 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11266 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11267 vec_safe_address (vec));
11271 /* Conveniently construct a function call expression. FNDECL names the
11272 function to be called, N is the number of arguments, and the "..."
11273 parameters are the argument expressions. */
11275 tree
11276 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11278 va_list ap;
11279 tree *argarray = XALLOCAVEC (tree, n);
11280 int i;
11282 va_start (ap, n);
11283 for (i = 0; i < n; i++)
11284 argarray[i] = va_arg (ap, tree);
11285 va_end (ap);
11286 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11289 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11290 varargs macros aren't supported by all bootstrap compilers. */
11292 tree
11293 build_call_expr (tree fndecl, int n, ...)
11295 va_list ap;
11296 tree *argarray = XALLOCAVEC (tree, n);
11297 int i;
11299 va_start (ap, n);
11300 for (i = 0; i < n; i++)
11301 argarray[i] = va_arg (ap, tree);
11302 va_end (ap);
11303 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11306 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11307 N arguments are passed in the array ARGARRAY. */
11309 tree
11310 fold_builtin_call_array (location_t loc, tree type,
11311 tree fn,
11312 int n,
11313 tree *argarray)
11315 tree ret = NULL_TREE;
11316 tree exp;
11318 if (TREE_CODE (fn) == ADDR_EXPR)
11320 tree fndecl = TREE_OPERAND (fn, 0);
11321 if (TREE_CODE (fndecl) == FUNCTION_DECL
11322 && DECL_BUILT_IN (fndecl))
11324 /* If last argument is __builtin_va_arg_pack (), arguments to this
11325 function are not finalized yet. Defer folding until they are. */
11326 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11328 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11329 if (fndecl2
11330 && TREE_CODE (fndecl2) == FUNCTION_DECL
11331 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11332 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11333 return build_call_array_loc (loc, type, fn, n, argarray);
11335 if (avoid_folding_inline_builtin (fndecl))
11336 return build_call_array_loc (loc, type, fn, n, argarray);
11337 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11339 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11340 if (ret)
11341 return ret;
11343 return build_call_array_loc (loc, type, fn, n, argarray);
11345 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11347 /* First try the transformations that don't require consing up
11348 an exp. */
11349 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11350 if (ret)
11351 return ret;
11354 /* If we got this far, we need to build an exp. */
11355 exp = build_call_array_loc (loc, type, fn, n, argarray);
11356 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11357 return ret ? ret : exp;
11361 return build_call_array_loc (loc, type, fn, n, argarray);
11364 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11365 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11366 of arguments in ARGS to be omitted. OLDNARGS is the number of
11367 elements in ARGS. */
11369 static tree
11370 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11371 int skip, tree fndecl, int n, va_list newargs)
11373 int nargs = oldnargs - skip + n;
11374 tree *buffer;
11376 if (n > 0)
11378 int i, j;
11380 buffer = XALLOCAVEC (tree, nargs);
11381 for (i = 0; i < n; i++)
11382 buffer[i] = va_arg (newargs, tree);
11383 for (j = skip; j < oldnargs; j++, i++)
11384 buffer[i] = args[j];
11386 else
11387 buffer = args + skip;
11389 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11392 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11393 list ARGS along with N new arguments specified as the "..."
11394 parameters. SKIP is the number of arguments in ARGS to be omitted.
11395 OLDNARGS is the number of elements in ARGS. */
11397 static tree
11398 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11399 int skip, tree fndecl, int n, ...)
11401 va_list ap;
11402 tree t;
11404 va_start (ap, n);
11405 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11406 va_end (ap);
11408 return t;
11411 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11412 along with N new arguments specified as the "..." parameters. SKIP
11413 is the number of arguments in EXP to be omitted. This function is used
11414 to do varargs-to-varargs transformations. */
11416 static tree
11417 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11419 va_list ap;
11420 tree t;
11422 va_start (ap, n);
11423 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11424 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11425 va_end (ap);
11427 return t;
11430 /* Validate a single argument ARG against a tree code CODE representing
11431 a type. */
11433 static bool
11434 validate_arg (const_tree arg, enum tree_code code)
11436 if (!arg)
11437 return false;
11438 else if (code == POINTER_TYPE)
11439 return POINTER_TYPE_P (TREE_TYPE (arg));
11440 else if (code == INTEGER_TYPE)
11441 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11442 return code == TREE_CODE (TREE_TYPE (arg));
11445 /* This function validates the types of a function call argument list
11446 against a specified list of tree_codes. If the last specifier is a 0,
11447 that represents an ellipses, otherwise the last specifier must be a
11448 VOID_TYPE.
11450 This is the GIMPLE version of validate_arglist. Eventually we want to
11451 completely convert builtins.c to work from GIMPLEs and the tree based
11452 validate_arglist will then be removed. */
11454 bool
11455 validate_gimple_arglist (const_gimple call, ...)
11457 enum tree_code code;
11458 bool res = 0;
11459 va_list ap;
11460 const_tree arg;
11461 size_t i;
11463 va_start (ap, call);
11464 i = 0;
11468 code = (enum tree_code) va_arg (ap, int);
11469 switch (code)
11471 case 0:
11472 /* This signifies an ellipses, any further arguments are all ok. */
11473 res = true;
11474 goto end;
11475 case VOID_TYPE:
11476 /* This signifies an endlink, if no arguments remain, return
11477 true, otherwise return false. */
11478 res = (i == gimple_call_num_args (call));
11479 goto end;
11480 default:
11481 /* If no parameters remain or the parameter's code does not
11482 match the specified code, return false. Otherwise continue
11483 checking any remaining arguments. */
11484 arg = gimple_call_arg (call, i++);
11485 if (!validate_arg (arg, code))
11486 goto end;
11487 break;
11490 while (1);
11492 /* We need gotos here since we can only have one VA_CLOSE in a
11493 function. */
11494 end: ;
11495 va_end (ap);
11497 return res;
11500 /* This function validates the types of a function call argument list
11501 against a specified list of tree_codes. If the last specifier is a 0,
11502 that represents an ellipses, otherwise the last specifier must be a
11503 VOID_TYPE. */
11505 bool
11506 validate_arglist (const_tree callexpr, ...)
11508 enum tree_code code;
11509 bool res = 0;
11510 va_list ap;
11511 const_call_expr_arg_iterator iter;
11512 const_tree arg;
11514 va_start (ap, callexpr);
11515 init_const_call_expr_arg_iterator (callexpr, &iter);
11519 code = (enum tree_code) va_arg (ap, int);
11520 switch (code)
11522 case 0:
11523 /* This signifies an ellipses, any further arguments are all ok. */
11524 res = true;
11525 goto end;
11526 case VOID_TYPE:
11527 /* This signifies an endlink, if no arguments remain, return
11528 true, otherwise return false. */
11529 res = !more_const_call_expr_args_p (&iter);
11530 goto end;
11531 default:
11532 /* If no parameters remain or the parameter's code does not
11533 match the specified code, return false. Otherwise continue
11534 checking any remaining arguments. */
11535 arg = next_const_call_expr_arg (&iter);
11536 if (!validate_arg (arg, code))
11537 goto end;
11538 break;
11541 while (1);
11543 /* We need gotos here since we can only have one VA_CLOSE in a
11544 function. */
11545 end: ;
11546 va_end (ap);
11548 return res;
11551 /* Default target-specific builtin expander that does nothing. */
11554 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11555 rtx target ATTRIBUTE_UNUSED,
11556 rtx subtarget ATTRIBUTE_UNUSED,
11557 enum machine_mode mode ATTRIBUTE_UNUSED,
11558 int ignore ATTRIBUTE_UNUSED)
11560 return NULL_RTX;
11563 /* Returns true is EXP represents data that would potentially reside
11564 in a readonly section. */
11566 static bool
11567 readonly_data_expr (tree exp)
11569 STRIP_NOPS (exp);
11571 if (TREE_CODE (exp) != ADDR_EXPR)
11572 return false;
11574 exp = get_base_address (TREE_OPERAND (exp, 0));
11575 if (!exp)
11576 return false;
11578 /* Make sure we call decl_readonly_section only for trees it
11579 can handle (since it returns true for everything it doesn't
11580 understand). */
11581 if (TREE_CODE (exp) == STRING_CST
11582 || TREE_CODE (exp) == CONSTRUCTOR
11583 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11584 return decl_readonly_section (exp, 0);
11585 else
11586 return false;
11589 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11590 to the call, and TYPE is its return type.
11592 Return NULL_TREE if no simplification was possible, otherwise return the
11593 simplified form of the call as a tree.
11595 The simplified form may be a constant or other expression which
11596 computes the same value, but in a more efficient manner (including
11597 calls to other builtin functions).
11599 The call may contain arguments which need to be evaluated, but
11600 which are not useful to determine the result of the call. In
11601 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11602 COMPOUND_EXPR will be an argument which must be evaluated.
11603 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11604 COMPOUND_EXPR in the chain will contain the tree for the simplified
11605 form of the builtin function call. */
11607 static tree
11608 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11610 if (!validate_arg (s1, POINTER_TYPE)
11611 || !validate_arg (s2, POINTER_TYPE))
11612 return NULL_TREE;
11613 else
11615 tree fn;
11616 const char *p1, *p2;
11618 p2 = c_getstr (s2);
11619 if (p2 == NULL)
11620 return NULL_TREE;
11622 p1 = c_getstr (s1);
11623 if (p1 != NULL)
11625 const char *r = strstr (p1, p2);
11626 tree tem;
11628 if (r == NULL)
11629 return build_int_cst (TREE_TYPE (s1), 0);
11631 /* Return an offset into the constant string argument. */
11632 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11633 return fold_convert_loc (loc, type, tem);
11636 /* The argument is const char *, and the result is char *, so we need
11637 a type conversion here to avoid a warning. */
11638 if (p2[0] == '\0')
11639 return fold_convert_loc (loc, type, s1);
11641 if (p2[1] != '\0')
11642 return NULL_TREE;
11644 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11645 if (!fn)
11646 return NULL_TREE;
11648 /* New argument list transforming strstr(s1, s2) to
11649 strchr(s1, s2[0]). */
11650 return build_call_expr_loc (loc, fn, 2, s1,
11651 build_int_cst (integer_type_node, p2[0]));
11655 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11656 the call, and TYPE is its return type.
11658 Return NULL_TREE if no simplification was possible, otherwise return the
11659 simplified form of the call as a tree.
11661 The simplified form may be a constant or other expression which
11662 computes the same value, but in a more efficient manner (including
11663 calls to other builtin functions).
11665 The call may contain arguments which need to be evaluated, but
11666 which are not useful to determine the result of the call. In
11667 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11668 COMPOUND_EXPR will be an argument which must be evaluated.
11669 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11670 COMPOUND_EXPR in the chain will contain the tree for the simplified
11671 form of the builtin function call. */
11673 static tree
11674 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11676 if (!validate_arg (s1, POINTER_TYPE)
11677 || !validate_arg (s2, INTEGER_TYPE))
11678 return NULL_TREE;
11679 else
11681 const char *p1;
11683 if (TREE_CODE (s2) != INTEGER_CST)
11684 return NULL_TREE;
11686 p1 = c_getstr (s1);
11687 if (p1 != NULL)
11689 char c;
11690 const char *r;
11691 tree tem;
11693 if (target_char_cast (s2, &c))
11694 return NULL_TREE;
11696 r = strchr (p1, c);
11698 if (r == NULL)
11699 return build_int_cst (TREE_TYPE (s1), 0);
11701 /* Return an offset into the constant string argument. */
11702 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11703 return fold_convert_loc (loc, type, tem);
11705 return NULL_TREE;
11709 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11710 the call, and TYPE is its return type.
11712 Return NULL_TREE if no simplification was possible, otherwise return the
11713 simplified form of the call as a tree.
11715 The simplified form may be a constant or other expression which
11716 computes the same value, but in a more efficient manner (including
11717 calls to other builtin functions).
11719 The call may contain arguments which need to be evaluated, but
11720 which are not useful to determine the result of the call. In
11721 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11722 COMPOUND_EXPR will be an argument which must be evaluated.
11723 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11724 COMPOUND_EXPR in the chain will contain the tree for the simplified
11725 form of the builtin function call. */
11727 static tree
11728 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11730 if (!validate_arg (s1, POINTER_TYPE)
11731 || !validate_arg (s2, INTEGER_TYPE))
11732 return NULL_TREE;
11733 else
11735 tree fn;
11736 const char *p1;
11738 if (TREE_CODE (s2) != INTEGER_CST)
11739 return NULL_TREE;
11741 p1 = c_getstr (s1);
11742 if (p1 != NULL)
11744 char c;
11745 const char *r;
11746 tree tem;
11748 if (target_char_cast (s2, &c))
11749 return NULL_TREE;
11751 r = strrchr (p1, c);
11753 if (r == NULL)
11754 return build_int_cst (TREE_TYPE (s1), 0);
11756 /* Return an offset into the constant string argument. */
11757 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11758 return fold_convert_loc (loc, type, tem);
11761 if (! integer_zerop (s2))
11762 return NULL_TREE;
11764 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11765 if (!fn)
11766 return NULL_TREE;
11768 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11769 return build_call_expr_loc (loc, fn, 2, s1, s2);
11773 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11774 to the call, and TYPE is its return type.
11776 Return NULL_TREE if no simplification was possible, otherwise return the
11777 simplified form of the call as a tree.
11779 The simplified form may be a constant or other expression which
11780 computes the same value, but in a more efficient manner (including
11781 calls to other builtin functions).
11783 The call may contain arguments which need to be evaluated, but
11784 which are not useful to determine the result of the call. In
11785 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11786 COMPOUND_EXPR will be an argument which must be evaluated.
11787 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11788 COMPOUND_EXPR in the chain will contain the tree for the simplified
11789 form of the builtin function call. */
11791 static tree
11792 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11794 if (!validate_arg (s1, POINTER_TYPE)
11795 || !validate_arg (s2, POINTER_TYPE))
11796 return NULL_TREE;
11797 else
11799 tree fn;
11800 const char *p1, *p2;
11802 p2 = c_getstr (s2);
11803 if (p2 == NULL)
11804 return NULL_TREE;
11806 p1 = c_getstr (s1);
11807 if (p1 != NULL)
11809 const char *r = strpbrk (p1, p2);
11810 tree tem;
11812 if (r == NULL)
11813 return build_int_cst (TREE_TYPE (s1), 0);
11815 /* Return an offset into the constant string argument. */
11816 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11817 return fold_convert_loc (loc, type, tem);
11820 if (p2[0] == '\0')
11821 /* strpbrk(x, "") == NULL.
11822 Evaluate and ignore s1 in case it had side-effects. */
11823 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11825 if (p2[1] != '\0')
11826 return NULL_TREE; /* Really call strpbrk. */
11828 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11829 if (!fn)
11830 return NULL_TREE;
11832 /* New argument list transforming strpbrk(s1, s2) to
11833 strchr(s1, s2[0]). */
11834 return build_call_expr_loc (loc, fn, 2, s1,
11835 build_int_cst (integer_type_node, p2[0]));
11839 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11840 to the call.
11842 Return NULL_TREE if no simplification was possible, otherwise return the
11843 simplified form of the call as a tree.
11845 The simplified form may be a constant or other expression which
11846 computes the same value, but in a more efficient manner (including
11847 calls to other builtin functions).
11849 The call may contain arguments which need to be evaluated, but
11850 which are not useful to determine the result of the call. In
11851 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11852 COMPOUND_EXPR will be an argument which must be evaluated.
11853 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11854 COMPOUND_EXPR in the chain will contain the tree for the simplified
11855 form of the builtin function call. */
11857 static tree
11858 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11860 if (!validate_arg (dst, POINTER_TYPE)
11861 || !validate_arg (src, POINTER_TYPE))
11862 return NULL_TREE;
11863 else
11865 const char *p = c_getstr (src);
11867 /* If the string length is zero, return the dst parameter. */
11868 if (p && *p == '\0')
11869 return dst;
11871 if (optimize_insn_for_speed_p ())
11873 /* See if we can store by pieces into (dst + strlen(dst)). */
11874 tree newdst, call;
11875 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11876 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11878 if (!strlen_fn || !strcpy_fn)
11879 return NULL_TREE;
11881 /* If we don't have a movstr we don't want to emit an strcpy
11882 call. We have to do that if the length of the source string
11883 isn't computable (in that case we can use memcpy probably
11884 later expanding to a sequence of mov instructions). If we
11885 have movstr instructions we can emit strcpy calls. */
11886 if (!HAVE_movstr)
11888 tree len = c_strlen (src, 1);
11889 if (! len || TREE_SIDE_EFFECTS (len))
11890 return NULL_TREE;
11893 /* Stabilize the argument list. */
11894 dst = builtin_save_expr (dst);
11896 /* Create strlen (dst). */
11897 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11898 /* Create (dst p+ strlen (dst)). */
11900 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11901 newdst = builtin_save_expr (newdst);
11903 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11904 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11906 return NULL_TREE;
11910 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11911 arguments to the call.
11913 Return NULL_TREE if no simplification was possible, otherwise return the
11914 simplified form of the call as a tree.
11916 The simplified form may be a constant or other expression which
11917 computes the same value, but in a more efficient manner (including
11918 calls to other builtin functions).
11920 The call may contain arguments which need to be evaluated, but
11921 which are not useful to determine the result of the call. In
11922 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11923 COMPOUND_EXPR will be an argument which must be evaluated.
11924 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11925 COMPOUND_EXPR in the chain will contain the tree for the simplified
11926 form of the builtin function call. */
11928 static tree
11929 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11931 if (!validate_arg (dst, POINTER_TYPE)
11932 || !validate_arg (src, POINTER_TYPE)
11933 || !validate_arg (len, INTEGER_TYPE))
11934 return NULL_TREE;
11935 else
11937 const char *p = c_getstr (src);
11939 /* If the requested length is zero, or the src parameter string
11940 length is zero, return the dst parameter. */
11941 if (integer_zerop (len) || (p && *p == '\0'))
11942 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11944 /* If the requested len is greater than or equal to the string
11945 length, call strcat. */
11946 if (TREE_CODE (len) == INTEGER_CST && p
11947 && compare_tree_int (len, strlen (p)) >= 0)
11949 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11951 /* If the replacement _DECL isn't initialized, don't do the
11952 transformation. */
11953 if (!fn)
11954 return NULL_TREE;
11956 return build_call_expr_loc (loc, fn, 2, dst, src);
11958 return NULL_TREE;
11962 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11963 to the call.
11965 Return NULL_TREE if no simplification was possible, otherwise return the
11966 simplified form of the call as a tree.
11968 The simplified form may be a constant or other expression which
11969 computes the same value, but in a more efficient manner (including
11970 calls to other builtin functions).
11972 The call may contain arguments which need to be evaluated, but
11973 which are not useful to determine the result of the call. In
11974 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11975 COMPOUND_EXPR will be an argument which must be evaluated.
11976 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11977 COMPOUND_EXPR in the chain will contain the tree for the simplified
11978 form of the builtin function call. */
11980 static tree
11981 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11983 if (!validate_arg (s1, POINTER_TYPE)
11984 || !validate_arg (s2, POINTER_TYPE))
11985 return NULL_TREE;
11986 else
11988 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11990 /* If both arguments are constants, evaluate at compile-time. */
11991 if (p1 && p2)
11993 const size_t r = strspn (p1, p2);
11994 return build_int_cst (size_type_node, r);
11997 /* If either argument is "", return NULL_TREE. */
11998 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11999 /* Evaluate and ignore both arguments in case either one has
12000 side-effects. */
12001 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
12002 s1, s2);
12003 return NULL_TREE;
12007 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
12008 to the call.
12010 Return NULL_TREE if no simplification was possible, otherwise return the
12011 simplified form of the call as a tree.
12013 The simplified form may be a constant or other expression which
12014 computes the same value, but in a more efficient manner (including
12015 calls to other builtin functions).
12017 The call may contain arguments which need to be evaluated, but
12018 which are not useful to determine the result of the call. In
12019 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12020 COMPOUND_EXPR will be an argument which must be evaluated.
12021 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12022 COMPOUND_EXPR in the chain will contain the tree for the simplified
12023 form of the builtin function call. */
12025 static tree
12026 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
12028 if (!validate_arg (s1, POINTER_TYPE)
12029 || !validate_arg (s2, POINTER_TYPE))
12030 return NULL_TREE;
12031 else
12033 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
12035 /* If both arguments are constants, evaluate at compile-time. */
12036 if (p1 && p2)
12038 const size_t r = strcspn (p1, p2);
12039 return build_int_cst (size_type_node, r);
12042 /* If the first argument is "", return NULL_TREE. */
12043 if (p1 && *p1 == '\0')
12045 /* Evaluate and ignore argument s2 in case it has
12046 side-effects. */
12047 return omit_one_operand_loc (loc, size_type_node,
12048 size_zero_node, s2);
12051 /* If the second argument is "", return __builtin_strlen(s1). */
12052 if (p2 && *p2 == '\0')
12054 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
12056 /* If the replacement _DECL isn't initialized, don't do the
12057 transformation. */
12058 if (!fn)
12059 return NULL_TREE;
12061 return build_call_expr_loc (loc, fn, 1, s1);
12063 return NULL_TREE;
12067 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
12068 to the call. IGNORE is true if the value returned
12069 by the builtin will be ignored. UNLOCKED is true is true if this
12070 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
12071 the known length of the string. Return NULL_TREE if no simplification
12072 was possible. */
12074 tree
12075 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
12076 bool ignore, bool unlocked, tree len)
12078 /* If we're using an unlocked function, assume the other unlocked
12079 functions exist explicitly. */
12080 tree const fn_fputc = (unlocked
12081 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
12082 : builtin_decl_implicit (BUILT_IN_FPUTC));
12083 tree const fn_fwrite = (unlocked
12084 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
12085 : builtin_decl_implicit (BUILT_IN_FWRITE));
12087 /* If the return value is used, don't do the transformation. */
12088 if (!ignore)
12089 return NULL_TREE;
12091 /* Verify the arguments in the original call. */
12092 if (!validate_arg (arg0, POINTER_TYPE)
12093 || !validate_arg (arg1, POINTER_TYPE))
12094 return NULL_TREE;
12096 if (! len)
12097 len = c_strlen (arg0, 0);
12099 /* Get the length of the string passed to fputs. If the length
12100 can't be determined, punt. */
12101 if (!len
12102 || TREE_CODE (len) != INTEGER_CST)
12103 return NULL_TREE;
12105 switch (compare_tree_int (len, 1))
12107 case -1: /* length is 0, delete the call entirely . */
12108 return omit_one_operand_loc (loc, integer_type_node,
12109 integer_zero_node, arg1);;
12111 case 0: /* length is 1, call fputc. */
12113 const char *p = c_getstr (arg0);
12115 if (p != NULL)
12117 if (fn_fputc)
12118 return build_call_expr_loc (loc, fn_fputc, 2,
12119 build_int_cst
12120 (integer_type_node, p[0]), arg1);
12121 else
12122 return NULL_TREE;
12125 /* FALLTHROUGH */
12126 case 1: /* length is greater than 1, call fwrite. */
12128 /* If optimizing for size keep fputs. */
12129 if (optimize_function_for_size_p (cfun))
12130 return NULL_TREE;
12131 /* New argument list transforming fputs(string, stream) to
12132 fwrite(string, 1, len, stream). */
12133 if (fn_fwrite)
12134 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12135 size_one_node, len, arg1);
12136 else
12137 return NULL_TREE;
12139 default:
12140 gcc_unreachable ();
12142 return NULL_TREE;
12145 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12146 produced. False otherwise. This is done so that we don't output the error
12147 or warning twice or three times. */
12149 bool
12150 fold_builtin_next_arg (tree exp, bool va_start_p)
12152 tree fntype = TREE_TYPE (current_function_decl);
12153 int nargs = call_expr_nargs (exp);
12154 tree arg;
12155 /* There is good chance the current input_location points inside the
12156 definition of the va_start macro (perhaps on the token for
12157 builtin) in a system header, so warnings will not be emitted.
12158 Use the location in real source code. */
12159 source_location current_location =
12160 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12161 NULL);
12163 if (!stdarg_p (fntype))
12165 error ("%<va_start%> used in function with fixed args");
12166 return true;
12169 if (va_start_p)
12171 if (va_start_p && (nargs != 2))
12173 error ("wrong number of arguments to function %<va_start%>");
12174 return true;
12176 arg = CALL_EXPR_ARG (exp, 1);
12178 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12179 when we checked the arguments and if needed issued a warning. */
12180 else
12182 if (nargs == 0)
12184 /* Evidently an out of date version of <stdarg.h>; can't validate
12185 va_start's second argument, but can still work as intended. */
12186 warning_at (current_location,
12187 OPT_Wvarargs,
12188 "%<__builtin_next_arg%> called without an argument");
12189 return true;
12191 else if (nargs > 1)
12193 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12194 return true;
12196 arg = CALL_EXPR_ARG (exp, 0);
12199 if (TREE_CODE (arg) == SSA_NAME)
12200 arg = SSA_NAME_VAR (arg);
12202 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12203 or __builtin_next_arg (0) the first time we see it, after checking
12204 the arguments and if needed issuing a warning. */
12205 if (!integer_zerop (arg))
12207 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12209 /* Strip off all nops for the sake of the comparison. This
12210 is not quite the same as STRIP_NOPS. It does more.
12211 We must also strip off INDIRECT_EXPR for C++ reference
12212 parameters. */
12213 while (CONVERT_EXPR_P (arg)
12214 || TREE_CODE (arg) == INDIRECT_REF)
12215 arg = TREE_OPERAND (arg, 0);
12216 if (arg != last_parm)
12218 /* FIXME: Sometimes with the tree optimizers we can get the
12219 not the last argument even though the user used the last
12220 argument. We just warn and set the arg to be the last
12221 argument so that we will get wrong-code because of
12222 it. */
12223 warning_at (current_location,
12224 OPT_Wvarargs,
12225 "second parameter of %<va_start%> not last named argument");
12228 /* Undefined by C99 7.15.1.4p4 (va_start):
12229 "If the parameter parmN is declared with the register storage
12230 class, with a function or array type, or with a type that is
12231 not compatible with the type that results after application of
12232 the default argument promotions, the behavior is undefined."
12234 else if (DECL_REGISTER (arg))
12236 warning_at (current_location,
12237 OPT_Wvarargs,
12238 "undefined behaviour when second parameter of "
12239 "%<va_start%> is declared with %<register%> storage");
12242 /* We want to verify the second parameter just once before the tree
12243 optimizers are run and then avoid keeping it in the tree,
12244 as otherwise we could warn even for correct code like:
12245 void foo (int i, ...)
12246 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12247 if (va_start_p)
12248 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12249 else
12250 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12252 return false;
12256 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12257 ORIG may be null if this is a 2-argument call. We don't attempt to
12258 simplify calls with more than 3 arguments.
12260 Return NULL_TREE if no simplification was possible, otherwise return the
12261 simplified form of the call as a tree. If IGNORED is true, it means that
12262 the caller does not use the returned value of the function. */
12264 static tree
12265 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12266 tree orig, int ignored)
12268 tree call, retval;
12269 const char *fmt_str = NULL;
12271 /* Verify the required arguments in the original call. We deal with two
12272 types of sprintf() calls: 'sprintf (str, fmt)' and
12273 'sprintf (dest, "%s", orig)'. */
12274 if (!validate_arg (dest, POINTER_TYPE)
12275 || !validate_arg (fmt, POINTER_TYPE))
12276 return NULL_TREE;
12277 if (orig && !validate_arg (orig, POINTER_TYPE))
12278 return NULL_TREE;
12280 /* Check whether the format is a literal string constant. */
12281 fmt_str = c_getstr (fmt);
12282 if (fmt_str == NULL)
12283 return NULL_TREE;
12285 call = NULL_TREE;
12286 retval = NULL_TREE;
12288 if (!init_target_chars ())
12289 return NULL_TREE;
12291 /* If the format doesn't contain % args or %%, use strcpy. */
12292 if (strchr (fmt_str, target_percent) == NULL)
12294 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12296 if (!fn)
12297 return NULL_TREE;
12299 /* Don't optimize sprintf (buf, "abc", ptr++). */
12300 if (orig)
12301 return NULL_TREE;
12303 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12304 'format' is known to contain no % formats. */
12305 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12306 if (!ignored)
12307 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12310 /* If the format is "%s", use strcpy if the result isn't used. */
12311 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12313 tree fn;
12314 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12316 if (!fn)
12317 return NULL_TREE;
12319 /* Don't crash on sprintf (str1, "%s"). */
12320 if (!orig)
12321 return NULL_TREE;
12323 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12324 if (!ignored)
12326 retval = c_strlen (orig, 1);
12327 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12328 return NULL_TREE;
12330 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12333 if (call && retval)
12335 retval = fold_convert_loc
12336 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12337 retval);
12338 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12340 else
12341 return call;
12344 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12345 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12346 attempt to simplify calls with more than 4 arguments.
12348 Return NULL_TREE if no simplification was possible, otherwise return the
12349 simplified form of the call as a tree. If IGNORED is true, it means that
12350 the caller does not use the returned value of the function. */
12352 static tree
12353 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12354 tree orig, int ignored)
12356 tree call, retval;
12357 const char *fmt_str = NULL;
12358 unsigned HOST_WIDE_INT destlen;
12360 /* Verify the required arguments in the original call. We deal with two
12361 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12362 'snprintf (dest, cst, "%s", orig)'. */
12363 if (!validate_arg (dest, POINTER_TYPE)
12364 || !validate_arg (destsize, INTEGER_TYPE)
12365 || !validate_arg (fmt, POINTER_TYPE))
12366 return NULL_TREE;
12367 if (orig && !validate_arg (orig, POINTER_TYPE))
12368 return NULL_TREE;
12370 if (!host_integerp (destsize, 1))
12371 return NULL_TREE;
12373 /* Check whether the format is a literal string constant. */
12374 fmt_str = c_getstr (fmt);
12375 if (fmt_str == NULL)
12376 return NULL_TREE;
12378 call = NULL_TREE;
12379 retval = NULL_TREE;
12381 if (!init_target_chars ())
12382 return NULL_TREE;
12384 destlen = tree_low_cst (destsize, 1);
12386 /* If the format doesn't contain % args or %%, use strcpy. */
12387 if (strchr (fmt_str, target_percent) == NULL)
12389 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12390 size_t len = strlen (fmt_str);
12392 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12393 if (orig)
12394 return NULL_TREE;
12396 /* We could expand this as
12397 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12398 or to
12399 memcpy (str, fmt_with_nul_at_cstm1, cst);
12400 but in the former case that might increase code size
12401 and in the latter case grow .rodata section too much.
12402 So punt for now. */
12403 if (len >= destlen)
12404 return NULL_TREE;
12406 if (!fn)
12407 return NULL_TREE;
12409 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12410 'format' is known to contain no % formats and
12411 strlen (fmt) < cst. */
12412 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12414 if (!ignored)
12415 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12418 /* If the format is "%s", use strcpy if the result isn't used. */
12419 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12421 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12422 unsigned HOST_WIDE_INT origlen;
12424 /* Don't crash on snprintf (str1, cst, "%s"). */
12425 if (!orig)
12426 return NULL_TREE;
12428 retval = c_strlen (orig, 1);
12429 if (!retval || !host_integerp (retval, 1))
12430 return NULL_TREE;
12432 origlen = tree_low_cst (retval, 1);
12433 /* We could expand this as
12434 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12435 or to
12436 memcpy (str1, str2_with_nul_at_cstm1, cst);
12437 but in the former case that might increase code size
12438 and in the latter case grow .rodata section too much.
12439 So punt for now. */
12440 if (origlen >= destlen)
12441 return NULL_TREE;
12443 /* Convert snprintf (str1, cst, "%s", str2) into
12444 strcpy (str1, str2) if strlen (str2) < cst. */
12445 if (!fn)
12446 return NULL_TREE;
12448 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12450 if (ignored)
12451 retval = NULL_TREE;
12454 if (call && retval)
12456 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12457 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12458 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12460 else
12461 return call;
12464 /* Expand a call EXP to __builtin_object_size. */
12467 expand_builtin_object_size (tree exp)
12469 tree ost;
12470 int object_size_type;
12471 tree fndecl = get_callee_fndecl (exp);
12473 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12475 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12476 exp, fndecl);
12477 expand_builtin_trap ();
12478 return const0_rtx;
12481 ost = CALL_EXPR_ARG (exp, 1);
12482 STRIP_NOPS (ost);
12484 if (TREE_CODE (ost) != INTEGER_CST
12485 || tree_int_cst_sgn (ost) < 0
12486 || compare_tree_int (ost, 3) > 0)
12488 error ("%Klast argument of %D is not integer constant between 0 and 3",
12489 exp, fndecl);
12490 expand_builtin_trap ();
12491 return const0_rtx;
12494 object_size_type = tree_low_cst (ost, 0);
12496 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12499 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12500 FCODE is the BUILT_IN_* to use.
12501 Return NULL_RTX if we failed; the caller should emit a normal call,
12502 otherwise try to get the result in TARGET, if convenient (and in
12503 mode MODE if that's convenient). */
12505 static rtx
12506 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12507 enum built_in_function fcode)
12509 tree dest, src, len, size;
12511 if (!validate_arglist (exp,
12512 POINTER_TYPE,
12513 fcode == BUILT_IN_MEMSET_CHK
12514 ? INTEGER_TYPE : POINTER_TYPE,
12515 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12516 return NULL_RTX;
12518 dest = CALL_EXPR_ARG (exp, 0);
12519 src = CALL_EXPR_ARG (exp, 1);
12520 len = CALL_EXPR_ARG (exp, 2);
12521 size = CALL_EXPR_ARG (exp, 3);
12523 if (! host_integerp (size, 1))
12524 return NULL_RTX;
12526 if (host_integerp (len, 1) || integer_all_onesp (size))
12528 tree fn;
12530 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12532 warning_at (tree_nonartificial_location (exp),
12533 0, "%Kcall to %D will always overflow destination buffer",
12534 exp, get_callee_fndecl (exp));
12535 return NULL_RTX;
12538 fn = NULL_TREE;
12539 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12540 mem{cpy,pcpy,move,set} is available. */
12541 switch (fcode)
12543 case BUILT_IN_MEMCPY_CHK:
12544 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12545 break;
12546 case BUILT_IN_MEMPCPY_CHK:
12547 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12548 break;
12549 case BUILT_IN_MEMMOVE_CHK:
12550 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12551 break;
12552 case BUILT_IN_MEMSET_CHK:
12553 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12554 break;
12555 default:
12556 break;
12559 if (! fn)
12560 return NULL_RTX;
12562 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12563 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12564 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12565 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12567 else if (fcode == BUILT_IN_MEMSET_CHK)
12568 return NULL_RTX;
12569 else
12571 unsigned int dest_align = get_pointer_alignment (dest);
12573 /* If DEST is not a pointer type, call the normal function. */
12574 if (dest_align == 0)
12575 return NULL_RTX;
12577 /* If SRC and DEST are the same (and not volatile), do nothing. */
12578 if (operand_equal_p (src, dest, 0))
12580 tree expr;
12582 if (fcode != BUILT_IN_MEMPCPY_CHK)
12584 /* Evaluate and ignore LEN in case it has side-effects. */
12585 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12586 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12589 expr = fold_build_pointer_plus (dest, len);
12590 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12593 /* __memmove_chk special case. */
12594 if (fcode == BUILT_IN_MEMMOVE_CHK)
12596 unsigned int src_align = get_pointer_alignment (src);
12598 if (src_align == 0)
12599 return NULL_RTX;
12601 /* If src is categorized for a readonly section we can use
12602 normal __memcpy_chk. */
12603 if (readonly_data_expr (src))
12605 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12606 if (!fn)
12607 return NULL_RTX;
12608 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12609 dest, src, len, size);
12610 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12611 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12612 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12615 return NULL_RTX;
12619 /* Emit warning if a buffer overflow is detected at compile time. */
12621 static void
12622 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12624 int is_strlen = 0;
12625 tree len, size;
12626 location_t loc = tree_nonartificial_location (exp);
12628 switch (fcode)
12630 case BUILT_IN_STRCPY_CHK:
12631 case BUILT_IN_STPCPY_CHK:
12632 /* For __strcat_chk the warning will be emitted only if overflowing
12633 by at least strlen (dest) + 1 bytes. */
12634 case BUILT_IN_STRCAT_CHK:
12635 len = CALL_EXPR_ARG (exp, 1);
12636 size = CALL_EXPR_ARG (exp, 2);
12637 is_strlen = 1;
12638 break;
12639 case BUILT_IN_STRNCAT_CHK:
12640 case BUILT_IN_STRNCPY_CHK:
12641 case BUILT_IN_STPNCPY_CHK:
12642 len = CALL_EXPR_ARG (exp, 2);
12643 size = CALL_EXPR_ARG (exp, 3);
12644 break;
12645 case BUILT_IN_SNPRINTF_CHK:
12646 case BUILT_IN_VSNPRINTF_CHK:
12647 len = CALL_EXPR_ARG (exp, 1);
12648 size = CALL_EXPR_ARG (exp, 3);
12649 break;
12650 default:
12651 gcc_unreachable ();
12654 if (!len || !size)
12655 return;
12657 if (! host_integerp (size, 1) || integer_all_onesp (size))
12658 return;
12660 if (is_strlen)
12662 len = c_strlen (len, 1);
12663 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12664 return;
12666 else if (fcode == BUILT_IN_STRNCAT_CHK)
12668 tree src = CALL_EXPR_ARG (exp, 1);
12669 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12670 return;
12671 src = c_strlen (src, 1);
12672 if (! src || ! host_integerp (src, 1))
12674 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12675 exp, get_callee_fndecl (exp));
12676 return;
12678 else if (tree_int_cst_lt (src, size))
12679 return;
12681 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12682 return;
12684 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12685 exp, get_callee_fndecl (exp));
12688 /* Emit warning if a buffer overflow is detected at compile time
12689 in __sprintf_chk/__vsprintf_chk calls. */
12691 static void
12692 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12694 tree size, len, fmt;
12695 const char *fmt_str;
12696 int nargs = call_expr_nargs (exp);
12698 /* Verify the required arguments in the original call. */
12700 if (nargs < 4)
12701 return;
12702 size = CALL_EXPR_ARG (exp, 2);
12703 fmt = CALL_EXPR_ARG (exp, 3);
12705 if (! host_integerp (size, 1) || integer_all_onesp (size))
12706 return;
12708 /* Check whether the format is a literal string constant. */
12709 fmt_str = c_getstr (fmt);
12710 if (fmt_str == NULL)
12711 return;
12713 if (!init_target_chars ())
12714 return;
12716 /* If the format doesn't contain % args or %%, we know its size. */
12717 if (strchr (fmt_str, target_percent) == 0)
12718 len = build_int_cstu (size_type_node, strlen (fmt_str));
12719 /* If the format is "%s" and first ... argument is a string literal,
12720 we know it too. */
12721 else if (fcode == BUILT_IN_SPRINTF_CHK
12722 && strcmp (fmt_str, target_percent_s) == 0)
12724 tree arg;
12726 if (nargs < 5)
12727 return;
12728 arg = CALL_EXPR_ARG (exp, 4);
12729 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12730 return;
12732 len = c_strlen (arg, 1);
12733 if (!len || ! host_integerp (len, 1))
12734 return;
12736 else
12737 return;
12739 if (! tree_int_cst_lt (len, size))
12740 warning_at (tree_nonartificial_location (exp),
12741 0, "%Kcall to %D will always overflow destination buffer",
12742 exp, get_callee_fndecl (exp));
12745 /* Emit warning if a free is called with address of a variable. */
12747 static void
12748 maybe_emit_free_warning (tree exp)
12750 tree arg = CALL_EXPR_ARG (exp, 0);
12752 STRIP_NOPS (arg);
12753 if (TREE_CODE (arg) != ADDR_EXPR)
12754 return;
12756 arg = get_base_address (TREE_OPERAND (arg, 0));
12757 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12758 return;
12760 if (SSA_VAR_P (arg))
12761 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12762 "%Kattempt to free a non-heap object %qD", exp, arg);
12763 else
12764 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12765 "%Kattempt to free a non-heap object", exp);
12768 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12769 if possible. */
12771 tree
12772 fold_builtin_object_size (tree ptr, tree ost)
12774 unsigned HOST_WIDE_INT bytes;
12775 int object_size_type;
12777 if (!validate_arg (ptr, POINTER_TYPE)
12778 || !validate_arg (ost, INTEGER_TYPE))
12779 return NULL_TREE;
12781 STRIP_NOPS (ost);
12783 if (TREE_CODE (ost) != INTEGER_CST
12784 || tree_int_cst_sgn (ost) < 0
12785 || compare_tree_int (ost, 3) > 0)
12786 return NULL_TREE;
12788 object_size_type = tree_low_cst (ost, 0);
12790 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12791 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12792 and (size_t) 0 for types 2 and 3. */
12793 if (TREE_SIDE_EFFECTS (ptr))
12794 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12796 if (TREE_CODE (ptr) == ADDR_EXPR)
12798 bytes = compute_builtin_object_size (ptr, object_size_type);
12799 if (double_int_fits_to_tree_p (size_type_node,
12800 double_int::from_uhwi (bytes)))
12801 return build_int_cstu (size_type_node, bytes);
12803 else if (TREE_CODE (ptr) == SSA_NAME)
12805 /* If object size is not known yet, delay folding until
12806 later. Maybe subsequent passes will help determining
12807 it. */
12808 bytes = compute_builtin_object_size (ptr, object_size_type);
12809 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12810 && double_int_fits_to_tree_p (size_type_node,
12811 double_int::from_uhwi (bytes)))
12812 return build_int_cstu (size_type_node, bytes);
12815 return NULL_TREE;
12818 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12819 DEST, SRC, LEN, and SIZE are the arguments to the call.
12820 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12821 code of the builtin. If MAXLEN is not NULL, it is maximum length
12822 passed as third argument. */
12824 tree
12825 fold_builtin_memory_chk (location_t loc, tree fndecl,
12826 tree dest, tree src, tree len, tree size,
12827 tree maxlen, bool ignore,
12828 enum built_in_function fcode)
12830 tree fn;
12832 if (!validate_arg (dest, POINTER_TYPE)
12833 || !validate_arg (src,
12834 (fcode == BUILT_IN_MEMSET_CHK
12835 ? INTEGER_TYPE : POINTER_TYPE))
12836 || !validate_arg (len, INTEGER_TYPE)
12837 || !validate_arg (size, INTEGER_TYPE))
12838 return NULL_TREE;
12840 /* If SRC and DEST are the same (and not volatile), return DEST
12841 (resp. DEST+LEN for __mempcpy_chk). */
12842 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12844 if (fcode != BUILT_IN_MEMPCPY_CHK)
12845 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12846 dest, len);
12847 else
12849 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12850 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12854 if (! host_integerp (size, 1))
12855 return NULL_TREE;
12857 if (! integer_all_onesp (size))
12859 if (! host_integerp (len, 1))
12861 /* If LEN is not constant, try MAXLEN too.
12862 For MAXLEN only allow optimizing into non-_ocs function
12863 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12864 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12866 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12868 /* (void) __mempcpy_chk () can be optimized into
12869 (void) __memcpy_chk (). */
12870 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12871 if (!fn)
12872 return NULL_TREE;
12874 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12876 return NULL_TREE;
12879 else
12880 maxlen = len;
12882 if (tree_int_cst_lt (size, maxlen))
12883 return NULL_TREE;
12886 fn = NULL_TREE;
12887 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12888 mem{cpy,pcpy,move,set} is available. */
12889 switch (fcode)
12891 case BUILT_IN_MEMCPY_CHK:
12892 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12893 break;
12894 case BUILT_IN_MEMPCPY_CHK:
12895 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12896 break;
12897 case BUILT_IN_MEMMOVE_CHK:
12898 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12899 break;
12900 case BUILT_IN_MEMSET_CHK:
12901 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12902 break;
12903 default:
12904 break;
12907 if (!fn)
12908 return NULL_TREE;
12910 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12913 /* Fold a call to the __st[rp]cpy_chk builtin.
12914 DEST, SRC, and SIZE are the arguments to the call.
12915 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12916 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12917 strings passed as second argument. */
12919 tree
12920 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12921 tree src, tree size,
12922 tree maxlen, bool ignore,
12923 enum built_in_function fcode)
12925 tree len, fn;
12927 if (!validate_arg (dest, POINTER_TYPE)
12928 || !validate_arg (src, POINTER_TYPE)
12929 || !validate_arg (size, INTEGER_TYPE))
12930 return NULL_TREE;
12932 /* If SRC and DEST are the same (and not volatile), return DEST. */
12933 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12934 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12936 if (! host_integerp (size, 1))
12937 return NULL_TREE;
12939 if (! integer_all_onesp (size))
12941 len = c_strlen (src, 1);
12942 if (! len || ! host_integerp (len, 1))
12944 /* If LEN is not constant, try MAXLEN too.
12945 For MAXLEN only allow optimizing into non-_ocs function
12946 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12947 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12949 if (fcode == BUILT_IN_STPCPY_CHK)
12951 if (! ignore)
12952 return NULL_TREE;
12954 /* If return value of __stpcpy_chk is ignored,
12955 optimize into __strcpy_chk. */
12956 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12957 if (!fn)
12958 return NULL_TREE;
12960 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12963 if (! len || TREE_SIDE_EFFECTS (len))
12964 return NULL_TREE;
12966 /* If c_strlen returned something, but not a constant,
12967 transform __strcpy_chk into __memcpy_chk. */
12968 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12969 if (!fn)
12970 return NULL_TREE;
12972 len = fold_convert_loc (loc, size_type_node, len);
12973 len = size_binop_loc (loc, PLUS_EXPR, len,
12974 build_int_cst (size_type_node, 1));
12975 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12976 build_call_expr_loc (loc, fn, 4,
12977 dest, src, len, size));
12980 else
12981 maxlen = len;
12983 if (! tree_int_cst_lt (maxlen, size))
12984 return NULL_TREE;
12987 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12988 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12989 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12990 if (!fn)
12991 return NULL_TREE;
12993 return build_call_expr_loc (loc, fn, 2, dest, src);
12996 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12997 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12998 length passed as third argument. IGNORE is true if return value can be
12999 ignored. FCODE is the BUILT_IN_* code of the builtin. */
13001 tree
13002 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
13003 tree len, tree size, tree maxlen, bool ignore,
13004 enum built_in_function fcode)
13006 tree fn;
13008 if (!validate_arg (dest, POINTER_TYPE)
13009 || !validate_arg (src, POINTER_TYPE)
13010 || !validate_arg (len, INTEGER_TYPE)
13011 || !validate_arg (size, INTEGER_TYPE))
13012 return NULL_TREE;
13014 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
13016 /* If return value of __stpncpy_chk is ignored,
13017 optimize into __strncpy_chk. */
13018 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
13019 if (fn)
13020 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
13023 if (! host_integerp (size, 1))
13024 return NULL_TREE;
13026 if (! integer_all_onesp (size))
13028 if (! host_integerp (len, 1))
13030 /* If LEN is not constant, try MAXLEN too.
13031 For MAXLEN only allow optimizing into non-_ocs function
13032 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13033 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13034 return NULL_TREE;
13036 else
13037 maxlen = len;
13039 if (tree_int_cst_lt (size, maxlen))
13040 return NULL_TREE;
13043 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
13044 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
13045 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
13046 if (!fn)
13047 return NULL_TREE;
13049 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13052 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
13053 are the arguments to the call. */
13055 static tree
13056 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
13057 tree src, tree size)
13059 tree fn;
13060 const char *p;
13062 if (!validate_arg (dest, POINTER_TYPE)
13063 || !validate_arg (src, POINTER_TYPE)
13064 || !validate_arg (size, INTEGER_TYPE))
13065 return NULL_TREE;
13067 p = c_getstr (src);
13068 /* If the SRC parameter is "", return DEST. */
13069 if (p && *p == '\0')
13070 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13072 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
13073 return NULL_TREE;
13075 /* If __builtin_strcat_chk is used, assume strcat is available. */
13076 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
13077 if (!fn)
13078 return NULL_TREE;
13080 return build_call_expr_loc (loc, fn, 2, dest, src);
13083 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
13084 LEN, and SIZE. */
13086 static tree
13087 fold_builtin_strncat_chk (location_t loc, tree fndecl,
13088 tree dest, tree src, tree len, tree size)
13090 tree fn;
13091 const char *p;
13093 if (!validate_arg (dest, POINTER_TYPE)
13094 || !validate_arg (src, POINTER_TYPE)
13095 || !validate_arg (size, INTEGER_TYPE)
13096 || !validate_arg (size, INTEGER_TYPE))
13097 return NULL_TREE;
13099 p = c_getstr (src);
13100 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13101 if (p && *p == '\0')
13102 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
13103 else if (integer_zerop (len))
13104 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13106 if (! host_integerp (size, 1))
13107 return NULL_TREE;
13109 if (! integer_all_onesp (size))
13111 tree src_len = c_strlen (src, 1);
13112 if (src_len
13113 && host_integerp (src_len, 1)
13114 && host_integerp (len, 1)
13115 && ! tree_int_cst_lt (len, src_len))
13117 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13118 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13119 if (!fn)
13120 return NULL_TREE;
13122 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13124 return NULL_TREE;
13127 /* If __builtin_strncat_chk is used, assume strncat is available. */
13128 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13129 if (!fn)
13130 return NULL_TREE;
13132 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13135 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13136 Return NULL_TREE if a normal call should be emitted rather than
13137 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13138 or BUILT_IN_VSPRINTF_CHK. */
13140 static tree
13141 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13142 enum built_in_function fcode)
13144 tree dest, size, len, fn, fmt, flag;
13145 const char *fmt_str;
13147 /* Verify the required arguments in the original call. */
13148 if (nargs < 4)
13149 return NULL_TREE;
13150 dest = args[0];
13151 if (!validate_arg (dest, POINTER_TYPE))
13152 return NULL_TREE;
13153 flag = args[1];
13154 if (!validate_arg (flag, INTEGER_TYPE))
13155 return NULL_TREE;
13156 size = args[2];
13157 if (!validate_arg (size, INTEGER_TYPE))
13158 return NULL_TREE;
13159 fmt = args[3];
13160 if (!validate_arg (fmt, POINTER_TYPE))
13161 return NULL_TREE;
13163 if (! host_integerp (size, 1))
13164 return NULL_TREE;
13166 len = NULL_TREE;
13168 if (!init_target_chars ())
13169 return NULL_TREE;
13171 /* Check whether the format is a literal string constant. */
13172 fmt_str = c_getstr (fmt);
13173 if (fmt_str != NULL)
13175 /* If the format doesn't contain % args or %%, we know the size. */
13176 if (strchr (fmt_str, target_percent) == 0)
13178 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13179 len = build_int_cstu (size_type_node, strlen (fmt_str));
13181 /* If the format is "%s" and first ... argument is a string literal,
13182 we know the size too. */
13183 else if (fcode == BUILT_IN_SPRINTF_CHK
13184 && strcmp (fmt_str, target_percent_s) == 0)
13186 tree arg;
13188 if (nargs == 5)
13190 arg = args[4];
13191 if (validate_arg (arg, POINTER_TYPE))
13193 len = c_strlen (arg, 1);
13194 if (! len || ! host_integerp (len, 1))
13195 len = NULL_TREE;
13201 if (! integer_all_onesp (size))
13203 if (! len || ! tree_int_cst_lt (len, size))
13204 return NULL_TREE;
13207 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13208 or if format doesn't contain % chars or is "%s". */
13209 if (! integer_zerop (flag))
13211 if (fmt_str == NULL)
13212 return NULL_TREE;
13213 if (strchr (fmt_str, target_percent) != NULL
13214 && strcmp (fmt_str, target_percent_s))
13215 return NULL_TREE;
13218 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13219 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13220 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13221 if (!fn)
13222 return NULL_TREE;
13224 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13227 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13228 a normal call should be emitted rather than expanding the function
13229 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13231 static tree
13232 fold_builtin_sprintf_chk (location_t loc, tree exp,
13233 enum built_in_function fcode)
13235 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13236 CALL_EXPR_ARGP (exp), fcode);
13239 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13240 NULL_TREE if a normal call should be emitted rather than expanding
13241 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13242 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13243 passed as second argument. */
13245 static tree
13246 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13247 tree maxlen, enum built_in_function fcode)
13249 tree dest, size, len, fn, fmt, flag;
13250 const char *fmt_str;
13252 /* Verify the required arguments in the original call. */
13253 if (nargs < 5)
13254 return NULL_TREE;
13255 dest = args[0];
13256 if (!validate_arg (dest, POINTER_TYPE))
13257 return NULL_TREE;
13258 len = args[1];
13259 if (!validate_arg (len, INTEGER_TYPE))
13260 return NULL_TREE;
13261 flag = args[2];
13262 if (!validate_arg (flag, INTEGER_TYPE))
13263 return NULL_TREE;
13264 size = args[3];
13265 if (!validate_arg (size, INTEGER_TYPE))
13266 return NULL_TREE;
13267 fmt = args[4];
13268 if (!validate_arg (fmt, POINTER_TYPE))
13269 return NULL_TREE;
13271 if (! host_integerp (size, 1))
13272 return NULL_TREE;
13274 if (! integer_all_onesp (size))
13276 if (! host_integerp (len, 1))
13278 /* If LEN is not constant, try MAXLEN too.
13279 For MAXLEN only allow optimizing into non-_ocs function
13280 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13281 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13282 return NULL_TREE;
13284 else
13285 maxlen = len;
13287 if (tree_int_cst_lt (size, maxlen))
13288 return NULL_TREE;
13291 if (!init_target_chars ())
13292 return NULL_TREE;
13294 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13295 or if format doesn't contain % chars or is "%s". */
13296 if (! integer_zerop (flag))
13298 fmt_str = c_getstr (fmt);
13299 if (fmt_str == NULL)
13300 return NULL_TREE;
13301 if (strchr (fmt_str, target_percent) != NULL
13302 && strcmp (fmt_str, target_percent_s))
13303 return NULL_TREE;
13306 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13307 available. */
13308 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13309 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13310 if (!fn)
13311 return NULL_TREE;
13313 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13316 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13317 a normal call should be emitted rather than expanding the function
13318 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13319 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13320 passed as second argument. */
13322 tree
13323 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13324 enum built_in_function fcode)
13326 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13327 CALL_EXPR_ARGP (exp), maxlen, fcode);
13330 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13331 FMT and ARG are the arguments to the call; we don't fold cases with
13332 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13334 Return NULL_TREE if no simplification was possible, otherwise return the
13335 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13336 code of the function to be simplified. */
13338 static tree
13339 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13340 tree arg, bool ignore,
13341 enum built_in_function fcode)
13343 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13344 const char *fmt_str = NULL;
13346 /* If the return value is used, don't do the transformation. */
13347 if (! ignore)
13348 return NULL_TREE;
13350 /* Verify the required arguments in the original call. */
13351 if (!validate_arg (fmt, POINTER_TYPE))
13352 return NULL_TREE;
13354 /* Check whether the format is a literal string constant. */
13355 fmt_str = c_getstr (fmt);
13356 if (fmt_str == NULL)
13357 return NULL_TREE;
13359 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13361 /* If we're using an unlocked function, assume the other
13362 unlocked functions exist explicitly. */
13363 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13364 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13366 else
13368 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13369 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13372 if (!init_target_chars ())
13373 return NULL_TREE;
13375 if (strcmp (fmt_str, target_percent_s) == 0
13376 || strchr (fmt_str, target_percent) == NULL)
13378 const char *str;
13380 if (strcmp (fmt_str, target_percent_s) == 0)
13382 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13383 return NULL_TREE;
13385 if (!arg || !validate_arg (arg, POINTER_TYPE))
13386 return NULL_TREE;
13388 str = c_getstr (arg);
13389 if (str == NULL)
13390 return NULL_TREE;
13392 else
13394 /* The format specifier doesn't contain any '%' characters. */
13395 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13396 && arg)
13397 return NULL_TREE;
13398 str = fmt_str;
13401 /* If the string was "", printf does nothing. */
13402 if (str[0] == '\0')
13403 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13405 /* If the string has length of 1, call putchar. */
13406 if (str[1] == '\0')
13408 /* Given printf("c"), (where c is any one character,)
13409 convert "c"[0] to an int and pass that to the replacement
13410 function. */
13411 newarg = build_int_cst (integer_type_node, str[0]);
13412 if (fn_putchar)
13413 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13415 else
13417 /* If the string was "string\n", call puts("string"). */
13418 size_t len = strlen (str);
13419 if ((unsigned char)str[len - 1] == target_newline
13420 && (size_t) (int) len == len
13421 && (int) len > 0)
13423 char *newstr;
13424 tree offset_node, string_cst;
13426 /* Create a NUL-terminated string that's one char shorter
13427 than the original, stripping off the trailing '\n'. */
13428 newarg = build_string_literal (len, str);
13429 string_cst = string_constant (newarg, &offset_node);
13430 gcc_checking_assert (string_cst
13431 && (TREE_STRING_LENGTH (string_cst)
13432 == (int) len)
13433 && integer_zerop (offset_node)
13434 && (unsigned char)
13435 TREE_STRING_POINTER (string_cst)[len - 1]
13436 == target_newline);
13437 /* build_string_literal creates a new STRING_CST,
13438 modify it in place to avoid double copying. */
13439 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13440 newstr[len - 1] = '\0';
13441 if (fn_puts)
13442 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13444 else
13445 /* We'd like to arrange to call fputs(string,stdout) here,
13446 but we need stdout and don't have a way to get it yet. */
13447 return NULL_TREE;
13451 /* The other optimizations can be done only on the non-va_list variants. */
13452 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13453 return NULL_TREE;
13455 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13456 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13458 if (!arg || !validate_arg (arg, POINTER_TYPE))
13459 return NULL_TREE;
13460 if (fn_puts)
13461 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13464 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13465 else if (strcmp (fmt_str, target_percent_c) == 0)
13467 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13468 return NULL_TREE;
13469 if (fn_putchar)
13470 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13473 if (!call)
13474 return NULL_TREE;
13476 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13479 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13480 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13481 more than 3 arguments, and ARG may be null in the 2-argument case.
13483 Return NULL_TREE if no simplification was possible, otherwise return the
13484 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13485 code of the function to be simplified. */
13487 static tree
13488 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13489 tree fmt, tree arg, bool ignore,
13490 enum built_in_function fcode)
13492 tree fn_fputc, fn_fputs, call = NULL_TREE;
13493 const char *fmt_str = NULL;
13495 /* If the return value is used, don't do the transformation. */
13496 if (! ignore)
13497 return NULL_TREE;
13499 /* Verify the required arguments in the original call. */
13500 if (!validate_arg (fp, POINTER_TYPE))
13501 return NULL_TREE;
13502 if (!validate_arg (fmt, POINTER_TYPE))
13503 return NULL_TREE;
13505 /* Check whether the format is a literal string constant. */
13506 fmt_str = c_getstr (fmt);
13507 if (fmt_str == NULL)
13508 return NULL_TREE;
13510 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13512 /* If we're using an unlocked function, assume the other
13513 unlocked functions exist explicitly. */
13514 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13515 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13517 else
13519 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13520 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13523 if (!init_target_chars ())
13524 return NULL_TREE;
13526 /* If the format doesn't contain % args or %%, use strcpy. */
13527 if (strchr (fmt_str, target_percent) == NULL)
13529 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13530 && arg)
13531 return NULL_TREE;
13533 /* If the format specifier was "", fprintf does nothing. */
13534 if (fmt_str[0] == '\0')
13536 /* If FP has side-effects, just wait until gimplification is
13537 done. */
13538 if (TREE_SIDE_EFFECTS (fp))
13539 return NULL_TREE;
13541 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13544 /* When "string" doesn't contain %, replace all cases of
13545 fprintf (fp, string) with fputs (string, fp). The fputs
13546 builtin will take care of special cases like length == 1. */
13547 if (fn_fputs)
13548 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13551 /* The other optimizations can be done only on the non-va_list variants. */
13552 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13553 return NULL_TREE;
13555 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13556 else if (strcmp (fmt_str, target_percent_s) == 0)
13558 if (!arg || !validate_arg (arg, POINTER_TYPE))
13559 return NULL_TREE;
13560 if (fn_fputs)
13561 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13564 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13565 else if (strcmp (fmt_str, target_percent_c) == 0)
13567 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13568 return NULL_TREE;
13569 if (fn_fputc)
13570 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13573 if (!call)
13574 return NULL_TREE;
13575 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13578 /* Initialize format string characters in the target charset. */
13580 static bool
13581 init_target_chars (void)
13583 static bool init;
13584 if (!init)
13586 target_newline = lang_hooks.to_target_charset ('\n');
13587 target_percent = lang_hooks.to_target_charset ('%');
13588 target_c = lang_hooks.to_target_charset ('c');
13589 target_s = lang_hooks.to_target_charset ('s');
13590 if (target_newline == 0 || target_percent == 0 || target_c == 0
13591 || target_s == 0)
13592 return false;
13594 target_percent_c[0] = target_percent;
13595 target_percent_c[1] = target_c;
13596 target_percent_c[2] = '\0';
13598 target_percent_s[0] = target_percent;
13599 target_percent_s[1] = target_s;
13600 target_percent_s[2] = '\0';
13602 target_percent_s_newline[0] = target_percent;
13603 target_percent_s_newline[1] = target_s;
13604 target_percent_s_newline[2] = target_newline;
13605 target_percent_s_newline[3] = '\0';
13607 init = true;
13609 return true;
13612 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13613 and no overflow/underflow occurred. INEXACT is true if M was not
13614 exactly calculated. TYPE is the tree type for the result. This
13615 function assumes that you cleared the MPFR flags and then
13616 calculated M to see if anything subsequently set a flag prior to
13617 entering this function. Return NULL_TREE if any checks fail. */
13619 static tree
13620 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13622 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13623 overflow/underflow occurred. If -frounding-math, proceed iff the
13624 result of calling FUNC was exact. */
13625 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13626 && (!flag_rounding_math || !inexact))
13628 REAL_VALUE_TYPE rr;
13630 real_from_mpfr (&rr, m, type, GMP_RNDN);
13631 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13632 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13633 but the mpft_t is not, then we underflowed in the
13634 conversion. */
13635 if (real_isfinite (&rr)
13636 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13638 REAL_VALUE_TYPE rmode;
13640 real_convert (&rmode, TYPE_MODE (type), &rr);
13641 /* Proceed iff the specified mode can hold the value. */
13642 if (real_identical (&rmode, &rr))
13643 return build_real (type, rmode);
13646 return NULL_TREE;
13649 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13650 number and no overflow/underflow occurred. INEXACT is true if M
13651 was not exactly calculated. TYPE is the tree type for the result.
13652 This function assumes that you cleared the MPFR flags and then
13653 calculated M to see if anything subsequently set a flag prior to
13654 entering this function. Return NULL_TREE if any checks fail, if
13655 FORCE_CONVERT is true, then bypass the checks. */
13657 static tree
13658 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13660 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13661 overflow/underflow occurred. If -frounding-math, proceed iff the
13662 result of calling FUNC was exact. */
13663 if (force_convert
13664 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13665 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13666 && (!flag_rounding_math || !inexact)))
13668 REAL_VALUE_TYPE re, im;
13670 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13671 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13672 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13673 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13674 but the mpft_t is not, then we underflowed in the
13675 conversion. */
13676 if (force_convert
13677 || (real_isfinite (&re) && real_isfinite (&im)
13678 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13679 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13681 REAL_VALUE_TYPE re_mode, im_mode;
13683 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13684 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13685 /* Proceed iff the specified mode can hold the value. */
13686 if (force_convert
13687 || (real_identical (&re_mode, &re)
13688 && real_identical (&im_mode, &im)))
13689 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13690 build_real (TREE_TYPE (type), im_mode));
13693 return NULL_TREE;
13696 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13697 FUNC on it and return the resulting value as a tree with type TYPE.
13698 If MIN and/or MAX are not NULL, then the supplied ARG must be
13699 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13700 acceptable values, otherwise they are not. The mpfr precision is
13701 set to the precision of TYPE. We assume that function FUNC returns
13702 zero if the result could be calculated exactly within the requested
13703 precision. */
13705 static tree
13706 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13707 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13708 bool inclusive)
13710 tree result = NULL_TREE;
13712 STRIP_NOPS (arg);
13714 /* To proceed, MPFR must exactly represent the target floating point
13715 format, which only happens when the target base equals two. */
13716 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13717 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13719 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13721 if (real_isfinite (ra)
13722 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13723 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13725 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13726 const int prec = fmt->p;
13727 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13728 int inexact;
13729 mpfr_t m;
13731 mpfr_init2 (m, prec);
13732 mpfr_from_real (m, ra, GMP_RNDN);
13733 mpfr_clear_flags ();
13734 inexact = func (m, m, rnd);
13735 result = do_mpfr_ckconv (m, type, inexact);
13736 mpfr_clear (m);
13740 return result;
13743 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13744 FUNC on it and return the resulting value as a tree with type TYPE.
13745 The mpfr precision is set to the precision of TYPE. We assume that
13746 function FUNC returns zero if the result could be calculated
13747 exactly within the requested precision. */
13749 static tree
13750 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13751 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13753 tree result = NULL_TREE;
13755 STRIP_NOPS (arg1);
13756 STRIP_NOPS (arg2);
13758 /* To proceed, MPFR must exactly represent the target floating point
13759 format, which only happens when the target base equals two. */
13760 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13761 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13762 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13764 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13765 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13767 if (real_isfinite (ra1) && real_isfinite (ra2))
13769 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13770 const int prec = fmt->p;
13771 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13772 int inexact;
13773 mpfr_t m1, m2;
13775 mpfr_inits2 (prec, m1, m2, NULL);
13776 mpfr_from_real (m1, ra1, GMP_RNDN);
13777 mpfr_from_real (m2, ra2, GMP_RNDN);
13778 mpfr_clear_flags ();
13779 inexact = func (m1, m1, m2, rnd);
13780 result = do_mpfr_ckconv (m1, type, inexact);
13781 mpfr_clears (m1, m2, NULL);
13785 return result;
13788 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13789 FUNC on it and return the resulting value as a tree with type TYPE.
13790 The mpfr precision is set to the precision of TYPE. We assume that
13791 function FUNC returns zero if the result could be calculated
13792 exactly within the requested precision. */
13794 static tree
13795 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13796 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13798 tree result = NULL_TREE;
13800 STRIP_NOPS (arg1);
13801 STRIP_NOPS (arg2);
13802 STRIP_NOPS (arg3);
13804 /* To proceed, MPFR must exactly represent the target floating point
13805 format, which only happens when the target base equals two. */
13806 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13807 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13808 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13809 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13811 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13812 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13813 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13815 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13817 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13818 const int prec = fmt->p;
13819 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13820 int inexact;
13821 mpfr_t m1, m2, m3;
13823 mpfr_inits2 (prec, m1, m2, m3, NULL);
13824 mpfr_from_real (m1, ra1, GMP_RNDN);
13825 mpfr_from_real (m2, ra2, GMP_RNDN);
13826 mpfr_from_real (m3, ra3, GMP_RNDN);
13827 mpfr_clear_flags ();
13828 inexact = func (m1, m1, m2, m3, rnd);
13829 result = do_mpfr_ckconv (m1, type, inexact);
13830 mpfr_clears (m1, m2, m3, NULL);
13834 return result;
13837 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13838 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13839 If ARG_SINP and ARG_COSP are NULL then the result is returned
13840 as a complex value.
13841 The type is taken from the type of ARG and is used for setting the
13842 precision of the calculation and results. */
13844 static tree
13845 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13847 tree const type = TREE_TYPE (arg);
13848 tree result = NULL_TREE;
13850 STRIP_NOPS (arg);
13852 /* To proceed, MPFR must exactly represent the target floating point
13853 format, which only happens when the target base equals two. */
13854 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13855 && TREE_CODE (arg) == REAL_CST
13856 && !TREE_OVERFLOW (arg))
13858 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13860 if (real_isfinite (ra))
13862 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13863 const int prec = fmt->p;
13864 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13865 tree result_s, result_c;
13866 int inexact;
13867 mpfr_t m, ms, mc;
13869 mpfr_inits2 (prec, m, ms, mc, NULL);
13870 mpfr_from_real (m, ra, GMP_RNDN);
13871 mpfr_clear_flags ();
13872 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13873 result_s = do_mpfr_ckconv (ms, type, inexact);
13874 result_c = do_mpfr_ckconv (mc, type, inexact);
13875 mpfr_clears (m, ms, mc, NULL);
13876 if (result_s && result_c)
13878 /* If we are to return in a complex value do so. */
13879 if (!arg_sinp && !arg_cosp)
13880 return build_complex (build_complex_type (type),
13881 result_c, result_s);
13883 /* Dereference the sin/cos pointer arguments. */
13884 arg_sinp = build_fold_indirect_ref (arg_sinp);
13885 arg_cosp = build_fold_indirect_ref (arg_cosp);
13886 /* Proceed if valid pointer type were passed in. */
13887 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13888 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13890 /* Set the values. */
13891 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13892 result_s);
13893 TREE_SIDE_EFFECTS (result_s) = 1;
13894 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13895 result_c);
13896 TREE_SIDE_EFFECTS (result_c) = 1;
13897 /* Combine the assignments into a compound expr. */
13898 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13899 result_s, result_c));
13904 return result;
13907 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13908 two-argument mpfr order N Bessel function FUNC on them and return
13909 the resulting value as a tree with type TYPE. The mpfr precision
13910 is set to the precision of TYPE. We assume that function FUNC
13911 returns zero if the result could be calculated exactly within the
13912 requested precision. */
13913 static tree
13914 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13915 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13916 const REAL_VALUE_TYPE *min, bool inclusive)
13918 tree result = NULL_TREE;
13920 STRIP_NOPS (arg1);
13921 STRIP_NOPS (arg2);
13923 /* To proceed, MPFR must exactly represent the target floating point
13924 format, which only happens when the target base equals two. */
13925 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13926 && host_integerp (arg1, 0)
13927 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13929 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13930 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13932 if (n == (long)n
13933 && real_isfinite (ra)
13934 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13936 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13937 const int prec = fmt->p;
13938 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13939 int inexact;
13940 mpfr_t m;
13942 mpfr_init2 (m, prec);
13943 mpfr_from_real (m, ra, GMP_RNDN);
13944 mpfr_clear_flags ();
13945 inexact = func (m, n, m, rnd);
13946 result = do_mpfr_ckconv (m, type, inexact);
13947 mpfr_clear (m);
13951 return result;
13954 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13955 the pointer *(ARG_QUO) and return the result. The type is taken
13956 from the type of ARG0 and is used for setting the precision of the
13957 calculation and results. */
13959 static tree
13960 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13962 tree const type = TREE_TYPE (arg0);
13963 tree result = NULL_TREE;
13965 STRIP_NOPS (arg0);
13966 STRIP_NOPS (arg1);
13968 /* To proceed, MPFR must exactly represent the target floating point
13969 format, which only happens when the target base equals two. */
13970 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13971 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13972 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13974 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13975 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13977 if (real_isfinite (ra0) && real_isfinite (ra1))
13979 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13980 const int prec = fmt->p;
13981 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13982 tree result_rem;
13983 long integer_quo;
13984 mpfr_t m0, m1;
13986 mpfr_inits2 (prec, m0, m1, NULL);
13987 mpfr_from_real (m0, ra0, GMP_RNDN);
13988 mpfr_from_real (m1, ra1, GMP_RNDN);
13989 mpfr_clear_flags ();
13990 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13991 /* Remquo is independent of the rounding mode, so pass
13992 inexact=0 to do_mpfr_ckconv(). */
13993 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13994 mpfr_clears (m0, m1, NULL);
13995 if (result_rem)
13997 /* MPFR calculates quo in the host's long so it may
13998 return more bits in quo than the target int can hold
13999 if sizeof(host long) > sizeof(target int). This can
14000 happen even for native compilers in LP64 mode. In
14001 these cases, modulo the quo value with the largest
14002 number that the target int can hold while leaving one
14003 bit for the sign. */
14004 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
14005 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
14007 /* Dereference the quo pointer argument. */
14008 arg_quo = build_fold_indirect_ref (arg_quo);
14009 /* Proceed iff a valid pointer type was passed in. */
14010 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
14012 /* Set the value. */
14013 tree result_quo
14014 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
14015 build_int_cst (TREE_TYPE (arg_quo),
14016 integer_quo));
14017 TREE_SIDE_EFFECTS (result_quo) = 1;
14018 /* Combine the quo assignment with the rem. */
14019 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14020 result_quo, result_rem));
14025 return result;
14028 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
14029 resulting value as a tree with type TYPE. The mpfr precision is
14030 set to the precision of TYPE. We assume that this mpfr function
14031 returns zero if the result could be calculated exactly within the
14032 requested precision. In addition, the integer pointer represented
14033 by ARG_SG will be dereferenced and set to the appropriate signgam
14034 (-1,1) value. */
14036 static tree
14037 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
14039 tree result = NULL_TREE;
14041 STRIP_NOPS (arg);
14043 /* To proceed, MPFR must exactly represent the target floating point
14044 format, which only happens when the target base equals two. Also
14045 verify ARG is a constant and that ARG_SG is an int pointer. */
14046 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
14047 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
14048 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
14049 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
14051 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
14053 /* In addition to NaN and Inf, the argument cannot be zero or a
14054 negative integer. */
14055 if (real_isfinite (ra)
14056 && ra->cl != rvc_zero
14057 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
14059 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14060 const int prec = fmt->p;
14061 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
14062 int inexact, sg;
14063 mpfr_t m;
14064 tree result_lg;
14066 mpfr_init2 (m, prec);
14067 mpfr_from_real (m, ra, GMP_RNDN);
14068 mpfr_clear_flags ();
14069 inexact = mpfr_lgamma (m, &sg, m, rnd);
14070 result_lg = do_mpfr_ckconv (m, type, inexact);
14071 mpfr_clear (m);
14072 if (result_lg)
14074 tree result_sg;
14076 /* Dereference the arg_sg pointer argument. */
14077 arg_sg = build_fold_indirect_ref (arg_sg);
14078 /* Assign the signgam value into *arg_sg. */
14079 result_sg = fold_build2 (MODIFY_EXPR,
14080 TREE_TYPE (arg_sg), arg_sg,
14081 build_int_cst (TREE_TYPE (arg_sg), sg));
14082 TREE_SIDE_EFFECTS (result_sg) = 1;
14083 /* Combine the signgam assignment with the lgamma result. */
14084 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14085 result_sg, result_lg));
14090 return result;
14093 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14094 function FUNC on it and return the resulting value as a tree with
14095 type TYPE. The mpfr precision is set to the precision of TYPE. We
14096 assume that function FUNC returns zero if the result could be
14097 calculated exactly within the requested precision. */
14099 static tree
14100 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14102 tree result = NULL_TREE;
14104 STRIP_NOPS (arg);
14106 /* To proceed, MPFR must exactly represent the target floating point
14107 format, which only happens when the target base equals two. */
14108 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14109 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14110 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14112 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14113 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14115 if (real_isfinite (re) && real_isfinite (im))
14117 const struct real_format *const fmt =
14118 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14119 const int prec = fmt->p;
14120 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14121 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14122 int inexact;
14123 mpc_t m;
14125 mpc_init2 (m, prec);
14126 mpfr_from_real (mpc_realref(m), re, rnd);
14127 mpfr_from_real (mpc_imagref(m), im, rnd);
14128 mpfr_clear_flags ();
14129 inexact = func (m, m, crnd);
14130 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14131 mpc_clear (m);
14135 return result;
14138 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14139 mpc function FUNC on it and return the resulting value as a tree
14140 with type TYPE. The mpfr precision is set to the precision of
14141 TYPE. We assume that function FUNC returns zero if the result
14142 could be calculated exactly within the requested precision. If
14143 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14144 in the arguments and/or results. */
14146 tree
14147 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14148 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14150 tree result = NULL_TREE;
14152 STRIP_NOPS (arg0);
14153 STRIP_NOPS (arg1);
14155 /* To proceed, MPFR must exactly represent the target floating point
14156 format, which only happens when the target base equals two. */
14157 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14158 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14159 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14160 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14161 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14163 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14164 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14165 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14166 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14168 if (do_nonfinite
14169 || (real_isfinite (re0) && real_isfinite (im0)
14170 && real_isfinite (re1) && real_isfinite (im1)))
14172 const struct real_format *const fmt =
14173 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14174 const int prec = fmt->p;
14175 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14176 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14177 int inexact;
14178 mpc_t m0, m1;
14180 mpc_init2 (m0, prec);
14181 mpc_init2 (m1, prec);
14182 mpfr_from_real (mpc_realref(m0), re0, rnd);
14183 mpfr_from_real (mpc_imagref(m0), im0, rnd);
14184 mpfr_from_real (mpc_realref(m1), re1, rnd);
14185 mpfr_from_real (mpc_imagref(m1), im1, rnd);
14186 mpfr_clear_flags ();
14187 inexact = func (m0, m0, m1, crnd);
14188 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14189 mpc_clear (m0);
14190 mpc_clear (m1);
14194 return result;
14197 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14198 a normal call should be emitted rather than expanding the function
14199 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14201 static tree
14202 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14204 int nargs = gimple_call_num_args (stmt);
14206 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14207 (nargs > 0
14208 ? gimple_call_arg_ptr (stmt, 0)
14209 : &error_mark_node), fcode);
14212 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14213 a normal call should be emitted rather than expanding the function
14214 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14215 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14216 passed as second argument. */
14218 tree
14219 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14220 enum built_in_function fcode)
14222 int nargs = gimple_call_num_args (stmt);
14224 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14225 (nargs > 0
14226 ? gimple_call_arg_ptr (stmt, 0)
14227 : &error_mark_node), maxlen, fcode);
14230 /* Builtins with folding operations that operate on "..." arguments
14231 need special handling; we need to store the arguments in a convenient
14232 data structure before attempting any folding. Fortunately there are
14233 only a few builtins that fall into this category. FNDECL is the
14234 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14235 result of the function call is ignored. */
14237 static tree
14238 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14239 bool ignore ATTRIBUTE_UNUSED)
14241 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14242 tree ret = NULL_TREE;
14244 switch (fcode)
14246 case BUILT_IN_SPRINTF_CHK:
14247 case BUILT_IN_VSPRINTF_CHK:
14248 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14249 break;
14251 case BUILT_IN_SNPRINTF_CHK:
14252 case BUILT_IN_VSNPRINTF_CHK:
14253 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14255 default:
14256 break;
14258 if (ret)
14260 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14261 TREE_NO_WARNING (ret) = 1;
14262 return ret;
14264 return NULL_TREE;
14267 /* A wrapper function for builtin folding that prevents warnings for
14268 "statement without effect" and the like, caused by removing the
14269 call node earlier than the warning is generated. */
14271 tree
14272 fold_call_stmt (gimple stmt, bool ignore)
14274 tree ret = NULL_TREE;
14275 tree fndecl = gimple_call_fndecl (stmt);
14276 location_t loc = gimple_location (stmt);
14277 if (fndecl
14278 && TREE_CODE (fndecl) == FUNCTION_DECL
14279 && DECL_BUILT_IN (fndecl)
14280 && !gimple_call_va_arg_pack_p (stmt))
14282 int nargs = gimple_call_num_args (stmt);
14283 tree *args = (nargs > 0
14284 ? gimple_call_arg_ptr (stmt, 0)
14285 : &error_mark_node);
14287 if (avoid_folding_inline_builtin (fndecl))
14288 return NULL_TREE;
14289 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14291 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14293 else
14295 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14296 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14297 if (!ret)
14298 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14299 if (ret)
14301 /* Propagate location information from original call to
14302 expansion of builtin. Otherwise things like
14303 maybe_emit_chk_warning, that operate on the expansion
14304 of a builtin, will use the wrong location information. */
14305 if (gimple_has_location (stmt))
14307 tree realret = ret;
14308 if (TREE_CODE (ret) == NOP_EXPR)
14309 realret = TREE_OPERAND (ret, 0);
14310 if (CAN_HAVE_LOCATION_P (realret)
14311 && !EXPR_HAS_LOCATION (realret))
14312 SET_EXPR_LOCATION (realret, loc);
14313 return realret;
14315 return ret;
14319 return NULL_TREE;
14322 /* Look up the function in builtin_decl that corresponds to DECL
14323 and set ASMSPEC as its user assembler name. DECL must be a
14324 function decl that declares a builtin. */
14326 void
14327 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14329 tree builtin;
14330 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14331 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14332 && asmspec != 0);
14334 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14335 set_user_assembler_name (builtin, asmspec);
14336 switch (DECL_FUNCTION_CODE (decl))
14338 case BUILT_IN_MEMCPY:
14339 init_block_move_fn (asmspec);
14340 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14341 break;
14342 case BUILT_IN_MEMSET:
14343 init_block_clear_fn (asmspec);
14344 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14345 break;
14346 case BUILT_IN_MEMMOVE:
14347 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14348 break;
14349 case BUILT_IN_MEMCMP:
14350 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14351 break;
14352 case BUILT_IN_ABORT:
14353 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14354 break;
14355 case BUILT_IN_FFS:
14356 if (INT_TYPE_SIZE < BITS_PER_WORD)
14358 set_user_assembler_libfunc ("ffs", asmspec);
14359 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14360 MODE_INT, 0), "ffs");
14362 break;
14363 default:
14364 break;
14368 /* Return true if DECL is a builtin that expands to a constant or similarly
14369 simple code. */
14370 bool
14371 is_simple_builtin (tree decl)
14373 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14374 switch (DECL_FUNCTION_CODE (decl))
14376 /* Builtins that expand to constants. */
14377 case BUILT_IN_CONSTANT_P:
14378 case BUILT_IN_EXPECT:
14379 case BUILT_IN_OBJECT_SIZE:
14380 case BUILT_IN_UNREACHABLE:
14381 /* Simple register moves or loads from stack. */
14382 case BUILT_IN_ASSUME_ALIGNED:
14383 case BUILT_IN_RETURN_ADDRESS:
14384 case BUILT_IN_EXTRACT_RETURN_ADDR:
14385 case BUILT_IN_FROB_RETURN_ADDR:
14386 case BUILT_IN_RETURN:
14387 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14388 case BUILT_IN_FRAME_ADDRESS:
14389 case BUILT_IN_VA_END:
14390 case BUILT_IN_STACK_SAVE:
14391 case BUILT_IN_STACK_RESTORE:
14392 /* Exception state returns or moves registers around. */
14393 case BUILT_IN_EH_FILTER:
14394 case BUILT_IN_EH_POINTER:
14395 case BUILT_IN_EH_COPY_VALUES:
14396 return true;
14398 default:
14399 return false;
14402 return false;
14405 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14406 most probably expanded inline into reasonably simple code. This is a
14407 superset of is_simple_builtin. */
14408 bool
14409 is_inexpensive_builtin (tree decl)
14411 if (!decl)
14412 return false;
14413 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14414 return true;
14415 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14416 switch (DECL_FUNCTION_CODE (decl))
14418 case BUILT_IN_ABS:
14419 case BUILT_IN_ALLOCA:
14420 case BUILT_IN_ALLOCA_WITH_ALIGN:
14421 case BUILT_IN_BSWAP16:
14422 case BUILT_IN_BSWAP32:
14423 case BUILT_IN_BSWAP64:
14424 case BUILT_IN_CLZ:
14425 case BUILT_IN_CLZIMAX:
14426 case BUILT_IN_CLZL:
14427 case BUILT_IN_CLZLL:
14428 case BUILT_IN_CTZ:
14429 case BUILT_IN_CTZIMAX:
14430 case BUILT_IN_CTZL:
14431 case BUILT_IN_CTZLL:
14432 case BUILT_IN_FFS:
14433 case BUILT_IN_FFSIMAX:
14434 case BUILT_IN_FFSL:
14435 case BUILT_IN_FFSLL:
14436 case BUILT_IN_IMAXABS:
14437 case BUILT_IN_FINITE:
14438 case BUILT_IN_FINITEF:
14439 case BUILT_IN_FINITEL:
14440 case BUILT_IN_FINITED32:
14441 case BUILT_IN_FINITED64:
14442 case BUILT_IN_FINITED128:
14443 case BUILT_IN_FPCLASSIFY:
14444 case BUILT_IN_ISFINITE:
14445 case BUILT_IN_ISINF_SIGN:
14446 case BUILT_IN_ISINF:
14447 case BUILT_IN_ISINFF:
14448 case BUILT_IN_ISINFL:
14449 case BUILT_IN_ISINFD32:
14450 case BUILT_IN_ISINFD64:
14451 case BUILT_IN_ISINFD128:
14452 case BUILT_IN_ISNAN:
14453 case BUILT_IN_ISNANF:
14454 case BUILT_IN_ISNANL:
14455 case BUILT_IN_ISNAND32:
14456 case BUILT_IN_ISNAND64:
14457 case BUILT_IN_ISNAND128:
14458 case BUILT_IN_ISNORMAL:
14459 case BUILT_IN_ISGREATER:
14460 case BUILT_IN_ISGREATEREQUAL:
14461 case BUILT_IN_ISLESS:
14462 case BUILT_IN_ISLESSEQUAL:
14463 case BUILT_IN_ISLESSGREATER:
14464 case BUILT_IN_ISUNORDERED:
14465 case BUILT_IN_VA_ARG_PACK:
14466 case BUILT_IN_VA_ARG_PACK_LEN:
14467 case BUILT_IN_VA_COPY:
14468 case BUILT_IN_TRAP:
14469 case BUILT_IN_SAVEREGS:
14470 case BUILT_IN_POPCOUNTL:
14471 case BUILT_IN_POPCOUNTLL:
14472 case BUILT_IN_POPCOUNTIMAX:
14473 case BUILT_IN_POPCOUNT:
14474 case BUILT_IN_PARITYL:
14475 case BUILT_IN_PARITYLL:
14476 case BUILT_IN_PARITYIMAX:
14477 case BUILT_IN_PARITY:
14478 case BUILT_IN_LABS:
14479 case BUILT_IN_LLABS:
14480 case BUILT_IN_PREFETCH:
14481 return true;
14483 default:
14484 return is_simple_builtin (decl);
14487 return false;