Daily bump.
[official-gcc.git] / gcc / builtins.c
blob0825391a0d351acad410fa63ca4f5563b3983966
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "basic-block.h"
34 #include "tree-ssa-alias.h"
35 #include "internal-fn.h"
36 #include "gimple-expr.h"
37 #include "is-a.h"
38 #include "gimple.h"
39 #include "flags.h"
40 #include "regs.h"
41 #include "hard-reg-set.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-config.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "recog.h"
49 #include "output.h"
50 #include "typeclass.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "langhooks.h"
55 #include "tree-ssanames.h"
56 #include "tree-dfa.h"
57 #include "value-prof.h"
58 #include "diagnostic-core.h"
59 #include "builtins.h"
60 #include "ubsan.h"
61 #include "cilk.h"
64 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
66 struct target_builtins default_target_builtins;
67 #if SWITCHABLE_TARGET
68 struct target_builtins *this_target_builtins = &default_target_builtins;
69 #endif
71 /* Define the names of the builtin function types and codes. */
72 const char *const built_in_class_names[BUILT_IN_LAST]
73 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
75 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
76 const char * built_in_names[(int) END_BUILTINS] =
78 #include "builtins.def"
80 #undef DEF_BUILTIN
82 /* Setup an array of _DECL trees, make sure each element is
83 initialized to NULL_TREE. */
84 builtin_info_type builtin_info;
86 /* Non-zero if __builtin_constant_p should be folded right away. */
87 bool force_folding_builtin_constant_p;
89 static const char *c_getstr (tree);
90 static rtx c_readstr (const char *, enum machine_mode);
91 static int target_char_cast (tree, char *);
92 static rtx get_memory_rtx (tree, tree);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
96 static rtx result_vector (int, rtx);
97 #endif
98 static void expand_builtin_update_setjmp_buf (rtx);
99 static void expand_builtin_prefetch (tree);
100 static rtx expand_builtin_apply_args (void);
101 static rtx expand_builtin_apply_args_1 (void);
102 static rtx expand_builtin_apply (rtx, rtx, rtx);
103 static void expand_builtin_return (rtx);
104 static enum type_class type_to_class (tree);
105 static rtx expand_builtin_classify_type (tree);
106 static void expand_errno_check (tree, rtx);
107 static rtx expand_builtin_mathfn (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
109 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
111 static rtx expand_builtin_interclass_mathfn (tree, rtx);
112 static rtx expand_builtin_sincos (tree);
113 static rtx expand_builtin_cexpi (tree, rtx);
114 static rtx expand_builtin_int_roundingfn (tree, rtx);
115 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
116 static rtx expand_builtin_next_arg (void);
117 static rtx expand_builtin_va_start (tree);
118 static rtx expand_builtin_va_end (tree);
119 static rtx expand_builtin_va_copy (tree);
120 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strcmp (tree, rtx);
122 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
123 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memcpy (tree, rtx);
125 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_strcpy (tree, rtx);
129 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
130 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_strncpy (tree, rtx);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
135 static rtx expand_builtin_bzero (tree);
136 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_alloca (tree, bool);
138 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
139 static rtx expand_builtin_frame_address (tree, tree);
140 static tree stabilize_va_list_loc (location_t, tree, int);
141 static rtx expand_builtin_expect (tree, rtx);
142 static tree fold_builtin_constant_p (tree);
143 static tree fold_builtin_classify_type (tree);
144 static tree fold_builtin_strlen (location_t, tree, tree);
145 static tree fold_builtin_inf (location_t, tree, int);
146 static tree fold_builtin_nan (tree, tree, int);
147 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
148 static bool validate_arg (const_tree, enum tree_code code);
149 static bool integer_valued_real_p (tree);
150 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
151 static bool readonly_data_expr (tree);
152 static rtx expand_builtin_fabs (tree, rtx, rtx);
153 static rtx expand_builtin_signbit (tree, rtx);
154 static tree fold_builtin_sqrt (location_t, tree, tree);
155 static tree fold_builtin_cbrt (location_t, tree, tree);
156 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
157 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
158 static tree fold_builtin_cos (location_t, tree, tree, tree);
159 static tree fold_builtin_cosh (location_t, tree, tree, tree);
160 static tree fold_builtin_tan (tree, tree);
161 static tree fold_builtin_trunc (location_t, tree, tree);
162 static tree fold_builtin_floor (location_t, tree, tree);
163 static tree fold_builtin_ceil (location_t, tree, tree);
164 static tree fold_builtin_round (location_t, tree, tree);
165 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
166 static tree fold_builtin_bitop (tree, tree);
167 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
168 static tree fold_builtin_strchr (location_t, tree, tree, tree);
169 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
171 static tree fold_builtin_strcmp (location_t, tree, tree);
172 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
173 static tree fold_builtin_signbit (location_t, tree, tree);
174 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
175 static tree fold_builtin_isascii (location_t, tree);
176 static tree fold_builtin_toascii (location_t, tree);
177 static tree fold_builtin_isdigit (location_t, tree);
178 static tree fold_builtin_fabs (location_t, tree, tree);
179 static tree fold_builtin_abs (location_t, tree, tree);
180 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
181 enum tree_code);
182 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
183 static tree fold_builtin_0 (location_t, tree, bool);
184 static tree fold_builtin_1 (location_t, tree, tree, bool);
185 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
186 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
187 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
188 static tree fold_builtin_varargs (location_t, tree, tree, bool);
190 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
191 static tree fold_builtin_strstr (location_t, tree, tree, tree);
192 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
193 static tree fold_builtin_strncat (location_t, tree, tree, tree);
194 static tree fold_builtin_strspn (location_t, tree, tree);
195 static tree fold_builtin_strcspn (location_t, tree, tree);
196 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
197 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
199 static rtx expand_builtin_object_size (tree);
200 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
201 enum built_in_function);
202 static void maybe_emit_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
204 static void maybe_emit_free_warning (tree);
205 static tree fold_builtin_object_size (tree, tree);
206 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
207 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
208 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
209 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
210 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
211 enum built_in_function);
212 static bool init_target_chars (void);
214 static unsigned HOST_WIDE_INT target_newline;
215 static unsigned HOST_WIDE_INT target_percent;
216 static unsigned HOST_WIDE_INT target_c;
217 static unsigned HOST_WIDE_INT target_s;
218 static char target_percent_c[3];
219 static char target_percent_s[3];
220 static char target_percent_s_newline[4];
221 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
222 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
223 static tree do_mpfr_arg2 (tree, tree, tree,
224 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
225 static tree do_mpfr_arg3 (tree, tree, tree, tree,
226 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
227 static tree do_mpfr_sincos (tree, tree, tree);
228 static tree do_mpfr_bessel_n (tree, tree, tree,
229 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
230 const REAL_VALUE_TYPE *, bool);
231 static tree do_mpfr_remquo (tree, tree, tree);
232 static tree do_mpfr_lgamma_r (tree, tree, tree);
233 static void expand_builtin_sync_synchronize (void);
235 /* Return true if NAME starts with __builtin_ or __sync_. */
237 static bool
238 is_builtin_name (const char *name)
240 if (strncmp (name, "__builtin_", 10) == 0)
241 return true;
242 if (strncmp (name, "__sync_", 7) == 0)
243 return true;
244 if (strncmp (name, "__atomic_", 9) == 0)
245 return true;
246 if (flag_cilkplus
247 && (!strcmp (name, "__cilkrts_detach")
248 || !strcmp (name, "__cilkrts_pop_frame")))
249 return true;
250 return false;
254 /* Return true if DECL is a function symbol representing a built-in. */
256 bool
257 is_builtin_fn (tree decl)
259 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
262 /* By default we assume that c99 functions are present at the runtime,
263 but sincos is not. */
264 bool
265 default_libc_has_function (enum function_class fn_class)
267 if (fn_class == function_c94
268 || fn_class == function_c99_misc
269 || fn_class == function_c99_math_complex)
270 return true;
272 return false;
275 bool
276 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
278 return true;
281 bool
282 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
284 return false;
287 /* Return true if NODE should be considered for inline expansion regardless
288 of the optimization level. This means whenever a function is invoked with
289 its "internal" name, which normally contains the prefix "__builtin". */
291 static bool
292 called_as_built_in (tree node)
294 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
295 we want the name used to call the function, not the name it
296 will have. */
297 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
298 return is_builtin_name (name);
301 /* Compute values M and N such that M divides (address of EXP - N) and such
302 that N < M. If these numbers can be determined, store M in alignp and N in
303 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
304 *alignp and any bit-offset to *bitposp.
306 Note that the address (and thus the alignment) computed here is based
307 on the address to which a symbol resolves, whereas DECL_ALIGN is based
308 on the address at which an object is actually located. These two
309 addresses are not always the same. For example, on ARM targets,
310 the address &foo of a Thumb function foo() has the lowest bit set,
311 whereas foo() itself starts on an even address.
313 If ADDR_P is true we are taking the address of the memory reference EXP
314 and thus cannot rely on the access taking place. */
316 static bool
317 get_object_alignment_2 (tree exp, unsigned int *alignp,
318 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
320 HOST_WIDE_INT bitsize, bitpos;
321 tree offset;
322 enum machine_mode mode;
323 int unsignedp, volatilep;
324 unsigned int align = BITS_PER_UNIT;
325 bool known_alignment = false;
327 /* Get the innermost object and the constant (bitpos) and possibly
328 variable (offset) offset of the access. */
329 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
330 &mode, &unsignedp, &volatilep, true);
332 /* Extract alignment information from the innermost object and
333 possibly adjust bitpos and offset. */
334 if (TREE_CODE (exp) == FUNCTION_DECL)
336 /* Function addresses can encode extra information besides their
337 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
338 allows the low bit to be used as a virtual bit, we know
339 that the address itself must be at least 2-byte aligned. */
340 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
341 align = 2 * BITS_PER_UNIT;
343 else if (TREE_CODE (exp) == LABEL_DECL)
345 else if (TREE_CODE (exp) == CONST_DECL)
347 /* The alignment of a CONST_DECL is determined by its initializer. */
348 exp = DECL_INITIAL (exp);
349 align = TYPE_ALIGN (TREE_TYPE (exp));
350 #ifdef CONSTANT_ALIGNMENT
351 if (CONSTANT_CLASS_P (exp))
352 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
353 #endif
354 known_alignment = true;
356 else if (DECL_P (exp))
358 align = DECL_ALIGN (exp);
359 known_alignment = true;
361 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
363 align = TYPE_ALIGN (TREE_TYPE (exp));
365 else if (TREE_CODE (exp) == INDIRECT_REF
366 || TREE_CODE (exp) == MEM_REF
367 || TREE_CODE (exp) == TARGET_MEM_REF)
369 tree addr = TREE_OPERAND (exp, 0);
370 unsigned ptr_align;
371 unsigned HOST_WIDE_INT ptr_bitpos;
372 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
374 /* If the address is explicitely aligned, handle that. */
375 if (TREE_CODE (addr) == BIT_AND_EXPR
376 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
378 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
379 ptr_bitmask *= BITS_PER_UNIT;
380 align = ptr_bitmask & -ptr_bitmask;
381 addr = TREE_OPERAND (addr, 0);
384 known_alignment
385 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
386 align = MAX (ptr_align, align);
388 /* Re-apply explicit alignment to the bitpos. */
389 ptr_bitpos &= ptr_bitmask;
391 /* The alignment of the pointer operand in a TARGET_MEM_REF
392 has to take the variable offset parts into account. */
393 if (TREE_CODE (exp) == TARGET_MEM_REF)
395 if (TMR_INDEX (exp))
397 unsigned HOST_WIDE_INT step = 1;
398 if (TMR_STEP (exp))
399 step = TREE_INT_CST_LOW (TMR_STEP (exp));
400 align = MIN (align, (step & -step) * BITS_PER_UNIT);
402 if (TMR_INDEX2 (exp))
403 align = BITS_PER_UNIT;
404 known_alignment = false;
407 /* When EXP is an actual memory reference then we can use
408 TYPE_ALIGN of a pointer indirection to derive alignment.
409 Do so only if get_pointer_alignment_1 did not reveal absolute
410 alignment knowledge and if using that alignment would
411 improve the situation. */
412 if (!addr_p && !known_alignment
413 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
414 align = TYPE_ALIGN (TREE_TYPE (exp));
415 else
417 /* Else adjust bitpos accordingly. */
418 bitpos += ptr_bitpos;
419 if (TREE_CODE (exp) == MEM_REF
420 || TREE_CODE (exp) == TARGET_MEM_REF)
421 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
424 else if (TREE_CODE (exp) == STRING_CST)
426 /* STRING_CST are the only constant objects we allow to be not
427 wrapped inside a CONST_DECL. */
428 align = TYPE_ALIGN (TREE_TYPE (exp));
429 #ifdef CONSTANT_ALIGNMENT
430 if (CONSTANT_CLASS_P (exp))
431 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
432 #endif
433 known_alignment = true;
436 /* If there is a non-constant offset part extract the maximum
437 alignment that can prevail. */
438 if (offset)
440 unsigned int trailing_zeros = tree_ctz (offset);
441 if (trailing_zeros < HOST_BITS_PER_INT)
443 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
444 if (inner)
445 align = MIN (align, inner);
449 *alignp = align;
450 *bitposp = bitpos & (*alignp - 1);
451 return known_alignment;
454 /* For a memory reference expression EXP compute values M and N such that M
455 divides (&EXP - N) and such that N < M. If these numbers can be determined,
456 store M in alignp and N in *BITPOSP and return true. Otherwise return false
457 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
459 bool
460 get_object_alignment_1 (tree exp, unsigned int *alignp,
461 unsigned HOST_WIDE_INT *bitposp)
463 return get_object_alignment_2 (exp, alignp, bitposp, false);
466 /* Return the alignment in bits of EXP, an object. */
468 unsigned int
469 get_object_alignment (tree exp)
471 unsigned HOST_WIDE_INT bitpos = 0;
472 unsigned int align;
474 get_object_alignment_1 (exp, &align, &bitpos);
476 /* align and bitpos now specify known low bits of the pointer.
477 ptr & (align - 1) == bitpos. */
479 if (bitpos != 0)
480 align = (bitpos & -bitpos);
481 return align;
484 /* For a pointer valued expression EXP compute values M and N such that M
485 divides (EXP - N) and such that N < M. If these numbers can be determined,
486 store M in alignp and N in *BITPOSP and return true. Return false if
487 the results are just a conservative approximation.
489 If EXP is not a pointer, false is returned too. */
491 bool
492 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
493 unsigned HOST_WIDE_INT *bitposp)
495 STRIP_NOPS (exp);
497 if (TREE_CODE (exp) == ADDR_EXPR)
498 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
499 alignp, bitposp, true);
500 else if (TREE_CODE (exp) == SSA_NAME
501 && POINTER_TYPE_P (TREE_TYPE (exp)))
503 unsigned int ptr_align, ptr_misalign;
504 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
506 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
508 *bitposp = ptr_misalign * BITS_PER_UNIT;
509 *alignp = ptr_align * BITS_PER_UNIT;
510 /* We cannot really tell whether this result is an approximation. */
511 return true;
513 else
515 *bitposp = 0;
516 *alignp = BITS_PER_UNIT;
517 return false;
520 else if (TREE_CODE (exp) == INTEGER_CST)
522 *alignp = BIGGEST_ALIGNMENT;
523 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
524 & (BIGGEST_ALIGNMENT - 1));
525 return true;
528 *bitposp = 0;
529 *alignp = BITS_PER_UNIT;
530 return false;
533 /* Return the alignment in bits of EXP, a pointer valued expression.
534 The alignment returned is, by default, the alignment of the thing that
535 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
537 Otherwise, look at the expression to see if we can do better, i.e., if the
538 expression is actually pointing at an object whose alignment is tighter. */
540 unsigned int
541 get_pointer_alignment (tree exp)
543 unsigned HOST_WIDE_INT bitpos = 0;
544 unsigned int align;
546 get_pointer_alignment_1 (exp, &align, &bitpos);
548 /* align and bitpos now specify known low bits of the pointer.
549 ptr & (align - 1) == bitpos. */
551 if (bitpos != 0)
552 align = (bitpos & -bitpos);
554 return align;
557 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
558 way, because it could contain a zero byte in the middle.
559 TREE_STRING_LENGTH is the size of the character array, not the string.
561 ONLY_VALUE should be nonzero if the result is not going to be emitted
562 into the instruction stream and zero if it is going to be expanded.
563 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
564 is returned, otherwise NULL, since
565 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
566 evaluate the side-effects.
568 The value returned is of type `ssizetype'.
570 Unfortunately, string_constant can't access the values of const char
571 arrays with initializers, so neither can we do so here. */
573 tree
574 c_strlen (tree src, int only_value)
576 tree offset_node;
577 HOST_WIDE_INT offset;
578 int max;
579 const char *ptr;
580 location_t loc;
582 STRIP_NOPS (src);
583 if (TREE_CODE (src) == COND_EXPR
584 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
586 tree len1, len2;
588 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
589 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
590 if (tree_int_cst_equal (len1, len2))
591 return len1;
594 if (TREE_CODE (src) == COMPOUND_EXPR
595 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
596 return c_strlen (TREE_OPERAND (src, 1), only_value);
598 loc = EXPR_LOC_OR_LOC (src, input_location);
600 src = string_constant (src, &offset_node);
601 if (src == 0)
602 return NULL_TREE;
604 max = TREE_STRING_LENGTH (src) - 1;
605 ptr = TREE_STRING_POINTER (src);
607 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
609 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
610 compute the offset to the following null if we don't know where to
611 start searching for it. */
612 int i;
614 for (i = 0; i < max; i++)
615 if (ptr[i] == 0)
616 return NULL_TREE;
618 /* We don't know the starting offset, but we do know that the string
619 has no internal zero bytes. We can assume that the offset falls
620 within the bounds of the string; otherwise, the programmer deserves
621 what he gets. Subtract the offset from the length of the string,
622 and return that. This would perhaps not be valid if we were dealing
623 with named arrays in addition to literal string constants. */
625 return size_diffop_loc (loc, size_int (max), offset_node);
628 /* We have a known offset into the string. Start searching there for
629 a null character if we can represent it as a single HOST_WIDE_INT. */
630 if (offset_node == 0)
631 offset = 0;
632 else if (! tree_fits_shwi_p (offset_node))
633 offset = -1;
634 else
635 offset = tree_to_shwi (offset_node);
637 /* If the offset is known to be out of bounds, warn, and call strlen at
638 runtime. */
639 if (offset < 0 || offset > max)
641 /* Suppress multiple warnings for propagated constant strings. */
642 if (! TREE_NO_WARNING (src))
644 warning_at (loc, 0, "offset outside bounds of constant string");
645 TREE_NO_WARNING (src) = 1;
647 return NULL_TREE;
650 /* Use strlen to search for the first zero byte. Since any strings
651 constructed with build_string will have nulls appended, we win even
652 if we get handed something like (char[4])"abcd".
654 Since OFFSET is our starting index into the string, no further
655 calculation is needed. */
656 return ssize_int (strlen (ptr + offset));
659 /* Return a char pointer for a C string if it is a string constant
660 or sum of string constant and integer constant. */
662 static const char *
663 c_getstr (tree src)
665 tree offset_node;
667 src = string_constant (src, &offset_node);
668 if (src == 0)
669 return 0;
671 if (offset_node == 0)
672 return TREE_STRING_POINTER (src);
673 else if (!tree_fits_uhwi_p (offset_node)
674 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
675 return 0;
677 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
680 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
681 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
683 static rtx
684 c_readstr (const char *str, enum machine_mode mode)
686 HOST_WIDE_INT c[2];
687 HOST_WIDE_INT ch;
688 unsigned int i, j;
690 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
692 c[0] = 0;
693 c[1] = 0;
694 ch = 1;
695 for (i = 0; i < GET_MODE_SIZE (mode); i++)
697 j = i;
698 if (WORDS_BIG_ENDIAN)
699 j = GET_MODE_SIZE (mode) - i - 1;
700 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
701 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
702 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
703 j *= BITS_PER_UNIT;
704 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
706 if (ch)
707 ch = (unsigned char) str[i];
708 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
710 return immed_double_const (c[0], c[1], mode);
713 /* Cast a target constant CST to target CHAR and if that value fits into
714 host char type, return zero and put that value into variable pointed to by
715 P. */
717 static int
718 target_char_cast (tree cst, char *p)
720 unsigned HOST_WIDE_INT val, hostval;
722 if (TREE_CODE (cst) != INTEGER_CST
723 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
724 return 1;
726 val = TREE_INT_CST_LOW (cst);
727 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
728 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
730 hostval = val;
731 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
732 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
734 if (val != hostval)
735 return 1;
737 *p = hostval;
738 return 0;
741 /* Similar to save_expr, but assumes that arbitrary code is not executed
742 in between the multiple evaluations. In particular, we assume that a
743 non-addressable local variable will not be modified. */
745 static tree
746 builtin_save_expr (tree exp)
748 if (TREE_CODE (exp) == SSA_NAME
749 || (TREE_ADDRESSABLE (exp) == 0
750 && (TREE_CODE (exp) == PARM_DECL
751 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
752 return exp;
754 return save_expr (exp);
757 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
758 times to get the address of either a higher stack frame, or a return
759 address located within it (depending on FNDECL_CODE). */
761 static rtx
762 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
764 int i;
766 #ifdef INITIAL_FRAME_ADDRESS_RTX
767 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
768 #else
769 rtx tem;
771 /* For a zero count with __builtin_return_address, we don't care what
772 frame address we return, because target-specific definitions will
773 override us. Therefore frame pointer elimination is OK, and using
774 the soft frame pointer is OK.
776 For a nonzero count, or a zero count with __builtin_frame_address,
777 we require a stable offset from the current frame pointer to the
778 previous one, so we must use the hard frame pointer, and
779 we must disable frame pointer elimination. */
780 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
781 tem = frame_pointer_rtx;
782 else
784 tem = hard_frame_pointer_rtx;
786 /* Tell reload not to eliminate the frame pointer. */
787 crtl->accesses_prior_frames = 1;
789 #endif
791 /* Some machines need special handling before we can access
792 arbitrary frames. For example, on the SPARC, we must first flush
793 all register windows to the stack. */
794 #ifdef SETUP_FRAME_ADDRESSES
795 if (count > 0)
796 SETUP_FRAME_ADDRESSES ();
797 #endif
799 /* On the SPARC, the return address is not in the frame, it is in a
800 register. There is no way to access it off of the current frame
801 pointer, but it can be accessed off the previous frame pointer by
802 reading the value from the register window save area. */
803 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
804 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
805 count--;
806 #endif
808 /* Scan back COUNT frames to the specified frame. */
809 for (i = 0; i < count; i++)
811 /* Assume the dynamic chain pointer is in the word that the
812 frame address points to, unless otherwise specified. */
813 #ifdef DYNAMIC_CHAIN_ADDRESS
814 tem = DYNAMIC_CHAIN_ADDRESS (tem);
815 #endif
816 tem = memory_address (Pmode, tem);
817 tem = gen_frame_mem (Pmode, tem);
818 tem = copy_to_reg (tem);
821 /* For __builtin_frame_address, return what we've got. But, on
822 the SPARC for example, we may have to add a bias. */
823 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
824 #ifdef FRAME_ADDR_RTX
825 return FRAME_ADDR_RTX (tem);
826 #else
827 return tem;
828 #endif
830 /* For __builtin_return_address, get the return address from that frame. */
831 #ifdef RETURN_ADDR_RTX
832 tem = RETURN_ADDR_RTX (count, tem);
833 #else
834 tem = memory_address (Pmode,
835 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
836 tem = gen_frame_mem (Pmode, tem);
837 #endif
838 return tem;
841 /* Alias set used for setjmp buffer. */
842 static alias_set_type setjmp_alias_set = -1;
844 /* Construct the leading half of a __builtin_setjmp call. Control will
845 return to RECEIVER_LABEL. This is also called directly by the SJLJ
846 exception handling code. */
848 void
849 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
851 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
852 rtx stack_save;
853 rtx mem;
855 if (setjmp_alias_set == -1)
856 setjmp_alias_set = new_alias_set ();
858 buf_addr = convert_memory_address (Pmode, buf_addr);
860 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
862 /* We store the frame pointer and the address of receiver_label in
863 the buffer and use the rest of it for the stack save area, which
864 is machine-dependent. */
866 mem = gen_rtx_MEM (Pmode, buf_addr);
867 set_mem_alias_set (mem, setjmp_alias_set);
868 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
870 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
871 GET_MODE_SIZE (Pmode))),
872 set_mem_alias_set (mem, setjmp_alias_set);
874 emit_move_insn (validize_mem (mem),
875 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
877 stack_save = gen_rtx_MEM (sa_mode,
878 plus_constant (Pmode, buf_addr,
879 2 * GET_MODE_SIZE (Pmode)));
880 set_mem_alias_set (stack_save, setjmp_alias_set);
881 emit_stack_save (SAVE_NONLOCAL, &stack_save);
883 /* If there is further processing to do, do it. */
884 #ifdef HAVE_builtin_setjmp_setup
885 if (HAVE_builtin_setjmp_setup)
886 emit_insn (gen_builtin_setjmp_setup (buf_addr));
887 #endif
889 /* We have a nonlocal label. */
890 cfun->has_nonlocal_label = 1;
893 /* Construct the trailing part of a __builtin_setjmp call. This is
894 also called directly by the SJLJ exception handling code.
895 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
897 void
898 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
900 rtx chain;
902 /* Mark the FP as used when we get here, so we have to make sure it's
903 marked as used by this function. */
904 emit_use (hard_frame_pointer_rtx);
906 /* Mark the static chain as clobbered here so life information
907 doesn't get messed up for it. */
908 chain = targetm.calls.static_chain (current_function_decl, true);
909 if (chain && REG_P (chain))
910 emit_clobber (chain);
912 /* Now put in the code to restore the frame pointer, and argument
913 pointer, if needed. */
914 #ifdef HAVE_nonlocal_goto
915 if (! HAVE_nonlocal_goto)
916 #endif
918 /* First adjust our frame pointer to its actual value. It was
919 previously set to the start of the virtual area corresponding to
920 the stacked variables when we branched here and now needs to be
921 adjusted to the actual hardware fp value.
923 Assignments to virtual registers are converted by
924 instantiate_virtual_regs into the corresponding assignment
925 to the underlying register (fp in this case) that makes
926 the original assignment true.
927 So the following insn will actually be decrementing fp by
928 STARTING_FRAME_OFFSET. */
929 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
931 /* Restoring the frame pointer also modifies the hard frame pointer.
932 Mark it used (so that the previous assignment remains live once
933 the frame pointer is eliminated) and clobbered (to represent the
934 implicit update from the assignment). */
935 emit_use (hard_frame_pointer_rtx);
936 emit_clobber (hard_frame_pointer_rtx);
939 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
940 if (fixed_regs[ARG_POINTER_REGNUM])
942 #ifdef ELIMINABLE_REGS
943 /* If the argument pointer can be eliminated in favor of the
944 frame pointer, we don't need to restore it. We assume here
945 that if such an elimination is present, it can always be used.
946 This is the case on all known machines; if we don't make this
947 assumption, we do unnecessary saving on many machines. */
948 size_t i;
949 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
951 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
952 if (elim_regs[i].from == ARG_POINTER_REGNUM
953 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
954 break;
956 if (i == ARRAY_SIZE (elim_regs))
957 #endif
959 /* Now restore our arg pointer from the address at which it
960 was saved in our stack frame. */
961 emit_move_insn (crtl->args.internal_arg_pointer,
962 copy_to_reg (get_arg_pointer_save_area ()));
965 #endif
967 #ifdef HAVE_builtin_setjmp_receiver
968 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
969 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
970 else
971 #endif
972 #ifdef HAVE_nonlocal_goto_receiver
973 if (HAVE_nonlocal_goto_receiver)
974 emit_insn (gen_nonlocal_goto_receiver ());
975 else
976 #endif
977 { /* Nothing */ }
979 /* We must not allow the code we just generated to be reordered by
980 scheduling. Specifically, the update of the frame pointer must
981 happen immediately, not later. */
982 emit_insn (gen_blockage ());
985 /* __builtin_longjmp is passed a pointer to an array of five words (not
986 all will be used on all machines). It operates similarly to the C
987 library function of the same name, but is more efficient. Much of
988 the code below is copied from the handling of non-local gotos. */
990 static void
991 expand_builtin_longjmp (rtx buf_addr, rtx value)
993 rtx fp, lab, stack, insn, last;
994 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
996 /* DRAP is needed for stack realign if longjmp is expanded to current
997 function */
998 if (SUPPORTS_STACK_ALIGNMENT)
999 crtl->need_drap = true;
1001 if (setjmp_alias_set == -1)
1002 setjmp_alias_set = new_alias_set ();
1004 buf_addr = convert_memory_address (Pmode, buf_addr);
1006 buf_addr = force_reg (Pmode, buf_addr);
1008 /* We require that the user must pass a second argument of 1, because
1009 that is what builtin_setjmp will return. */
1010 gcc_assert (value == const1_rtx);
1012 last = get_last_insn ();
1013 #ifdef HAVE_builtin_longjmp
1014 if (HAVE_builtin_longjmp)
1015 emit_insn (gen_builtin_longjmp (buf_addr));
1016 else
1017 #endif
1019 fp = gen_rtx_MEM (Pmode, buf_addr);
1020 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1021 GET_MODE_SIZE (Pmode)));
1023 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1024 2 * GET_MODE_SIZE (Pmode)));
1025 set_mem_alias_set (fp, setjmp_alias_set);
1026 set_mem_alias_set (lab, setjmp_alias_set);
1027 set_mem_alias_set (stack, setjmp_alias_set);
1029 /* Pick up FP, label, and SP from the block and jump. This code is
1030 from expand_goto in stmt.c; see there for detailed comments. */
1031 #ifdef HAVE_nonlocal_goto
1032 if (HAVE_nonlocal_goto)
1033 /* We have to pass a value to the nonlocal_goto pattern that will
1034 get copied into the static_chain pointer, but it does not matter
1035 what that value is, because builtin_setjmp does not use it. */
1036 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1037 else
1038 #endif
1040 lab = copy_to_reg (lab);
1042 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1043 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1045 emit_move_insn (hard_frame_pointer_rtx, fp);
1046 emit_stack_restore (SAVE_NONLOCAL, stack);
1048 emit_use (hard_frame_pointer_rtx);
1049 emit_use (stack_pointer_rtx);
1050 emit_indirect_jump (lab);
1054 /* Search backwards and mark the jump insn as a non-local goto.
1055 Note that this precludes the use of __builtin_longjmp to a
1056 __builtin_setjmp target in the same function. However, we've
1057 already cautioned the user that these functions are for
1058 internal exception handling use only. */
1059 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1061 gcc_assert (insn != last);
1063 if (JUMP_P (insn))
1065 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1066 break;
1068 else if (CALL_P (insn))
1069 break;
1073 static inline bool
1074 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1076 return (iter->i < iter->n);
1079 /* This function validates the types of a function call argument list
1080 against a specified list of tree_codes. If the last specifier is a 0,
1081 that represents an ellipses, otherwise the last specifier must be a
1082 VOID_TYPE. */
1084 static bool
1085 validate_arglist (const_tree callexpr, ...)
1087 enum tree_code code;
1088 bool res = 0;
1089 va_list ap;
1090 const_call_expr_arg_iterator iter;
1091 const_tree arg;
1093 va_start (ap, callexpr);
1094 init_const_call_expr_arg_iterator (callexpr, &iter);
1098 code = (enum tree_code) va_arg (ap, int);
1099 switch (code)
1101 case 0:
1102 /* This signifies an ellipses, any further arguments are all ok. */
1103 res = true;
1104 goto end;
1105 case VOID_TYPE:
1106 /* This signifies an endlink, if no arguments remain, return
1107 true, otherwise return false. */
1108 res = !more_const_call_expr_args_p (&iter);
1109 goto end;
1110 default:
1111 /* If no parameters remain or the parameter's code does not
1112 match the specified code, return false. Otherwise continue
1113 checking any remaining arguments. */
1114 arg = next_const_call_expr_arg (&iter);
1115 if (!validate_arg (arg, code))
1116 goto end;
1117 break;
1120 while (1);
1122 /* We need gotos here since we can only have one VA_CLOSE in a
1123 function. */
1124 end: ;
1125 va_end (ap);
1127 return res;
1130 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1131 and the address of the save area. */
1133 static rtx
1134 expand_builtin_nonlocal_goto (tree exp)
1136 tree t_label, t_save_area;
1137 rtx r_label, r_save_area, r_fp, r_sp, insn;
1139 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1140 return NULL_RTX;
1142 t_label = CALL_EXPR_ARG (exp, 0);
1143 t_save_area = CALL_EXPR_ARG (exp, 1);
1145 r_label = expand_normal (t_label);
1146 r_label = convert_memory_address (Pmode, r_label);
1147 r_save_area = expand_normal (t_save_area);
1148 r_save_area = convert_memory_address (Pmode, r_save_area);
1149 /* Copy the address of the save location to a register just in case it was
1150 based on the frame pointer. */
1151 r_save_area = copy_to_reg (r_save_area);
1152 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1153 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1154 plus_constant (Pmode, r_save_area,
1155 GET_MODE_SIZE (Pmode)));
1157 crtl->has_nonlocal_goto = 1;
1159 #ifdef HAVE_nonlocal_goto
1160 /* ??? We no longer need to pass the static chain value, afaik. */
1161 if (HAVE_nonlocal_goto)
1162 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1163 else
1164 #endif
1166 r_label = copy_to_reg (r_label);
1168 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1169 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1171 /* Restore frame pointer for containing function. */
1172 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1173 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1175 /* USE of hard_frame_pointer_rtx added for consistency;
1176 not clear if really needed. */
1177 emit_use (hard_frame_pointer_rtx);
1178 emit_use (stack_pointer_rtx);
1180 /* If the architecture is using a GP register, we must
1181 conservatively assume that the target function makes use of it.
1182 The prologue of functions with nonlocal gotos must therefore
1183 initialize the GP register to the appropriate value, and we
1184 must then make sure that this value is live at the point
1185 of the jump. (Note that this doesn't necessarily apply
1186 to targets with a nonlocal_goto pattern; they are free
1187 to implement it in their own way. Note also that this is
1188 a no-op if the GP register is a global invariant.) */
1189 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1190 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1191 emit_use (pic_offset_table_rtx);
1193 emit_indirect_jump (r_label);
1196 /* Search backwards to the jump insn and mark it as a
1197 non-local goto. */
1198 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1200 if (JUMP_P (insn))
1202 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1203 break;
1205 else if (CALL_P (insn))
1206 break;
1209 return const0_rtx;
1212 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1213 (not all will be used on all machines) that was passed to __builtin_setjmp.
1214 It updates the stack pointer in that block to correspond to the current
1215 stack pointer. */
1217 static void
1218 expand_builtin_update_setjmp_buf (rtx buf_addr)
1220 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1221 rtx stack_save
1222 = gen_rtx_MEM (sa_mode,
1223 memory_address
1224 (sa_mode,
1225 plus_constant (Pmode, buf_addr,
1226 2 * GET_MODE_SIZE (Pmode))));
1228 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1231 /* Expand a call to __builtin_prefetch. For a target that does not support
1232 data prefetch, evaluate the memory address argument in case it has side
1233 effects. */
1235 static void
1236 expand_builtin_prefetch (tree exp)
1238 tree arg0, arg1, arg2;
1239 int nargs;
1240 rtx op0, op1, op2;
1242 if (!validate_arglist (exp, POINTER_TYPE, 0))
1243 return;
1245 arg0 = CALL_EXPR_ARG (exp, 0);
1247 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1248 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1249 locality). */
1250 nargs = call_expr_nargs (exp);
1251 if (nargs > 1)
1252 arg1 = CALL_EXPR_ARG (exp, 1);
1253 else
1254 arg1 = integer_zero_node;
1255 if (nargs > 2)
1256 arg2 = CALL_EXPR_ARG (exp, 2);
1257 else
1258 arg2 = integer_three_node;
1260 /* Argument 0 is an address. */
1261 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1263 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1264 if (TREE_CODE (arg1) != INTEGER_CST)
1266 error ("second argument to %<__builtin_prefetch%> must be a constant");
1267 arg1 = integer_zero_node;
1269 op1 = expand_normal (arg1);
1270 /* Argument 1 must be either zero or one. */
1271 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1273 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1274 " using zero");
1275 op1 = const0_rtx;
1278 /* Argument 2 (locality) must be a compile-time constant int. */
1279 if (TREE_CODE (arg2) != INTEGER_CST)
1281 error ("third argument to %<__builtin_prefetch%> must be a constant");
1282 arg2 = integer_zero_node;
1284 op2 = expand_normal (arg2);
1285 /* Argument 2 must be 0, 1, 2, or 3. */
1286 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1288 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1289 op2 = const0_rtx;
1292 #ifdef HAVE_prefetch
1293 if (HAVE_prefetch)
1295 struct expand_operand ops[3];
1297 create_address_operand (&ops[0], op0);
1298 create_integer_operand (&ops[1], INTVAL (op1));
1299 create_integer_operand (&ops[2], INTVAL (op2));
1300 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1301 return;
1303 #endif
1305 /* Don't do anything with direct references to volatile memory, but
1306 generate code to handle other side effects. */
1307 if (!MEM_P (op0) && side_effects_p (op0))
1308 emit_insn (op0);
1311 /* Get a MEM rtx for expression EXP which is the address of an operand
1312 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1313 the maximum length of the block of memory that might be accessed or
1314 NULL if unknown. */
1316 static rtx
1317 get_memory_rtx (tree exp, tree len)
1319 tree orig_exp = exp;
1320 rtx addr, mem;
1322 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1323 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1324 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1325 exp = TREE_OPERAND (exp, 0);
1327 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1328 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1330 /* Get an expression we can use to find the attributes to assign to MEM.
1331 First remove any nops. */
1332 while (CONVERT_EXPR_P (exp)
1333 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1334 exp = TREE_OPERAND (exp, 0);
1336 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1337 (as builtin stringops may alias with anything). */
1338 exp = fold_build2 (MEM_REF,
1339 build_array_type (char_type_node,
1340 build_range_type (sizetype,
1341 size_one_node, len)),
1342 exp, build_int_cst (ptr_type_node, 0));
1344 /* If the MEM_REF has no acceptable address, try to get the base object
1345 from the original address we got, and build an all-aliasing
1346 unknown-sized access to that one. */
1347 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1348 set_mem_attributes (mem, exp, 0);
1349 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1350 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1351 0))))
1353 exp = build_fold_addr_expr (exp);
1354 exp = fold_build2 (MEM_REF,
1355 build_array_type (char_type_node,
1356 build_range_type (sizetype,
1357 size_zero_node,
1358 NULL)),
1359 exp, build_int_cst (ptr_type_node, 0));
1360 set_mem_attributes (mem, exp, 0);
1362 set_mem_alias_set (mem, 0);
1363 return mem;
1366 /* Built-in functions to perform an untyped call and return. */
1368 #define apply_args_mode \
1369 (this_target_builtins->x_apply_args_mode)
1370 #define apply_result_mode \
1371 (this_target_builtins->x_apply_result_mode)
1373 /* Return the size required for the block returned by __builtin_apply_args,
1374 and initialize apply_args_mode. */
1376 static int
1377 apply_args_size (void)
1379 static int size = -1;
1380 int align;
1381 unsigned int regno;
1382 enum machine_mode mode;
1384 /* The values computed by this function never change. */
1385 if (size < 0)
1387 /* The first value is the incoming arg-pointer. */
1388 size = GET_MODE_SIZE (Pmode);
1390 /* The second value is the structure value address unless this is
1391 passed as an "invisible" first argument. */
1392 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1393 size += GET_MODE_SIZE (Pmode);
1395 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1396 if (FUNCTION_ARG_REGNO_P (regno))
1398 mode = targetm.calls.get_raw_arg_mode (regno);
1400 gcc_assert (mode != VOIDmode);
1402 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1403 if (size % align != 0)
1404 size = CEIL (size, align) * align;
1405 size += GET_MODE_SIZE (mode);
1406 apply_args_mode[regno] = mode;
1408 else
1410 apply_args_mode[regno] = VOIDmode;
1413 return size;
1416 /* Return the size required for the block returned by __builtin_apply,
1417 and initialize apply_result_mode. */
1419 static int
1420 apply_result_size (void)
1422 static int size = -1;
1423 int align, regno;
1424 enum machine_mode mode;
1426 /* The values computed by this function never change. */
1427 if (size < 0)
1429 size = 0;
1431 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1432 if (targetm.calls.function_value_regno_p (regno))
1434 mode = targetm.calls.get_raw_result_mode (regno);
1436 gcc_assert (mode != VOIDmode);
1438 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1439 if (size % align != 0)
1440 size = CEIL (size, align) * align;
1441 size += GET_MODE_SIZE (mode);
1442 apply_result_mode[regno] = mode;
1444 else
1445 apply_result_mode[regno] = VOIDmode;
1447 /* Allow targets that use untyped_call and untyped_return to override
1448 the size so that machine-specific information can be stored here. */
1449 #ifdef APPLY_RESULT_SIZE
1450 size = APPLY_RESULT_SIZE;
1451 #endif
1453 return size;
1456 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1457 /* Create a vector describing the result block RESULT. If SAVEP is true,
1458 the result block is used to save the values; otherwise it is used to
1459 restore the values. */
1461 static rtx
1462 result_vector (int savep, rtx result)
1464 int regno, size, align, nelts;
1465 enum machine_mode mode;
1466 rtx reg, mem;
1467 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1469 size = nelts = 0;
1470 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1471 if ((mode = apply_result_mode[regno]) != VOIDmode)
1473 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1474 if (size % align != 0)
1475 size = CEIL (size, align) * align;
1476 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1477 mem = adjust_address (result, mode, size);
1478 savevec[nelts++] = (savep
1479 ? gen_rtx_SET (VOIDmode, mem, reg)
1480 : gen_rtx_SET (VOIDmode, reg, mem));
1481 size += GET_MODE_SIZE (mode);
1483 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1485 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1487 /* Save the state required to perform an untyped call with the same
1488 arguments as were passed to the current function. */
1490 static rtx
1491 expand_builtin_apply_args_1 (void)
1493 rtx registers, tem;
1494 int size, align, regno;
1495 enum machine_mode mode;
1496 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1498 /* Create a block where the arg-pointer, structure value address,
1499 and argument registers can be saved. */
1500 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1502 /* Walk past the arg-pointer and structure value address. */
1503 size = GET_MODE_SIZE (Pmode);
1504 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1505 size += GET_MODE_SIZE (Pmode);
1507 /* Save each register used in calling a function to the block. */
1508 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1509 if ((mode = apply_args_mode[regno]) != VOIDmode)
1511 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1512 if (size % align != 0)
1513 size = CEIL (size, align) * align;
1515 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1517 emit_move_insn (adjust_address (registers, mode, size), tem);
1518 size += GET_MODE_SIZE (mode);
1521 /* Save the arg pointer to the block. */
1522 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1523 #ifdef STACK_GROWS_DOWNWARD
1524 /* We need the pointer as the caller actually passed them to us, not
1525 as we might have pretended they were passed. Make sure it's a valid
1526 operand, as emit_move_insn isn't expected to handle a PLUS. */
1528 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1529 NULL_RTX);
1530 #endif
1531 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1533 size = GET_MODE_SIZE (Pmode);
1535 /* Save the structure value address unless this is passed as an
1536 "invisible" first argument. */
1537 if (struct_incoming_value)
1539 emit_move_insn (adjust_address (registers, Pmode, size),
1540 copy_to_reg (struct_incoming_value));
1541 size += GET_MODE_SIZE (Pmode);
1544 /* Return the address of the block. */
1545 return copy_addr_to_reg (XEXP (registers, 0));
1548 /* __builtin_apply_args returns block of memory allocated on
1549 the stack into which is stored the arg pointer, structure
1550 value address, static chain, and all the registers that might
1551 possibly be used in performing a function call. The code is
1552 moved to the start of the function so the incoming values are
1553 saved. */
1555 static rtx
1556 expand_builtin_apply_args (void)
1558 /* Don't do __builtin_apply_args more than once in a function.
1559 Save the result of the first call and reuse it. */
1560 if (apply_args_value != 0)
1561 return apply_args_value;
1563 /* When this function is called, it means that registers must be
1564 saved on entry to this function. So we migrate the
1565 call to the first insn of this function. */
1566 rtx temp;
1567 rtx seq;
1569 start_sequence ();
1570 temp = expand_builtin_apply_args_1 ();
1571 seq = get_insns ();
1572 end_sequence ();
1574 apply_args_value = temp;
1576 /* Put the insns after the NOTE that starts the function.
1577 If this is inside a start_sequence, make the outer-level insn
1578 chain current, so the code is placed at the start of the
1579 function. If internal_arg_pointer is a non-virtual pseudo,
1580 it needs to be placed after the function that initializes
1581 that pseudo. */
1582 push_topmost_sequence ();
1583 if (REG_P (crtl->args.internal_arg_pointer)
1584 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1585 emit_insn_before (seq, parm_birth_insn);
1586 else
1587 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1588 pop_topmost_sequence ();
1589 return temp;
1593 /* Perform an untyped call and save the state required to perform an
1594 untyped return of whatever value was returned by the given function. */
1596 static rtx
1597 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1599 int size, align, regno;
1600 enum machine_mode mode;
1601 rtx incoming_args, result, reg, dest, src, call_insn;
1602 rtx old_stack_level = 0;
1603 rtx call_fusage = 0;
1604 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1606 arguments = convert_memory_address (Pmode, arguments);
1608 /* Create a block where the return registers can be saved. */
1609 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1611 /* Fetch the arg pointer from the ARGUMENTS block. */
1612 incoming_args = gen_reg_rtx (Pmode);
1613 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1614 #ifndef STACK_GROWS_DOWNWARD
1615 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1616 incoming_args, 0, OPTAB_LIB_WIDEN);
1617 #endif
1619 /* Push a new argument block and copy the arguments. Do not allow
1620 the (potential) memcpy call below to interfere with our stack
1621 manipulations. */
1622 do_pending_stack_adjust ();
1623 NO_DEFER_POP;
1625 /* Save the stack with nonlocal if available. */
1626 #ifdef HAVE_save_stack_nonlocal
1627 if (HAVE_save_stack_nonlocal)
1628 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1629 else
1630 #endif
1631 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1633 /* Allocate a block of memory onto the stack and copy the memory
1634 arguments to the outgoing arguments address. We can pass TRUE
1635 as the 4th argument because we just saved the stack pointer
1636 and will restore it right after the call. */
1637 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1639 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1640 may have already set current_function_calls_alloca to true.
1641 current_function_calls_alloca won't be set if argsize is zero,
1642 so we have to guarantee need_drap is true here. */
1643 if (SUPPORTS_STACK_ALIGNMENT)
1644 crtl->need_drap = true;
1646 dest = virtual_outgoing_args_rtx;
1647 #ifndef STACK_GROWS_DOWNWARD
1648 if (CONST_INT_P (argsize))
1649 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1650 else
1651 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1652 #endif
1653 dest = gen_rtx_MEM (BLKmode, dest);
1654 set_mem_align (dest, PARM_BOUNDARY);
1655 src = gen_rtx_MEM (BLKmode, incoming_args);
1656 set_mem_align (src, PARM_BOUNDARY);
1657 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1659 /* Refer to the argument block. */
1660 apply_args_size ();
1661 arguments = gen_rtx_MEM (BLKmode, arguments);
1662 set_mem_align (arguments, PARM_BOUNDARY);
1664 /* Walk past the arg-pointer and structure value address. */
1665 size = GET_MODE_SIZE (Pmode);
1666 if (struct_value)
1667 size += GET_MODE_SIZE (Pmode);
1669 /* Restore each of the registers previously saved. Make USE insns
1670 for each of these registers for use in making the call. */
1671 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1672 if ((mode = apply_args_mode[regno]) != VOIDmode)
1674 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1675 if (size % align != 0)
1676 size = CEIL (size, align) * align;
1677 reg = gen_rtx_REG (mode, regno);
1678 emit_move_insn (reg, adjust_address (arguments, mode, size));
1679 use_reg (&call_fusage, reg);
1680 size += GET_MODE_SIZE (mode);
1683 /* Restore the structure value address unless this is passed as an
1684 "invisible" first argument. */
1685 size = GET_MODE_SIZE (Pmode);
1686 if (struct_value)
1688 rtx value = gen_reg_rtx (Pmode);
1689 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1690 emit_move_insn (struct_value, value);
1691 if (REG_P (struct_value))
1692 use_reg (&call_fusage, struct_value);
1693 size += GET_MODE_SIZE (Pmode);
1696 /* All arguments and registers used for the call are set up by now! */
1697 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1699 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1700 and we don't want to load it into a register as an optimization,
1701 because prepare_call_address already did it if it should be done. */
1702 if (GET_CODE (function) != SYMBOL_REF)
1703 function = memory_address (FUNCTION_MODE, function);
1705 /* Generate the actual call instruction and save the return value. */
1706 #ifdef HAVE_untyped_call
1707 if (HAVE_untyped_call)
1708 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1709 result, result_vector (1, result)));
1710 else
1711 #endif
1712 #ifdef HAVE_call_value
1713 if (HAVE_call_value)
1715 rtx valreg = 0;
1717 /* Locate the unique return register. It is not possible to
1718 express a call that sets more than one return register using
1719 call_value; use untyped_call for that. In fact, untyped_call
1720 only needs to save the return registers in the given block. */
1721 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1722 if ((mode = apply_result_mode[regno]) != VOIDmode)
1724 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1726 valreg = gen_rtx_REG (mode, regno);
1729 emit_call_insn (GEN_CALL_VALUE (valreg,
1730 gen_rtx_MEM (FUNCTION_MODE, function),
1731 const0_rtx, NULL_RTX, const0_rtx));
1733 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1735 else
1736 #endif
1737 gcc_unreachable ();
1739 /* Find the CALL insn we just emitted, and attach the register usage
1740 information. */
1741 call_insn = last_call_insn ();
1742 add_function_usage_to (call_insn, call_fusage);
1744 /* Restore the stack. */
1745 #ifdef HAVE_save_stack_nonlocal
1746 if (HAVE_save_stack_nonlocal)
1747 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1748 else
1749 #endif
1750 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1751 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1753 OK_DEFER_POP;
1755 /* Return the address of the result block. */
1756 result = copy_addr_to_reg (XEXP (result, 0));
1757 return convert_memory_address (ptr_mode, result);
1760 /* Perform an untyped return. */
1762 static void
1763 expand_builtin_return (rtx result)
1765 int size, align, regno;
1766 enum machine_mode mode;
1767 rtx reg;
1768 rtx call_fusage = 0;
1770 result = convert_memory_address (Pmode, result);
1772 apply_result_size ();
1773 result = gen_rtx_MEM (BLKmode, result);
1775 #ifdef HAVE_untyped_return
1776 if (HAVE_untyped_return)
1778 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1779 emit_barrier ();
1780 return;
1782 #endif
1784 /* Restore the return value and note that each value is used. */
1785 size = 0;
1786 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1787 if ((mode = apply_result_mode[regno]) != VOIDmode)
1789 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1790 if (size % align != 0)
1791 size = CEIL (size, align) * align;
1792 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1793 emit_move_insn (reg, adjust_address (result, mode, size));
1795 push_to_sequence (call_fusage);
1796 emit_use (reg);
1797 call_fusage = get_insns ();
1798 end_sequence ();
1799 size += GET_MODE_SIZE (mode);
1802 /* Put the USE insns before the return. */
1803 emit_insn (call_fusage);
1805 /* Return whatever values was restored by jumping directly to the end
1806 of the function. */
1807 expand_naked_return ();
1810 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1812 static enum type_class
1813 type_to_class (tree type)
1815 switch (TREE_CODE (type))
1817 case VOID_TYPE: return void_type_class;
1818 case INTEGER_TYPE: return integer_type_class;
1819 case ENUMERAL_TYPE: return enumeral_type_class;
1820 case BOOLEAN_TYPE: return boolean_type_class;
1821 case POINTER_TYPE: return pointer_type_class;
1822 case REFERENCE_TYPE: return reference_type_class;
1823 case OFFSET_TYPE: return offset_type_class;
1824 case REAL_TYPE: return real_type_class;
1825 case COMPLEX_TYPE: return complex_type_class;
1826 case FUNCTION_TYPE: return function_type_class;
1827 case METHOD_TYPE: return method_type_class;
1828 case RECORD_TYPE: return record_type_class;
1829 case UNION_TYPE:
1830 case QUAL_UNION_TYPE: return union_type_class;
1831 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1832 ? string_type_class : array_type_class);
1833 case LANG_TYPE: return lang_type_class;
1834 default: return no_type_class;
1838 /* Expand a call EXP to __builtin_classify_type. */
1840 static rtx
1841 expand_builtin_classify_type (tree exp)
1843 if (call_expr_nargs (exp))
1844 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1845 return GEN_INT (no_type_class);
1848 /* This helper macro, meant to be used in mathfn_built_in below,
1849 determines which among a set of three builtin math functions is
1850 appropriate for a given type mode. The `F' and `L' cases are
1851 automatically generated from the `double' case. */
1852 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1853 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1854 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1855 fcodel = BUILT_IN_MATHFN##L ; break;
1856 /* Similar to above, but appends _R after any F/L suffix. */
1857 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1858 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1859 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1860 fcodel = BUILT_IN_MATHFN##L_R ; break;
1862 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1863 if available. If IMPLICIT is true use the implicit builtin declaration,
1864 otherwise use the explicit declaration. If we can't do the conversion,
1865 return zero. */
1867 static tree
1868 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1870 enum built_in_function fcode, fcodef, fcodel, fcode2;
1872 switch (fn)
1874 CASE_MATHFN (BUILT_IN_ACOS)
1875 CASE_MATHFN (BUILT_IN_ACOSH)
1876 CASE_MATHFN (BUILT_IN_ASIN)
1877 CASE_MATHFN (BUILT_IN_ASINH)
1878 CASE_MATHFN (BUILT_IN_ATAN)
1879 CASE_MATHFN (BUILT_IN_ATAN2)
1880 CASE_MATHFN (BUILT_IN_ATANH)
1881 CASE_MATHFN (BUILT_IN_CBRT)
1882 CASE_MATHFN (BUILT_IN_CEIL)
1883 CASE_MATHFN (BUILT_IN_CEXPI)
1884 CASE_MATHFN (BUILT_IN_COPYSIGN)
1885 CASE_MATHFN (BUILT_IN_COS)
1886 CASE_MATHFN (BUILT_IN_COSH)
1887 CASE_MATHFN (BUILT_IN_DREM)
1888 CASE_MATHFN (BUILT_IN_ERF)
1889 CASE_MATHFN (BUILT_IN_ERFC)
1890 CASE_MATHFN (BUILT_IN_EXP)
1891 CASE_MATHFN (BUILT_IN_EXP10)
1892 CASE_MATHFN (BUILT_IN_EXP2)
1893 CASE_MATHFN (BUILT_IN_EXPM1)
1894 CASE_MATHFN (BUILT_IN_FABS)
1895 CASE_MATHFN (BUILT_IN_FDIM)
1896 CASE_MATHFN (BUILT_IN_FLOOR)
1897 CASE_MATHFN (BUILT_IN_FMA)
1898 CASE_MATHFN (BUILT_IN_FMAX)
1899 CASE_MATHFN (BUILT_IN_FMIN)
1900 CASE_MATHFN (BUILT_IN_FMOD)
1901 CASE_MATHFN (BUILT_IN_FREXP)
1902 CASE_MATHFN (BUILT_IN_GAMMA)
1903 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1904 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1905 CASE_MATHFN (BUILT_IN_HYPOT)
1906 CASE_MATHFN (BUILT_IN_ILOGB)
1907 CASE_MATHFN (BUILT_IN_ICEIL)
1908 CASE_MATHFN (BUILT_IN_IFLOOR)
1909 CASE_MATHFN (BUILT_IN_INF)
1910 CASE_MATHFN (BUILT_IN_IRINT)
1911 CASE_MATHFN (BUILT_IN_IROUND)
1912 CASE_MATHFN (BUILT_IN_ISINF)
1913 CASE_MATHFN (BUILT_IN_J0)
1914 CASE_MATHFN (BUILT_IN_J1)
1915 CASE_MATHFN (BUILT_IN_JN)
1916 CASE_MATHFN (BUILT_IN_LCEIL)
1917 CASE_MATHFN (BUILT_IN_LDEXP)
1918 CASE_MATHFN (BUILT_IN_LFLOOR)
1919 CASE_MATHFN (BUILT_IN_LGAMMA)
1920 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1921 CASE_MATHFN (BUILT_IN_LLCEIL)
1922 CASE_MATHFN (BUILT_IN_LLFLOOR)
1923 CASE_MATHFN (BUILT_IN_LLRINT)
1924 CASE_MATHFN (BUILT_IN_LLROUND)
1925 CASE_MATHFN (BUILT_IN_LOG)
1926 CASE_MATHFN (BUILT_IN_LOG10)
1927 CASE_MATHFN (BUILT_IN_LOG1P)
1928 CASE_MATHFN (BUILT_IN_LOG2)
1929 CASE_MATHFN (BUILT_IN_LOGB)
1930 CASE_MATHFN (BUILT_IN_LRINT)
1931 CASE_MATHFN (BUILT_IN_LROUND)
1932 CASE_MATHFN (BUILT_IN_MODF)
1933 CASE_MATHFN (BUILT_IN_NAN)
1934 CASE_MATHFN (BUILT_IN_NANS)
1935 CASE_MATHFN (BUILT_IN_NEARBYINT)
1936 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1937 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1938 CASE_MATHFN (BUILT_IN_POW)
1939 CASE_MATHFN (BUILT_IN_POWI)
1940 CASE_MATHFN (BUILT_IN_POW10)
1941 CASE_MATHFN (BUILT_IN_REMAINDER)
1942 CASE_MATHFN (BUILT_IN_REMQUO)
1943 CASE_MATHFN (BUILT_IN_RINT)
1944 CASE_MATHFN (BUILT_IN_ROUND)
1945 CASE_MATHFN (BUILT_IN_SCALB)
1946 CASE_MATHFN (BUILT_IN_SCALBLN)
1947 CASE_MATHFN (BUILT_IN_SCALBN)
1948 CASE_MATHFN (BUILT_IN_SIGNBIT)
1949 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1950 CASE_MATHFN (BUILT_IN_SIN)
1951 CASE_MATHFN (BUILT_IN_SINCOS)
1952 CASE_MATHFN (BUILT_IN_SINH)
1953 CASE_MATHFN (BUILT_IN_SQRT)
1954 CASE_MATHFN (BUILT_IN_TAN)
1955 CASE_MATHFN (BUILT_IN_TANH)
1956 CASE_MATHFN (BUILT_IN_TGAMMA)
1957 CASE_MATHFN (BUILT_IN_TRUNC)
1958 CASE_MATHFN (BUILT_IN_Y0)
1959 CASE_MATHFN (BUILT_IN_Y1)
1960 CASE_MATHFN (BUILT_IN_YN)
1962 default:
1963 return NULL_TREE;
1966 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1967 fcode2 = fcode;
1968 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1969 fcode2 = fcodef;
1970 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1971 fcode2 = fcodel;
1972 else
1973 return NULL_TREE;
1975 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1976 return NULL_TREE;
1978 return builtin_decl_explicit (fcode2);
1981 /* Like mathfn_built_in_1(), but always use the implicit array. */
1983 tree
1984 mathfn_built_in (tree type, enum built_in_function fn)
1986 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1989 /* If errno must be maintained, expand the RTL to check if the result,
1990 TARGET, of a built-in function call, EXP, is NaN, and if so set
1991 errno to EDOM. */
1993 static void
1994 expand_errno_check (tree exp, rtx target)
1996 rtx lab = gen_label_rtx ();
1998 /* Test the result; if it is NaN, set errno=EDOM because
1999 the argument was not in the domain. */
2000 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
2001 NULL_RTX, NULL_RTX, lab,
2002 /* The jump is very likely. */
2003 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
2005 #ifdef TARGET_EDOM
2006 /* If this built-in doesn't throw an exception, set errno directly. */
2007 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
2009 #ifdef GEN_ERRNO_RTX
2010 rtx errno_rtx = GEN_ERRNO_RTX;
2011 #else
2012 rtx errno_rtx
2013 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2014 #endif
2015 emit_move_insn (errno_rtx,
2016 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2017 emit_label (lab);
2018 return;
2020 #endif
2022 /* Make sure the library call isn't expanded as a tail call. */
2023 CALL_EXPR_TAILCALL (exp) = 0;
2025 /* We can't set errno=EDOM directly; let the library call do it.
2026 Pop the arguments right away in case the call gets deleted. */
2027 NO_DEFER_POP;
2028 expand_call (exp, target, 0);
2029 OK_DEFER_POP;
2030 emit_label (lab);
2033 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2034 Return NULL_RTX if a normal call should be emitted rather than expanding
2035 the function in-line. EXP is the expression that is a call to the builtin
2036 function; if convenient, the result should be placed in TARGET.
2037 SUBTARGET may be used as the target for computing one of EXP's operands. */
2039 static rtx
2040 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2042 optab builtin_optab;
2043 rtx op0, insns;
2044 tree fndecl = get_callee_fndecl (exp);
2045 enum machine_mode mode;
2046 bool errno_set = false;
2047 bool try_widening = false;
2048 tree arg;
2050 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2051 return NULL_RTX;
2053 arg = CALL_EXPR_ARG (exp, 0);
2055 switch (DECL_FUNCTION_CODE (fndecl))
2057 CASE_FLT_FN (BUILT_IN_SQRT):
2058 errno_set = ! tree_expr_nonnegative_p (arg);
2059 try_widening = true;
2060 builtin_optab = sqrt_optab;
2061 break;
2062 CASE_FLT_FN (BUILT_IN_EXP):
2063 errno_set = true; builtin_optab = exp_optab; break;
2064 CASE_FLT_FN (BUILT_IN_EXP10):
2065 CASE_FLT_FN (BUILT_IN_POW10):
2066 errno_set = true; builtin_optab = exp10_optab; break;
2067 CASE_FLT_FN (BUILT_IN_EXP2):
2068 errno_set = true; builtin_optab = exp2_optab; break;
2069 CASE_FLT_FN (BUILT_IN_EXPM1):
2070 errno_set = true; builtin_optab = expm1_optab; break;
2071 CASE_FLT_FN (BUILT_IN_LOGB):
2072 errno_set = true; builtin_optab = logb_optab; break;
2073 CASE_FLT_FN (BUILT_IN_LOG):
2074 errno_set = true; builtin_optab = log_optab; break;
2075 CASE_FLT_FN (BUILT_IN_LOG10):
2076 errno_set = true; builtin_optab = log10_optab; break;
2077 CASE_FLT_FN (BUILT_IN_LOG2):
2078 errno_set = true; builtin_optab = log2_optab; break;
2079 CASE_FLT_FN (BUILT_IN_LOG1P):
2080 errno_set = true; builtin_optab = log1p_optab; break;
2081 CASE_FLT_FN (BUILT_IN_ASIN):
2082 builtin_optab = asin_optab; break;
2083 CASE_FLT_FN (BUILT_IN_ACOS):
2084 builtin_optab = acos_optab; break;
2085 CASE_FLT_FN (BUILT_IN_TAN):
2086 builtin_optab = tan_optab; break;
2087 CASE_FLT_FN (BUILT_IN_ATAN):
2088 builtin_optab = atan_optab; break;
2089 CASE_FLT_FN (BUILT_IN_FLOOR):
2090 builtin_optab = floor_optab; break;
2091 CASE_FLT_FN (BUILT_IN_CEIL):
2092 builtin_optab = ceil_optab; break;
2093 CASE_FLT_FN (BUILT_IN_TRUNC):
2094 builtin_optab = btrunc_optab; break;
2095 CASE_FLT_FN (BUILT_IN_ROUND):
2096 builtin_optab = round_optab; break;
2097 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2098 builtin_optab = nearbyint_optab;
2099 if (flag_trapping_math)
2100 break;
2101 /* Else fallthrough and expand as rint. */
2102 CASE_FLT_FN (BUILT_IN_RINT):
2103 builtin_optab = rint_optab; break;
2104 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2105 builtin_optab = significand_optab; break;
2106 default:
2107 gcc_unreachable ();
2110 /* Make a suitable register to place result in. */
2111 mode = TYPE_MODE (TREE_TYPE (exp));
2113 if (! flag_errno_math || ! HONOR_NANS (mode))
2114 errno_set = false;
2116 /* Before working hard, check whether the instruction is available, but try
2117 to widen the mode for specific operations. */
2118 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2119 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2120 && (!errno_set || !optimize_insn_for_size_p ()))
2122 rtx result = gen_reg_rtx (mode);
2124 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2125 need to expand the argument again. This way, we will not perform
2126 side-effects more the once. */
2127 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2129 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2131 start_sequence ();
2133 /* Compute into RESULT.
2134 Set RESULT to wherever the result comes back. */
2135 result = expand_unop (mode, builtin_optab, op0, result, 0);
2137 if (result != 0)
2139 if (errno_set)
2140 expand_errno_check (exp, result);
2142 /* Output the entire sequence. */
2143 insns = get_insns ();
2144 end_sequence ();
2145 emit_insn (insns);
2146 return result;
2149 /* If we were unable to expand via the builtin, stop the sequence
2150 (without outputting the insns) and call to the library function
2151 with the stabilized argument list. */
2152 end_sequence ();
2155 return expand_call (exp, target, target == const0_rtx);
2158 /* Expand a call to the builtin binary math functions (pow and atan2).
2159 Return NULL_RTX if a normal call should be emitted rather than expanding the
2160 function in-line. EXP is the expression that is a call to the builtin
2161 function; if convenient, the result should be placed in TARGET.
2162 SUBTARGET may be used as the target for computing one of EXP's
2163 operands. */
2165 static rtx
2166 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2168 optab builtin_optab;
2169 rtx op0, op1, insns, result;
2170 int op1_type = REAL_TYPE;
2171 tree fndecl = get_callee_fndecl (exp);
2172 tree arg0, arg1;
2173 enum machine_mode mode;
2174 bool errno_set = true;
2176 switch (DECL_FUNCTION_CODE (fndecl))
2178 CASE_FLT_FN (BUILT_IN_SCALBN):
2179 CASE_FLT_FN (BUILT_IN_SCALBLN):
2180 CASE_FLT_FN (BUILT_IN_LDEXP):
2181 op1_type = INTEGER_TYPE;
2182 default:
2183 break;
2186 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2187 return NULL_RTX;
2189 arg0 = CALL_EXPR_ARG (exp, 0);
2190 arg1 = CALL_EXPR_ARG (exp, 1);
2192 switch (DECL_FUNCTION_CODE (fndecl))
2194 CASE_FLT_FN (BUILT_IN_POW):
2195 builtin_optab = pow_optab; break;
2196 CASE_FLT_FN (BUILT_IN_ATAN2):
2197 builtin_optab = atan2_optab; break;
2198 CASE_FLT_FN (BUILT_IN_SCALB):
2199 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2200 return 0;
2201 builtin_optab = scalb_optab; break;
2202 CASE_FLT_FN (BUILT_IN_SCALBN):
2203 CASE_FLT_FN (BUILT_IN_SCALBLN):
2204 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2205 return 0;
2206 /* Fall through... */
2207 CASE_FLT_FN (BUILT_IN_LDEXP):
2208 builtin_optab = ldexp_optab; break;
2209 CASE_FLT_FN (BUILT_IN_FMOD):
2210 builtin_optab = fmod_optab; break;
2211 CASE_FLT_FN (BUILT_IN_REMAINDER):
2212 CASE_FLT_FN (BUILT_IN_DREM):
2213 builtin_optab = remainder_optab; break;
2214 default:
2215 gcc_unreachable ();
2218 /* Make a suitable register to place result in. */
2219 mode = TYPE_MODE (TREE_TYPE (exp));
2221 /* Before working hard, check whether the instruction is available. */
2222 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2223 return NULL_RTX;
2225 result = gen_reg_rtx (mode);
2227 if (! flag_errno_math || ! HONOR_NANS (mode))
2228 errno_set = false;
2230 if (errno_set && optimize_insn_for_size_p ())
2231 return 0;
2233 /* Always stabilize the argument list. */
2234 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2235 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2237 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2238 op1 = expand_normal (arg1);
2240 start_sequence ();
2242 /* Compute into RESULT.
2243 Set RESULT to wherever the result comes back. */
2244 result = expand_binop (mode, builtin_optab, op0, op1,
2245 result, 0, OPTAB_DIRECT);
2247 /* If we were unable to expand via the builtin, stop the sequence
2248 (without outputting the insns) and call to the library function
2249 with the stabilized argument list. */
2250 if (result == 0)
2252 end_sequence ();
2253 return expand_call (exp, target, target == const0_rtx);
2256 if (errno_set)
2257 expand_errno_check (exp, result);
2259 /* Output the entire sequence. */
2260 insns = get_insns ();
2261 end_sequence ();
2262 emit_insn (insns);
2264 return result;
2267 /* Expand a call to the builtin trinary math functions (fma).
2268 Return NULL_RTX if a normal call should be emitted rather than expanding the
2269 function in-line. EXP is the expression that is a call to the builtin
2270 function; if convenient, the result should be placed in TARGET.
2271 SUBTARGET may be used as the target for computing one of EXP's
2272 operands. */
2274 static rtx
2275 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2277 optab builtin_optab;
2278 rtx op0, op1, op2, insns, result;
2279 tree fndecl = get_callee_fndecl (exp);
2280 tree arg0, arg1, arg2;
2281 enum machine_mode mode;
2283 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2284 return NULL_RTX;
2286 arg0 = CALL_EXPR_ARG (exp, 0);
2287 arg1 = CALL_EXPR_ARG (exp, 1);
2288 arg2 = CALL_EXPR_ARG (exp, 2);
2290 switch (DECL_FUNCTION_CODE (fndecl))
2292 CASE_FLT_FN (BUILT_IN_FMA):
2293 builtin_optab = fma_optab; break;
2294 default:
2295 gcc_unreachable ();
2298 /* Make a suitable register to place result in. */
2299 mode = TYPE_MODE (TREE_TYPE (exp));
2301 /* Before working hard, check whether the instruction is available. */
2302 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2303 return NULL_RTX;
2305 result = gen_reg_rtx (mode);
2307 /* Always stabilize the argument list. */
2308 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2309 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2310 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2312 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2313 op1 = expand_normal (arg1);
2314 op2 = expand_normal (arg2);
2316 start_sequence ();
2318 /* Compute into RESULT.
2319 Set RESULT to wherever the result comes back. */
2320 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2321 result, 0);
2323 /* If we were unable to expand via the builtin, stop the sequence
2324 (without outputting the insns) and call to the library function
2325 with the stabilized argument list. */
2326 if (result == 0)
2328 end_sequence ();
2329 return expand_call (exp, target, target == const0_rtx);
2332 /* Output the entire sequence. */
2333 insns = get_insns ();
2334 end_sequence ();
2335 emit_insn (insns);
2337 return result;
2340 /* Expand a call to the builtin sin and cos math functions.
2341 Return NULL_RTX if a normal call should be emitted rather than expanding the
2342 function in-line. EXP is the expression that is a call to the builtin
2343 function; if convenient, the result should be placed in TARGET.
2344 SUBTARGET may be used as the target for computing one of EXP's
2345 operands. */
2347 static rtx
2348 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2350 optab builtin_optab;
2351 rtx op0, insns;
2352 tree fndecl = get_callee_fndecl (exp);
2353 enum machine_mode mode;
2354 tree arg;
2356 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2357 return NULL_RTX;
2359 arg = CALL_EXPR_ARG (exp, 0);
2361 switch (DECL_FUNCTION_CODE (fndecl))
2363 CASE_FLT_FN (BUILT_IN_SIN):
2364 CASE_FLT_FN (BUILT_IN_COS):
2365 builtin_optab = sincos_optab; break;
2366 default:
2367 gcc_unreachable ();
2370 /* Make a suitable register to place result in. */
2371 mode = TYPE_MODE (TREE_TYPE (exp));
2373 /* Check if sincos insn is available, otherwise fallback
2374 to sin or cos insn. */
2375 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2376 switch (DECL_FUNCTION_CODE (fndecl))
2378 CASE_FLT_FN (BUILT_IN_SIN):
2379 builtin_optab = sin_optab; break;
2380 CASE_FLT_FN (BUILT_IN_COS):
2381 builtin_optab = cos_optab; break;
2382 default:
2383 gcc_unreachable ();
2386 /* Before working hard, check whether the instruction is available. */
2387 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2389 rtx result = gen_reg_rtx (mode);
2391 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2392 need to expand the argument again. This way, we will not perform
2393 side-effects more the once. */
2394 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2396 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2398 start_sequence ();
2400 /* Compute into RESULT.
2401 Set RESULT to wherever the result comes back. */
2402 if (builtin_optab == sincos_optab)
2404 int ok;
2406 switch (DECL_FUNCTION_CODE (fndecl))
2408 CASE_FLT_FN (BUILT_IN_SIN):
2409 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2410 break;
2411 CASE_FLT_FN (BUILT_IN_COS):
2412 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2413 break;
2414 default:
2415 gcc_unreachable ();
2417 gcc_assert (ok);
2419 else
2420 result = expand_unop (mode, builtin_optab, op0, result, 0);
2422 if (result != 0)
2424 /* Output the entire sequence. */
2425 insns = get_insns ();
2426 end_sequence ();
2427 emit_insn (insns);
2428 return result;
2431 /* If we were unable to expand via the builtin, stop the sequence
2432 (without outputting the insns) and call to the library function
2433 with the stabilized argument list. */
2434 end_sequence ();
2437 return expand_call (exp, target, target == const0_rtx);
2440 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2441 return an RTL instruction code that implements the functionality.
2442 If that isn't possible or available return CODE_FOR_nothing. */
2444 static enum insn_code
2445 interclass_mathfn_icode (tree arg, tree fndecl)
2447 bool errno_set = false;
2448 optab builtin_optab = unknown_optab;
2449 enum machine_mode mode;
2451 switch (DECL_FUNCTION_CODE (fndecl))
2453 CASE_FLT_FN (BUILT_IN_ILOGB):
2454 errno_set = true; builtin_optab = ilogb_optab; break;
2455 CASE_FLT_FN (BUILT_IN_ISINF):
2456 builtin_optab = isinf_optab; break;
2457 case BUILT_IN_ISNORMAL:
2458 case BUILT_IN_ISFINITE:
2459 CASE_FLT_FN (BUILT_IN_FINITE):
2460 case BUILT_IN_FINITED32:
2461 case BUILT_IN_FINITED64:
2462 case BUILT_IN_FINITED128:
2463 case BUILT_IN_ISINFD32:
2464 case BUILT_IN_ISINFD64:
2465 case BUILT_IN_ISINFD128:
2466 /* These builtins have no optabs (yet). */
2467 break;
2468 default:
2469 gcc_unreachable ();
2472 /* There's no easy way to detect the case we need to set EDOM. */
2473 if (flag_errno_math && errno_set)
2474 return CODE_FOR_nothing;
2476 /* Optab mode depends on the mode of the input argument. */
2477 mode = TYPE_MODE (TREE_TYPE (arg));
2479 if (builtin_optab)
2480 return optab_handler (builtin_optab, mode);
2481 return CODE_FOR_nothing;
2484 /* Expand a call to one of the builtin math functions that operate on
2485 floating point argument and output an integer result (ilogb, isinf,
2486 isnan, etc).
2487 Return 0 if a normal call should be emitted rather than expanding the
2488 function in-line. EXP is the expression that is a call to the builtin
2489 function; if convenient, the result should be placed in TARGET. */
2491 static rtx
2492 expand_builtin_interclass_mathfn (tree exp, rtx target)
2494 enum insn_code icode = CODE_FOR_nothing;
2495 rtx op0;
2496 tree fndecl = get_callee_fndecl (exp);
2497 enum machine_mode mode;
2498 tree arg;
2500 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2501 return NULL_RTX;
2503 arg = CALL_EXPR_ARG (exp, 0);
2504 icode = interclass_mathfn_icode (arg, fndecl);
2505 mode = TYPE_MODE (TREE_TYPE (arg));
2507 if (icode != CODE_FOR_nothing)
2509 struct expand_operand ops[1];
2510 rtx last = get_last_insn ();
2511 tree orig_arg = arg;
2513 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2514 need to expand the argument again. This way, we will not perform
2515 side-effects more the once. */
2516 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2518 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2520 if (mode != GET_MODE (op0))
2521 op0 = convert_to_mode (mode, op0, 0);
2523 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2524 if (maybe_legitimize_operands (icode, 0, 1, ops)
2525 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2526 return ops[0].value;
2528 delete_insns_since (last);
2529 CALL_EXPR_ARG (exp, 0) = orig_arg;
2532 return NULL_RTX;
2535 /* Expand a call to the builtin sincos math function.
2536 Return NULL_RTX if a normal call should be emitted rather than expanding the
2537 function in-line. EXP is the expression that is a call to the builtin
2538 function. */
2540 static rtx
2541 expand_builtin_sincos (tree exp)
2543 rtx op0, op1, op2, target1, target2;
2544 enum machine_mode mode;
2545 tree arg, sinp, cosp;
2546 int result;
2547 location_t loc = EXPR_LOCATION (exp);
2548 tree alias_type, alias_off;
2550 if (!validate_arglist (exp, REAL_TYPE,
2551 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2552 return NULL_RTX;
2554 arg = CALL_EXPR_ARG (exp, 0);
2555 sinp = CALL_EXPR_ARG (exp, 1);
2556 cosp = CALL_EXPR_ARG (exp, 2);
2558 /* Make a suitable register to place result in. */
2559 mode = TYPE_MODE (TREE_TYPE (arg));
2561 /* Check if sincos insn is available, otherwise emit the call. */
2562 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2563 return NULL_RTX;
2565 target1 = gen_reg_rtx (mode);
2566 target2 = gen_reg_rtx (mode);
2568 op0 = expand_normal (arg);
2569 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2570 alias_off = build_int_cst (alias_type, 0);
2571 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2572 sinp, alias_off));
2573 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2574 cosp, alias_off));
2576 /* Compute into target1 and target2.
2577 Set TARGET to wherever the result comes back. */
2578 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2579 gcc_assert (result);
2581 /* Move target1 and target2 to the memory locations indicated
2582 by op1 and op2. */
2583 emit_move_insn (op1, target1);
2584 emit_move_insn (op2, target2);
2586 return const0_rtx;
2589 /* Expand a call to the internal cexpi builtin to the sincos math function.
2590 EXP is the expression that is a call to the builtin function; if convenient,
2591 the result should be placed in TARGET. */
2593 static rtx
2594 expand_builtin_cexpi (tree exp, rtx target)
2596 tree fndecl = get_callee_fndecl (exp);
2597 tree arg, type;
2598 enum machine_mode mode;
2599 rtx op0, op1, op2;
2600 location_t loc = EXPR_LOCATION (exp);
2602 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2603 return NULL_RTX;
2605 arg = CALL_EXPR_ARG (exp, 0);
2606 type = TREE_TYPE (arg);
2607 mode = TYPE_MODE (TREE_TYPE (arg));
2609 /* Try expanding via a sincos optab, fall back to emitting a libcall
2610 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2611 is only generated from sincos, cexp or if we have either of them. */
2612 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2614 op1 = gen_reg_rtx (mode);
2615 op2 = gen_reg_rtx (mode);
2617 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2619 /* Compute into op1 and op2. */
2620 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2622 else if (targetm.libc_has_function (function_sincos))
2624 tree call, fn = NULL_TREE;
2625 tree top1, top2;
2626 rtx op1a, op2a;
2628 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2629 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2630 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2631 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2632 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2633 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2634 else
2635 gcc_unreachable ();
2637 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2638 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2639 op1a = copy_addr_to_reg (XEXP (op1, 0));
2640 op2a = copy_addr_to_reg (XEXP (op2, 0));
2641 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2642 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2644 /* Make sure not to fold the sincos call again. */
2645 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2646 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2647 call, 3, arg, top1, top2));
2649 else
2651 tree call, fn = NULL_TREE, narg;
2652 tree ctype = build_complex_type (type);
2654 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2655 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2656 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2657 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2658 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2659 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2660 else
2661 gcc_unreachable ();
2663 /* If we don't have a decl for cexp create one. This is the
2664 friendliest fallback if the user calls __builtin_cexpi
2665 without full target C99 function support. */
2666 if (fn == NULL_TREE)
2668 tree fntype;
2669 const char *name = NULL;
2671 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2672 name = "cexpf";
2673 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2674 name = "cexp";
2675 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2676 name = "cexpl";
2678 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2679 fn = build_fn_decl (name, fntype);
2682 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2683 build_real (type, dconst0), arg);
2685 /* Make sure not to fold the cexp call again. */
2686 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2687 return expand_expr (build_call_nary (ctype, call, 1, narg),
2688 target, VOIDmode, EXPAND_NORMAL);
2691 /* Now build the proper return type. */
2692 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2693 make_tree (TREE_TYPE (arg), op2),
2694 make_tree (TREE_TYPE (arg), op1)),
2695 target, VOIDmode, EXPAND_NORMAL);
2698 /* Conveniently construct a function call expression. FNDECL names the
2699 function to be called, N is the number of arguments, and the "..."
2700 parameters are the argument expressions. Unlike build_call_exr
2701 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2703 static tree
2704 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2706 va_list ap;
2707 tree fntype = TREE_TYPE (fndecl);
2708 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2710 va_start (ap, n);
2711 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2712 va_end (ap);
2713 SET_EXPR_LOCATION (fn, loc);
2714 return fn;
2717 /* Expand a call to one of the builtin rounding functions gcc defines
2718 as an extension (lfloor and lceil). As these are gcc extensions we
2719 do not need to worry about setting errno to EDOM.
2720 If expanding via optab fails, lower expression to (int)(floor(x)).
2721 EXP is the expression that is a call to the builtin function;
2722 if convenient, the result should be placed in TARGET. */
2724 static rtx
2725 expand_builtin_int_roundingfn (tree exp, rtx target)
2727 convert_optab builtin_optab;
2728 rtx op0, insns, tmp;
2729 tree fndecl = get_callee_fndecl (exp);
2730 enum built_in_function fallback_fn;
2731 tree fallback_fndecl;
2732 enum machine_mode mode;
2733 tree arg;
2735 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2736 gcc_unreachable ();
2738 arg = CALL_EXPR_ARG (exp, 0);
2740 switch (DECL_FUNCTION_CODE (fndecl))
2742 CASE_FLT_FN (BUILT_IN_ICEIL):
2743 CASE_FLT_FN (BUILT_IN_LCEIL):
2744 CASE_FLT_FN (BUILT_IN_LLCEIL):
2745 builtin_optab = lceil_optab;
2746 fallback_fn = BUILT_IN_CEIL;
2747 break;
2749 CASE_FLT_FN (BUILT_IN_IFLOOR):
2750 CASE_FLT_FN (BUILT_IN_LFLOOR):
2751 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2752 builtin_optab = lfloor_optab;
2753 fallback_fn = BUILT_IN_FLOOR;
2754 break;
2756 default:
2757 gcc_unreachable ();
2760 /* Make a suitable register to place result in. */
2761 mode = TYPE_MODE (TREE_TYPE (exp));
2763 target = gen_reg_rtx (mode);
2765 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2766 need to expand the argument again. This way, we will not perform
2767 side-effects more the once. */
2768 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2770 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2772 start_sequence ();
2774 /* Compute into TARGET. */
2775 if (expand_sfix_optab (target, op0, builtin_optab))
2777 /* Output the entire sequence. */
2778 insns = get_insns ();
2779 end_sequence ();
2780 emit_insn (insns);
2781 return target;
2784 /* If we were unable to expand via the builtin, stop the sequence
2785 (without outputting the insns). */
2786 end_sequence ();
2788 /* Fall back to floating point rounding optab. */
2789 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2791 /* For non-C99 targets we may end up without a fallback fndecl here
2792 if the user called __builtin_lfloor directly. In this case emit
2793 a call to the floor/ceil variants nevertheless. This should result
2794 in the best user experience for not full C99 targets. */
2795 if (fallback_fndecl == NULL_TREE)
2797 tree fntype;
2798 const char *name = NULL;
2800 switch (DECL_FUNCTION_CODE (fndecl))
2802 case BUILT_IN_ICEIL:
2803 case BUILT_IN_LCEIL:
2804 case BUILT_IN_LLCEIL:
2805 name = "ceil";
2806 break;
2807 case BUILT_IN_ICEILF:
2808 case BUILT_IN_LCEILF:
2809 case BUILT_IN_LLCEILF:
2810 name = "ceilf";
2811 break;
2812 case BUILT_IN_ICEILL:
2813 case BUILT_IN_LCEILL:
2814 case BUILT_IN_LLCEILL:
2815 name = "ceill";
2816 break;
2817 case BUILT_IN_IFLOOR:
2818 case BUILT_IN_LFLOOR:
2819 case BUILT_IN_LLFLOOR:
2820 name = "floor";
2821 break;
2822 case BUILT_IN_IFLOORF:
2823 case BUILT_IN_LFLOORF:
2824 case BUILT_IN_LLFLOORF:
2825 name = "floorf";
2826 break;
2827 case BUILT_IN_IFLOORL:
2828 case BUILT_IN_LFLOORL:
2829 case BUILT_IN_LLFLOORL:
2830 name = "floorl";
2831 break;
2832 default:
2833 gcc_unreachable ();
2836 fntype = build_function_type_list (TREE_TYPE (arg),
2837 TREE_TYPE (arg), NULL_TREE);
2838 fallback_fndecl = build_fn_decl (name, fntype);
2841 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2843 tmp = expand_normal (exp);
2844 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2846 /* Truncate the result of floating point optab to integer
2847 via expand_fix (). */
2848 target = gen_reg_rtx (mode);
2849 expand_fix (target, tmp, 0);
2851 return target;
2854 /* Expand a call to one of the builtin math functions doing integer
2855 conversion (lrint).
2856 Return 0 if a normal call should be emitted rather than expanding the
2857 function in-line. EXP is the expression that is a call to the builtin
2858 function; if convenient, the result should be placed in TARGET. */
2860 static rtx
2861 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2863 convert_optab builtin_optab;
2864 rtx op0, insns;
2865 tree fndecl = get_callee_fndecl (exp);
2866 tree arg;
2867 enum machine_mode mode;
2868 enum built_in_function fallback_fn = BUILT_IN_NONE;
2870 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2871 gcc_unreachable ();
2873 arg = CALL_EXPR_ARG (exp, 0);
2875 switch (DECL_FUNCTION_CODE (fndecl))
2877 CASE_FLT_FN (BUILT_IN_IRINT):
2878 fallback_fn = BUILT_IN_LRINT;
2879 /* FALLTHRU */
2880 CASE_FLT_FN (BUILT_IN_LRINT):
2881 CASE_FLT_FN (BUILT_IN_LLRINT):
2882 builtin_optab = lrint_optab;
2883 break;
2885 CASE_FLT_FN (BUILT_IN_IROUND):
2886 fallback_fn = BUILT_IN_LROUND;
2887 /* FALLTHRU */
2888 CASE_FLT_FN (BUILT_IN_LROUND):
2889 CASE_FLT_FN (BUILT_IN_LLROUND):
2890 builtin_optab = lround_optab;
2891 break;
2893 default:
2894 gcc_unreachable ();
2897 /* There's no easy way to detect the case we need to set EDOM. */
2898 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2899 return NULL_RTX;
2901 /* Make a suitable register to place result in. */
2902 mode = TYPE_MODE (TREE_TYPE (exp));
2904 /* There's no easy way to detect the case we need to set EDOM. */
2905 if (!flag_errno_math)
2907 rtx result = gen_reg_rtx (mode);
2909 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2910 need to expand the argument again. This way, we will not perform
2911 side-effects more the once. */
2912 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2914 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2916 start_sequence ();
2918 if (expand_sfix_optab (result, op0, builtin_optab))
2920 /* Output the entire sequence. */
2921 insns = get_insns ();
2922 end_sequence ();
2923 emit_insn (insns);
2924 return result;
2927 /* If we were unable to expand via the builtin, stop the sequence
2928 (without outputting the insns) and call to the library function
2929 with the stabilized argument list. */
2930 end_sequence ();
2933 if (fallback_fn != BUILT_IN_NONE)
2935 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2936 targets, (int) round (x) should never be transformed into
2937 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2938 a call to lround in the hope that the target provides at least some
2939 C99 functions. This should result in the best user experience for
2940 not full C99 targets. */
2941 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2942 fallback_fn, 0);
2944 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2945 fallback_fndecl, 1, arg);
2947 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2948 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2949 return convert_to_mode (mode, target, 0);
2952 return expand_call (exp, target, target == const0_rtx);
2955 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2956 a normal call should be emitted rather than expanding the function
2957 in-line. EXP is the expression that is a call to the builtin
2958 function; if convenient, the result should be placed in TARGET. */
2960 static rtx
2961 expand_builtin_powi (tree exp, rtx target)
2963 tree arg0, arg1;
2964 rtx op0, op1;
2965 enum machine_mode mode;
2966 enum machine_mode mode2;
2968 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2969 return NULL_RTX;
2971 arg0 = CALL_EXPR_ARG (exp, 0);
2972 arg1 = CALL_EXPR_ARG (exp, 1);
2973 mode = TYPE_MODE (TREE_TYPE (exp));
2975 /* Emit a libcall to libgcc. */
2977 /* Mode of the 2nd argument must match that of an int. */
2978 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2980 if (target == NULL_RTX)
2981 target = gen_reg_rtx (mode);
2983 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2984 if (GET_MODE (op0) != mode)
2985 op0 = convert_to_mode (mode, op0, 0);
2986 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2987 if (GET_MODE (op1) != mode2)
2988 op1 = convert_to_mode (mode2, op1, 0);
2990 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2991 target, LCT_CONST, mode, 2,
2992 op0, mode, op1, mode2);
2994 return target;
2997 /* Expand expression EXP which is a call to the strlen builtin. Return
2998 NULL_RTX if we failed the caller should emit a normal call, otherwise
2999 try to get the result in TARGET, if convenient. */
3001 static rtx
3002 expand_builtin_strlen (tree exp, rtx target,
3003 enum machine_mode target_mode)
3005 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3006 return NULL_RTX;
3007 else
3009 struct expand_operand ops[4];
3010 rtx pat;
3011 tree len;
3012 tree src = CALL_EXPR_ARG (exp, 0);
3013 rtx src_reg, before_strlen;
3014 enum machine_mode insn_mode = target_mode;
3015 enum insn_code icode = CODE_FOR_nothing;
3016 unsigned int align;
3018 /* If the length can be computed at compile-time, return it. */
3019 len = c_strlen (src, 0);
3020 if (len)
3021 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3023 /* If the length can be computed at compile-time and is constant
3024 integer, but there are side-effects in src, evaluate
3025 src for side-effects, then return len.
3026 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3027 can be optimized into: i++; x = 3; */
3028 len = c_strlen (src, 1);
3029 if (len && TREE_CODE (len) == INTEGER_CST)
3031 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3032 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3035 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3037 /* If SRC is not a pointer type, don't do this operation inline. */
3038 if (align == 0)
3039 return NULL_RTX;
3041 /* Bail out if we can't compute strlen in the right mode. */
3042 while (insn_mode != VOIDmode)
3044 icode = optab_handler (strlen_optab, insn_mode);
3045 if (icode != CODE_FOR_nothing)
3046 break;
3048 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3050 if (insn_mode == VOIDmode)
3051 return NULL_RTX;
3053 /* Make a place to hold the source address. We will not expand
3054 the actual source until we are sure that the expansion will
3055 not fail -- there are trees that cannot be expanded twice. */
3056 src_reg = gen_reg_rtx (Pmode);
3058 /* Mark the beginning of the strlen sequence so we can emit the
3059 source operand later. */
3060 before_strlen = get_last_insn ();
3062 create_output_operand (&ops[0], target, insn_mode);
3063 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3064 create_integer_operand (&ops[2], 0);
3065 create_integer_operand (&ops[3], align);
3066 if (!maybe_expand_insn (icode, 4, ops))
3067 return NULL_RTX;
3069 /* Now that we are assured of success, expand the source. */
3070 start_sequence ();
3071 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3072 if (pat != src_reg)
3074 #ifdef POINTERS_EXTEND_UNSIGNED
3075 if (GET_MODE (pat) != Pmode)
3076 pat = convert_to_mode (Pmode, pat,
3077 POINTERS_EXTEND_UNSIGNED);
3078 #endif
3079 emit_move_insn (src_reg, pat);
3081 pat = get_insns ();
3082 end_sequence ();
3084 if (before_strlen)
3085 emit_insn_after (pat, before_strlen);
3086 else
3087 emit_insn_before (pat, get_insns ());
3089 /* Return the value in the proper mode for this function. */
3090 if (GET_MODE (ops[0].value) == target_mode)
3091 target = ops[0].value;
3092 else if (target != 0)
3093 convert_move (target, ops[0].value, 0);
3094 else
3095 target = convert_to_mode (target_mode, ops[0].value, 0);
3097 return target;
3101 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3102 bytes from constant string DATA + OFFSET and return it as target
3103 constant. */
3105 static rtx
3106 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3107 enum machine_mode mode)
3109 const char *str = (const char *) data;
3111 gcc_assert (offset >= 0
3112 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3113 <= strlen (str) + 1));
3115 return c_readstr (str + offset, mode);
3118 /* LEN specify length of the block of memcpy/memset operation.
3119 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3120 In some cases we can make very likely guess on max size, then we
3121 set it into PROBABLE_MAX_SIZE. */
3123 static void
3124 determine_block_size (tree len, rtx len_rtx,
3125 unsigned HOST_WIDE_INT *min_size,
3126 unsigned HOST_WIDE_INT *max_size,
3127 unsigned HOST_WIDE_INT *probable_max_size)
3129 if (CONST_INT_P (len_rtx))
3131 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3132 return;
3134 else
3136 double_int min, max;
3137 enum value_range_type range_type = VR_UNDEFINED;
3139 /* Determine bounds from the type. */
3140 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3141 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3142 else
3143 *min_size = 0;
3144 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3145 *probable_max_size = *max_size
3146 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3147 else
3148 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3150 if (TREE_CODE (len) == SSA_NAME)
3151 range_type = get_range_info (len, &min, &max);
3152 if (range_type == VR_RANGE)
3154 if (min.fits_uhwi () && *min_size < min.to_uhwi ())
3155 *min_size = min.to_uhwi ();
3156 if (max.fits_uhwi () && *max_size > max.to_uhwi ())
3157 *probable_max_size = *max_size = max.to_uhwi ();
3159 else if (range_type == VR_ANTI_RANGE)
3161 /* Anti range 0...N lets us to determine minimal size to N+1. */
3162 if (min.is_zero ())
3164 if ((max + double_int_one).fits_uhwi ())
3165 *min_size = (max + double_int_one).to_uhwi ();
3167 /* Code like
3169 int n;
3170 if (n < 100)
3171 memcpy (a, b, n)
3173 Produce anti range allowing negative values of N. We still
3174 can use the information and make a guess that N is not negative.
3176 else if (!max.ule (double_int_one.lshift (30))
3177 && min.fits_uhwi ())
3178 *probable_max_size = min.to_uhwi () - 1;
3181 gcc_checking_assert (*max_size <=
3182 (unsigned HOST_WIDE_INT)
3183 GET_MODE_MASK (GET_MODE (len_rtx)));
3186 /* Expand a call EXP to the memcpy builtin.
3187 Return NULL_RTX if we failed, the caller should emit a normal call,
3188 otherwise try to get the result in TARGET, if convenient (and in
3189 mode MODE if that's convenient). */
3191 static rtx
3192 expand_builtin_memcpy (tree exp, rtx target)
3194 if (!validate_arglist (exp,
3195 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3196 return NULL_RTX;
3197 else
3199 tree dest = CALL_EXPR_ARG (exp, 0);
3200 tree src = CALL_EXPR_ARG (exp, 1);
3201 tree len = CALL_EXPR_ARG (exp, 2);
3202 const char *src_str;
3203 unsigned int src_align = get_pointer_alignment (src);
3204 unsigned int dest_align = get_pointer_alignment (dest);
3205 rtx dest_mem, src_mem, dest_addr, len_rtx;
3206 HOST_WIDE_INT expected_size = -1;
3207 unsigned int expected_align = 0;
3208 unsigned HOST_WIDE_INT min_size;
3209 unsigned HOST_WIDE_INT max_size;
3210 unsigned HOST_WIDE_INT probable_max_size;
3212 /* If DEST is not a pointer type, call the normal function. */
3213 if (dest_align == 0)
3214 return NULL_RTX;
3216 /* If either SRC is not a pointer type, don't do this
3217 operation in-line. */
3218 if (src_align == 0)
3219 return NULL_RTX;
3221 if (currently_expanding_gimple_stmt)
3222 stringop_block_profile (currently_expanding_gimple_stmt,
3223 &expected_align, &expected_size);
3225 if (expected_align < dest_align)
3226 expected_align = dest_align;
3227 dest_mem = get_memory_rtx (dest, len);
3228 set_mem_align (dest_mem, dest_align);
3229 len_rtx = expand_normal (len);
3230 determine_block_size (len, len_rtx, &min_size, &max_size,
3231 &probable_max_size);
3232 src_str = c_getstr (src);
3234 /* If SRC is a string constant and block move would be done
3235 by pieces, we can avoid loading the string from memory
3236 and only stored the computed constants. */
3237 if (src_str
3238 && CONST_INT_P (len_rtx)
3239 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3240 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3241 CONST_CAST (char *, src_str),
3242 dest_align, false))
3244 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3245 builtin_memcpy_read_str,
3246 CONST_CAST (char *, src_str),
3247 dest_align, false, 0);
3248 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3249 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3250 return dest_mem;
3253 src_mem = get_memory_rtx (src, len);
3254 set_mem_align (src_mem, src_align);
3256 /* Copy word part most expediently. */
3257 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3258 CALL_EXPR_TAILCALL (exp)
3259 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3260 expected_align, expected_size,
3261 min_size, max_size, probable_max_size);
3263 if (dest_addr == 0)
3265 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3266 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3268 return dest_addr;
3272 /* Expand a call EXP to the mempcpy builtin.
3273 Return NULL_RTX if we failed; the caller should emit a normal call,
3274 otherwise try to get the result in TARGET, if convenient (and in
3275 mode MODE if that's convenient). If ENDP is 0 return the
3276 destination pointer, if ENDP is 1 return the end pointer ala
3277 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3278 stpcpy. */
3280 static rtx
3281 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3283 if (!validate_arglist (exp,
3284 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3285 return NULL_RTX;
3286 else
3288 tree dest = CALL_EXPR_ARG (exp, 0);
3289 tree src = CALL_EXPR_ARG (exp, 1);
3290 tree len = CALL_EXPR_ARG (exp, 2);
3291 return expand_builtin_mempcpy_args (dest, src, len,
3292 target, mode, /*endp=*/ 1);
3296 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3297 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3298 so that this can also be called without constructing an actual CALL_EXPR.
3299 The other arguments and return value are the same as for
3300 expand_builtin_mempcpy. */
3302 static rtx
3303 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3304 rtx target, enum machine_mode mode, int endp)
3306 /* If return value is ignored, transform mempcpy into memcpy. */
3307 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3309 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3310 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3311 dest, src, len);
3312 return expand_expr (result, target, mode, EXPAND_NORMAL);
3314 else
3316 const char *src_str;
3317 unsigned int src_align = get_pointer_alignment (src);
3318 unsigned int dest_align = get_pointer_alignment (dest);
3319 rtx dest_mem, src_mem, len_rtx;
3321 /* If either SRC or DEST is not a pointer type, don't do this
3322 operation in-line. */
3323 if (dest_align == 0 || src_align == 0)
3324 return NULL_RTX;
3326 /* If LEN is not constant, call the normal function. */
3327 if (! tree_fits_uhwi_p (len))
3328 return NULL_RTX;
3330 len_rtx = expand_normal (len);
3331 src_str = c_getstr (src);
3333 /* If SRC is a string constant and block move would be done
3334 by pieces, we can avoid loading the string from memory
3335 and only stored the computed constants. */
3336 if (src_str
3337 && CONST_INT_P (len_rtx)
3338 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3339 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3340 CONST_CAST (char *, src_str),
3341 dest_align, false))
3343 dest_mem = get_memory_rtx (dest, len);
3344 set_mem_align (dest_mem, dest_align);
3345 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3346 builtin_memcpy_read_str,
3347 CONST_CAST (char *, src_str),
3348 dest_align, false, endp);
3349 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3350 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3351 return dest_mem;
3354 if (CONST_INT_P (len_rtx)
3355 && can_move_by_pieces (INTVAL (len_rtx),
3356 MIN (dest_align, src_align)))
3358 dest_mem = get_memory_rtx (dest, len);
3359 set_mem_align (dest_mem, dest_align);
3360 src_mem = get_memory_rtx (src, len);
3361 set_mem_align (src_mem, src_align);
3362 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3363 MIN (dest_align, src_align), endp);
3364 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3365 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3366 return dest_mem;
3369 return NULL_RTX;
3373 #ifndef HAVE_movstr
3374 # define HAVE_movstr 0
3375 # define CODE_FOR_movstr CODE_FOR_nothing
3376 #endif
3378 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3379 we failed, the caller should emit a normal call, otherwise try to
3380 get the result in TARGET, if convenient. If ENDP is 0 return the
3381 destination pointer, if ENDP is 1 return the end pointer ala
3382 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3383 stpcpy. */
3385 static rtx
3386 expand_movstr (tree dest, tree src, rtx target, int endp)
3388 struct expand_operand ops[3];
3389 rtx dest_mem;
3390 rtx src_mem;
3392 if (!HAVE_movstr)
3393 return NULL_RTX;
3395 dest_mem = get_memory_rtx (dest, NULL);
3396 src_mem = get_memory_rtx (src, NULL);
3397 if (!endp)
3399 target = force_reg (Pmode, XEXP (dest_mem, 0));
3400 dest_mem = replace_equiv_address (dest_mem, target);
3403 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3404 create_fixed_operand (&ops[1], dest_mem);
3405 create_fixed_operand (&ops[2], src_mem);
3406 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3407 return NULL_RTX;
3409 if (endp && target != const0_rtx)
3411 target = ops[0].value;
3412 /* movstr is supposed to set end to the address of the NUL
3413 terminator. If the caller requested a mempcpy-like return value,
3414 adjust it. */
3415 if (endp == 1)
3417 rtx tem = plus_constant (GET_MODE (target),
3418 gen_lowpart (GET_MODE (target), target), 1);
3419 emit_move_insn (target, force_operand (tem, NULL_RTX));
3422 return target;
3425 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3426 NULL_RTX if we failed the caller should emit a normal call, otherwise
3427 try to get the result in TARGET, if convenient (and in mode MODE if that's
3428 convenient). */
3430 static rtx
3431 expand_builtin_strcpy (tree exp, rtx target)
3433 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3435 tree dest = CALL_EXPR_ARG (exp, 0);
3436 tree src = CALL_EXPR_ARG (exp, 1);
3437 return expand_builtin_strcpy_args (dest, src, target);
3439 return NULL_RTX;
3442 /* Helper function to do the actual work for expand_builtin_strcpy. The
3443 arguments to the builtin_strcpy call DEST and SRC are broken out
3444 so that this can also be called without constructing an actual CALL_EXPR.
3445 The other arguments and return value are the same as for
3446 expand_builtin_strcpy. */
3448 static rtx
3449 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3451 return expand_movstr (dest, src, target, /*endp=*/0);
3454 /* Expand a call EXP to the stpcpy builtin.
3455 Return NULL_RTX if we failed the caller should emit a normal call,
3456 otherwise try to get the result in TARGET, if convenient (and in
3457 mode MODE if that's convenient). */
3459 static rtx
3460 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3462 tree dst, src;
3463 location_t loc = EXPR_LOCATION (exp);
3465 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3466 return NULL_RTX;
3468 dst = CALL_EXPR_ARG (exp, 0);
3469 src = CALL_EXPR_ARG (exp, 1);
3471 /* If return value is ignored, transform stpcpy into strcpy. */
3472 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3474 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3475 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3476 return expand_expr (result, target, mode, EXPAND_NORMAL);
3478 else
3480 tree len, lenp1;
3481 rtx ret;
3483 /* Ensure we get an actual string whose length can be evaluated at
3484 compile-time, not an expression containing a string. This is
3485 because the latter will potentially produce pessimized code
3486 when used to produce the return value. */
3487 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3488 return expand_movstr (dst, src, target, /*endp=*/2);
3490 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3491 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3492 target, mode, /*endp=*/2);
3494 if (ret)
3495 return ret;
3497 if (TREE_CODE (len) == INTEGER_CST)
3499 rtx len_rtx = expand_normal (len);
3501 if (CONST_INT_P (len_rtx))
3503 ret = expand_builtin_strcpy_args (dst, src, target);
3505 if (ret)
3507 if (! target)
3509 if (mode != VOIDmode)
3510 target = gen_reg_rtx (mode);
3511 else
3512 target = gen_reg_rtx (GET_MODE (ret));
3514 if (GET_MODE (target) != GET_MODE (ret))
3515 ret = gen_lowpart (GET_MODE (target), ret);
3517 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3518 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3519 gcc_assert (ret);
3521 return target;
3526 return expand_movstr (dst, src, target, /*endp=*/2);
3530 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3531 bytes from constant string DATA + OFFSET and return it as target
3532 constant. */
3535 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3536 enum machine_mode mode)
3538 const char *str = (const char *) data;
3540 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3541 return const0_rtx;
3543 return c_readstr (str + offset, mode);
3546 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3547 NULL_RTX if we failed the caller should emit a normal call. */
3549 static rtx
3550 expand_builtin_strncpy (tree exp, rtx target)
3552 location_t loc = EXPR_LOCATION (exp);
3554 if (validate_arglist (exp,
3555 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3557 tree dest = CALL_EXPR_ARG (exp, 0);
3558 tree src = CALL_EXPR_ARG (exp, 1);
3559 tree len = CALL_EXPR_ARG (exp, 2);
3560 tree slen = c_strlen (src, 1);
3562 /* We must be passed a constant len and src parameter. */
3563 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3564 return NULL_RTX;
3566 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3568 /* We're required to pad with trailing zeros if the requested
3569 len is greater than strlen(s2)+1. In that case try to
3570 use store_by_pieces, if it fails, punt. */
3571 if (tree_int_cst_lt (slen, len))
3573 unsigned int dest_align = get_pointer_alignment (dest);
3574 const char *p = c_getstr (src);
3575 rtx dest_mem;
3577 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3578 || !can_store_by_pieces (tree_to_uhwi (len),
3579 builtin_strncpy_read_str,
3580 CONST_CAST (char *, p),
3581 dest_align, false))
3582 return NULL_RTX;
3584 dest_mem = get_memory_rtx (dest, len);
3585 store_by_pieces (dest_mem, tree_to_uhwi (len),
3586 builtin_strncpy_read_str,
3587 CONST_CAST (char *, p), dest_align, false, 0);
3588 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3589 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3590 return dest_mem;
3593 return NULL_RTX;
3596 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3597 bytes from constant string DATA + OFFSET and return it as target
3598 constant. */
3601 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3602 enum machine_mode mode)
3604 const char *c = (const char *) data;
3605 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3607 memset (p, *c, GET_MODE_SIZE (mode));
3609 return c_readstr (p, mode);
3612 /* Callback routine for store_by_pieces. Return the RTL of a register
3613 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3614 char value given in the RTL register data. For example, if mode is
3615 4 bytes wide, return the RTL for 0x01010101*data. */
3617 static rtx
3618 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3619 enum machine_mode mode)
3621 rtx target, coeff;
3622 size_t size;
3623 char *p;
3625 size = GET_MODE_SIZE (mode);
3626 if (size == 1)
3627 return (rtx) data;
3629 p = XALLOCAVEC (char, size);
3630 memset (p, 1, size);
3631 coeff = c_readstr (p, mode);
3633 target = convert_to_mode (mode, (rtx) data, 1);
3634 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3635 return force_reg (mode, target);
3638 /* Expand expression EXP, which is a call to the memset builtin. Return
3639 NULL_RTX if we failed the caller should emit a normal call, otherwise
3640 try to get the result in TARGET, if convenient (and in mode MODE if that's
3641 convenient). */
3643 static rtx
3644 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3646 if (!validate_arglist (exp,
3647 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3648 return NULL_RTX;
3649 else
3651 tree dest = CALL_EXPR_ARG (exp, 0);
3652 tree val = CALL_EXPR_ARG (exp, 1);
3653 tree len = CALL_EXPR_ARG (exp, 2);
3654 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3658 /* Helper function to do the actual work for expand_builtin_memset. The
3659 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3660 so that this can also be called without constructing an actual CALL_EXPR.
3661 The other arguments and return value are the same as for
3662 expand_builtin_memset. */
3664 static rtx
3665 expand_builtin_memset_args (tree dest, tree val, tree len,
3666 rtx target, enum machine_mode mode, tree orig_exp)
3668 tree fndecl, fn;
3669 enum built_in_function fcode;
3670 enum machine_mode val_mode;
3671 char c;
3672 unsigned int dest_align;
3673 rtx dest_mem, dest_addr, len_rtx;
3674 HOST_WIDE_INT expected_size = -1;
3675 unsigned int expected_align = 0;
3676 unsigned HOST_WIDE_INT min_size;
3677 unsigned HOST_WIDE_INT max_size;
3678 unsigned HOST_WIDE_INT probable_max_size;
3680 dest_align = get_pointer_alignment (dest);
3682 /* If DEST is not a pointer type, don't do this operation in-line. */
3683 if (dest_align == 0)
3684 return NULL_RTX;
3686 if (currently_expanding_gimple_stmt)
3687 stringop_block_profile (currently_expanding_gimple_stmt,
3688 &expected_align, &expected_size);
3690 if (expected_align < dest_align)
3691 expected_align = dest_align;
3693 /* If the LEN parameter is zero, return DEST. */
3694 if (integer_zerop (len))
3696 /* Evaluate and ignore VAL in case it has side-effects. */
3697 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3698 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3701 /* Stabilize the arguments in case we fail. */
3702 dest = builtin_save_expr (dest);
3703 val = builtin_save_expr (val);
3704 len = builtin_save_expr (len);
3706 len_rtx = expand_normal (len);
3707 determine_block_size (len, len_rtx, &min_size, &max_size,
3708 &probable_max_size);
3709 dest_mem = get_memory_rtx (dest, len);
3710 val_mode = TYPE_MODE (unsigned_char_type_node);
3712 if (TREE_CODE (val) != INTEGER_CST)
3714 rtx val_rtx;
3716 val_rtx = expand_normal (val);
3717 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3719 /* Assume that we can memset by pieces if we can store
3720 * the coefficients by pieces (in the required modes).
3721 * We can't pass builtin_memset_gen_str as that emits RTL. */
3722 c = 1;
3723 if (tree_fits_uhwi_p (len)
3724 && can_store_by_pieces (tree_to_uhwi (len),
3725 builtin_memset_read_str, &c, dest_align,
3726 true))
3728 val_rtx = force_reg (val_mode, val_rtx);
3729 store_by_pieces (dest_mem, tree_to_uhwi (len),
3730 builtin_memset_gen_str, val_rtx, dest_align,
3731 true, 0);
3733 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3734 dest_align, expected_align,
3735 expected_size, min_size, max_size,
3736 probable_max_size))
3737 goto do_libcall;
3739 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3740 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3741 return dest_mem;
3744 if (target_char_cast (val, &c))
3745 goto do_libcall;
3747 if (c)
3749 if (tree_fits_uhwi_p (len)
3750 && can_store_by_pieces (tree_to_uhwi (len),
3751 builtin_memset_read_str, &c, dest_align,
3752 true))
3753 store_by_pieces (dest_mem, tree_to_uhwi (len),
3754 builtin_memset_read_str, &c, dest_align, true, 0);
3755 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3756 gen_int_mode (c, val_mode),
3757 dest_align, expected_align,
3758 expected_size, min_size, max_size,
3759 probable_max_size))
3760 goto do_libcall;
3762 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3763 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3764 return dest_mem;
3767 set_mem_align (dest_mem, dest_align);
3768 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3769 CALL_EXPR_TAILCALL (orig_exp)
3770 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3771 expected_align, expected_size,
3772 min_size, max_size,
3773 probable_max_size);
3775 if (dest_addr == 0)
3777 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3778 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3781 return dest_addr;
3783 do_libcall:
3784 fndecl = get_callee_fndecl (orig_exp);
3785 fcode = DECL_FUNCTION_CODE (fndecl);
3786 if (fcode == BUILT_IN_MEMSET)
3787 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3788 dest, val, len);
3789 else if (fcode == BUILT_IN_BZERO)
3790 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3791 dest, len);
3792 else
3793 gcc_unreachable ();
3794 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3795 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3796 return expand_call (fn, target, target == const0_rtx);
3799 /* Expand expression EXP, which is a call to the bzero builtin. Return
3800 NULL_RTX if we failed the caller should emit a normal call. */
3802 static rtx
3803 expand_builtin_bzero (tree exp)
3805 tree dest, size;
3806 location_t loc = EXPR_LOCATION (exp);
3808 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3809 return NULL_RTX;
3811 dest = CALL_EXPR_ARG (exp, 0);
3812 size = CALL_EXPR_ARG (exp, 1);
3814 /* New argument list transforming bzero(ptr x, int y) to
3815 memset(ptr x, int 0, size_t y). This is done this way
3816 so that if it isn't expanded inline, we fallback to
3817 calling bzero instead of memset. */
3819 return expand_builtin_memset_args (dest, integer_zero_node,
3820 fold_convert_loc (loc,
3821 size_type_node, size),
3822 const0_rtx, VOIDmode, exp);
3825 /* Expand expression EXP, which is a call to the memcmp built-in function.
3826 Return NULL_RTX if we failed and the caller should emit a normal call,
3827 otherwise try to get the result in TARGET, if convenient (and in mode
3828 MODE, if that's convenient). */
3830 static rtx
3831 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3832 ATTRIBUTE_UNUSED enum machine_mode mode)
3834 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3836 if (!validate_arglist (exp,
3837 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3838 return NULL_RTX;
3840 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3841 implementing memcmp because it will stop if it encounters two
3842 zero bytes. */
3843 #if defined HAVE_cmpmemsi
3845 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3846 rtx result;
3847 rtx insn;
3848 tree arg1 = CALL_EXPR_ARG (exp, 0);
3849 tree arg2 = CALL_EXPR_ARG (exp, 1);
3850 tree len = CALL_EXPR_ARG (exp, 2);
3852 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3853 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3854 enum machine_mode insn_mode;
3856 if (HAVE_cmpmemsi)
3857 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3858 else
3859 return NULL_RTX;
3861 /* If we don't have POINTER_TYPE, call the function. */
3862 if (arg1_align == 0 || arg2_align == 0)
3863 return NULL_RTX;
3865 /* Make a place to write the result of the instruction. */
3866 result = target;
3867 if (! (result != 0
3868 && REG_P (result) && GET_MODE (result) == insn_mode
3869 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3870 result = gen_reg_rtx (insn_mode);
3872 arg1_rtx = get_memory_rtx (arg1, len);
3873 arg2_rtx = get_memory_rtx (arg2, len);
3874 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3876 /* Set MEM_SIZE as appropriate. */
3877 if (CONST_INT_P (arg3_rtx))
3879 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3880 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3883 if (HAVE_cmpmemsi)
3884 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3885 GEN_INT (MIN (arg1_align, arg2_align)));
3886 else
3887 gcc_unreachable ();
3889 if (insn)
3890 emit_insn (insn);
3891 else
3892 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3893 TYPE_MODE (integer_type_node), 3,
3894 XEXP (arg1_rtx, 0), Pmode,
3895 XEXP (arg2_rtx, 0), Pmode,
3896 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3897 TYPE_UNSIGNED (sizetype)),
3898 TYPE_MODE (sizetype));
3900 /* Return the value in the proper mode for this function. */
3901 mode = TYPE_MODE (TREE_TYPE (exp));
3902 if (GET_MODE (result) == mode)
3903 return result;
3904 else if (target != 0)
3906 convert_move (target, result, 0);
3907 return target;
3909 else
3910 return convert_to_mode (mode, result, 0);
3912 #endif /* HAVE_cmpmemsi. */
3914 return NULL_RTX;
3917 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3918 if we failed the caller should emit a normal call, otherwise try to get
3919 the result in TARGET, if convenient. */
3921 static rtx
3922 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3924 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3925 return NULL_RTX;
3927 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3928 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3929 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3931 rtx arg1_rtx, arg2_rtx;
3932 rtx result, insn = NULL_RTX;
3933 tree fndecl, fn;
3934 tree arg1 = CALL_EXPR_ARG (exp, 0);
3935 tree arg2 = CALL_EXPR_ARG (exp, 1);
3937 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3938 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3940 /* If we don't have POINTER_TYPE, call the function. */
3941 if (arg1_align == 0 || arg2_align == 0)
3942 return NULL_RTX;
3944 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3945 arg1 = builtin_save_expr (arg1);
3946 arg2 = builtin_save_expr (arg2);
3948 arg1_rtx = get_memory_rtx (arg1, NULL);
3949 arg2_rtx = get_memory_rtx (arg2, NULL);
3951 #ifdef HAVE_cmpstrsi
3952 /* Try to call cmpstrsi. */
3953 if (HAVE_cmpstrsi)
3955 enum machine_mode insn_mode
3956 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3958 /* Make a place to write the result of the instruction. */
3959 result = target;
3960 if (! (result != 0
3961 && REG_P (result) && GET_MODE (result) == insn_mode
3962 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3963 result = gen_reg_rtx (insn_mode);
3965 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3966 GEN_INT (MIN (arg1_align, arg2_align)));
3968 #endif
3969 #ifdef HAVE_cmpstrnsi
3970 /* Try to determine at least one length and call cmpstrnsi. */
3971 if (!insn && HAVE_cmpstrnsi)
3973 tree len;
3974 rtx arg3_rtx;
3976 enum machine_mode insn_mode
3977 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3978 tree len1 = c_strlen (arg1, 1);
3979 tree len2 = c_strlen (arg2, 1);
3981 if (len1)
3982 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3983 if (len2)
3984 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3986 /* If we don't have a constant length for the first, use the length
3987 of the second, if we know it. We don't require a constant for
3988 this case; some cost analysis could be done if both are available
3989 but neither is constant. For now, assume they're equally cheap,
3990 unless one has side effects. If both strings have constant lengths,
3991 use the smaller. */
3993 if (!len1)
3994 len = len2;
3995 else if (!len2)
3996 len = len1;
3997 else if (TREE_SIDE_EFFECTS (len1))
3998 len = len2;
3999 else if (TREE_SIDE_EFFECTS (len2))
4000 len = len1;
4001 else if (TREE_CODE (len1) != INTEGER_CST)
4002 len = len2;
4003 else if (TREE_CODE (len2) != INTEGER_CST)
4004 len = len1;
4005 else if (tree_int_cst_lt (len1, len2))
4006 len = len1;
4007 else
4008 len = len2;
4010 /* If both arguments have side effects, we cannot optimize. */
4011 if (!len || TREE_SIDE_EFFECTS (len))
4012 goto do_libcall;
4014 arg3_rtx = expand_normal (len);
4016 /* Make a place to write the result of the instruction. */
4017 result = target;
4018 if (! (result != 0
4019 && REG_P (result) && GET_MODE (result) == insn_mode
4020 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4021 result = gen_reg_rtx (insn_mode);
4023 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4024 GEN_INT (MIN (arg1_align, arg2_align)));
4026 #endif
4028 if (insn)
4030 enum machine_mode mode;
4031 emit_insn (insn);
4033 /* Return the value in the proper mode for this function. */
4034 mode = TYPE_MODE (TREE_TYPE (exp));
4035 if (GET_MODE (result) == mode)
4036 return result;
4037 if (target == 0)
4038 return convert_to_mode (mode, result, 0);
4039 convert_move (target, result, 0);
4040 return target;
4043 /* Expand the library call ourselves using a stabilized argument
4044 list to avoid re-evaluating the function's arguments twice. */
4045 #ifdef HAVE_cmpstrnsi
4046 do_libcall:
4047 #endif
4048 fndecl = get_callee_fndecl (exp);
4049 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4050 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4051 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4052 return expand_call (fn, target, target == const0_rtx);
4054 #endif
4055 return NULL_RTX;
4058 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4059 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4060 the result in TARGET, if convenient. */
4062 static rtx
4063 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4064 ATTRIBUTE_UNUSED enum machine_mode mode)
4066 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4068 if (!validate_arglist (exp,
4069 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4070 return NULL_RTX;
4072 /* If c_strlen can determine an expression for one of the string
4073 lengths, and it doesn't have side effects, then emit cmpstrnsi
4074 using length MIN(strlen(string)+1, arg3). */
4075 #ifdef HAVE_cmpstrnsi
4076 if (HAVE_cmpstrnsi)
4078 tree len, len1, len2;
4079 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4080 rtx result, insn;
4081 tree fndecl, fn;
4082 tree arg1 = CALL_EXPR_ARG (exp, 0);
4083 tree arg2 = CALL_EXPR_ARG (exp, 1);
4084 tree arg3 = CALL_EXPR_ARG (exp, 2);
4086 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4087 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4088 enum machine_mode insn_mode
4089 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4091 len1 = c_strlen (arg1, 1);
4092 len2 = c_strlen (arg2, 1);
4094 if (len1)
4095 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4096 if (len2)
4097 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4099 /* If we don't have a constant length for the first, use the length
4100 of the second, if we know it. We don't require a constant for
4101 this case; some cost analysis could be done if both are available
4102 but neither is constant. For now, assume they're equally cheap,
4103 unless one has side effects. If both strings have constant lengths,
4104 use the smaller. */
4106 if (!len1)
4107 len = len2;
4108 else if (!len2)
4109 len = len1;
4110 else if (TREE_SIDE_EFFECTS (len1))
4111 len = len2;
4112 else if (TREE_SIDE_EFFECTS (len2))
4113 len = len1;
4114 else if (TREE_CODE (len1) != INTEGER_CST)
4115 len = len2;
4116 else if (TREE_CODE (len2) != INTEGER_CST)
4117 len = len1;
4118 else if (tree_int_cst_lt (len1, len2))
4119 len = len1;
4120 else
4121 len = len2;
4123 /* If both arguments have side effects, we cannot optimize. */
4124 if (!len || TREE_SIDE_EFFECTS (len))
4125 return NULL_RTX;
4127 /* The actual new length parameter is MIN(len,arg3). */
4128 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4129 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4131 /* If we don't have POINTER_TYPE, call the function. */
4132 if (arg1_align == 0 || arg2_align == 0)
4133 return NULL_RTX;
4135 /* Make a place to write the result of the instruction. */
4136 result = target;
4137 if (! (result != 0
4138 && REG_P (result) && GET_MODE (result) == insn_mode
4139 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4140 result = gen_reg_rtx (insn_mode);
4142 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4143 arg1 = builtin_save_expr (arg1);
4144 arg2 = builtin_save_expr (arg2);
4145 len = builtin_save_expr (len);
4147 arg1_rtx = get_memory_rtx (arg1, len);
4148 arg2_rtx = get_memory_rtx (arg2, len);
4149 arg3_rtx = expand_normal (len);
4150 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4151 GEN_INT (MIN (arg1_align, arg2_align)));
4152 if (insn)
4154 emit_insn (insn);
4156 /* Return the value in the proper mode for this function. */
4157 mode = TYPE_MODE (TREE_TYPE (exp));
4158 if (GET_MODE (result) == mode)
4159 return result;
4160 if (target == 0)
4161 return convert_to_mode (mode, result, 0);
4162 convert_move (target, result, 0);
4163 return target;
4166 /* Expand the library call ourselves using a stabilized argument
4167 list to avoid re-evaluating the function's arguments twice. */
4168 fndecl = get_callee_fndecl (exp);
4169 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4170 arg1, arg2, len);
4171 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4172 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4173 return expand_call (fn, target, target == const0_rtx);
4175 #endif
4176 return NULL_RTX;
4179 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4180 if that's convenient. */
4183 expand_builtin_saveregs (void)
4185 rtx val, seq;
4187 /* Don't do __builtin_saveregs more than once in a function.
4188 Save the result of the first call and reuse it. */
4189 if (saveregs_value != 0)
4190 return saveregs_value;
4192 /* When this function is called, it means that registers must be
4193 saved on entry to this function. So we migrate the call to the
4194 first insn of this function. */
4196 start_sequence ();
4198 /* Do whatever the machine needs done in this case. */
4199 val = targetm.calls.expand_builtin_saveregs ();
4201 seq = get_insns ();
4202 end_sequence ();
4204 saveregs_value = val;
4206 /* Put the insns after the NOTE that starts the function. If this
4207 is inside a start_sequence, make the outer-level insn chain current, so
4208 the code is placed at the start of the function. */
4209 push_topmost_sequence ();
4210 emit_insn_after (seq, entry_of_function ());
4211 pop_topmost_sequence ();
4213 return val;
4216 /* Expand a call to __builtin_next_arg. */
4218 static rtx
4219 expand_builtin_next_arg (void)
4221 /* Checking arguments is already done in fold_builtin_next_arg
4222 that must be called before this function. */
4223 return expand_binop (ptr_mode, add_optab,
4224 crtl->args.internal_arg_pointer,
4225 crtl->args.arg_offset_rtx,
4226 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4229 /* Make it easier for the backends by protecting the valist argument
4230 from multiple evaluations. */
4232 static tree
4233 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4235 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4237 /* The current way of determining the type of valist is completely
4238 bogus. We should have the information on the va builtin instead. */
4239 if (!vatype)
4240 vatype = targetm.fn_abi_va_list (cfun->decl);
4242 if (TREE_CODE (vatype) == ARRAY_TYPE)
4244 if (TREE_SIDE_EFFECTS (valist))
4245 valist = save_expr (valist);
4247 /* For this case, the backends will be expecting a pointer to
4248 vatype, but it's possible we've actually been given an array
4249 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4250 So fix it. */
4251 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4253 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4254 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4257 else
4259 tree pt = build_pointer_type (vatype);
4261 if (! needs_lvalue)
4263 if (! TREE_SIDE_EFFECTS (valist))
4264 return valist;
4266 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4267 TREE_SIDE_EFFECTS (valist) = 1;
4270 if (TREE_SIDE_EFFECTS (valist))
4271 valist = save_expr (valist);
4272 valist = fold_build2_loc (loc, MEM_REF,
4273 vatype, valist, build_int_cst (pt, 0));
4276 return valist;
4279 /* The "standard" definition of va_list is void*. */
4281 tree
4282 std_build_builtin_va_list (void)
4284 return ptr_type_node;
4287 /* The "standard" abi va_list is va_list_type_node. */
4289 tree
4290 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4292 return va_list_type_node;
4295 /* The "standard" type of va_list is va_list_type_node. */
4297 tree
4298 std_canonical_va_list_type (tree type)
4300 tree wtype, htype;
4302 if (INDIRECT_REF_P (type))
4303 type = TREE_TYPE (type);
4304 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4305 type = TREE_TYPE (type);
4306 wtype = va_list_type_node;
4307 htype = type;
4308 /* Treat structure va_list types. */
4309 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4310 htype = TREE_TYPE (htype);
4311 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4313 /* If va_list is an array type, the argument may have decayed
4314 to a pointer type, e.g. by being passed to another function.
4315 In that case, unwrap both types so that we can compare the
4316 underlying records. */
4317 if (TREE_CODE (htype) == ARRAY_TYPE
4318 || POINTER_TYPE_P (htype))
4320 wtype = TREE_TYPE (wtype);
4321 htype = TREE_TYPE (htype);
4324 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4325 return va_list_type_node;
4327 return NULL_TREE;
4330 /* The "standard" implementation of va_start: just assign `nextarg' to
4331 the variable. */
4333 void
4334 std_expand_builtin_va_start (tree valist, rtx nextarg)
4336 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4337 convert_move (va_r, nextarg, 0);
4340 /* Expand EXP, a call to __builtin_va_start. */
4342 static rtx
4343 expand_builtin_va_start (tree exp)
4345 rtx nextarg;
4346 tree valist;
4347 location_t loc = EXPR_LOCATION (exp);
4349 if (call_expr_nargs (exp) < 2)
4351 error_at (loc, "too few arguments to function %<va_start%>");
4352 return const0_rtx;
4355 if (fold_builtin_next_arg (exp, true))
4356 return const0_rtx;
4358 nextarg = expand_builtin_next_arg ();
4359 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4361 if (targetm.expand_builtin_va_start)
4362 targetm.expand_builtin_va_start (valist, nextarg);
4363 else
4364 std_expand_builtin_va_start (valist, nextarg);
4366 return const0_rtx;
4369 /* Expand EXP, a call to __builtin_va_end. */
4371 static rtx
4372 expand_builtin_va_end (tree exp)
4374 tree valist = CALL_EXPR_ARG (exp, 0);
4376 /* Evaluate for side effects, if needed. I hate macros that don't
4377 do that. */
4378 if (TREE_SIDE_EFFECTS (valist))
4379 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4381 return const0_rtx;
4384 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4385 builtin rather than just as an assignment in stdarg.h because of the
4386 nastiness of array-type va_list types. */
4388 static rtx
4389 expand_builtin_va_copy (tree exp)
4391 tree dst, src, t;
4392 location_t loc = EXPR_LOCATION (exp);
4394 dst = CALL_EXPR_ARG (exp, 0);
4395 src = CALL_EXPR_ARG (exp, 1);
4397 dst = stabilize_va_list_loc (loc, dst, 1);
4398 src = stabilize_va_list_loc (loc, src, 0);
4400 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4402 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4404 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4405 TREE_SIDE_EFFECTS (t) = 1;
4406 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4408 else
4410 rtx dstb, srcb, size;
4412 /* Evaluate to pointers. */
4413 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4414 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4415 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4416 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4418 dstb = convert_memory_address (Pmode, dstb);
4419 srcb = convert_memory_address (Pmode, srcb);
4421 /* "Dereference" to BLKmode memories. */
4422 dstb = gen_rtx_MEM (BLKmode, dstb);
4423 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4424 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4425 srcb = gen_rtx_MEM (BLKmode, srcb);
4426 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4427 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4429 /* Copy. */
4430 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4433 return const0_rtx;
4436 /* Expand a call to one of the builtin functions __builtin_frame_address or
4437 __builtin_return_address. */
4439 static rtx
4440 expand_builtin_frame_address (tree fndecl, tree exp)
4442 /* The argument must be a nonnegative integer constant.
4443 It counts the number of frames to scan up the stack.
4444 The value is the return address saved in that frame. */
4445 if (call_expr_nargs (exp) == 0)
4446 /* Warning about missing arg was already issued. */
4447 return const0_rtx;
4448 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4450 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4451 error ("invalid argument to %<__builtin_frame_address%>");
4452 else
4453 error ("invalid argument to %<__builtin_return_address%>");
4454 return const0_rtx;
4456 else
4458 rtx tem
4459 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4460 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4462 /* Some ports cannot access arbitrary stack frames. */
4463 if (tem == NULL)
4465 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4466 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4467 else
4468 warning (0, "unsupported argument to %<__builtin_return_address%>");
4469 return const0_rtx;
4472 /* For __builtin_frame_address, return what we've got. */
4473 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4474 return tem;
4476 if (!REG_P (tem)
4477 && ! CONSTANT_P (tem))
4478 tem = copy_addr_to_reg (tem);
4479 return tem;
4483 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4484 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4485 is the same as for allocate_dynamic_stack_space. */
4487 static rtx
4488 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4490 rtx op0;
4491 rtx result;
4492 bool valid_arglist;
4493 unsigned int align;
4494 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4495 == BUILT_IN_ALLOCA_WITH_ALIGN);
4497 valid_arglist
4498 = (alloca_with_align
4499 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4500 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4502 if (!valid_arglist)
4503 return NULL_RTX;
4505 /* Compute the argument. */
4506 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4508 /* Compute the alignment. */
4509 align = (alloca_with_align
4510 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4511 : BIGGEST_ALIGNMENT);
4513 /* Allocate the desired space. */
4514 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4515 result = convert_memory_address (ptr_mode, result);
4517 return result;
4520 /* Expand a call to bswap builtin in EXP.
4521 Return NULL_RTX if a normal call should be emitted rather than expanding the
4522 function in-line. If convenient, the result should be placed in TARGET.
4523 SUBTARGET may be used as the target for computing one of EXP's operands. */
4525 static rtx
4526 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4527 rtx subtarget)
4529 tree arg;
4530 rtx op0;
4532 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4533 return NULL_RTX;
4535 arg = CALL_EXPR_ARG (exp, 0);
4536 op0 = expand_expr (arg,
4537 subtarget && GET_MODE (subtarget) == target_mode
4538 ? subtarget : NULL_RTX,
4539 target_mode, EXPAND_NORMAL);
4540 if (GET_MODE (op0) != target_mode)
4541 op0 = convert_to_mode (target_mode, op0, 1);
4543 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4545 gcc_assert (target);
4547 return convert_to_mode (target_mode, target, 1);
4550 /* Expand a call to a unary builtin in EXP.
4551 Return NULL_RTX if a normal call should be emitted rather than expanding the
4552 function in-line. If convenient, the result should be placed in TARGET.
4553 SUBTARGET may be used as the target for computing one of EXP's operands. */
4555 static rtx
4556 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4557 rtx subtarget, optab op_optab)
4559 rtx op0;
4561 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4562 return NULL_RTX;
4564 /* Compute the argument. */
4565 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4566 (subtarget
4567 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4568 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4569 VOIDmode, EXPAND_NORMAL);
4570 /* Compute op, into TARGET if possible.
4571 Set TARGET to wherever the result comes back. */
4572 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4573 op_optab, op0, target, op_optab != clrsb_optab);
4574 gcc_assert (target);
4576 return convert_to_mode (target_mode, target, 0);
4579 /* Expand a call to __builtin_expect. We just return our argument
4580 as the builtin_expect semantic should've been already executed by
4581 tree branch prediction pass. */
4583 static rtx
4584 expand_builtin_expect (tree exp, rtx target)
4586 tree arg;
4588 if (call_expr_nargs (exp) < 2)
4589 return const0_rtx;
4590 arg = CALL_EXPR_ARG (exp, 0);
4592 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4593 /* When guessing was done, the hints should be already stripped away. */
4594 gcc_assert (!flag_guess_branch_prob
4595 || optimize == 0 || seen_error ());
4596 return target;
4599 /* Expand a call to __builtin_assume_aligned. We just return our first
4600 argument as the builtin_assume_aligned semantic should've been already
4601 executed by CCP. */
4603 static rtx
4604 expand_builtin_assume_aligned (tree exp, rtx target)
4606 if (call_expr_nargs (exp) < 2)
4607 return const0_rtx;
4608 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4609 EXPAND_NORMAL);
4610 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4611 && (call_expr_nargs (exp) < 3
4612 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4613 return target;
4616 void
4617 expand_builtin_trap (void)
4619 #ifdef HAVE_trap
4620 if (HAVE_trap)
4622 rtx insn = emit_insn (gen_trap ());
4623 /* For trap insns when not accumulating outgoing args force
4624 REG_ARGS_SIZE note to prevent crossjumping of calls with
4625 different args sizes. */
4626 if (!ACCUMULATE_OUTGOING_ARGS)
4627 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4629 else
4630 #endif
4631 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4632 emit_barrier ();
4635 /* Expand a call to __builtin_unreachable. We do nothing except emit
4636 a barrier saying that control flow will not pass here.
4638 It is the responsibility of the program being compiled to ensure
4639 that control flow does never reach __builtin_unreachable. */
4640 static void
4641 expand_builtin_unreachable (void)
4643 emit_barrier ();
4646 /* Expand EXP, a call to fabs, fabsf or fabsl.
4647 Return NULL_RTX if a normal call should be emitted rather than expanding
4648 the function inline. If convenient, the result should be placed
4649 in TARGET. SUBTARGET may be used as the target for computing
4650 the operand. */
4652 static rtx
4653 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4655 enum machine_mode mode;
4656 tree arg;
4657 rtx op0;
4659 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4660 return NULL_RTX;
4662 arg = CALL_EXPR_ARG (exp, 0);
4663 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4664 mode = TYPE_MODE (TREE_TYPE (arg));
4665 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4666 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4669 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4670 Return NULL is a normal call should be emitted rather than expanding the
4671 function inline. If convenient, the result should be placed in TARGET.
4672 SUBTARGET may be used as the target for computing the operand. */
4674 static rtx
4675 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4677 rtx op0, op1;
4678 tree arg;
4680 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4681 return NULL_RTX;
4683 arg = CALL_EXPR_ARG (exp, 0);
4684 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4686 arg = CALL_EXPR_ARG (exp, 1);
4687 op1 = expand_normal (arg);
4689 return expand_copysign (op0, op1, target);
4692 /* Create a new constant string literal and return a char* pointer to it.
4693 The STRING_CST value is the LEN characters at STR. */
4694 tree
4695 build_string_literal (int len, const char *str)
4697 tree t, elem, index, type;
4699 t = build_string (len, str);
4700 elem = build_type_variant (char_type_node, 1, 0);
4701 index = build_index_type (size_int (len - 1));
4702 type = build_array_type (elem, index);
4703 TREE_TYPE (t) = type;
4704 TREE_CONSTANT (t) = 1;
4705 TREE_READONLY (t) = 1;
4706 TREE_STATIC (t) = 1;
4708 type = build_pointer_type (elem);
4709 t = build1 (ADDR_EXPR, type,
4710 build4 (ARRAY_REF, elem,
4711 t, integer_zero_node, NULL_TREE, NULL_TREE));
4712 return t;
4715 /* Expand a call to __builtin___clear_cache. */
4717 static rtx
4718 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4720 #ifndef HAVE_clear_cache
4721 #ifdef CLEAR_INSN_CACHE
4722 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4723 does something. Just do the default expansion to a call to
4724 __clear_cache(). */
4725 return NULL_RTX;
4726 #else
4727 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4728 does nothing. There is no need to call it. Do nothing. */
4729 return const0_rtx;
4730 #endif /* CLEAR_INSN_CACHE */
4731 #else
4732 /* We have a "clear_cache" insn, and it will handle everything. */
4733 tree begin, end;
4734 rtx begin_rtx, end_rtx;
4736 /* We must not expand to a library call. If we did, any
4737 fallback library function in libgcc that might contain a call to
4738 __builtin___clear_cache() would recurse infinitely. */
4739 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4741 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4742 return const0_rtx;
4745 if (HAVE_clear_cache)
4747 struct expand_operand ops[2];
4749 begin = CALL_EXPR_ARG (exp, 0);
4750 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4752 end = CALL_EXPR_ARG (exp, 1);
4753 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4755 create_address_operand (&ops[0], begin_rtx);
4756 create_address_operand (&ops[1], end_rtx);
4757 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4758 return const0_rtx;
4760 return const0_rtx;
4761 #endif /* HAVE_clear_cache */
4764 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4766 static rtx
4767 round_trampoline_addr (rtx tramp)
4769 rtx temp, addend, mask;
4771 /* If we don't need too much alignment, we'll have been guaranteed
4772 proper alignment by get_trampoline_type. */
4773 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4774 return tramp;
4776 /* Round address up to desired boundary. */
4777 temp = gen_reg_rtx (Pmode);
4778 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4779 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4781 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4782 temp, 0, OPTAB_LIB_WIDEN);
4783 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4784 temp, 0, OPTAB_LIB_WIDEN);
4786 return tramp;
4789 static rtx
4790 expand_builtin_init_trampoline (tree exp, bool onstack)
4792 tree t_tramp, t_func, t_chain;
4793 rtx m_tramp, r_tramp, r_chain, tmp;
4795 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4796 POINTER_TYPE, VOID_TYPE))
4797 return NULL_RTX;
4799 t_tramp = CALL_EXPR_ARG (exp, 0);
4800 t_func = CALL_EXPR_ARG (exp, 1);
4801 t_chain = CALL_EXPR_ARG (exp, 2);
4803 r_tramp = expand_normal (t_tramp);
4804 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4805 MEM_NOTRAP_P (m_tramp) = 1;
4807 /* If ONSTACK, the TRAMP argument should be the address of a field
4808 within the local function's FRAME decl. Either way, let's see if
4809 we can fill in the MEM_ATTRs for this memory. */
4810 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4811 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4813 /* Creator of a heap trampoline is responsible for making sure the
4814 address is aligned to at least STACK_BOUNDARY. Normally malloc
4815 will ensure this anyhow. */
4816 tmp = round_trampoline_addr (r_tramp);
4817 if (tmp != r_tramp)
4819 m_tramp = change_address (m_tramp, BLKmode, tmp);
4820 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4821 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4824 /* The FUNC argument should be the address of the nested function.
4825 Extract the actual function decl to pass to the hook. */
4826 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4827 t_func = TREE_OPERAND (t_func, 0);
4828 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4830 r_chain = expand_normal (t_chain);
4832 /* Generate insns to initialize the trampoline. */
4833 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4835 if (onstack)
4837 trampolines_created = 1;
4839 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4840 "trampoline generated for nested function %qD", t_func);
4843 return const0_rtx;
4846 static rtx
4847 expand_builtin_adjust_trampoline (tree exp)
4849 rtx tramp;
4851 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4852 return NULL_RTX;
4854 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4855 tramp = round_trampoline_addr (tramp);
4856 if (targetm.calls.trampoline_adjust_address)
4857 tramp = targetm.calls.trampoline_adjust_address (tramp);
4859 return tramp;
4862 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4863 function. The function first checks whether the back end provides
4864 an insn to implement signbit for the respective mode. If not, it
4865 checks whether the floating point format of the value is such that
4866 the sign bit can be extracted. If that is not the case, the
4867 function returns NULL_RTX to indicate that a normal call should be
4868 emitted rather than expanding the function in-line. EXP is the
4869 expression that is a call to the builtin function; if convenient,
4870 the result should be placed in TARGET. */
4871 static rtx
4872 expand_builtin_signbit (tree exp, rtx target)
4874 const struct real_format *fmt;
4875 enum machine_mode fmode, imode, rmode;
4876 tree arg;
4877 int word, bitpos;
4878 enum insn_code icode;
4879 rtx temp;
4880 location_t loc = EXPR_LOCATION (exp);
4882 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4883 return NULL_RTX;
4885 arg = CALL_EXPR_ARG (exp, 0);
4886 fmode = TYPE_MODE (TREE_TYPE (arg));
4887 rmode = TYPE_MODE (TREE_TYPE (exp));
4888 fmt = REAL_MODE_FORMAT (fmode);
4890 arg = builtin_save_expr (arg);
4892 /* Expand the argument yielding a RTX expression. */
4893 temp = expand_normal (arg);
4895 /* Check if the back end provides an insn that handles signbit for the
4896 argument's mode. */
4897 icode = optab_handler (signbit_optab, fmode);
4898 if (icode != CODE_FOR_nothing)
4900 rtx last = get_last_insn ();
4901 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4902 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4903 return target;
4904 delete_insns_since (last);
4907 /* For floating point formats without a sign bit, implement signbit
4908 as "ARG < 0.0". */
4909 bitpos = fmt->signbit_ro;
4910 if (bitpos < 0)
4912 /* But we can't do this if the format supports signed zero. */
4913 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4914 return NULL_RTX;
4916 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4917 build_real (TREE_TYPE (arg), dconst0));
4918 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4921 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4923 imode = int_mode_for_mode (fmode);
4924 if (imode == BLKmode)
4925 return NULL_RTX;
4926 temp = gen_lowpart (imode, temp);
4928 else
4930 imode = word_mode;
4931 /* Handle targets with different FP word orders. */
4932 if (FLOAT_WORDS_BIG_ENDIAN)
4933 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4934 else
4935 word = bitpos / BITS_PER_WORD;
4936 temp = operand_subword_force (temp, word, fmode);
4937 bitpos = bitpos % BITS_PER_WORD;
4940 /* Force the intermediate word_mode (or narrower) result into a
4941 register. This avoids attempting to create paradoxical SUBREGs
4942 of floating point modes below. */
4943 temp = force_reg (imode, temp);
4945 /* If the bitpos is within the "result mode" lowpart, the operation
4946 can be implement with a single bitwise AND. Otherwise, we need
4947 a right shift and an AND. */
4949 if (bitpos < GET_MODE_BITSIZE (rmode))
4951 double_int mask = double_int_zero.set_bit (bitpos);
4953 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4954 temp = gen_lowpart (rmode, temp);
4955 temp = expand_binop (rmode, and_optab, temp,
4956 immed_double_int_const (mask, rmode),
4957 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4959 else
4961 /* Perform a logical right shift to place the signbit in the least
4962 significant bit, then truncate the result to the desired mode
4963 and mask just this bit. */
4964 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4965 temp = gen_lowpart (rmode, temp);
4966 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4967 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4970 return temp;
4973 /* Expand fork or exec calls. TARGET is the desired target of the
4974 call. EXP is the call. FN is the
4975 identificator of the actual function. IGNORE is nonzero if the
4976 value is to be ignored. */
4978 static rtx
4979 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4981 tree id, decl;
4982 tree call;
4984 /* If we are not profiling, just call the function. */
4985 if (!profile_arc_flag)
4986 return NULL_RTX;
4988 /* Otherwise call the wrapper. This should be equivalent for the rest of
4989 compiler, so the code does not diverge, and the wrapper may run the
4990 code necessary for keeping the profiling sane. */
4992 switch (DECL_FUNCTION_CODE (fn))
4994 case BUILT_IN_FORK:
4995 id = get_identifier ("__gcov_fork");
4996 break;
4998 case BUILT_IN_EXECL:
4999 id = get_identifier ("__gcov_execl");
5000 break;
5002 case BUILT_IN_EXECV:
5003 id = get_identifier ("__gcov_execv");
5004 break;
5006 case BUILT_IN_EXECLP:
5007 id = get_identifier ("__gcov_execlp");
5008 break;
5010 case BUILT_IN_EXECLE:
5011 id = get_identifier ("__gcov_execle");
5012 break;
5014 case BUILT_IN_EXECVP:
5015 id = get_identifier ("__gcov_execvp");
5016 break;
5018 case BUILT_IN_EXECVE:
5019 id = get_identifier ("__gcov_execve");
5020 break;
5022 default:
5023 gcc_unreachable ();
5026 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5027 FUNCTION_DECL, id, TREE_TYPE (fn));
5028 DECL_EXTERNAL (decl) = 1;
5029 TREE_PUBLIC (decl) = 1;
5030 DECL_ARTIFICIAL (decl) = 1;
5031 TREE_NOTHROW (decl) = 1;
5032 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5033 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5034 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5035 return expand_call (call, target, ignore);
5040 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5041 the pointer in these functions is void*, the tree optimizers may remove
5042 casts. The mode computed in expand_builtin isn't reliable either, due
5043 to __sync_bool_compare_and_swap.
5045 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5046 group of builtins. This gives us log2 of the mode size. */
5048 static inline enum machine_mode
5049 get_builtin_sync_mode (int fcode_diff)
5051 /* The size is not negotiable, so ask not to get BLKmode in return
5052 if the target indicates that a smaller size would be better. */
5053 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5056 /* Expand the memory expression LOC and return the appropriate memory operand
5057 for the builtin_sync operations. */
5059 static rtx
5060 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5062 rtx addr, mem;
5064 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5065 addr = convert_memory_address (Pmode, addr);
5067 /* Note that we explicitly do not want any alias information for this
5068 memory, so that we kill all other live memories. Otherwise we don't
5069 satisfy the full barrier semantics of the intrinsic. */
5070 mem = validize_mem (gen_rtx_MEM (mode, addr));
5072 /* The alignment needs to be at least according to that of the mode. */
5073 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5074 get_pointer_alignment (loc)));
5075 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5076 MEM_VOLATILE_P (mem) = 1;
5078 return mem;
5081 /* Make sure an argument is in the right mode.
5082 EXP is the tree argument.
5083 MODE is the mode it should be in. */
5085 static rtx
5086 expand_expr_force_mode (tree exp, enum machine_mode mode)
5088 rtx val;
5089 enum machine_mode old_mode;
5091 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5092 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5093 of CONST_INTs, where we know the old_mode only from the call argument. */
5095 old_mode = GET_MODE (val);
5096 if (old_mode == VOIDmode)
5097 old_mode = TYPE_MODE (TREE_TYPE (exp));
5098 val = convert_modes (mode, old_mode, val, 1);
5099 return val;
5103 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5104 EXP is the CALL_EXPR. CODE is the rtx code
5105 that corresponds to the arithmetic or logical operation from the name;
5106 an exception here is that NOT actually means NAND. TARGET is an optional
5107 place for us to store the results; AFTER is true if this is the
5108 fetch_and_xxx form. */
5110 static rtx
5111 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5112 enum rtx_code code, bool after,
5113 rtx target)
5115 rtx val, mem;
5116 location_t loc = EXPR_LOCATION (exp);
5118 if (code == NOT && warn_sync_nand)
5120 tree fndecl = get_callee_fndecl (exp);
5121 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5123 static bool warned_f_a_n, warned_n_a_f;
5125 switch (fcode)
5127 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5128 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5129 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5130 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5131 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5132 if (warned_f_a_n)
5133 break;
5135 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5136 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5137 warned_f_a_n = true;
5138 break;
5140 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5141 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5142 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5143 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5144 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5145 if (warned_n_a_f)
5146 break;
5148 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5149 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5150 warned_n_a_f = true;
5151 break;
5153 default:
5154 gcc_unreachable ();
5158 /* Expand the operands. */
5159 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5160 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5162 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5163 after);
5166 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5167 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5168 true if this is the boolean form. TARGET is a place for us to store the
5169 results; this is NOT optional if IS_BOOL is true. */
5171 static rtx
5172 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5173 bool is_bool, rtx target)
5175 rtx old_val, new_val, mem;
5176 rtx *pbool, *poval;
5178 /* Expand the operands. */
5179 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5180 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5181 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5183 pbool = poval = NULL;
5184 if (target != const0_rtx)
5186 if (is_bool)
5187 pbool = &target;
5188 else
5189 poval = &target;
5191 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5192 false, MEMMODEL_SEQ_CST,
5193 MEMMODEL_SEQ_CST))
5194 return NULL_RTX;
5196 return target;
5199 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5200 general form is actually an atomic exchange, and some targets only
5201 support a reduced form with the second argument being a constant 1.
5202 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5203 the results. */
5205 static rtx
5206 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5207 rtx target)
5209 rtx val, mem;
5211 /* Expand the operands. */
5212 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5213 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5215 return expand_sync_lock_test_and_set (target, mem, val);
5218 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5220 static void
5221 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5223 rtx mem;
5225 /* Expand the operands. */
5226 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5228 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5231 /* Given an integer representing an ``enum memmodel'', verify its
5232 correctness and return the memory model enum. */
5234 static enum memmodel
5235 get_memmodel (tree exp)
5237 rtx op;
5238 unsigned HOST_WIDE_INT val;
5240 /* If the parameter is not a constant, it's a run time value so we'll just
5241 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5242 if (TREE_CODE (exp) != INTEGER_CST)
5243 return MEMMODEL_SEQ_CST;
5245 op = expand_normal (exp);
5247 val = INTVAL (op);
5248 if (targetm.memmodel_check)
5249 val = targetm.memmodel_check (val);
5250 else if (val & ~MEMMODEL_MASK)
5252 warning (OPT_Winvalid_memory_model,
5253 "Unknown architecture specifier in memory model to builtin.");
5254 return MEMMODEL_SEQ_CST;
5257 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5259 warning (OPT_Winvalid_memory_model,
5260 "invalid memory model argument to builtin");
5261 return MEMMODEL_SEQ_CST;
5264 return (enum memmodel) val;
5267 /* Expand the __atomic_exchange intrinsic:
5268 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5269 EXP is the CALL_EXPR.
5270 TARGET is an optional place for us to store the results. */
5272 static rtx
5273 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5275 rtx val, mem;
5276 enum memmodel model;
5278 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5279 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5281 error ("invalid memory model for %<__atomic_exchange%>");
5282 return NULL_RTX;
5285 if (!flag_inline_atomics)
5286 return NULL_RTX;
5288 /* Expand the operands. */
5289 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5290 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5292 return expand_atomic_exchange (target, mem, val, model);
5295 /* Expand the __atomic_compare_exchange intrinsic:
5296 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5297 TYPE desired, BOOL weak,
5298 enum memmodel success,
5299 enum memmodel failure)
5300 EXP is the CALL_EXPR.
5301 TARGET is an optional place for us to store the results. */
5303 static rtx
5304 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5305 rtx target)
5307 rtx expect, desired, mem, oldval, label;
5308 enum memmodel success, failure;
5309 tree weak;
5310 bool is_weak;
5312 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5313 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5315 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5316 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5318 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5319 return NULL_RTX;
5322 if (failure > success)
5324 error ("failure memory model cannot be stronger than success "
5325 "memory model for %<__atomic_compare_exchange%>");
5326 return NULL_RTX;
5329 if (!flag_inline_atomics)
5330 return NULL_RTX;
5332 /* Expand the operands. */
5333 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5335 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5336 expect = convert_memory_address (Pmode, expect);
5337 expect = gen_rtx_MEM (mode, expect);
5338 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5340 weak = CALL_EXPR_ARG (exp, 3);
5341 is_weak = false;
5342 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5343 is_weak = true;
5345 if (target == const0_rtx)
5346 target = NULL;
5348 /* Lest the rtl backend create a race condition with an imporoper store
5349 to memory, always create a new pseudo for OLDVAL. */
5350 oldval = NULL;
5352 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5353 is_weak, success, failure))
5354 return NULL_RTX;
5356 /* Conditionally store back to EXPECT, lest we create a race condition
5357 with an improper store to memory. */
5358 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5359 the normal case where EXPECT is totally private, i.e. a register. At
5360 which point the store can be unconditional. */
5361 label = gen_label_rtx ();
5362 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5363 emit_move_insn (expect, oldval);
5364 emit_label (label);
5366 return target;
5369 /* Expand the __atomic_load intrinsic:
5370 TYPE __atomic_load (TYPE *object, enum memmodel)
5371 EXP is the CALL_EXPR.
5372 TARGET is an optional place for us to store the results. */
5374 static rtx
5375 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5377 rtx mem;
5378 enum memmodel model;
5380 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5381 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5382 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5384 error ("invalid memory model for %<__atomic_load%>");
5385 return NULL_RTX;
5388 if (!flag_inline_atomics)
5389 return NULL_RTX;
5391 /* Expand the operand. */
5392 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5394 return expand_atomic_load (target, mem, model);
5398 /* Expand the __atomic_store intrinsic:
5399 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5400 EXP is the CALL_EXPR.
5401 TARGET is an optional place for us to store the results. */
5403 static rtx
5404 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5406 rtx mem, val;
5407 enum memmodel model;
5409 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5410 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5411 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5412 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5414 error ("invalid memory model for %<__atomic_store%>");
5415 return NULL_RTX;
5418 if (!flag_inline_atomics)
5419 return NULL_RTX;
5421 /* Expand the operands. */
5422 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5423 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5425 return expand_atomic_store (mem, val, model, false);
5428 /* Expand the __atomic_fetch_XXX intrinsic:
5429 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5430 EXP is the CALL_EXPR.
5431 TARGET is an optional place for us to store the results.
5432 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5433 FETCH_AFTER is true if returning the result of the operation.
5434 FETCH_AFTER is false if returning the value before the operation.
5435 IGNORE is true if the result is not used.
5436 EXT_CALL is the correct builtin for an external call if this cannot be
5437 resolved to an instruction sequence. */
5439 static rtx
5440 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5441 enum rtx_code code, bool fetch_after,
5442 bool ignore, enum built_in_function ext_call)
5444 rtx val, mem, ret;
5445 enum memmodel model;
5446 tree fndecl;
5447 tree addr;
5449 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5451 /* Expand the operands. */
5452 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5453 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5455 /* Only try generating instructions if inlining is turned on. */
5456 if (flag_inline_atomics)
5458 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5459 if (ret)
5460 return ret;
5463 /* Return if a different routine isn't needed for the library call. */
5464 if (ext_call == BUILT_IN_NONE)
5465 return NULL_RTX;
5467 /* Change the call to the specified function. */
5468 fndecl = get_callee_fndecl (exp);
5469 addr = CALL_EXPR_FN (exp);
5470 STRIP_NOPS (addr);
5472 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5473 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5475 /* Expand the call here so we can emit trailing code. */
5476 ret = expand_call (exp, target, ignore);
5478 /* Replace the original function just in case it matters. */
5479 TREE_OPERAND (addr, 0) = fndecl;
5481 /* Then issue the arithmetic correction to return the right result. */
5482 if (!ignore)
5484 if (code == NOT)
5486 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5487 OPTAB_LIB_WIDEN);
5488 ret = expand_simple_unop (mode, NOT, ret, target, true);
5490 else
5491 ret = expand_simple_binop (mode, code, ret, val, target, true,
5492 OPTAB_LIB_WIDEN);
5494 return ret;
5498 #ifndef HAVE_atomic_clear
5499 # define HAVE_atomic_clear 0
5500 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5501 #endif
5503 /* Expand an atomic clear operation.
5504 void _atomic_clear (BOOL *obj, enum memmodel)
5505 EXP is the call expression. */
5507 static rtx
5508 expand_builtin_atomic_clear (tree exp)
5510 enum machine_mode mode;
5511 rtx mem, ret;
5512 enum memmodel model;
5514 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5515 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5516 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5518 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5519 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5521 error ("invalid memory model for %<__atomic_store%>");
5522 return const0_rtx;
5525 if (HAVE_atomic_clear)
5527 emit_insn (gen_atomic_clear (mem, model));
5528 return const0_rtx;
5531 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5532 Failing that, a store is issued by __atomic_store. The only way this can
5533 fail is if the bool type is larger than a word size. Unlikely, but
5534 handle it anyway for completeness. Assume a single threaded model since
5535 there is no atomic support in this case, and no barriers are required. */
5536 ret = expand_atomic_store (mem, const0_rtx, model, true);
5537 if (!ret)
5538 emit_move_insn (mem, const0_rtx);
5539 return const0_rtx;
5542 /* Expand an atomic test_and_set operation.
5543 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5544 EXP is the call expression. */
5546 static rtx
5547 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5549 rtx mem;
5550 enum memmodel model;
5551 enum machine_mode mode;
5553 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5554 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5555 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5557 return expand_atomic_test_and_set (target, mem, model);
5561 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5562 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5564 static tree
5565 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5567 int size;
5568 enum machine_mode mode;
5569 unsigned int mode_align, type_align;
5571 if (TREE_CODE (arg0) != INTEGER_CST)
5572 return NULL_TREE;
5574 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5575 mode = mode_for_size (size, MODE_INT, 0);
5576 mode_align = GET_MODE_ALIGNMENT (mode);
5578 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5579 type_align = mode_align;
5580 else
5582 tree ttype = TREE_TYPE (arg1);
5584 /* This function is usually invoked and folded immediately by the front
5585 end before anything else has a chance to look at it. The pointer
5586 parameter at this point is usually cast to a void *, so check for that
5587 and look past the cast. */
5588 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5589 && VOID_TYPE_P (TREE_TYPE (ttype)))
5590 arg1 = TREE_OPERAND (arg1, 0);
5592 ttype = TREE_TYPE (arg1);
5593 gcc_assert (POINTER_TYPE_P (ttype));
5595 /* Get the underlying type of the object. */
5596 ttype = TREE_TYPE (ttype);
5597 type_align = TYPE_ALIGN (ttype);
5600 /* If the object has smaller alignment, the the lock free routines cannot
5601 be used. */
5602 if (type_align < mode_align)
5603 return boolean_false_node;
5605 /* Check if a compare_and_swap pattern exists for the mode which represents
5606 the required size. The pattern is not allowed to fail, so the existence
5607 of the pattern indicates support is present. */
5608 if (can_compare_and_swap_p (mode, true))
5609 return boolean_true_node;
5610 else
5611 return boolean_false_node;
5614 /* Return true if the parameters to call EXP represent an object which will
5615 always generate lock free instructions. The first argument represents the
5616 size of the object, and the second parameter is a pointer to the object
5617 itself. If NULL is passed for the object, then the result is based on
5618 typical alignment for an object of the specified size. Otherwise return
5619 false. */
5621 static rtx
5622 expand_builtin_atomic_always_lock_free (tree exp)
5624 tree size;
5625 tree arg0 = CALL_EXPR_ARG (exp, 0);
5626 tree arg1 = CALL_EXPR_ARG (exp, 1);
5628 if (TREE_CODE (arg0) != INTEGER_CST)
5630 error ("non-constant argument 1 to __atomic_always_lock_free");
5631 return const0_rtx;
5634 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5635 if (size == boolean_true_node)
5636 return const1_rtx;
5637 return const0_rtx;
5640 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5641 is lock free on this architecture. */
5643 static tree
5644 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5646 if (!flag_inline_atomics)
5647 return NULL_TREE;
5649 /* If it isn't always lock free, don't generate a result. */
5650 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5651 return boolean_true_node;
5653 return NULL_TREE;
5656 /* Return true if the parameters to call EXP represent an object which will
5657 always generate lock free instructions. The first argument represents the
5658 size of the object, and the second parameter is a pointer to the object
5659 itself. If NULL is passed for the object, then the result is based on
5660 typical alignment for an object of the specified size. Otherwise return
5661 NULL*/
5663 static rtx
5664 expand_builtin_atomic_is_lock_free (tree exp)
5666 tree size;
5667 tree arg0 = CALL_EXPR_ARG (exp, 0);
5668 tree arg1 = CALL_EXPR_ARG (exp, 1);
5670 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5672 error ("non-integer argument 1 to __atomic_is_lock_free");
5673 return NULL_RTX;
5676 if (!flag_inline_atomics)
5677 return NULL_RTX;
5679 /* If the value is known at compile time, return the RTX for it. */
5680 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5681 if (size == boolean_true_node)
5682 return const1_rtx;
5684 return NULL_RTX;
5687 /* Expand the __atomic_thread_fence intrinsic:
5688 void __atomic_thread_fence (enum memmodel)
5689 EXP is the CALL_EXPR. */
5691 static void
5692 expand_builtin_atomic_thread_fence (tree exp)
5694 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5695 expand_mem_thread_fence (model);
5698 /* Expand the __atomic_signal_fence intrinsic:
5699 void __atomic_signal_fence (enum memmodel)
5700 EXP is the CALL_EXPR. */
5702 static void
5703 expand_builtin_atomic_signal_fence (tree exp)
5705 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5706 expand_mem_signal_fence (model);
5709 /* Expand the __sync_synchronize intrinsic. */
5711 static void
5712 expand_builtin_sync_synchronize (void)
5714 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5717 static rtx
5718 expand_builtin_thread_pointer (tree exp, rtx target)
5720 enum insn_code icode;
5721 if (!validate_arglist (exp, VOID_TYPE))
5722 return const0_rtx;
5723 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5724 if (icode != CODE_FOR_nothing)
5726 struct expand_operand op;
5727 /* If the target is not sutitable then create a new target. */
5728 if (target == NULL_RTX
5729 || !REG_P (target)
5730 || GET_MODE (target) != Pmode)
5731 target = gen_reg_rtx (Pmode);
5732 create_output_operand (&op, target, Pmode);
5733 expand_insn (icode, 1, &op);
5734 return target;
5736 error ("__builtin_thread_pointer is not supported on this target");
5737 return const0_rtx;
5740 static void
5741 expand_builtin_set_thread_pointer (tree exp)
5743 enum insn_code icode;
5744 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5745 return;
5746 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5747 if (icode != CODE_FOR_nothing)
5749 struct expand_operand op;
5750 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5751 Pmode, EXPAND_NORMAL);
5752 create_input_operand (&op, val, Pmode);
5753 expand_insn (icode, 1, &op);
5754 return;
5756 error ("__builtin_set_thread_pointer is not supported on this target");
5760 /* Emit code to restore the current value of stack. */
5762 static void
5763 expand_stack_restore (tree var)
5765 rtx prev, sa = expand_normal (var);
5767 sa = convert_memory_address (Pmode, sa);
5769 prev = get_last_insn ();
5770 emit_stack_restore (SAVE_BLOCK, sa);
5771 fixup_args_size_notes (prev, get_last_insn (), 0);
5775 /* Emit code to save the current value of stack. */
5777 static rtx
5778 expand_stack_save (void)
5780 rtx ret = NULL_RTX;
5782 do_pending_stack_adjust ();
5783 emit_stack_save (SAVE_BLOCK, &ret);
5784 return ret;
5787 /* Expand an expression EXP that calls a built-in function,
5788 with result going to TARGET if that's convenient
5789 (and in mode MODE if that's convenient).
5790 SUBTARGET may be used as the target for computing one of EXP's operands.
5791 IGNORE is nonzero if the value is to be ignored. */
5794 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5795 int ignore)
5797 tree fndecl = get_callee_fndecl (exp);
5798 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5799 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5800 int flags;
5802 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5803 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5805 /* When not optimizing, generate calls to library functions for a certain
5806 set of builtins. */
5807 if (!optimize
5808 && !called_as_built_in (fndecl)
5809 && fcode != BUILT_IN_FORK
5810 && fcode != BUILT_IN_EXECL
5811 && fcode != BUILT_IN_EXECV
5812 && fcode != BUILT_IN_EXECLP
5813 && fcode != BUILT_IN_EXECLE
5814 && fcode != BUILT_IN_EXECVP
5815 && fcode != BUILT_IN_EXECVE
5816 && fcode != BUILT_IN_ALLOCA
5817 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5818 && fcode != BUILT_IN_FREE)
5819 return expand_call (exp, target, ignore);
5821 /* The built-in function expanders test for target == const0_rtx
5822 to determine whether the function's result will be ignored. */
5823 if (ignore)
5824 target = const0_rtx;
5826 /* If the result of a pure or const built-in function is ignored, and
5827 none of its arguments are volatile, we can avoid expanding the
5828 built-in call and just evaluate the arguments for side-effects. */
5829 if (target == const0_rtx
5830 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5831 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5833 bool volatilep = false;
5834 tree arg;
5835 call_expr_arg_iterator iter;
5837 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5838 if (TREE_THIS_VOLATILE (arg))
5840 volatilep = true;
5841 break;
5844 if (! volatilep)
5846 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5847 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5848 return const0_rtx;
5852 switch (fcode)
5854 CASE_FLT_FN (BUILT_IN_FABS):
5855 case BUILT_IN_FABSD32:
5856 case BUILT_IN_FABSD64:
5857 case BUILT_IN_FABSD128:
5858 target = expand_builtin_fabs (exp, target, subtarget);
5859 if (target)
5860 return target;
5861 break;
5863 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5864 target = expand_builtin_copysign (exp, target, subtarget);
5865 if (target)
5866 return target;
5867 break;
5869 /* Just do a normal library call if we were unable to fold
5870 the values. */
5871 CASE_FLT_FN (BUILT_IN_CABS):
5872 break;
5874 CASE_FLT_FN (BUILT_IN_EXP):
5875 CASE_FLT_FN (BUILT_IN_EXP10):
5876 CASE_FLT_FN (BUILT_IN_POW10):
5877 CASE_FLT_FN (BUILT_IN_EXP2):
5878 CASE_FLT_FN (BUILT_IN_EXPM1):
5879 CASE_FLT_FN (BUILT_IN_LOGB):
5880 CASE_FLT_FN (BUILT_IN_LOG):
5881 CASE_FLT_FN (BUILT_IN_LOG10):
5882 CASE_FLT_FN (BUILT_IN_LOG2):
5883 CASE_FLT_FN (BUILT_IN_LOG1P):
5884 CASE_FLT_FN (BUILT_IN_TAN):
5885 CASE_FLT_FN (BUILT_IN_ASIN):
5886 CASE_FLT_FN (BUILT_IN_ACOS):
5887 CASE_FLT_FN (BUILT_IN_ATAN):
5888 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5889 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5890 because of possible accuracy problems. */
5891 if (! flag_unsafe_math_optimizations)
5892 break;
5893 CASE_FLT_FN (BUILT_IN_SQRT):
5894 CASE_FLT_FN (BUILT_IN_FLOOR):
5895 CASE_FLT_FN (BUILT_IN_CEIL):
5896 CASE_FLT_FN (BUILT_IN_TRUNC):
5897 CASE_FLT_FN (BUILT_IN_ROUND):
5898 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5899 CASE_FLT_FN (BUILT_IN_RINT):
5900 target = expand_builtin_mathfn (exp, target, subtarget);
5901 if (target)
5902 return target;
5903 break;
5905 CASE_FLT_FN (BUILT_IN_FMA):
5906 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5907 if (target)
5908 return target;
5909 break;
5911 CASE_FLT_FN (BUILT_IN_ILOGB):
5912 if (! flag_unsafe_math_optimizations)
5913 break;
5914 CASE_FLT_FN (BUILT_IN_ISINF):
5915 CASE_FLT_FN (BUILT_IN_FINITE):
5916 case BUILT_IN_ISFINITE:
5917 case BUILT_IN_ISNORMAL:
5918 target = expand_builtin_interclass_mathfn (exp, target);
5919 if (target)
5920 return target;
5921 break;
5923 CASE_FLT_FN (BUILT_IN_ICEIL):
5924 CASE_FLT_FN (BUILT_IN_LCEIL):
5925 CASE_FLT_FN (BUILT_IN_LLCEIL):
5926 CASE_FLT_FN (BUILT_IN_LFLOOR):
5927 CASE_FLT_FN (BUILT_IN_IFLOOR):
5928 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5929 target = expand_builtin_int_roundingfn (exp, target);
5930 if (target)
5931 return target;
5932 break;
5934 CASE_FLT_FN (BUILT_IN_IRINT):
5935 CASE_FLT_FN (BUILT_IN_LRINT):
5936 CASE_FLT_FN (BUILT_IN_LLRINT):
5937 CASE_FLT_FN (BUILT_IN_IROUND):
5938 CASE_FLT_FN (BUILT_IN_LROUND):
5939 CASE_FLT_FN (BUILT_IN_LLROUND):
5940 target = expand_builtin_int_roundingfn_2 (exp, target);
5941 if (target)
5942 return target;
5943 break;
5945 CASE_FLT_FN (BUILT_IN_POWI):
5946 target = expand_builtin_powi (exp, target);
5947 if (target)
5948 return target;
5949 break;
5951 CASE_FLT_FN (BUILT_IN_ATAN2):
5952 CASE_FLT_FN (BUILT_IN_LDEXP):
5953 CASE_FLT_FN (BUILT_IN_SCALB):
5954 CASE_FLT_FN (BUILT_IN_SCALBN):
5955 CASE_FLT_FN (BUILT_IN_SCALBLN):
5956 if (! flag_unsafe_math_optimizations)
5957 break;
5959 CASE_FLT_FN (BUILT_IN_FMOD):
5960 CASE_FLT_FN (BUILT_IN_REMAINDER):
5961 CASE_FLT_FN (BUILT_IN_DREM):
5962 CASE_FLT_FN (BUILT_IN_POW):
5963 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5964 if (target)
5965 return target;
5966 break;
5968 CASE_FLT_FN (BUILT_IN_CEXPI):
5969 target = expand_builtin_cexpi (exp, target);
5970 gcc_assert (target);
5971 return target;
5973 CASE_FLT_FN (BUILT_IN_SIN):
5974 CASE_FLT_FN (BUILT_IN_COS):
5975 if (! flag_unsafe_math_optimizations)
5976 break;
5977 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5978 if (target)
5979 return target;
5980 break;
5982 CASE_FLT_FN (BUILT_IN_SINCOS):
5983 if (! flag_unsafe_math_optimizations)
5984 break;
5985 target = expand_builtin_sincos (exp);
5986 if (target)
5987 return target;
5988 break;
5990 case BUILT_IN_APPLY_ARGS:
5991 return expand_builtin_apply_args ();
5993 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5994 FUNCTION with a copy of the parameters described by
5995 ARGUMENTS, and ARGSIZE. It returns a block of memory
5996 allocated on the stack into which is stored all the registers
5997 that might possibly be used for returning the result of a
5998 function. ARGUMENTS is the value returned by
5999 __builtin_apply_args. ARGSIZE is the number of bytes of
6000 arguments that must be copied. ??? How should this value be
6001 computed? We'll also need a safe worst case value for varargs
6002 functions. */
6003 case BUILT_IN_APPLY:
6004 if (!validate_arglist (exp, POINTER_TYPE,
6005 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6006 && !validate_arglist (exp, REFERENCE_TYPE,
6007 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6008 return const0_rtx;
6009 else
6011 rtx ops[3];
6013 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6014 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6015 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6017 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6020 /* __builtin_return (RESULT) causes the function to return the
6021 value described by RESULT. RESULT is address of the block of
6022 memory returned by __builtin_apply. */
6023 case BUILT_IN_RETURN:
6024 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6025 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6026 return const0_rtx;
6028 case BUILT_IN_SAVEREGS:
6029 return expand_builtin_saveregs ();
6031 case BUILT_IN_VA_ARG_PACK:
6032 /* All valid uses of __builtin_va_arg_pack () are removed during
6033 inlining. */
6034 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6035 return const0_rtx;
6037 case BUILT_IN_VA_ARG_PACK_LEN:
6038 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6039 inlining. */
6040 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6041 return const0_rtx;
6043 /* Return the address of the first anonymous stack arg. */
6044 case BUILT_IN_NEXT_ARG:
6045 if (fold_builtin_next_arg (exp, false))
6046 return const0_rtx;
6047 return expand_builtin_next_arg ();
6049 case BUILT_IN_CLEAR_CACHE:
6050 target = expand_builtin___clear_cache (exp);
6051 if (target)
6052 return target;
6053 break;
6055 case BUILT_IN_CLASSIFY_TYPE:
6056 return expand_builtin_classify_type (exp);
6058 case BUILT_IN_CONSTANT_P:
6059 return const0_rtx;
6061 case BUILT_IN_FRAME_ADDRESS:
6062 case BUILT_IN_RETURN_ADDRESS:
6063 return expand_builtin_frame_address (fndecl, exp);
6065 /* Returns the address of the area where the structure is returned.
6066 0 otherwise. */
6067 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6068 if (call_expr_nargs (exp) != 0
6069 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6070 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6071 return const0_rtx;
6072 else
6073 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6075 case BUILT_IN_ALLOCA:
6076 case BUILT_IN_ALLOCA_WITH_ALIGN:
6077 /* If the allocation stems from the declaration of a variable-sized
6078 object, it cannot accumulate. */
6079 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6080 if (target)
6081 return target;
6082 break;
6084 case BUILT_IN_STACK_SAVE:
6085 return expand_stack_save ();
6087 case BUILT_IN_STACK_RESTORE:
6088 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6089 return const0_rtx;
6091 case BUILT_IN_BSWAP16:
6092 case BUILT_IN_BSWAP32:
6093 case BUILT_IN_BSWAP64:
6094 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6095 if (target)
6096 return target;
6097 break;
6099 CASE_INT_FN (BUILT_IN_FFS):
6100 target = expand_builtin_unop (target_mode, exp, target,
6101 subtarget, ffs_optab);
6102 if (target)
6103 return target;
6104 break;
6106 CASE_INT_FN (BUILT_IN_CLZ):
6107 target = expand_builtin_unop (target_mode, exp, target,
6108 subtarget, clz_optab);
6109 if (target)
6110 return target;
6111 break;
6113 CASE_INT_FN (BUILT_IN_CTZ):
6114 target = expand_builtin_unop (target_mode, exp, target,
6115 subtarget, ctz_optab);
6116 if (target)
6117 return target;
6118 break;
6120 CASE_INT_FN (BUILT_IN_CLRSB):
6121 target = expand_builtin_unop (target_mode, exp, target,
6122 subtarget, clrsb_optab);
6123 if (target)
6124 return target;
6125 break;
6127 CASE_INT_FN (BUILT_IN_POPCOUNT):
6128 target = expand_builtin_unop (target_mode, exp, target,
6129 subtarget, popcount_optab);
6130 if (target)
6131 return target;
6132 break;
6134 CASE_INT_FN (BUILT_IN_PARITY):
6135 target = expand_builtin_unop (target_mode, exp, target,
6136 subtarget, parity_optab);
6137 if (target)
6138 return target;
6139 break;
6141 case BUILT_IN_STRLEN:
6142 target = expand_builtin_strlen (exp, target, target_mode);
6143 if (target)
6144 return target;
6145 break;
6147 case BUILT_IN_STRCPY:
6148 target = expand_builtin_strcpy (exp, target);
6149 if (target)
6150 return target;
6151 break;
6153 case BUILT_IN_STRNCPY:
6154 target = expand_builtin_strncpy (exp, target);
6155 if (target)
6156 return target;
6157 break;
6159 case BUILT_IN_STPCPY:
6160 target = expand_builtin_stpcpy (exp, target, mode);
6161 if (target)
6162 return target;
6163 break;
6165 case BUILT_IN_MEMCPY:
6166 target = expand_builtin_memcpy (exp, target);
6167 if (target)
6168 return target;
6169 break;
6171 case BUILT_IN_MEMPCPY:
6172 target = expand_builtin_mempcpy (exp, target, mode);
6173 if (target)
6174 return target;
6175 break;
6177 case BUILT_IN_MEMSET:
6178 target = expand_builtin_memset (exp, target, mode);
6179 if (target)
6180 return target;
6181 break;
6183 case BUILT_IN_BZERO:
6184 target = expand_builtin_bzero (exp);
6185 if (target)
6186 return target;
6187 break;
6189 case BUILT_IN_STRCMP:
6190 target = expand_builtin_strcmp (exp, target);
6191 if (target)
6192 return target;
6193 break;
6195 case BUILT_IN_STRNCMP:
6196 target = expand_builtin_strncmp (exp, target, mode);
6197 if (target)
6198 return target;
6199 break;
6201 case BUILT_IN_BCMP:
6202 case BUILT_IN_MEMCMP:
6203 target = expand_builtin_memcmp (exp, target, mode);
6204 if (target)
6205 return target;
6206 break;
6208 case BUILT_IN_SETJMP:
6209 /* This should have been lowered to the builtins below. */
6210 gcc_unreachable ();
6212 case BUILT_IN_SETJMP_SETUP:
6213 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6214 and the receiver label. */
6215 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6217 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6218 VOIDmode, EXPAND_NORMAL);
6219 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6220 rtx label_r = label_rtx (label);
6222 /* This is copied from the handling of non-local gotos. */
6223 expand_builtin_setjmp_setup (buf_addr, label_r);
6224 nonlocal_goto_handler_labels
6225 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6226 nonlocal_goto_handler_labels);
6227 /* ??? Do not let expand_label treat us as such since we would
6228 not want to be both on the list of non-local labels and on
6229 the list of forced labels. */
6230 FORCED_LABEL (label) = 0;
6231 return const0_rtx;
6233 break;
6235 case BUILT_IN_SETJMP_RECEIVER:
6236 /* __builtin_setjmp_receiver is passed the receiver label. */
6237 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6239 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6240 rtx label_r = label_rtx (label);
6242 expand_builtin_setjmp_receiver (label_r);
6243 return const0_rtx;
6245 break;
6247 /* __builtin_longjmp is passed a pointer to an array of five words.
6248 It's similar to the C library longjmp function but works with
6249 __builtin_setjmp above. */
6250 case BUILT_IN_LONGJMP:
6251 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6253 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6254 VOIDmode, EXPAND_NORMAL);
6255 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6257 if (value != const1_rtx)
6259 error ("%<__builtin_longjmp%> second argument must be 1");
6260 return const0_rtx;
6263 expand_builtin_longjmp (buf_addr, value);
6264 return const0_rtx;
6266 break;
6268 case BUILT_IN_NONLOCAL_GOTO:
6269 target = expand_builtin_nonlocal_goto (exp);
6270 if (target)
6271 return target;
6272 break;
6274 /* This updates the setjmp buffer that is its argument with the value
6275 of the current stack pointer. */
6276 case BUILT_IN_UPDATE_SETJMP_BUF:
6277 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6279 rtx buf_addr
6280 = expand_normal (CALL_EXPR_ARG (exp, 0));
6282 expand_builtin_update_setjmp_buf (buf_addr);
6283 return const0_rtx;
6285 break;
6287 case BUILT_IN_TRAP:
6288 expand_builtin_trap ();
6289 return const0_rtx;
6291 case BUILT_IN_UNREACHABLE:
6292 expand_builtin_unreachable ();
6293 return const0_rtx;
6295 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6296 case BUILT_IN_SIGNBITD32:
6297 case BUILT_IN_SIGNBITD64:
6298 case BUILT_IN_SIGNBITD128:
6299 target = expand_builtin_signbit (exp, target);
6300 if (target)
6301 return target;
6302 break;
6304 /* Various hooks for the DWARF 2 __throw routine. */
6305 case BUILT_IN_UNWIND_INIT:
6306 expand_builtin_unwind_init ();
6307 return const0_rtx;
6308 case BUILT_IN_DWARF_CFA:
6309 return virtual_cfa_rtx;
6310 #ifdef DWARF2_UNWIND_INFO
6311 case BUILT_IN_DWARF_SP_COLUMN:
6312 return expand_builtin_dwarf_sp_column ();
6313 case BUILT_IN_INIT_DWARF_REG_SIZES:
6314 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6315 return const0_rtx;
6316 #endif
6317 case BUILT_IN_FROB_RETURN_ADDR:
6318 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6319 case BUILT_IN_EXTRACT_RETURN_ADDR:
6320 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6321 case BUILT_IN_EH_RETURN:
6322 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6323 CALL_EXPR_ARG (exp, 1));
6324 return const0_rtx;
6325 #ifdef EH_RETURN_DATA_REGNO
6326 case BUILT_IN_EH_RETURN_DATA_REGNO:
6327 return expand_builtin_eh_return_data_regno (exp);
6328 #endif
6329 case BUILT_IN_EXTEND_POINTER:
6330 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6331 case BUILT_IN_EH_POINTER:
6332 return expand_builtin_eh_pointer (exp);
6333 case BUILT_IN_EH_FILTER:
6334 return expand_builtin_eh_filter (exp);
6335 case BUILT_IN_EH_COPY_VALUES:
6336 return expand_builtin_eh_copy_values (exp);
6338 case BUILT_IN_VA_START:
6339 return expand_builtin_va_start (exp);
6340 case BUILT_IN_VA_END:
6341 return expand_builtin_va_end (exp);
6342 case BUILT_IN_VA_COPY:
6343 return expand_builtin_va_copy (exp);
6344 case BUILT_IN_EXPECT:
6345 return expand_builtin_expect (exp, target);
6346 case BUILT_IN_ASSUME_ALIGNED:
6347 return expand_builtin_assume_aligned (exp, target);
6348 case BUILT_IN_PREFETCH:
6349 expand_builtin_prefetch (exp);
6350 return const0_rtx;
6352 case BUILT_IN_INIT_TRAMPOLINE:
6353 return expand_builtin_init_trampoline (exp, true);
6354 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6355 return expand_builtin_init_trampoline (exp, false);
6356 case BUILT_IN_ADJUST_TRAMPOLINE:
6357 return expand_builtin_adjust_trampoline (exp);
6359 case BUILT_IN_FORK:
6360 case BUILT_IN_EXECL:
6361 case BUILT_IN_EXECV:
6362 case BUILT_IN_EXECLP:
6363 case BUILT_IN_EXECLE:
6364 case BUILT_IN_EXECVP:
6365 case BUILT_IN_EXECVE:
6366 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6367 if (target)
6368 return target;
6369 break;
6371 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6372 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6373 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6374 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6375 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6376 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6377 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6378 if (target)
6379 return target;
6380 break;
6382 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6383 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6384 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6385 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6386 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6387 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6388 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6389 if (target)
6390 return target;
6391 break;
6393 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6394 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6395 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6396 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6397 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6398 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6399 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6400 if (target)
6401 return target;
6402 break;
6404 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6405 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6406 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6407 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6408 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6409 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6410 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6411 if (target)
6412 return target;
6413 break;
6415 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6416 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6417 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6418 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6419 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6420 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6421 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6422 if (target)
6423 return target;
6424 break;
6426 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6427 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6428 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6429 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6430 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6431 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6432 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6433 if (target)
6434 return target;
6435 break;
6437 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6438 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6439 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6440 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6441 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6442 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6443 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6444 if (target)
6445 return target;
6446 break;
6448 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6449 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6450 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6451 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6452 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6453 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6454 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6455 if (target)
6456 return target;
6457 break;
6459 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6460 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6461 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6462 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6463 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6464 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6465 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6466 if (target)
6467 return target;
6468 break;
6470 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6471 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6472 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6473 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6474 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6475 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6476 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6477 if (target)
6478 return target;
6479 break;
6481 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6482 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6483 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6484 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6485 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6486 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6487 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6488 if (target)
6489 return target;
6490 break;
6492 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6493 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6494 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6495 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6496 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6497 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6498 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6499 if (target)
6500 return target;
6501 break;
6503 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6504 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6505 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6506 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6507 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6508 if (mode == VOIDmode)
6509 mode = TYPE_MODE (boolean_type_node);
6510 if (!target || !register_operand (target, mode))
6511 target = gen_reg_rtx (mode);
6513 mode = get_builtin_sync_mode
6514 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6515 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6516 if (target)
6517 return target;
6518 break;
6520 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6521 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6522 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6523 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6524 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6525 mode = get_builtin_sync_mode
6526 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6527 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6528 if (target)
6529 return target;
6530 break;
6532 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6533 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6534 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6535 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6536 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6537 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6538 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6539 if (target)
6540 return target;
6541 break;
6543 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6544 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6545 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6546 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6547 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6548 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6549 expand_builtin_sync_lock_release (mode, exp);
6550 return const0_rtx;
6552 case BUILT_IN_SYNC_SYNCHRONIZE:
6553 expand_builtin_sync_synchronize ();
6554 return const0_rtx;
6556 case BUILT_IN_ATOMIC_EXCHANGE_1:
6557 case BUILT_IN_ATOMIC_EXCHANGE_2:
6558 case BUILT_IN_ATOMIC_EXCHANGE_4:
6559 case BUILT_IN_ATOMIC_EXCHANGE_8:
6560 case BUILT_IN_ATOMIC_EXCHANGE_16:
6561 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6562 target = expand_builtin_atomic_exchange (mode, exp, target);
6563 if (target)
6564 return target;
6565 break;
6567 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6568 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6569 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6570 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6571 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6573 unsigned int nargs, z;
6574 vec<tree, va_gc> *vec;
6576 mode =
6577 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6578 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6579 if (target)
6580 return target;
6582 /* If this is turned into an external library call, the weak parameter
6583 must be dropped to match the expected parameter list. */
6584 nargs = call_expr_nargs (exp);
6585 vec_alloc (vec, nargs - 1);
6586 for (z = 0; z < 3; z++)
6587 vec->quick_push (CALL_EXPR_ARG (exp, z));
6588 /* Skip the boolean weak parameter. */
6589 for (z = 4; z < 6; z++)
6590 vec->quick_push (CALL_EXPR_ARG (exp, z));
6591 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6592 break;
6595 case BUILT_IN_ATOMIC_LOAD_1:
6596 case BUILT_IN_ATOMIC_LOAD_2:
6597 case BUILT_IN_ATOMIC_LOAD_4:
6598 case BUILT_IN_ATOMIC_LOAD_8:
6599 case BUILT_IN_ATOMIC_LOAD_16:
6600 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6601 target = expand_builtin_atomic_load (mode, exp, target);
6602 if (target)
6603 return target;
6604 break;
6606 case BUILT_IN_ATOMIC_STORE_1:
6607 case BUILT_IN_ATOMIC_STORE_2:
6608 case BUILT_IN_ATOMIC_STORE_4:
6609 case BUILT_IN_ATOMIC_STORE_8:
6610 case BUILT_IN_ATOMIC_STORE_16:
6611 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6612 target = expand_builtin_atomic_store (mode, exp);
6613 if (target)
6614 return const0_rtx;
6615 break;
6617 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6618 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6619 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6620 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6621 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6623 enum built_in_function lib;
6624 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6625 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6626 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6627 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6628 ignore, lib);
6629 if (target)
6630 return target;
6631 break;
6633 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6634 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6635 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6636 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6637 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6639 enum built_in_function lib;
6640 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6641 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6642 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6643 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6644 ignore, lib);
6645 if (target)
6646 return target;
6647 break;
6649 case BUILT_IN_ATOMIC_AND_FETCH_1:
6650 case BUILT_IN_ATOMIC_AND_FETCH_2:
6651 case BUILT_IN_ATOMIC_AND_FETCH_4:
6652 case BUILT_IN_ATOMIC_AND_FETCH_8:
6653 case BUILT_IN_ATOMIC_AND_FETCH_16:
6655 enum built_in_function lib;
6656 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6657 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6658 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6659 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6660 ignore, lib);
6661 if (target)
6662 return target;
6663 break;
6665 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6666 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6667 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6668 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6669 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6671 enum built_in_function lib;
6672 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6673 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6674 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6675 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6676 ignore, lib);
6677 if (target)
6678 return target;
6679 break;
6681 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6682 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6683 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6684 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6685 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6687 enum built_in_function lib;
6688 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6689 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6690 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6691 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6692 ignore, lib);
6693 if (target)
6694 return target;
6695 break;
6697 case BUILT_IN_ATOMIC_OR_FETCH_1:
6698 case BUILT_IN_ATOMIC_OR_FETCH_2:
6699 case BUILT_IN_ATOMIC_OR_FETCH_4:
6700 case BUILT_IN_ATOMIC_OR_FETCH_8:
6701 case BUILT_IN_ATOMIC_OR_FETCH_16:
6703 enum built_in_function lib;
6704 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6705 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6706 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6707 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6708 ignore, lib);
6709 if (target)
6710 return target;
6711 break;
6713 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6714 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6715 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6716 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6717 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6718 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6719 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6720 ignore, BUILT_IN_NONE);
6721 if (target)
6722 return target;
6723 break;
6725 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6726 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6727 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6728 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6729 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6730 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6731 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6732 ignore, BUILT_IN_NONE);
6733 if (target)
6734 return target;
6735 break;
6737 case BUILT_IN_ATOMIC_FETCH_AND_1:
6738 case BUILT_IN_ATOMIC_FETCH_AND_2:
6739 case BUILT_IN_ATOMIC_FETCH_AND_4:
6740 case BUILT_IN_ATOMIC_FETCH_AND_8:
6741 case BUILT_IN_ATOMIC_FETCH_AND_16:
6742 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6743 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6744 ignore, BUILT_IN_NONE);
6745 if (target)
6746 return target;
6747 break;
6749 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6750 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6751 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6752 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6753 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6754 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6755 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6756 ignore, BUILT_IN_NONE);
6757 if (target)
6758 return target;
6759 break;
6761 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6762 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6763 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6764 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6765 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6766 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6767 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6768 ignore, BUILT_IN_NONE);
6769 if (target)
6770 return target;
6771 break;
6773 case BUILT_IN_ATOMIC_FETCH_OR_1:
6774 case BUILT_IN_ATOMIC_FETCH_OR_2:
6775 case BUILT_IN_ATOMIC_FETCH_OR_4:
6776 case BUILT_IN_ATOMIC_FETCH_OR_8:
6777 case BUILT_IN_ATOMIC_FETCH_OR_16:
6778 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6779 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6780 ignore, BUILT_IN_NONE);
6781 if (target)
6782 return target;
6783 break;
6785 case BUILT_IN_ATOMIC_TEST_AND_SET:
6786 return expand_builtin_atomic_test_and_set (exp, target);
6788 case BUILT_IN_ATOMIC_CLEAR:
6789 return expand_builtin_atomic_clear (exp);
6791 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6792 return expand_builtin_atomic_always_lock_free (exp);
6794 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6795 target = expand_builtin_atomic_is_lock_free (exp);
6796 if (target)
6797 return target;
6798 break;
6800 case BUILT_IN_ATOMIC_THREAD_FENCE:
6801 expand_builtin_atomic_thread_fence (exp);
6802 return const0_rtx;
6804 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6805 expand_builtin_atomic_signal_fence (exp);
6806 return const0_rtx;
6808 case BUILT_IN_OBJECT_SIZE:
6809 return expand_builtin_object_size (exp);
6811 case BUILT_IN_MEMCPY_CHK:
6812 case BUILT_IN_MEMPCPY_CHK:
6813 case BUILT_IN_MEMMOVE_CHK:
6814 case BUILT_IN_MEMSET_CHK:
6815 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6816 if (target)
6817 return target;
6818 break;
6820 case BUILT_IN_STRCPY_CHK:
6821 case BUILT_IN_STPCPY_CHK:
6822 case BUILT_IN_STRNCPY_CHK:
6823 case BUILT_IN_STPNCPY_CHK:
6824 case BUILT_IN_STRCAT_CHK:
6825 case BUILT_IN_STRNCAT_CHK:
6826 case BUILT_IN_SNPRINTF_CHK:
6827 case BUILT_IN_VSNPRINTF_CHK:
6828 maybe_emit_chk_warning (exp, fcode);
6829 break;
6831 case BUILT_IN_SPRINTF_CHK:
6832 case BUILT_IN_VSPRINTF_CHK:
6833 maybe_emit_sprintf_chk_warning (exp, fcode);
6834 break;
6836 case BUILT_IN_FREE:
6837 if (warn_free_nonheap_object)
6838 maybe_emit_free_warning (exp);
6839 break;
6841 case BUILT_IN_THREAD_POINTER:
6842 return expand_builtin_thread_pointer (exp, target);
6844 case BUILT_IN_SET_THREAD_POINTER:
6845 expand_builtin_set_thread_pointer (exp);
6846 return const0_rtx;
6848 case BUILT_IN_CILK_DETACH:
6849 expand_builtin_cilk_detach (exp);
6850 return const0_rtx;
6852 case BUILT_IN_CILK_POP_FRAME:
6853 expand_builtin_cilk_pop_frame (exp);
6854 return const0_rtx;
6856 default: /* just do library call, if unknown builtin */
6857 break;
6860 /* The switch statement above can drop through to cause the function
6861 to be called normally. */
6862 return expand_call (exp, target, ignore);
6865 /* Determine whether a tree node represents a call to a built-in
6866 function. If the tree T is a call to a built-in function with
6867 the right number of arguments of the appropriate types, return
6868 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6869 Otherwise the return value is END_BUILTINS. */
6871 enum built_in_function
6872 builtin_mathfn_code (const_tree t)
6874 const_tree fndecl, arg, parmlist;
6875 const_tree argtype, parmtype;
6876 const_call_expr_arg_iterator iter;
6878 if (TREE_CODE (t) != CALL_EXPR
6879 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6880 return END_BUILTINS;
6882 fndecl = get_callee_fndecl (t);
6883 if (fndecl == NULL_TREE
6884 || TREE_CODE (fndecl) != FUNCTION_DECL
6885 || ! DECL_BUILT_IN (fndecl)
6886 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6887 return END_BUILTINS;
6889 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6890 init_const_call_expr_arg_iterator (t, &iter);
6891 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6893 /* If a function doesn't take a variable number of arguments,
6894 the last element in the list will have type `void'. */
6895 parmtype = TREE_VALUE (parmlist);
6896 if (VOID_TYPE_P (parmtype))
6898 if (more_const_call_expr_args_p (&iter))
6899 return END_BUILTINS;
6900 return DECL_FUNCTION_CODE (fndecl);
6903 if (! more_const_call_expr_args_p (&iter))
6904 return END_BUILTINS;
6906 arg = next_const_call_expr_arg (&iter);
6907 argtype = TREE_TYPE (arg);
6909 if (SCALAR_FLOAT_TYPE_P (parmtype))
6911 if (! SCALAR_FLOAT_TYPE_P (argtype))
6912 return END_BUILTINS;
6914 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6916 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6917 return END_BUILTINS;
6919 else if (POINTER_TYPE_P (parmtype))
6921 if (! POINTER_TYPE_P (argtype))
6922 return END_BUILTINS;
6924 else if (INTEGRAL_TYPE_P (parmtype))
6926 if (! INTEGRAL_TYPE_P (argtype))
6927 return END_BUILTINS;
6929 else
6930 return END_BUILTINS;
6933 /* Variable-length argument list. */
6934 return DECL_FUNCTION_CODE (fndecl);
6937 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6938 evaluate to a constant. */
6940 static tree
6941 fold_builtin_constant_p (tree arg)
6943 /* We return 1 for a numeric type that's known to be a constant
6944 value at compile-time or for an aggregate type that's a
6945 literal constant. */
6946 STRIP_NOPS (arg);
6948 /* If we know this is a constant, emit the constant of one. */
6949 if (CONSTANT_CLASS_P (arg)
6950 || (TREE_CODE (arg) == CONSTRUCTOR
6951 && TREE_CONSTANT (arg)))
6952 return integer_one_node;
6953 if (TREE_CODE (arg) == ADDR_EXPR)
6955 tree op = TREE_OPERAND (arg, 0);
6956 if (TREE_CODE (op) == STRING_CST
6957 || (TREE_CODE (op) == ARRAY_REF
6958 && integer_zerop (TREE_OPERAND (op, 1))
6959 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6960 return integer_one_node;
6963 /* If this expression has side effects, show we don't know it to be a
6964 constant. Likewise if it's a pointer or aggregate type since in
6965 those case we only want literals, since those are only optimized
6966 when generating RTL, not later.
6967 And finally, if we are compiling an initializer, not code, we
6968 need to return a definite result now; there's not going to be any
6969 more optimization done. */
6970 if (TREE_SIDE_EFFECTS (arg)
6971 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6972 || POINTER_TYPE_P (TREE_TYPE (arg))
6973 || cfun == 0
6974 || folding_initializer
6975 || force_folding_builtin_constant_p)
6976 return integer_zero_node;
6978 return NULL_TREE;
6981 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6982 return it as a truthvalue. */
6984 static tree
6985 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
6986 tree predictor)
6988 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6990 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6991 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6992 ret_type = TREE_TYPE (TREE_TYPE (fn));
6993 pred_type = TREE_VALUE (arg_types);
6994 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6996 pred = fold_convert_loc (loc, pred_type, pred);
6997 expected = fold_convert_loc (loc, expected_type, expected);
6998 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
6999 predictor);
7001 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7002 build_int_cst (ret_type, 0));
7005 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7006 NULL_TREE if no simplification is possible. */
7008 tree
7009 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7011 tree inner, fndecl, inner_arg0;
7012 enum tree_code code;
7014 /* Distribute the expected value over short-circuiting operators.
7015 See through the cast from truthvalue_type_node to long. */
7016 inner_arg0 = arg0;
7017 while (TREE_CODE (inner_arg0) == NOP_EXPR
7018 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7019 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7020 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7022 /* If this is a builtin_expect within a builtin_expect keep the
7023 inner one. See through a comparison against a constant. It
7024 might have been added to create a thruthvalue. */
7025 inner = inner_arg0;
7027 if (COMPARISON_CLASS_P (inner)
7028 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7029 inner = TREE_OPERAND (inner, 0);
7031 if (TREE_CODE (inner) == CALL_EXPR
7032 && (fndecl = get_callee_fndecl (inner))
7033 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7034 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7035 return arg0;
7037 inner = inner_arg0;
7038 code = TREE_CODE (inner);
7039 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7041 tree op0 = TREE_OPERAND (inner, 0);
7042 tree op1 = TREE_OPERAND (inner, 1);
7044 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7045 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7046 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7048 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7051 /* If the argument isn't invariant then there's nothing else we can do. */
7052 if (!TREE_CONSTANT (inner_arg0))
7053 return NULL_TREE;
7055 /* If we expect that a comparison against the argument will fold to
7056 a constant return the constant. In practice, this means a true
7057 constant or the address of a non-weak symbol. */
7058 inner = inner_arg0;
7059 STRIP_NOPS (inner);
7060 if (TREE_CODE (inner) == ADDR_EXPR)
7064 inner = TREE_OPERAND (inner, 0);
7066 while (TREE_CODE (inner) == COMPONENT_REF
7067 || TREE_CODE (inner) == ARRAY_REF);
7068 if ((TREE_CODE (inner) == VAR_DECL
7069 || TREE_CODE (inner) == FUNCTION_DECL)
7070 && DECL_WEAK (inner))
7071 return NULL_TREE;
7074 /* Otherwise, ARG0 already has the proper type for the return value. */
7075 return arg0;
7078 /* Fold a call to __builtin_classify_type with argument ARG. */
7080 static tree
7081 fold_builtin_classify_type (tree arg)
7083 if (arg == 0)
7084 return build_int_cst (integer_type_node, no_type_class);
7086 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7089 /* Fold a call to __builtin_strlen with argument ARG. */
7091 static tree
7092 fold_builtin_strlen (location_t loc, tree type, tree arg)
7094 if (!validate_arg (arg, POINTER_TYPE))
7095 return NULL_TREE;
7096 else
7098 tree len = c_strlen (arg, 0);
7100 if (len)
7101 return fold_convert_loc (loc, type, len);
7103 return NULL_TREE;
7107 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7109 static tree
7110 fold_builtin_inf (location_t loc, tree type, int warn)
7112 REAL_VALUE_TYPE real;
7114 /* __builtin_inff is intended to be usable to define INFINITY on all
7115 targets. If an infinity is not available, INFINITY expands "to a
7116 positive constant of type float that overflows at translation
7117 time", footnote "In this case, using INFINITY will violate the
7118 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7119 Thus we pedwarn to ensure this constraint violation is
7120 diagnosed. */
7121 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7122 pedwarn (loc, 0, "target format does not support infinity");
7124 real_inf (&real);
7125 return build_real (type, real);
7128 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7130 static tree
7131 fold_builtin_nan (tree arg, tree type, int quiet)
7133 REAL_VALUE_TYPE real;
7134 const char *str;
7136 if (!validate_arg (arg, POINTER_TYPE))
7137 return NULL_TREE;
7138 str = c_getstr (arg);
7139 if (!str)
7140 return NULL_TREE;
7142 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7143 return NULL_TREE;
7145 return build_real (type, real);
7148 /* Return true if the floating point expression T has an integer value.
7149 We also allow +Inf, -Inf and NaN to be considered integer values. */
7151 static bool
7152 integer_valued_real_p (tree t)
7154 switch (TREE_CODE (t))
7156 case FLOAT_EXPR:
7157 return true;
7159 case ABS_EXPR:
7160 case SAVE_EXPR:
7161 return integer_valued_real_p (TREE_OPERAND (t, 0));
7163 case COMPOUND_EXPR:
7164 case MODIFY_EXPR:
7165 case BIND_EXPR:
7166 return integer_valued_real_p (TREE_OPERAND (t, 1));
7168 case PLUS_EXPR:
7169 case MINUS_EXPR:
7170 case MULT_EXPR:
7171 case MIN_EXPR:
7172 case MAX_EXPR:
7173 return integer_valued_real_p (TREE_OPERAND (t, 0))
7174 && integer_valued_real_p (TREE_OPERAND (t, 1));
7176 case COND_EXPR:
7177 return integer_valued_real_p (TREE_OPERAND (t, 1))
7178 && integer_valued_real_p (TREE_OPERAND (t, 2));
7180 case REAL_CST:
7181 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7183 case NOP_EXPR:
7185 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7186 if (TREE_CODE (type) == INTEGER_TYPE)
7187 return true;
7188 if (TREE_CODE (type) == REAL_TYPE)
7189 return integer_valued_real_p (TREE_OPERAND (t, 0));
7190 break;
7193 case CALL_EXPR:
7194 switch (builtin_mathfn_code (t))
7196 CASE_FLT_FN (BUILT_IN_CEIL):
7197 CASE_FLT_FN (BUILT_IN_FLOOR):
7198 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7199 CASE_FLT_FN (BUILT_IN_RINT):
7200 CASE_FLT_FN (BUILT_IN_ROUND):
7201 CASE_FLT_FN (BUILT_IN_TRUNC):
7202 return true;
7204 CASE_FLT_FN (BUILT_IN_FMIN):
7205 CASE_FLT_FN (BUILT_IN_FMAX):
7206 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7207 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7209 default:
7210 break;
7212 break;
7214 default:
7215 break;
7217 return false;
7220 /* FNDECL is assumed to be a builtin where truncation can be propagated
7221 across (for instance floor((double)f) == (double)floorf (f).
7222 Do the transformation for a call with argument ARG. */
7224 static tree
7225 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7227 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7229 if (!validate_arg (arg, REAL_TYPE))
7230 return NULL_TREE;
7232 /* Integer rounding functions are idempotent. */
7233 if (fcode == builtin_mathfn_code (arg))
7234 return arg;
7236 /* If argument is already integer valued, and we don't need to worry
7237 about setting errno, there's no need to perform rounding. */
7238 if (! flag_errno_math && integer_valued_real_p (arg))
7239 return arg;
7241 if (optimize)
7243 tree arg0 = strip_float_extensions (arg);
7244 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7245 tree newtype = TREE_TYPE (arg0);
7246 tree decl;
7248 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7249 && (decl = mathfn_built_in (newtype, fcode)))
7250 return fold_convert_loc (loc, ftype,
7251 build_call_expr_loc (loc, decl, 1,
7252 fold_convert_loc (loc,
7253 newtype,
7254 arg0)));
7256 return NULL_TREE;
7259 /* FNDECL is assumed to be builtin which can narrow the FP type of
7260 the argument, for instance lround((double)f) -> lroundf (f).
7261 Do the transformation for a call with argument ARG. */
7263 static tree
7264 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7266 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7268 if (!validate_arg (arg, REAL_TYPE))
7269 return NULL_TREE;
7271 /* If argument is already integer valued, and we don't need to worry
7272 about setting errno, there's no need to perform rounding. */
7273 if (! flag_errno_math && integer_valued_real_p (arg))
7274 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7275 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7277 if (optimize)
7279 tree ftype = TREE_TYPE (arg);
7280 tree arg0 = strip_float_extensions (arg);
7281 tree newtype = TREE_TYPE (arg0);
7282 tree decl;
7284 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7285 && (decl = mathfn_built_in (newtype, fcode)))
7286 return build_call_expr_loc (loc, decl, 1,
7287 fold_convert_loc (loc, newtype, arg0));
7290 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7291 sizeof (int) == sizeof (long). */
7292 if (TYPE_PRECISION (integer_type_node)
7293 == TYPE_PRECISION (long_integer_type_node))
7295 tree newfn = NULL_TREE;
7296 switch (fcode)
7298 CASE_FLT_FN (BUILT_IN_ICEIL):
7299 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7300 break;
7302 CASE_FLT_FN (BUILT_IN_IFLOOR):
7303 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7304 break;
7306 CASE_FLT_FN (BUILT_IN_IROUND):
7307 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7308 break;
7310 CASE_FLT_FN (BUILT_IN_IRINT):
7311 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7312 break;
7314 default:
7315 break;
7318 if (newfn)
7320 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7321 return fold_convert_loc (loc,
7322 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7326 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7327 sizeof (long long) == sizeof (long). */
7328 if (TYPE_PRECISION (long_long_integer_type_node)
7329 == TYPE_PRECISION (long_integer_type_node))
7331 tree newfn = NULL_TREE;
7332 switch (fcode)
7334 CASE_FLT_FN (BUILT_IN_LLCEIL):
7335 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7336 break;
7338 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7339 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7340 break;
7342 CASE_FLT_FN (BUILT_IN_LLROUND):
7343 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7344 break;
7346 CASE_FLT_FN (BUILT_IN_LLRINT):
7347 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7348 break;
7350 default:
7351 break;
7354 if (newfn)
7356 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7357 return fold_convert_loc (loc,
7358 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7362 return NULL_TREE;
7365 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7366 return type. Return NULL_TREE if no simplification can be made. */
7368 static tree
7369 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7371 tree res;
7373 if (!validate_arg (arg, COMPLEX_TYPE)
7374 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7375 return NULL_TREE;
7377 /* Calculate the result when the argument is a constant. */
7378 if (TREE_CODE (arg) == COMPLEX_CST
7379 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7380 type, mpfr_hypot)))
7381 return res;
7383 if (TREE_CODE (arg) == COMPLEX_EXPR)
7385 tree real = TREE_OPERAND (arg, 0);
7386 tree imag = TREE_OPERAND (arg, 1);
7388 /* If either part is zero, cabs is fabs of the other. */
7389 if (real_zerop (real))
7390 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7391 if (real_zerop (imag))
7392 return fold_build1_loc (loc, ABS_EXPR, type, real);
7394 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7395 if (flag_unsafe_math_optimizations
7396 && operand_equal_p (real, imag, OEP_PURE_SAME))
7398 const REAL_VALUE_TYPE sqrt2_trunc
7399 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7400 STRIP_NOPS (real);
7401 return fold_build2_loc (loc, MULT_EXPR, type,
7402 fold_build1_loc (loc, ABS_EXPR, type, real),
7403 build_real (type, sqrt2_trunc));
7407 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7408 if (TREE_CODE (arg) == NEGATE_EXPR
7409 || TREE_CODE (arg) == CONJ_EXPR)
7410 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7412 /* Don't do this when optimizing for size. */
7413 if (flag_unsafe_math_optimizations
7414 && optimize && optimize_function_for_speed_p (cfun))
7416 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7418 if (sqrtfn != NULL_TREE)
7420 tree rpart, ipart, result;
7422 arg = builtin_save_expr (arg);
7424 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7425 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7427 rpart = builtin_save_expr (rpart);
7428 ipart = builtin_save_expr (ipart);
7430 result = fold_build2_loc (loc, PLUS_EXPR, type,
7431 fold_build2_loc (loc, MULT_EXPR, type,
7432 rpart, rpart),
7433 fold_build2_loc (loc, MULT_EXPR, type,
7434 ipart, ipart));
7436 return build_call_expr_loc (loc, sqrtfn, 1, result);
7440 return NULL_TREE;
7443 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7444 complex tree type of the result. If NEG is true, the imaginary
7445 zero is negative. */
7447 static tree
7448 build_complex_cproj (tree type, bool neg)
7450 REAL_VALUE_TYPE rinf, rzero = dconst0;
7452 real_inf (&rinf);
7453 rzero.sign = neg;
7454 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7455 build_real (TREE_TYPE (type), rzero));
7458 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7459 return type. Return NULL_TREE if no simplification can be made. */
7461 static tree
7462 fold_builtin_cproj (location_t loc, tree arg, tree type)
7464 if (!validate_arg (arg, COMPLEX_TYPE)
7465 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7466 return NULL_TREE;
7468 /* If there are no infinities, return arg. */
7469 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7470 return non_lvalue_loc (loc, arg);
7472 /* Calculate the result when the argument is a constant. */
7473 if (TREE_CODE (arg) == COMPLEX_CST)
7475 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7476 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7478 if (real_isinf (real) || real_isinf (imag))
7479 return build_complex_cproj (type, imag->sign);
7480 else
7481 return arg;
7483 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7485 tree real = TREE_OPERAND (arg, 0);
7486 tree imag = TREE_OPERAND (arg, 1);
7488 STRIP_NOPS (real);
7489 STRIP_NOPS (imag);
7491 /* If the real part is inf and the imag part is known to be
7492 nonnegative, return (inf + 0i). Remember side-effects are
7493 possible in the imag part. */
7494 if (TREE_CODE (real) == REAL_CST
7495 && real_isinf (TREE_REAL_CST_PTR (real))
7496 && tree_expr_nonnegative_p (imag))
7497 return omit_one_operand_loc (loc, type,
7498 build_complex_cproj (type, false),
7499 arg);
7501 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7502 Remember side-effects are possible in the real part. */
7503 if (TREE_CODE (imag) == REAL_CST
7504 && real_isinf (TREE_REAL_CST_PTR (imag)))
7505 return
7506 omit_one_operand_loc (loc, type,
7507 build_complex_cproj (type, TREE_REAL_CST_PTR
7508 (imag)->sign), arg);
7511 return NULL_TREE;
7514 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7515 Return NULL_TREE if no simplification can be made. */
7517 static tree
7518 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7521 enum built_in_function fcode;
7522 tree res;
7524 if (!validate_arg (arg, REAL_TYPE))
7525 return NULL_TREE;
7527 /* Calculate the result when the argument is a constant. */
7528 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7529 return res;
7531 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7532 fcode = builtin_mathfn_code (arg);
7533 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7535 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7536 arg = fold_build2_loc (loc, MULT_EXPR, type,
7537 CALL_EXPR_ARG (arg, 0),
7538 build_real (type, dconsthalf));
7539 return build_call_expr_loc (loc, expfn, 1, arg);
7542 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7543 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7545 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7547 if (powfn)
7549 tree arg0 = CALL_EXPR_ARG (arg, 0);
7550 tree tree_root;
7551 /* The inner root was either sqrt or cbrt. */
7552 /* This was a conditional expression but it triggered a bug
7553 in Sun C 5.5. */
7554 REAL_VALUE_TYPE dconstroot;
7555 if (BUILTIN_SQRT_P (fcode))
7556 dconstroot = dconsthalf;
7557 else
7558 dconstroot = dconst_third ();
7560 /* Adjust for the outer root. */
7561 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7562 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7563 tree_root = build_real (type, dconstroot);
7564 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7568 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7569 if (flag_unsafe_math_optimizations
7570 && (fcode == BUILT_IN_POW
7571 || fcode == BUILT_IN_POWF
7572 || fcode == BUILT_IN_POWL))
7574 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7575 tree arg0 = CALL_EXPR_ARG (arg, 0);
7576 tree arg1 = CALL_EXPR_ARG (arg, 1);
7577 tree narg1;
7578 if (!tree_expr_nonnegative_p (arg0))
7579 arg0 = build1 (ABS_EXPR, type, arg0);
7580 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7581 build_real (type, dconsthalf));
7582 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7585 return NULL_TREE;
7588 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7589 Return NULL_TREE if no simplification can be made. */
7591 static tree
7592 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7594 const enum built_in_function fcode = builtin_mathfn_code (arg);
7595 tree res;
7597 if (!validate_arg (arg, REAL_TYPE))
7598 return NULL_TREE;
7600 /* Calculate the result when the argument is a constant. */
7601 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7602 return res;
7604 if (flag_unsafe_math_optimizations)
7606 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7607 if (BUILTIN_EXPONENT_P (fcode))
7609 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7610 const REAL_VALUE_TYPE third_trunc =
7611 real_value_truncate (TYPE_MODE (type), dconst_third ());
7612 arg = fold_build2_loc (loc, MULT_EXPR, type,
7613 CALL_EXPR_ARG (arg, 0),
7614 build_real (type, third_trunc));
7615 return build_call_expr_loc (loc, expfn, 1, arg);
7618 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7619 if (BUILTIN_SQRT_P (fcode))
7621 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7623 if (powfn)
7625 tree arg0 = CALL_EXPR_ARG (arg, 0);
7626 tree tree_root;
7627 REAL_VALUE_TYPE dconstroot = dconst_third ();
7629 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7630 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7631 tree_root = build_real (type, dconstroot);
7632 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7636 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7637 if (BUILTIN_CBRT_P (fcode))
7639 tree arg0 = CALL_EXPR_ARG (arg, 0);
7640 if (tree_expr_nonnegative_p (arg0))
7642 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7644 if (powfn)
7646 tree tree_root;
7647 REAL_VALUE_TYPE dconstroot;
7649 real_arithmetic (&dconstroot, MULT_EXPR,
7650 dconst_third_ptr (), dconst_third_ptr ());
7651 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7652 tree_root = build_real (type, dconstroot);
7653 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7658 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7659 if (fcode == BUILT_IN_POW
7660 || fcode == BUILT_IN_POWF
7661 || fcode == BUILT_IN_POWL)
7663 tree arg00 = CALL_EXPR_ARG (arg, 0);
7664 tree arg01 = CALL_EXPR_ARG (arg, 1);
7665 if (tree_expr_nonnegative_p (arg00))
7667 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7668 const REAL_VALUE_TYPE dconstroot
7669 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7670 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7671 build_real (type, dconstroot));
7672 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7676 return NULL_TREE;
7679 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7680 TYPE is the type of the return value. Return NULL_TREE if no
7681 simplification can be made. */
7683 static tree
7684 fold_builtin_cos (location_t loc,
7685 tree arg, tree type, tree fndecl)
7687 tree res, narg;
7689 if (!validate_arg (arg, REAL_TYPE))
7690 return NULL_TREE;
7692 /* Calculate the result when the argument is a constant. */
7693 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7694 return res;
7696 /* Optimize cos(-x) into cos (x). */
7697 if ((narg = fold_strip_sign_ops (arg)))
7698 return build_call_expr_loc (loc, fndecl, 1, narg);
7700 return NULL_TREE;
7703 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7704 Return NULL_TREE if no simplification can be made. */
7706 static tree
7707 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7709 if (validate_arg (arg, REAL_TYPE))
7711 tree res, narg;
7713 /* Calculate the result when the argument is a constant. */
7714 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7715 return res;
7717 /* Optimize cosh(-x) into cosh (x). */
7718 if ((narg = fold_strip_sign_ops (arg)))
7719 return build_call_expr_loc (loc, fndecl, 1, narg);
7722 return NULL_TREE;
7725 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7726 argument ARG. TYPE is the type of the return value. Return
7727 NULL_TREE if no simplification can be made. */
7729 static tree
7730 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7731 bool hyper)
7733 if (validate_arg (arg, COMPLEX_TYPE)
7734 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7736 tree tmp;
7738 /* Calculate the result when the argument is a constant. */
7739 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7740 return tmp;
7742 /* Optimize fn(-x) into fn(x). */
7743 if ((tmp = fold_strip_sign_ops (arg)))
7744 return build_call_expr_loc (loc, fndecl, 1, tmp);
7747 return NULL_TREE;
7750 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7751 Return NULL_TREE if no simplification can be made. */
7753 static tree
7754 fold_builtin_tan (tree arg, tree type)
7756 enum built_in_function fcode;
7757 tree res;
7759 if (!validate_arg (arg, REAL_TYPE))
7760 return NULL_TREE;
7762 /* Calculate the result when the argument is a constant. */
7763 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7764 return res;
7766 /* Optimize tan(atan(x)) = x. */
7767 fcode = builtin_mathfn_code (arg);
7768 if (flag_unsafe_math_optimizations
7769 && (fcode == BUILT_IN_ATAN
7770 || fcode == BUILT_IN_ATANF
7771 || fcode == BUILT_IN_ATANL))
7772 return CALL_EXPR_ARG (arg, 0);
7774 return NULL_TREE;
7777 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7778 NULL_TREE if no simplification can be made. */
7780 static tree
7781 fold_builtin_sincos (location_t loc,
7782 tree arg0, tree arg1, tree arg2)
7784 tree type;
7785 tree res, fn, call;
7787 if (!validate_arg (arg0, REAL_TYPE)
7788 || !validate_arg (arg1, POINTER_TYPE)
7789 || !validate_arg (arg2, POINTER_TYPE))
7790 return NULL_TREE;
7792 type = TREE_TYPE (arg0);
7794 /* Calculate the result when the argument is a constant. */
7795 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7796 return res;
7798 /* Canonicalize sincos to cexpi. */
7799 if (!targetm.libc_has_function (function_c99_math_complex))
7800 return NULL_TREE;
7801 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7802 if (!fn)
7803 return NULL_TREE;
7805 call = build_call_expr_loc (loc, fn, 1, arg0);
7806 call = builtin_save_expr (call);
7808 return build2 (COMPOUND_EXPR, void_type_node,
7809 build2 (MODIFY_EXPR, void_type_node,
7810 build_fold_indirect_ref_loc (loc, arg1),
7811 build1 (IMAGPART_EXPR, type, call)),
7812 build2 (MODIFY_EXPR, void_type_node,
7813 build_fold_indirect_ref_loc (loc, arg2),
7814 build1 (REALPART_EXPR, type, call)));
7817 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7818 NULL_TREE if no simplification can be made. */
7820 static tree
7821 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7823 tree rtype;
7824 tree realp, imagp, ifn;
7825 tree res;
7827 if (!validate_arg (arg0, COMPLEX_TYPE)
7828 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7829 return NULL_TREE;
7831 /* Calculate the result when the argument is a constant. */
7832 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7833 return res;
7835 rtype = TREE_TYPE (TREE_TYPE (arg0));
7837 /* In case we can figure out the real part of arg0 and it is constant zero
7838 fold to cexpi. */
7839 if (!targetm.libc_has_function (function_c99_math_complex))
7840 return NULL_TREE;
7841 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7842 if (!ifn)
7843 return NULL_TREE;
7845 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7846 && real_zerop (realp))
7848 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7849 return build_call_expr_loc (loc, ifn, 1, narg);
7852 /* In case we can easily decompose real and imaginary parts split cexp
7853 to exp (r) * cexpi (i). */
7854 if (flag_unsafe_math_optimizations
7855 && realp)
7857 tree rfn, rcall, icall;
7859 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7860 if (!rfn)
7861 return NULL_TREE;
7863 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7864 if (!imagp)
7865 return NULL_TREE;
7867 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7868 icall = builtin_save_expr (icall);
7869 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7870 rcall = builtin_save_expr (rcall);
7871 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7872 fold_build2_loc (loc, MULT_EXPR, rtype,
7873 rcall,
7874 fold_build1_loc (loc, REALPART_EXPR,
7875 rtype, icall)),
7876 fold_build2_loc (loc, MULT_EXPR, rtype,
7877 rcall,
7878 fold_build1_loc (loc, IMAGPART_EXPR,
7879 rtype, icall)));
7882 return NULL_TREE;
7885 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7886 Return NULL_TREE if no simplification can be made. */
7888 static tree
7889 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7891 if (!validate_arg (arg, REAL_TYPE))
7892 return NULL_TREE;
7894 /* Optimize trunc of constant value. */
7895 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7897 REAL_VALUE_TYPE r, x;
7898 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7900 x = TREE_REAL_CST (arg);
7901 real_trunc (&r, TYPE_MODE (type), &x);
7902 return build_real (type, r);
7905 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7908 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7909 Return NULL_TREE if no simplification can be made. */
7911 static tree
7912 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7914 if (!validate_arg (arg, REAL_TYPE))
7915 return NULL_TREE;
7917 /* Optimize floor of constant value. */
7918 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7920 REAL_VALUE_TYPE x;
7922 x = TREE_REAL_CST (arg);
7923 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7925 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7926 REAL_VALUE_TYPE r;
7928 real_floor (&r, TYPE_MODE (type), &x);
7929 return build_real (type, r);
7933 /* Fold floor (x) where x is nonnegative to trunc (x). */
7934 if (tree_expr_nonnegative_p (arg))
7936 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7937 if (truncfn)
7938 return build_call_expr_loc (loc, truncfn, 1, arg);
7941 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7944 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7945 Return NULL_TREE if no simplification can be made. */
7947 static tree
7948 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7950 if (!validate_arg (arg, REAL_TYPE))
7951 return NULL_TREE;
7953 /* Optimize ceil of constant value. */
7954 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7956 REAL_VALUE_TYPE x;
7958 x = TREE_REAL_CST (arg);
7959 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7961 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7962 REAL_VALUE_TYPE r;
7964 real_ceil (&r, TYPE_MODE (type), &x);
7965 return build_real (type, r);
7969 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7972 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7973 Return NULL_TREE if no simplification can be made. */
7975 static tree
7976 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7978 if (!validate_arg (arg, REAL_TYPE))
7979 return NULL_TREE;
7981 /* Optimize round of constant value. */
7982 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7984 REAL_VALUE_TYPE x;
7986 x = TREE_REAL_CST (arg);
7987 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7989 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7990 REAL_VALUE_TYPE r;
7992 real_round (&r, TYPE_MODE (type), &x);
7993 return build_real (type, r);
7997 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8000 /* Fold function call to builtin lround, lroundf or lroundl (or the
8001 corresponding long long versions) and other rounding functions. ARG
8002 is the argument to the call. Return NULL_TREE if no simplification
8003 can be made. */
8005 static tree
8006 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8008 if (!validate_arg (arg, REAL_TYPE))
8009 return NULL_TREE;
8011 /* Optimize lround of constant value. */
8012 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8014 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8016 if (real_isfinite (&x))
8018 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8019 tree ftype = TREE_TYPE (arg);
8020 double_int val;
8021 REAL_VALUE_TYPE r;
8023 switch (DECL_FUNCTION_CODE (fndecl))
8025 CASE_FLT_FN (BUILT_IN_IFLOOR):
8026 CASE_FLT_FN (BUILT_IN_LFLOOR):
8027 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8028 real_floor (&r, TYPE_MODE (ftype), &x);
8029 break;
8031 CASE_FLT_FN (BUILT_IN_ICEIL):
8032 CASE_FLT_FN (BUILT_IN_LCEIL):
8033 CASE_FLT_FN (BUILT_IN_LLCEIL):
8034 real_ceil (&r, TYPE_MODE (ftype), &x);
8035 break;
8037 CASE_FLT_FN (BUILT_IN_IROUND):
8038 CASE_FLT_FN (BUILT_IN_LROUND):
8039 CASE_FLT_FN (BUILT_IN_LLROUND):
8040 real_round (&r, TYPE_MODE (ftype), &x);
8041 break;
8043 default:
8044 gcc_unreachable ();
8047 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8048 if (double_int_fits_to_tree_p (itype, val))
8049 return double_int_to_tree (itype, val);
8053 switch (DECL_FUNCTION_CODE (fndecl))
8055 CASE_FLT_FN (BUILT_IN_LFLOOR):
8056 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8057 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8058 if (tree_expr_nonnegative_p (arg))
8059 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8060 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8061 break;
8062 default:;
8065 return fold_fixed_mathfn (loc, fndecl, arg);
8068 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8069 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8070 the argument to the call. Return NULL_TREE if no simplification can
8071 be made. */
8073 static tree
8074 fold_builtin_bitop (tree fndecl, tree arg)
8076 if (!validate_arg (arg, INTEGER_TYPE))
8077 return NULL_TREE;
8079 /* Optimize for constant argument. */
8080 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8082 HOST_WIDE_INT hi, width, result;
8083 unsigned HOST_WIDE_INT lo;
8084 tree type;
8086 type = TREE_TYPE (arg);
8087 width = TYPE_PRECISION (type);
8088 lo = TREE_INT_CST_LOW (arg);
8090 /* Clear all the bits that are beyond the type's precision. */
8091 if (width > HOST_BITS_PER_WIDE_INT)
8093 hi = TREE_INT_CST_HIGH (arg);
8094 if (width < HOST_BITS_PER_DOUBLE_INT)
8095 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
8097 else
8099 hi = 0;
8100 if (width < HOST_BITS_PER_WIDE_INT)
8101 lo &= ~(HOST_WIDE_INT_M1U << width);
8104 switch (DECL_FUNCTION_CODE (fndecl))
8106 CASE_INT_FN (BUILT_IN_FFS):
8107 if (lo != 0)
8108 result = ffs_hwi (lo);
8109 else if (hi != 0)
8110 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8111 else
8112 result = 0;
8113 break;
8115 CASE_INT_FN (BUILT_IN_CLZ):
8116 if (hi != 0)
8117 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8118 else if (lo != 0)
8119 result = width - floor_log2 (lo) - 1;
8120 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8121 result = width;
8122 break;
8124 CASE_INT_FN (BUILT_IN_CTZ):
8125 if (lo != 0)
8126 result = ctz_hwi (lo);
8127 else if (hi != 0)
8128 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8129 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8130 result = width;
8131 break;
8133 CASE_INT_FN (BUILT_IN_CLRSB):
8134 if (width > 2 * HOST_BITS_PER_WIDE_INT)
8135 return NULL_TREE;
8136 if (width > HOST_BITS_PER_WIDE_INT
8137 && (hi & ((unsigned HOST_WIDE_INT) 1
8138 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8140 hi = ~hi & ~(HOST_WIDE_INT_M1U
8141 << (width - HOST_BITS_PER_WIDE_INT - 1));
8142 lo = ~lo;
8144 else if (width <= HOST_BITS_PER_WIDE_INT
8145 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8146 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
8147 if (hi != 0)
8148 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8149 else if (lo != 0)
8150 result = width - floor_log2 (lo) - 2;
8151 else
8152 result = width - 1;
8153 break;
8155 CASE_INT_FN (BUILT_IN_POPCOUNT):
8156 result = 0;
8157 while (lo)
8158 result++, lo &= lo - 1;
8159 while (hi)
8160 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8161 break;
8163 CASE_INT_FN (BUILT_IN_PARITY):
8164 result = 0;
8165 while (lo)
8166 result++, lo &= lo - 1;
8167 while (hi)
8168 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8169 result &= 1;
8170 break;
8172 default:
8173 gcc_unreachable ();
8176 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8179 return NULL_TREE;
8182 /* Fold function call to builtin_bswap and the short, long and long long
8183 variants. Return NULL_TREE if no simplification can be made. */
8184 static tree
8185 fold_builtin_bswap (tree fndecl, tree arg)
8187 if (! validate_arg (arg, INTEGER_TYPE))
8188 return NULL_TREE;
8190 /* Optimize constant value. */
8191 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8193 HOST_WIDE_INT hi, width, r_hi = 0;
8194 unsigned HOST_WIDE_INT lo, r_lo = 0;
8195 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8197 width = TYPE_PRECISION (type);
8198 lo = TREE_INT_CST_LOW (arg);
8199 hi = TREE_INT_CST_HIGH (arg);
8201 switch (DECL_FUNCTION_CODE (fndecl))
8203 case BUILT_IN_BSWAP16:
8204 case BUILT_IN_BSWAP32:
8205 case BUILT_IN_BSWAP64:
8207 int s;
8209 for (s = 0; s < width; s += 8)
8211 int d = width - s - 8;
8212 unsigned HOST_WIDE_INT byte;
8214 if (s < HOST_BITS_PER_WIDE_INT)
8215 byte = (lo >> s) & 0xff;
8216 else
8217 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8219 if (d < HOST_BITS_PER_WIDE_INT)
8220 r_lo |= byte << d;
8221 else
8222 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8226 break;
8228 default:
8229 gcc_unreachable ();
8232 if (width < HOST_BITS_PER_WIDE_INT)
8233 return build_int_cst (type, r_lo);
8234 else
8235 return build_int_cst_wide (type, r_lo, r_hi);
8238 return NULL_TREE;
8241 /* A subroutine of fold_builtin to fold the various logarithmic
8242 functions. Return NULL_TREE if no simplification can me made.
8243 FUNC is the corresponding MPFR logarithm function. */
8245 static tree
8246 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8247 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8249 if (validate_arg (arg, REAL_TYPE))
8251 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8252 tree res;
8253 const enum built_in_function fcode = builtin_mathfn_code (arg);
8255 /* Calculate the result when the argument is a constant. */
8256 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8257 return res;
8259 /* Special case, optimize logN(expN(x)) = x. */
8260 if (flag_unsafe_math_optimizations
8261 && ((func == mpfr_log
8262 && (fcode == BUILT_IN_EXP
8263 || fcode == BUILT_IN_EXPF
8264 || fcode == BUILT_IN_EXPL))
8265 || (func == mpfr_log2
8266 && (fcode == BUILT_IN_EXP2
8267 || fcode == BUILT_IN_EXP2F
8268 || fcode == BUILT_IN_EXP2L))
8269 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8270 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8272 /* Optimize logN(func()) for various exponential functions. We
8273 want to determine the value "x" and the power "exponent" in
8274 order to transform logN(x**exponent) into exponent*logN(x). */
8275 if (flag_unsafe_math_optimizations)
8277 tree exponent = 0, x = 0;
8279 switch (fcode)
8281 CASE_FLT_FN (BUILT_IN_EXP):
8282 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8283 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8284 dconst_e ()));
8285 exponent = CALL_EXPR_ARG (arg, 0);
8286 break;
8287 CASE_FLT_FN (BUILT_IN_EXP2):
8288 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8289 x = build_real (type, dconst2);
8290 exponent = CALL_EXPR_ARG (arg, 0);
8291 break;
8292 CASE_FLT_FN (BUILT_IN_EXP10):
8293 CASE_FLT_FN (BUILT_IN_POW10):
8294 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8296 REAL_VALUE_TYPE dconst10;
8297 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8298 x = build_real (type, dconst10);
8300 exponent = CALL_EXPR_ARG (arg, 0);
8301 break;
8302 CASE_FLT_FN (BUILT_IN_SQRT):
8303 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8304 x = CALL_EXPR_ARG (arg, 0);
8305 exponent = build_real (type, dconsthalf);
8306 break;
8307 CASE_FLT_FN (BUILT_IN_CBRT):
8308 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8309 x = CALL_EXPR_ARG (arg, 0);
8310 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8311 dconst_third ()));
8312 break;
8313 CASE_FLT_FN (BUILT_IN_POW):
8314 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8315 x = CALL_EXPR_ARG (arg, 0);
8316 exponent = CALL_EXPR_ARG (arg, 1);
8317 break;
8318 default:
8319 break;
8322 /* Now perform the optimization. */
8323 if (x && exponent)
8325 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8326 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8331 return NULL_TREE;
8334 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8335 NULL_TREE if no simplification can be made. */
8337 static tree
8338 fold_builtin_hypot (location_t loc, tree fndecl,
8339 tree arg0, tree arg1, tree type)
8341 tree res, narg0, narg1;
8343 if (!validate_arg (arg0, REAL_TYPE)
8344 || !validate_arg (arg1, REAL_TYPE))
8345 return NULL_TREE;
8347 /* Calculate the result when the argument is a constant. */
8348 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8349 return res;
8351 /* If either argument to hypot has a negate or abs, strip that off.
8352 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8353 narg0 = fold_strip_sign_ops (arg0);
8354 narg1 = fold_strip_sign_ops (arg1);
8355 if (narg0 || narg1)
8357 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8358 narg1 ? narg1 : arg1);
8361 /* If either argument is zero, hypot is fabs of the other. */
8362 if (real_zerop (arg0))
8363 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8364 else if (real_zerop (arg1))
8365 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8367 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8368 if (flag_unsafe_math_optimizations
8369 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8371 const REAL_VALUE_TYPE sqrt2_trunc
8372 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8373 return fold_build2_loc (loc, MULT_EXPR, type,
8374 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8375 build_real (type, sqrt2_trunc));
8378 return NULL_TREE;
8382 /* Fold a builtin function call to pow, powf, or powl. Return
8383 NULL_TREE if no simplification can be made. */
8384 static tree
8385 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8387 tree res;
8389 if (!validate_arg (arg0, REAL_TYPE)
8390 || !validate_arg (arg1, REAL_TYPE))
8391 return NULL_TREE;
8393 /* Calculate the result when the argument is a constant. */
8394 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8395 return res;
8397 /* Optimize pow(1.0,y) = 1.0. */
8398 if (real_onep (arg0))
8399 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8401 if (TREE_CODE (arg1) == REAL_CST
8402 && !TREE_OVERFLOW (arg1))
8404 REAL_VALUE_TYPE cint;
8405 REAL_VALUE_TYPE c;
8406 HOST_WIDE_INT n;
8408 c = TREE_REAL_CST (arg1);
8410 /* Optimize pow(x,0.0) = 1.0. */
8411 if (REAL_VALUES_EQUAL (c, dconst0))
8412 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8413 arg0);
8415 /* Optimize pow(x,1.0) = x. */
8416 if (REAL_VALUES_EQUAL (c, dconst1))
8417 return arg0;
8419 /* Optimize pow(x,-1.0) = 1.0/x. */
8420 if (REAL_VALUES_EQUAL (c, dconstm1))
8421 return fold_build2_loc (loc, RDIV_EXPR, type,
8422 build_real (type, dconst1), arg0);
8424 /* Optimize pow(x,0.5) = sqrt(x). */
8425 if (flag_unsafe_math_optimizations
8426 && REAL_VALUES_EQUAL (c, dconsthalf))
8428 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8430 if (sqrtfn != NULL_TREE)
8431 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8434 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8435 if (flag_unsafe_math_optimizations)
8437 const REAL_VALUE_TYPE dconstroot
8438 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8440 if (REAL_VALUES_EQUAL (c, dconstroot))
8442 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8443 if (cbrtfn != NULL_TREE)
8444 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8448 /* Check for an integer exponent. */
8449 n = real_to_integer (&c);
8450 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8451 if (real_identical (&c, &cint))
8453 /* Attempt to evaluate pow at compile-time, unless this should
8454 raise an exception. */
8455 if (TREE_CODE (arg0) == REAL_CST
8456 && !TREE_OVERFLOW (arg0)
8457 && (n > 0
8458 || (!flag_trapping_math && !flag_errno_math)
8459 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8461 REAL_VALUE_TYPE x;
8462 bool inexact;
8464 x = TREE_REAL_CST (arg0);
8465 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8466 if (flag_unsafe_math_optimizations || !inexact)
8467 return build_real (type, x);
8470 /* Strip sign ops from even integer powers. */
8471 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8473 tree narg0 = fold_strip_sign_ops (arg0);
8474 if (narg0)
8475 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8480 if (flag_unsafe_math_optimizations)
8482 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8484 /* Optimize pow(expN(x),y) = expN(x*y). */
8485 if (BUILTIN_EXPONENT_P (fcode))
8487 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8488 tree arg = CALL_EXPR_ARG (arg0, 0);
8489 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8490 return build_call_expr_loc (loc, expfn, 1, arg);
8493 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8494 if (BUILTIN_SQRT_P (fcode))
8496 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8497 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8498 build_real (type, dconsthalf));
8499 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8502 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8503 if (BUILTIN_CBRT_P (fcode))
8505 tree arg = CALL_EXPR_ARG (arg0, 0);
8506 if (tree_expr_nonnegative_p (arg))
8508 const REAL_VALUE_TYPE dconstroot
8509 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8510 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8511 build_real (type, dconstroot));
8512 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8516 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8517 if (fcode == BUILT_IN_POW
8518 || fcode == BUILT_IN_POWF
8519 || fcode == BUILT_IN_POWL)
8521 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8522 if (tree_expr_nonnegative_p (arg00))
8524 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8525 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8526 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8531 return NULL_TREE;
8534 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8535 Return NULL_TREE if no simplification can be made. */
8536 static tree
8537 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8538 tree arg0, tree arg1, tree type)
8540 if (!validate_arg (arg0, REAL_TYPE)
8541 || !validate_arg (arg1, INTEGER_TYPE))
8542 return NULL_TREE;
8544 /* Optimize pow(1.0,y) = 1.0. */
8545 if (real_onep (arg0))
8546 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8548 if (tree_fits_shwi_p (arg1))
8550 HOST_WIDE_INT c = tree_to_shwi (arg1);
8552 /* Evaluate powi at compile-time. */
8553 if (TREE_CODE (arg0) == REAL_CST
8554 && !TREE_OVERFLOW (arg0))
8556 REAL_VALUE_TYPE x;
8557 x = TREE_REAL_CST (arg0);
8558 real_powi (&x, TYPE_MODE (type), &x, c);
8559 return build_real (type, x);
8562 /* Optimize pow(x,0) = 1.0. */
8563 if (c == 0)
8564 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8565 arg0);
8567 /* Optimize pow(x,1) = x. */
8568 if (c == 1)
8569 return arg0;
8571 /* Optimize pow(x,-1) = 1.0/x. */
8572 if (c == -1)
8573 return fold_build2_loc (loc, RDIV_EXPR, type,
8574 build_real (type, dconst1), arg0);
8577 return NULL_TREE;
8580 /* A subroutine of fold_builtin to fold the various exponent
8581 functions. Return NULL_TREE if no simplification can be made.
8582 FUNC is the corresponding MPFR exponent function. */
8584 static tree
8585 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8586 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8588 if (validate_arg (arg, REAL_TYPE))
8590 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8591 tree res;
8593 /* Calculate the result when the argument is a constant. */
8594 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8595 return res;
8597 /* Optimize expN(logN(x)) = x. */
8598 if (flag_unsafe_math_optimizations)
8600 const enum built_in_function fcode = builtin_mathfn_code (arg);
8602 if ((func == mpfr_exp
8603 && (fcode == BUILT_IN_LOG
8604 || fcode == BUILT_IN_LOGF
8605 || fcode == BUILT_IN_LOGL))
8606 || (func == mpfr_exp2
8607 && (fcode == BUILT_IN_LOG2
8608 || fcode == BUILT_IN_LOG2F
8609 || fcode == BUILT_IN_LOG2L))
8610 || (func == mpfr_exp10
8611 && (fcode == BUILT_IN_LOG10
8612 || fcode == BUILT_IN_LOG10F
8613 || fcode == BUILT_IN_LOG10L)))
8614 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8618 return NULL_TREE;
8621 /* Return true if VAR is a VAR_DECL or a component thereof. */
8623 static bool
8624 var_decl_component_p (tree var)
8626 tree inner = var;
8627 while (handled_component_p (inner))
8628 inner = TREE_OPERAND (inner, 0);
8629 return SSA_VAR_P (inner);
8632 /* Fold function call to builtin memset. Return
8633 NULL_TREE if no simplification can be made. */
8635 static tree
8636 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8637 tree type, bool ignore)
8639 tree var, ret, etype;
8640 unsigned HOST_WIDE_INT length, cval;
8642 if (! validate_arg (dest, POINTER_TYPE)
8643 || ! validate_arg (c, INTEGER_TYPE)
8644 || ! validate_arg (len, INTEGER_TYPE))
8645 return NULL_TREE;
8647 if (! tree_fits_uhwi_p (len))
8648 return NULL_TREE;
8650 /* If the LEN parameter is zero, return DEST. */
8651 if (integer_zerop (len))
8652 return omit_one_operand_loc (loc, type, dest, c);
8654 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8655 return NULL_TREE;
8657 var = dest;
8658 STRIP_NOPS (var);
8659 if (TREE_CODE (var) != ADDR_EXPR)
8660 return NULL_TREE;
8662 var = TREE_OPERAND (var, 0);
8663 if (TREE_THIS_VOLATILE (var))
8664 return NULL_TREE;
8666 etype = TREE_TYPE (var);
8667 if (TREE_CODE (etype) == ARRAY_TYPE)
8668 etype = TREE_TYPE (etype);
8670 if (!INTEGRAL_TYPE_P (etype)
8671 && !POINTER_TYPE_P (etype))
8672 return NULL_TREE;
8674 if (! var_decl_component_p (var))
8675 return NULL_TREE;
8677 length = tree_to_uhwi (len);
8678 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8679 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8680 return NULL_TREE;
8682 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8683 return NULL_TREE;
8685 if (integer_zerop (c))
8686 cval = 0;
8687 else
8689 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8690 return NULL_TREE;
8692 cval = TREE_INT_CST_LOW (c);
8693 cval &= 0xff;
8694 cval |= cval << 8;
8695 cval |= cval << 16;
8696 cval |= (cval << 31) << 1;
8699 ret = build_int_cst_type (etype, cval);
8700 var = build_fold_indirect_ref_loc (loc,
8701 fold_convert_loc (loc,
8702 build_pointer_type (etype),
8703 dest));
8704 ret = build2 (MODIFY_EXPR, etype, var, ret);
8705 if (ignore)
8706 return ret;
8708 return omit_one_operand_loc (loc, type, dest, ret);
8711 /* Fold function call to builtin memset. Return
8712 NULL_TREE if no simplification can be made. */
8714 static tree
8715 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8717 if (! validate_arg (dest, POINTER_TYPE)
8718 || ! validate_arg (size, INTEGER_TYPE))
8719 return NULL_TREE;
8721 if (!ignore)
8722 return NULL_TREE;
8724 /* New argument list transforming bzero(ptr x, int y) to
8725 memset(ptr x, int 0, size_t y). This is done this way
8726 so that if it isn't expanded inline, we fallback to
8727 calling bzero instead of memset. */
8729 return fold_builtin_memset (loc, dest, integer_zero_node,
8730 fold_convert_loc (loc, size_type_node, size),
8731 void_type_node, ignore);
8734 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8735 NULL_TREE if no simplification can be made.
8736 If ENDP is 0, return DEST (like memcpy).
8737 If ENDP is 1, return DEST+LEN (like mempcpy).
8738 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8739 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8740 (memmove). */
8742 static tree
8743 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8744 tree len, tree type, bool ignore, int endp)
8746 tree destvar, srcvar, expr;
8748 if (! validate_arg (dest, POINTER_TYPE)
8749 || ! validate_arg (src, POINTER_TYPE)
8750 || ! validate_arg (len, INTEGER_TYPE))
8751 return NULL_TREE;
8753 /* If the LEN parameter is zero, return DEST. */
8754 if (integer_zerop (len))
8755 return omit_one_operand_loc (loc, type, dest, src);
8757 /* If SRC and DEST are the same (and not volatile), return
8758 DEST{,+LEN,+LEN-1}. */
8759 if (operand_equal_p (src, dest, 0))
8760 expr = len;
8761 else
8763 tree srctype, desttype;
8764 unsigned int src_align, dest_align;
8765 tree off0;
8767 if (endp == 3)
8769 src_align = get_pointer_alignment (src);
8770 dest_align = get_pointer_alignment (dest);
8772 /* Both DEST and SRC must be pointer types.
8773 ??? This is what old code did. Is the testing for pointer types
8774 really mandatory?
8776 If either SRC is readonly or length is 1, we can use memcpy. */
8777 if (!dest_align || !src_align)
8778 return NULL_TREE;
8779 if (readonly_data_expr (src)
8780 || (tree_fits_uhwi_p (len)
8781 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8782 >= tree_to_uhwi (len))))
8784 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8785 if (!fn)
8786 return NULL_TREE;
8787 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8790 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8791 if (TREE_CODE (src) == ADDR_EXPR
8792 && TREE_CODE (dest) == ADDR_EXPR)
8794 tree src_base, dest_base, fn;
8795 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8796 HOST_WIDE_INT size = -1;
8797 HOST_WIDE_INT maxsize = -1;
8799 srcvar = TREE_OPERAND (src, 0);
8800 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8801 &size, &maxsize);
8802 destvar = TREE_OPERAND (dest, 0);
8803 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8804 &size, &maxsize);
8805 if (tree_fits_uhwi_p (len))
8806 maxsize = tree_to_uhwi (len);
8807 else
8808 maxsize = -1;
8809 src_offset /= BITS_PER_UNIT;
8810 dest_offset /= BITS_PER_UNIT;
8811 if (SSA_VAR_P (src_base)
8812 && SSA_VAR_P (dest_base))
8814 if (operand_equal_p (src_base, dest_base, 0)
8815 && ranges_overlap_p (src_offset, maxsize,
8816 dest_offset, maxsize))
8817 return NULL_TREE;
8819 else if (TREE_CODE (src_base) == MEM_REF
8820 && TREE_CODE (dest_base) == MEM_REF)
8822 double_int off;
8823 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8824 TREE_OPERAND (dest_base, 0), 0))
8825 return NULL_TREE;
8826 off = mem_ref_offset (src_base) +
8827 double_int::from_shwi (src_offset);
8828 if (!off.fits_shwi ())
8829 return NULL_TREE;
8830 src_offset = off.low;
8831 off = mem_ref_offset (dest_base) +
8832 double_int::from_shwi (dest_offset);
8833 if (!off.fits_shwi ())
8834 return NULL_TREE;
8835 dest_offset = off.low;
8836 if (ranges_overlap_p (src_offset, maxsize,
8837 dest_offset, maxsize))
8838 return NULL_TREE;
8840 else
8841 return NULL_TREE;
8843 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8844 if (!fn)
8845 return NULL_TREE;
8846 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8849 /* If the destination and source do not alias optimize into
8850 memcpy as well. */
8851 if ((is_gimple_min_invariant (dest)
8852 || TREE_CODE (dest) == SSA_NAME)
8853 && (is_gimple_min_invariant (src)
8854 || TREE_CODE (src) == SSA_NAME))
8856 ao_ref destr, srcr;
8857 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8858 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8859 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8861 tree fn;
8862 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8863 if (!fn)
8864 return NULL_TREE;
8865 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8869 return NULL_TREE;
8872 if (!tree_fits_shwi_p (len))
8873 return NULL_TREE;
8874 /* FIXME:
8875 This logic lose for arguments like (type *)malloc (sizeof (type)),
8876 since we strip the casts of up to VOID return value from malloc.
8877 Perhaps we ought to inherit type from non-VOID argument here? */
8878 STRIP_NOPS (src);
8879 STRIP_NOPS (dest);
8880 if (!POINTER_TYPE_P (TREE_TYPE (src))
8881 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8882 return NULL_TREE;
8883 /* In the following try to find a type that is most natural to be
8884 used for the memcpy source and destination and that allows
8885 the most optimization when memcpy is turned into a plain assignment
8886 using that type. In theory we could always use a char[len] type
8887 but that only gains us that the destination and source possibly
8888 no longer will have their address taken. */
8889 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8890 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8892 tree tem = TREE_OPERAND (src, 0);
8893 STRIP_NOPS (tem);
8894 if (tem != TREE_OPERAND (src, 0))
8895 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8897 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8899 tree tem = TREE_OPERAND (dest, 0);
8900 STRIP_NOPS (tem);
8901 if (tem != TREE_OPERAND (dest, 0))
8902 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8904 srctype = TREE_TYPE (TREE_TYPE (src));
8905 if (TREE_CODE (srctype) == ARRAY_TYPE
8906 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8908 srctype = TREE_TYPE (srctype);
8909 STRIP_NOPS (src);
8910 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8912 desttype = TREE_TYPE (TREE_TYPE (dest));
8913 if (TREE_CODE (desttype) == ARRAY_TYPE
8914 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8916 desttype = TREE_TYPE (desttype);
8917 STRIP_NOPS (dest);
8918 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8920 if (TREE_ADDRESSABLE (srctype)
8921 || TREE_ADDRESSABLE (desttype))
8922 return NULL_TREE;
8924 /* Make sure we are not copying using a floating-point mode or
8925 a type whose size possibly does not match its precision. */
8926 if (FLOAT_MODE_P (TYPE_MODE (desttype))
8927 || TREE_CODE (desttype) == BOOLEAN_TYPE
8928 || TREE_CODE (desttype) == ENUMERAL_TYPE)
8930 /* A more suitable int_mode_for_mode would return a vector
8931 integer mode for a vector float mode or a integer complex
8932 mode for a float complex mode if there isn't a regular
8933 integer mode covering the mode of desttype. */
8934 enum machine_mode mode = int_mode_for_mode (TYPE_MODE (desttype));
8935 if (mode == BLKmode)
8936 desttype = NULL_TREE;
8937 else
8938 desttype = build_nonstandard_integer_type (GET_MODE_BITSIZE (mode),
8941 if (FLOAT_MODE_P (TYPE_MODE (srctype))
8942 || TREE_CODE (srctype) == BOOLEAN_TYPE
8943 || TREE_CODE (srctype) == ENUMERAL_TYPE)
8945 enum machine_mode mode = int_mode_for_mode (TYPE_MODE (srctype));
8946 if (mode == BLKmode)
8947 srctype = NULL_TREE;
8948 else
8949 srctype = build_nonstandard_integer_type (GET_MODE_BITSIZE (mode),
8952 if (!srctype)
8953 srctype = desttype;
8954 if (!desttype)
8955 desttype = srctype;
8956 if (!srctype)
8957 return NULL_TREE;
8959 src_align = get_pointer_alignment (src);
8960 dest_align = get_pointer_alignment (dest);
8961 if (dest_align < TYPE_ALIGN (desttype)
8962 || src_align < TYPE_ALIGN (srctype))
8963 return NULL_TREE;
8965 if (!ignore)
8966 dest = builtin_save_expr (dest);
8968 /* Build accesses at offset zero with a ref-all character type. */
8969 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8970 ptr_mode, true), 0);
8972 destvar = dest;
8973 STRIP_NOPS (destvar);
8974 if (TREE_CODE (destvar) == ADDR_EXPR
8975 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8976 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8977 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8978 else
8979 destvar = NULL_TREE;
8981 srcvar = src;
8982 STRIP_NOPS (srcvar);
8983 if (TREE_CODE (srcvar) == ADDR_EXPR
8984 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8985 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8987 if (!destvar
8988 || src_align >= TYPE_ALIGN (desttype))
8989 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8990 srcvar, off0);
8991 else if (!STRICT_ALIGNMENT)
8993 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8994 src_align);
8995 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8997 else
8998 srcvar = NULL_TREE;
9000 else
9001 srcvar = NULL_TREE;
9003 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9004 return NULL_TREE;
9006 if (srcvar == NULL_TREE)
9008 STRIP_NOPS (src);
9009 if (src_align >= TYPE_ALIGN (desttype))
9010 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
9011 else
9013 if (STRICT_ALIGNMENT)
9014 return NULL_TREE;
9015 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
9016 src_align);
9017 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
9020 else if (destvar == NULL_TREE)
9022 STRIP_NOPS (dest);
9023 if (dest_align >= TYPE_ALIGN (srctype))
9024 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
9025 else
9027 if (STRICT_ALIGNMENT)
9028 return NULL_TREE;
9029 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
9030 dest_align);
9031 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
9035 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
9038 if (ignore)
9039 return expr;
9041 if (endp == 0 || endp == 3)
9042 return omit_one_operand_loc (loc, type, dest, expr);
9044 if (expr == len)
9045 expr = NULL_TREE;
9047 if (endp == 2)
9048 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9049 ssize_int (1));
9051 dest = fold_build_pointer_plus_loc (loc, dest, len);
9052 dest = fold_convert_loc (loc, type, dest);
9053 if (expr)
9054 dest = omit_one_operand_loc (loc, type, dest, expr);
9055 return dest;
9058 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9059 If LEN is not NULL, it represents the length of the string to be
9060 copied. Return NULL_TREE if no simplification can be made. */
9062 tree
9063 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9065 tree fn;
9067 if (!validate_arg (dest, POINTER_TYPE)
9068 || !validate_arg (src, POINTER_TYPE))
9069 return NULL_TREE;
9071 /* If SRC and DEST are the same (and not volatile), return DEST. */
9072 if (operand_equal_p (src, dest, 0))
9073 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9075 if (optimize_function_for_size_p (cfun))
9076 return NULL_TREE;
9078 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9079 if (!fn)
9080 return NULL_TREE;
9082 if (!len)
9084 len = c_strlen (src, 1);
9085 if (! len || TREE_SIDE_EFFECTS (len))
9086 return NULL_TREE;
9089 len = fold_convert_loc (loc, size_type_node, len);
9090 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9091 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9092 build_call_expr_loc (loc, fn, 3, dest, src, len));
9095 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9096 Return NULL_TREE if no simplification can be made. */
9098 static tree
9099 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9101 tree fn, len, lenp1, call, type;
9103 if (!validate_arg (dest, POINTER_TYPE)
9104 || !validate_arg (src, POINTER_TYPE))
9105 return NULL_TREE;
9107 len = c_strlen (src, 1);
9108 if (!len
9109 || TREE_CODE (len) != INTEGER_CST)
9110 return NULL_TREE;
9112 if (optimize_function_for_size_p (cfun)
9113 /* If length is zero it's small enough. */
9114 && !integer_zerop (len))
9115 return NULL_TREE;
9117 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9118 if (!fn)
9119 return NULL_TREE;
9121 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9122 fold_convert_loc (loc, size_type_node, len),
9123 build_int_cst (size_type_node, 1));
9124 /* We use dest twice in building our expression. Save it from
9125 multiple expansions. */
9126 dest = builtin_save_expr (dest);
9127 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9129 type = TREE_TYPE (TREE_TYPE (fndecl));
9130 dest = fold_build_pointer_plus_loc (loc, dest, len);
9131 dest = fold_convert_loc (loc, type, dest);
9132 dest = omit_one_operand_loc (loc, type, dest, call);
9133 return dest;
9136 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9137 If SLEN is not NULL, it represents the length of the source string.
9138 Return NULL_TREE if no simplification can be made. */
9140 tree
9141 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9142 tree src, tree len, tree slen)
9144 tree fn;
9146 if (!validate_arg (dest, POINTER_TYPE)
9147 || !validate_arg (src, POINTER_TYPE)
9148 || !validate_arg (len, INTEGER_TYPE))
9149 return NULL_TREE;
9151 /* If the LEN parameter is zero, return DEST. */
9152 if (integer_zerop (len))
9153 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9155 /* We can't compare slen with len as constants below if len is not a
9156 constant. */
9157 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9158 return NULL_TREE;
9160 if (!slen)
9161 slen = c_strlen (src, 1);
9163 /* Now, we must be passed a constant src ptr parameter. */
9164 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9165 return NULL_TREE;
9167 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9169 /* We do not support simplification of this case, though we do
9170 support it when expanding trees into RTL. */
9171 /* FIXME: generate a call to __builtin_memset. */
9172 if (tree_int_cst_lt (slen, len))
9173 return NULL_TREE;
9175 /* OK transform into builtin memcpy. */
9176 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9177 if (!fn)
9178 return NULL_TREE;
9180 len = fold_convert_loc (loc, size_type_node, len);
9181 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9182 build_call_expr_loc (loc, fn, 3, dest, src, len));
9185 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9186 arguments to the call, and TYPE is its return type.
9187 Return NULL_TREE if no simplification can be made. */
9189 static tree
9190 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9192 if (!validate_arg (arg1, POINTER_TYPE)
9193 || !validate_arg (arg2, INTEGER_TYPE)
9194 || !validate_arg (len, INTEGER_TYPE))
9195 return NULL_TREE;
9196 else
9198 const char *p1;
9200 if (TREE_CODE (arg2) != INTEGER_CST
9201 || !tree_fits_uhwi_p (len))
9202 return NULL_TREE;
9204 p1 = c_getstr (arg1);
9205 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9207 char c;
9208 const char *r;
9209 tree tem;
9211 if (target_char_cast (arg2, &c))
9212 return NULL_TREE;
9214 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
9216 if (r == NULL)
9217 return build_int_cst (TREE_TYPE (arg1), 0);
9219 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9220 return fold_convert_loc (loc, type, tem);
9222 return NULL_TREE;
9226 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9227 Return NULL_TREE if no simplification can be made. */
9229 static tree
9230 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9232 const char *p1, *p2;
9234 if (!validate_arg (arg1, POINTER_TYPE)
9235 || !validate_arg (arg2, POINTER_TYPE)
9236 || !validate_arg (len, INTEGER_TYPE))
9237 return NULL_TREE;
9239 /* If the LEN parameter is zero, return zero. */
9240 if (integer_zerop (len))
9241 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9242 arg1, arg2);
9244 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9245 if (operand_equal_p (arg1, arg2, 0))
9246 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9248 p1 = c_getstr (arg1);
9249 p2 = c_getstr (arg2);
9251 /* If all arguments are constant, and the value of len is not greater
9252 than the lengths of arg1 and arg2, evaluate at compile-time. */
9253 if (tree_fits_uhwi_p (len) && p1 && p2
9254 && compare_tree_int (len, strlen (p1) + 1) <= 0
9255 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9257 const int r = memcmp (p1, p2, tree_to_uhwi (len));
9259 if (r > 0)
9260 return integer_one_node;
9261 else if (r < 0)
9262 return integer_minus_one_node;
9263 else
9264 return integer_zero_node;
9267 /* If len parameter is one, return an expression corresponding to
9268 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9269 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9271 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9272 tree cst_uchar_ptr_node
9273 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9275 tree ind1
9276 = fold_convert_loc (loc, integer_type_node,
9277 build1 (INDIRECT_REF, cst_uchar_node,
9278 fold_convert_loc (loc,
9279 cst_uchar_ptr_node,
9280 arg1)));
9281 tree ind2
9282 = fold_convert_loc (loc, integer_type_node,
9283 build1 (INDIRECT_REF, cst_uchar_node,
9284 fold_convert_loc (loc,
9285 cst_uchar_ptr_node,
9286 arg2)));
9287 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9290 return NULL_TREE;
9293 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9294 Return NULL_TREE if no simplification can be made. */
9296 static tree
9297 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9299 const char *p1, *p2;
9301 if (!validate_arg (arg1, POINTER_TYPE)
9302 || !validate_arg (arg2, POINTER_TYPE))
9303 return NULL_TREE;
9305 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9306 if (operand_equal_p (arg1, arg2, 0))
9307 return integer_zero_node;
9309 p1 = c_getstr (arg1);
9310 p2 = c_getstr (arg2);
9312 if (p1 && p2)
9314 const int i = strcmp (p1, p2);
9315 if (i < 0)
9316 return integer_minus_one_node;
9317 else if (i > 0)
9318 return integer_one_node;
9319 else
9320 return integer_zero_node;
9323 /* If the second arg is "", return *(const unsigned char*)arg1. */
9324 if (p2 && *p2 == '\0')
9326 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9327 tree cst_uchar_ptr_node
9328 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9330 return fold_convert_loc (loc, integer_type_node,
9331 build1 (INDIRECT_REF, cst_uchar_node,
9332 fold_convert_loc (loc,
9333 cst_uchar_ptr_node,
9334 arg1)));
9337 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9338 if (p1 && *p1 == '\0')
9340 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9341 tree cst_uchar_ptr_node
9342 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9344 tree temp
9345 = fold_convert_loc (loc, integer_type_node,
9346 build1 (INDIRECT_REF, cst_uchar_node,
9347 fold_convert_loc (loc,
9348 cst_uchar_ptr_node,
9349 arg2)));
9350 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9353 return NULL_TREE;
9356 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9357 Return NULL_TREE if no simplification can be made. */
9359 static tree
9360 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9362 const char *p1, *p2;
9364 if (!validate_arg (arg1, POINTER_TYPE)
9365 || !validate_arg (arg2, POINTER_TYPE)
9366 || !validate_arg (len, INTEGER_TYPE))
9367 return NULL_TREE;
9369 /* If the LEN parameter is zero, return zero. */
9370 if (integer_zerop (len))
9371 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9372 arg1, arg2);
9374 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9375 if (operand_equal_p (arg1, arg2, 0))
9376 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9378 p1 = c_getstr (arg1);
9379 p2 = c_getstr (arg2);
9381 if (tree_fits_uhwi_p (len) && p1 && p2)
9383 const int i = strncmp (p1, p2, tree_to_uhwi (len));
9384 if (i > 0)
9385 return integer_one_node;
9386 else if (i < 0)
9387 return integer_minus_one_node;
9388 else
9389 return integer_zero_node;
9392 /* If the second arg is "", and the length is greater than zero,
9393 return *(const unsigned char*)arg1. */
9394 if (p2 && *p2 == '\0'
9395 && TREE_CODE (len) == INTEGER_CST
9396 && tree_int_cst_sgn (len) == 1)
9398 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9399 tree cst_uchar_ptr_node
9400 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9402 return fold_convert_loc (loc, integer_type_node,
9403 build1 (INDIRECT_REF, cst_uchar_node,
9404 fold_convert_loc (loc,
9405 cst_uchar_ptr_node,
9406 arg1)));
9409 /* If the first arg is "", and the length is greater than zero,
9410 return -*(const unsigned char*)arg2. */
9411 if (p1 && *p1 == '\0'
9412 && TREE_CODE (len) == INTEGER_CST
9413 && tree_int_cst_sgn (len) == 1)
9415 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9416 tree cst_uchar_ptr_node
9417 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9419 tree temp = fold_convert_loc (loc, integer_type_node,
9420 build1 (INDIRECT_REF, cst_uchar_node,
9421 fold_convert_loc (loc,
9422 cst_uchar_ptr_node,
9423 arg2)));
9424 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9427 /* If len parameter is one, return an expression corresponding to
9428 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9429 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9431 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9432 tree cst_uchar_ptr_node
9433 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9435 tree ind1 = fold_convert_loc (loc, integer_type_node,
9436 build1 (INDIRECT_REF, cst_uchar_node,
9437 fold_convert_loc (loc,
9438 cst_uchar_ptr_node,
9439 arg1)));
9440 tree ind2 = fold_convert_loc (loc, integer_type_node,
9441 build1 (INDIRECT_REF, cst_uchar_node,
9442 fold_convert_loc (loc,
9443 cst_uchar_ptr_node,
9444 arg2)));
9445 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9448 return NULL_TREE;
9451 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9452 ARG. Return NULL_TREE if no simplification can be made. */
9454 static tree
9455 fold_builtin_signbit (location_t loc, tree arg, tree type)
9457 if (!validate_arg (arg, REAL_TYPE))
9458 return NULL_TREE;
9460 /* If ARG is a compile-time constant, determine the result. */
9461 if (TREE_CODE (arg) == REAL_CST
9462 && !TREE_OVERFLOW (arg))
9464 REAL_VALUE_TYPE c;
9466 c = TREE_REAL_CST (arg);
9467 return (REAL_VALUE_NEGATIVE (c)
9468 ? build_one_cst (type)
9469 : build_zero_cst (type));
9472 /* If ARG is non-negative, the result is always zero. */
9473 if (tree_expr_nonnegative_p (arg))
9474 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9476 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9477 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9478 return fold_convert (type,
9479 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9480 build_real (TREE_TYPE (arg), dconst0)));
9482 return NULL_TREE;
9485 /* Fold function call to builtin copysign, copysignf or copysignl with
9486 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9487 be made. */
9489 static tree
9490 fold_builtin_copysign (location_t loc, tree fndecl,
9491 tree arg1, tree arg2, tree type)
9493 tree tem;
9495 if (!validate_arg (arg1, REAL_TYPE)
9496 || !validate_arg (arg2, REAL_TYPE))
9497 return NULL_TREE;
9499 /* copysign(X,X) is X. */
9500 if (operand_equal_p (arg1, arg2, 0))
9501 return fold_convert_loc (loc, type, arg1);
9503 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9504 if (TREE_CODE (arg1) == REAL_CST
9505 && TREE_CODE (arg2) == REAL_CST
9506 && !TREE_OVERFLOW (arg1)
9507 && !TREE_OVERFLOW (arg2))
9509 REAL_VALUE_TYPE c1, c2;
9511 c1 = TREE_REAL_CST (arg1);
9512 c2 = TREE_REAL_CST (arg2);
9513 /* c1.sign := c2.sign. */
9514 real_copysign (&c1, &c2);
9515 return build_real (type, c1);
9518 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9519 Remember to evaluate Y for side-effects. */
9520 if (tree_expr_nonnegative_p (arg2))
9521 return omit_one_operand_loc (loc, type,
9522 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9523 arg2);
9525 /* Strip sign changing operations for the first argument. */
9526 tem = fold_strip_sign_ops (arg1);
9527 if (tem)
9528 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9530 return NULL_TREE;
9533 /* Fold a call to builtin isascii with argument ARG. */
9535 static tree
9536 fold_builtin_isascii (location_t loc, tree arg)
9538 if (!validate_arg (arg, INTEGER_TYPE))
9539 return NULL_TREE;
9540 else
9542 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9543 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9544 build_int_cst (integer_type_node,
9545 ~ (unsigned HOST_WIDE_INT) 0x7f));
9546 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9547 arg, integer_zero_node);
9551 /* Fold a call to builtin toascii with argument ARG. */
9553 static tree
9554 fold_builtin_toascii (location_t loc, tree arg)
9556 if (!validate_arg (arg, INTEGER_TYPE))
9557 return NULL_TREE;
9559 /* Transform toascii(c) -> (c & 0x7f). */
9560 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9561 build_int_cst (integer_type_node, 0x7f));
9564 /* Fold a call to builtin isdigit with argument ARG. */
9566 static tree
9567 fold_builtin_isdigit (location_t loc, tree arg)
9569 if (!validate_arg (arg, INTEGER_TYPE))
9570 return NULL_TREE;
9571 else
9573 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9574 /* According to the C standard, isdigit is unaffected by locale.
9575 However, it definitely is affected by the target character set. */
9576 unsigned HOST_WIDE_INT target_digit0
9577 = lang_hooks.to_target_charset ('0');
9579 if (target_digit0 == 0)
9580 return NULL_TREE;
9582 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9583 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9584 build_int_cst (unsigned_type_node, target_digit0));
9585 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9586 build_int_cst (unsigned_type_node, 9));
9590 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9592 static tree
9593 fold_builtin_fabs (location_t loc, tree arg, tree type)
9595 if (!validate_arg (arg, REAL_TYPE))
9596 return NULL_TREE;
9598 arg = fold_convert_loc (loc, type, arg);
9599 if (TREE_CODE (arg) == REAL_CST)
9600 return fold_abs_const (arg, type);
9601 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9604 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9606 static tree
9607 fold_builtin_abs (location_t loc, tree arg, tree type)
9609 if (!validate_arg (arg, INTEGER_TYPE))
9610 return NULL_TREE;
9612 arg = fold_convert_loc (loc, type, arg);
9613 if (TREE_CODE (arg) == INTEGER_CST)
9614 return fold_abs_const (arg, type);
9615 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9618 /* Fold a fma operation with arguments ARG[012]. */
9620 tree
9621 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9622 tree type, tree arg0, tree arg1, tree arg2)
9624 if (TREE_CODE (arg0) == REAL_CST
9625 && TREE_CODE (arg1) == REAL_CST
9626 && TREE_CODE (arg2) == REAL_CST)
9627 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9629 return NULL_TREE;
9632 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9634 static tree
9635 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9637 if (validate_arg (arg0, REAL_TYPE)
9638 && validate_arg (arg1, REAL_TYPE)
9639 && validate_arg (arg2, REAL_TYPE))
9641 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9642 if (tem)
9643 return tem;
9645 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9646 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9647 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9649 return NULL_TREE;
9652 /* Fold a call to builtin fmin or fmax. */
9654 static tree
9655 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9656 tree type, bool max)
9658 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9660 /* Calculate the result when the argument is a constant. */
9661 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9663 if (res)
9664 return res;
9666 /* If either argument is NaN, return the other one. Avoid the
9667 transformation if we get (and honor) a signalling NaN. Using
9668 omit_one_operand() ensures we create a non-lvalue. */
9669 if (TREE_CODE (arg0) == REAL_CST
9670 && real_isnan (&TREE_REAL_CST (arg0))
9671 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9672 || ! TREE_REAL_CST (arg0).signalling))
9673 return omit_one_operand_loc (loc, type, arg1, arg0);
9674 if (TREE_CODE (arg1) == REAL_CST
9675 && real_isnan (&TREE_REAL_CST (arg1))
9676 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9677 || ! TREE_REAL_CST (arg1).signalling))
9678 return omit_one_operand_loc (loc, type, arg0, arg1);
9680 /* Transform fmin/fmax(x,x) -> x. */
9681 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9682 return omit_one_operand_loc (loc, type, arg0, arg1);
9684 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9685 functions to return the numeric arg if the other one is NaN.
9686 These tree codes don't honor that, so only transform if
9687 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9688 handled, so we don't have to worry about it either. */
9689 if (flag_finite_math_only)
9690 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9691 fold_convert_loc (loc, type, arg0),
9692 fold_convert_loc (loc, type, arg1));
9694 return NULL_TREE;
9697 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9699 static tree
9700 fold_builtin_carg (location_t loc, tree arg, tree type)
9702 if (validate_arg (arg, COMPLEX_TYPE)
9703 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9705 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9707 if (atan2_fn)
9709 tree new_arg = builtin_save_expr (arg);
9710 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9711 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9712 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9716 return NULL_TREE;
9719 /* Fold a call to builtin logb/ilogb. */
9721 static tree
9722 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9724 if (! validate_arg (arg, REAL_TYPE))
9725 return NULL_TREE;
9727 STRIP_NOPS (arg);
9729 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9731 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9733 switch (value->cl)
9735 case rvc_nan:
9736 case rvc_inf:
9737 /* If arg is Inf or NaN and we're logb, return it. */
9738 if (TREE_CODE (rettype) == REAL_TYPE)
9740 /* For logb(-Inf) we have to return +Inf. */
9741 if (real_isinf (value) && real_isneg (value))
9743 REAL_VALUE_TYPE tem;
9744 real_inf (&tem);
9745 return build_real (rettype, tem);
9747 return fold_convert_loc (loc, rettype, arg);
9749 /* Fall through... */
9750 case rvc_zero:
9751 /* Zero may set errno and/or raise an exception for logb, also
9752 for ilogb we don't know FP_ILOGB0. */
9753 return NULL_TREE;
9754 case rvc_normal:
9755 /* For normal numbers, proceed iff radix == 2. In GCC,
9756 normalized significands are in the range [0.5, 1.0). We
9757 want the exponent as if they were [1.0, 2.0) so get the
9758 exponent and subtract 1. */
9759 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9760 return fold_convert_loc (loc, rettype,
9761 build_int_cst (integer_type_node,
9762 REAL_EXP (value)-1));
9763 break;
9767 return NULL_TREE;
9770 /* Fold a call to builtin significand, if radix == 2. */
9772 static tree
9773 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9775 if (! validate_arg (arg, REAL_TYPE))
9776 return NULL_TREE;
9778 STRIP_NOPS (arg);
9780 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9782 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9784 switch (value->cl)
9786 case rvc_zero:
9787 case rvc_nan:
9788 case rvc_inf:
9789 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9790 return fold_convert_loc (loc, rettype, arg);
9791 case rvc_normal:
9792 /* For normal numbers, proceed iff radix == 2. */
9793 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9795 REAL_VALUE_TYPE result = *value;
9796 /* In GCC, normalized significands are in the range [0.5,
9797 1.0). We want them to be [1.0, 2.0) so set the
9798 exponent to 1. */
9799 SET_REAL_EXP (&result, 1);
9800 return build_real (rettype, result);
9802 break;
9806 return NULL_TREE;
9809 /* Fold a call to builtin frexp, we can assume the base is 2. */
9811 static tree
9812 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9814 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9815 return NULL_TREE;
9817 STRIP_NOPS (arg0);
9819 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9820 return NULL_TREE;
9822 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9824 /* Proceed if a valid pointer type was passed in. */
9825 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9827 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9828 tree frac, exp;
9830 switch (value->cl)
9832 case rvc_zero:
9833 /* For +-0, return (*exp = 0, +-0). */
9834 exp = integer_zero_node;
9835 frac = arg0;
9836 break;
9837 case rvc_nan:
9838 case rvc_inf:
9839 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9840 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9841 case rvc_normal:
9843 /* Since the frexp function always expects base 2, and in
9844 GCC normalized significands are already in the range
9845 [0.5, 1.0), we have exactly what frexp wants. */
9846 REAL_VALUE_TYPE frac_rvt = *value;
9847 SET_REAL_EXP (&frac_rvt, 0);
9848 frac = build_real (rettype, frac_rvt);
9849 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9851 break;
9852 default:
9853 gcc_unreachable ();
9856 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9857 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9858 TREE_SIDE_EFFECTS (arg1) = 1;
9859 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9862 return NULL_TREE;
9865 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9866 then we can assume the base is two. If it's false, then we have to
9867 check the mode of the TYPE parameter in certain cases. */
9869 static tree
9870 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9871 tree type, bool ldexp)
9873 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9875 STRIP_NOPS (arg0);
9876 STRIP_NOPS (arg1);
9878 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9879 if (real_zerop (arg0) || integer_zerop (arg1)
9880 || (TREE_CODE (arg0) == REAL_CST
9881 && !real_isfinite (&TREE_REAL_CST (arg0))))
9882 return omit_one_operand_loc (loc, type, arg0, arg1);
9884 /* If both arguments are constant, then try to evaluate it. */
9885 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9886 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9887 && tree_fits_shwi_p (arg1))
9889 /* Bound the maximum adjustment to twice the range of the
9890 mode's valid exponents. Use abs to ensure the range is
9891 positive as a sanity check. */
9892 const long max_exp_adj = 2 *
9893 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9894 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9896 /* Get the user-requested adjustment. */
9897 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9899 /* The requested adjustment must be inside this range. This
9900 is a preliminary cap to avoid things like overflow, we
9901 may still fail to compute the result for other reasons. */
9902 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9904 REAL_VALUE_TYPE initial_result;
9906 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9908 /* Ensure we didn't overflow. */
9909 if (! real_isinf (&initial_result))
9911 const REAL_VALUE_TYPE trunc_result
9912 = real_value_truncate (TYPE_MODE (type), initial_result);
9914 /* Only proceed if the target mode can hold the
9915 resulting value. */
9916 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9917 return build_real (type, trunc_result);
9923 return NULL_TREE;
9926 /* Fold a call to builtin modf. */
9928 static tree
9929 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9931 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9932 return NULL_TREE;
9934 STRIP_NOPS (arg0);
9936 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9937 return NULL_TREE;
9939 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9941 /* Proceed if a valid pointer type was passed in. */
9942 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9944 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9945 REAL_VALUE_TYPE trunc, frac;
9947 switch (value->cl)
9949 case rvc_nan:
9950 case rvc_zero:
9951 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9952 trunc = frac = *value;
9953 break;
9954 case rvc_inf:
9955 /* For +-Inf, return (*arg1 = arg0, +-0). */
9956 frac = dconst0;
9957 frac.sign = value->sign;
9958 trunc = *value;
9959 break;
9960 case rvc_normal:
9961 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9962 real_trunc (&trunc, VOIDmode, value);
9963 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9964 /* If the original number was negative and already
9965 integral, then the fractional part is -0.0. */
9966 if (value->sign && frac.cl == rvc_zero)
9967 frac.sign = value->sign;
9968 break;
9971 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9972 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9973 build_real (rettype, trunc));
9974 TREE_SIDE_EFFECTS (arg1) = 1;
9975 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9976 build_real (rettype, frac));
9979 return NULL_TREE;
9982 /* Given a location LOC, an interclass builtin function decl FNDECL
9983 and its single argument ARG, return an folded expression computing
9984 the same, or NULL_TREE if we either couldn't or didn't want to fold
9985 (the latter happen if there's an RTL instruction available). */
9987 static tree
9988 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9990 enum machine_mode mode;
9992 if (!validate_arg (arg, REAL_TYPE))
9993 return NULL_TREE;
9995 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9996 return NULL_TREE;
9998 mode = TYPE_MODE (TREE_TYPE (arg));
10000 /* If there is no optab, try generic code. */
10001 switch (DECL_FUNCTION_CODE (fndecl))
10003 tree result;
10005 CASE_FLT_FN (BUILT_IN_ISINF):
10007 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
10008 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
10009 tree const type = TREE_TYPE (arg);
10010 REAL_VALUE_TYPE r;
10011 char buf[128];
10013 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10014 real_from_string (&r, buf);
10015 result = build_call_expr (isgr_fn, 2,
10016 fold_build1_loc (loc, ABS_EXPR, type, arg),
10017 build_real (type, r));
10018 return result;
10020 CASE_FLT_FN (BUILT_IN_FINITE):
10021 case BUILT_IN_ISFINITE:
10023 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
10024 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10025 tree const type = TREE_TYPE (arg);
10026 REAL_VALUE_TYPE r;
10027 char buf[128];
10029 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10030 real_from_string (&r, buf);
10031 result = build_call_expr (isle_fn, 2,
10032 fold_build1_loc (loc, ABS_EXPR, type, arg),
10033 build_real (type, r));
10034 /*result = fold_build2_loc (loc, UNGT_EXPR,
10035 TREE_TYPE (TREE_TYPE (fndecl)),
10036 fold_build1_loc (loc, ABS_EXPR, type, arg),
10037 build_real (type, r));
10038 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10039 TREE_TYPE (TREE_TYPE (fndecl)),
10040 result);*/
10041 return result;
10043 case BUILT_IN_ISNORMAL:
10045 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10046 islessequal(fabs(x),DBL_MAX). */
10047 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10048 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10049 tree const type = TREE_TYPE (arg);
10050 REAL_VALUE_TYPE rmax, rmin;
10051 char buf[128];
10053 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10054 real_from_string (&rmax, buf);
10055 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10056 real_from_string (&rmin, buf);
10057 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10058 result = build_call_expr (isle_fn, 2, arg,
10059 build_real (type, rmax));
10060 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
10061 build_call_expr (isge_fn, 2, arg,
10062 build_real (type, rmin)));
10063 return result;
10065 default:
10066 break;
10069 return NULL_TREE;
10072 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10073 ARG is the argument for the call. */
10075 static tree
10076 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10078 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10079 REAL_VALUE_TYPE r;
10081 if (!validate_arg (arg, REAL_TYPE))
10082 return NULL_TREE;
10084 switch (builtin_index)
10086 case BUILT_IN_ISINF:
10087 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10088 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10090 if (TREE_CODE (arg) == REAL_CST)
10092 r = TREE_REAL_CST (arg);
10093 if (real_isinf (&r))
10094 return real_compare (GT_EXPR, &r, &dconst0)
10095 ? integer_one_node : integer_minus_one_node;
10096 else
10097 return integer_zero_node;
10100 return NULL_TREE;
10102 case BUILT_IN_ISINF_SIGN:
10104 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10105 /* In a boolean context, GCC will fold the inner COND_EXPR to
10106 1. So e.g. "if (isinf_sign(x))" would be folded to just
10107 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10108 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10109 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10110 tree tmp = NULL_TREE;
10112 arg = builtin_save_expr (arg);
10114 if (signbit_fn && isinf_fn)
10116 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10117 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10119 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10120 signbit_call, integer_zero_node);
10121 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10122 isinf_call, integer_zero_node);
10124 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10125 integer_minus_one_node, integer_one_node);
10126 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10127 isinf_call, tmp,
10128 integer_zero_node);
10131 return tmp;
10134 case BUILT_IN_ISFINITE:
10135 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10136 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10137 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10139 if (TREE_CODE (arg) == REAL_CST)
10141 r = TREE_REAL_CST (arg);
10142 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10145 return NULL_TREE;
10147 case BUILT_IN_ISNAN:
10148 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10149 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10151 if (TREE_CODE (arg) == REAL_CST)
10153 r = TREE_REAL_CST (arg);
10154 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10157 arg = builtin_save_expr (arg);
10158 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10160 default:
10161 gcc_unreachable ();
10165 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10166 This builtin will generate code to return the appropriate floating
10167 point classification depending on the value of the floating point
10168 number passed in. The possible return values must be supplied as
10169 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10170 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10171 one floating point argument which is "type generic". */
10173 static tree
10174 fold_builtin_fpclassify (location_t loc, tree exp)
10176 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10177 arg, type, res, tmp;
10178 enum machine_mode mode;
10179 REAL_VALUE_TYPE r;
10180 char buf[128];
10182 /* Verify the required arguments in the original call. */
10183 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10184 INTEGER_TYPE, INTEGER_TYPE,
10185 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10186 return NULL_TREE;
10188 fp_nan = CALL_EXPR_ARG (exp, 0);
10189 fp_infinite = CALL_EXPR_ARG (exp, 1);
10190 fp_normal = CALL_EXPR_ARG (exp, 2);
10191 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10192 fp_zero = CALL_EXPR_ARG (exp, 4);
10193 arg = CALL_EXPR_ARG (exp, 5);
10194 type = TREE_TYPE (arg);
10195 mode = TYPE_MODE (type);
10196 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10198 /* fpclassify(x) ->
10199 isnan(x) ? FP_NAN :
10200 (fabs(x) == Inf ? FP_INFINITE :
10201 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10202 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10204 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10205 build_real (type, dconst0));
10206 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10207 tmp, fp_zero, fp_subnormal);
10209 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10210 real_from_string (&r, buf);
10211 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10212 arg, build_real (type, r));
10213 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10215 if (HONOR_INFINITIES (mode))
10217 real_inf (&r);
10218 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10219 build_real (type, r));
10220 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10221 fp_infinite, res);
10224 if (HONOR_NANS (mode))
10226 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10227 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10230 return res;
10233 /* Fold a call to an unordered comparison function such as
10234 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10235 being called and ARG0 and ARG1 are the arguments for the call.
10236 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10237 the opposite of the desired result. UNORDERED_CODE is used
10238 for modes that can hold NaNs and ORDERED_CODE is used for
10239 the rest. */
10241 static tree
10242 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10243 enum tree_code unordered_code,
10244 enum tree_code ordered_code)
10246 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10247 enum tree_code code;
10248 tree type0, type1;
10249 enum tree_code code0, code1;
10250 tree cmp_type = NULL_TREE;
10252 type0 = TREE_TYPE (arg0);
10253 type1 = TREE_TYPE (arg1);
10255 code0 = TREE_CODE (type0);
10256 code1 = TREE_CODE (type1);
10258 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10259 /* Choose the wider of two real types. */
10260 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10261 ? type0 : type1;
10262 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10263 cmp_type = type0;
10264 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10265 cmp_type = type1;
10267 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10268 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10270 if (unordered_code == UNORDERED_EXPR)
10272 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10273 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10274 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10277 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10278 : ordered_code;
10279 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10280 fold_build2_loc (loc, code, type, arg0, arg1));
10283 /* Fold a call to built-in function FNDECL with 0 arguments.
10284 IGNORE is true if the result of the function call is ignored. This
10285 function returns NULL_TREE if no simplification was possible. */
10287 static tree
10288 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10290 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10291 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10292 switch (fcode)
10294 CASE_FLT_FN (BUILT_IN_INF):
10295 case BUILT_IN_INFD32:
10296 case BUILT_IN_INFD64:
10297 case BUILT_IN_INFD128:
10298 return fold_builtin_inf (loc, type, true);
10300 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10301 return fold_builtin_inf (loc, type, false);
10303 case BUILT_IN_CLASSIFY_TYPE:
10304 return fold_builtin_classify_type (NULL_TREE);
10306 case BUILT_IN_UNREACHABLE:
10307 if (flag_sanitize & SANITIZE_UNREACHABLE
10308 && (current_function_decl == NULL
10309 || !lookup_attribute ("no_sanitize_undefined",
10310 DECL_ATTRIBUTES (current_function_decl))))
10311 return ubsan_instrument_unreachable (loc);
10312 break;
10314 default:
10315 break;
10317 return NULL_TREE;
10320 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10321 IGNORE is true if the result of the function call is ignored. This
10322 function returns NULL_TREE if no simplification was possible. */
10324 static tree
10325 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10327 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10328 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10329 switch (fcode)
10331 case BUILT_IN_CONSTANT_P:
10333 tree val = fold_builtin_constant_p (arg0);
10335 /* Gimplification will pull the CALL_EXPR for the builtin out of
10336 an if condition. When not optimizing, we'll not CSE it back.
10337 To avoid link error types of regressions, return false now. */
10338 if (!val && !optimize)
10339 val = integer_zero_node;
10341 return val;
10344 case BUILT_IN_CLASSIFY_TYPE:
10345 return fold_builtin_classify_type (arg0);
10347 case BUILT_IN_STRLEN:
10348 return fold_builtin_strlen (loc, type, arg0);
10350 CASE_FLT_FN (BUILT_IN_FABS):
10351 case BUILT_IN_FABSD32:
10352 case BUILT_IN_FABSD64:
10353 case BUILT_IN_FABSD128:
10354 return fold_builtin_fabs (loc, arg0, type);
10356 case BUILT_IN_ABS:
10357 case BUILT_IN_LABS:
10358 case BUILT_IN_LLABS:
10359 case BUILT_IN_IMAXABS:
10360 return fold_builtin_abs (loc, arg0, type);
10362 CASE_FLT_FN (BUILT_IN_CONJ):
10363 if (validate_arg (arg0, COMPLEX_TYPE)
10364 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10365 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10366 break;
10368 CASE_FLT_FN (BUILT_IN_CREAL):
10369 if (validate_arg (arg0, COMPLEX_TYPE)
10370 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10371 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10372 break;
10374 CASE_FLT_FN (BUILT_IN_CIMAG):
10375 if (validate_arg (arg0, COMPLEX_TYPE)
10376 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10377 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10378 break;
10380 CASE_FLT_FN (BUILT_IN_CCOS):
10381 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10383 CASE_FLT_FN (BUILT_IN_CCOSH):
10384 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10386 CASE_FLT_FN (BUILT_IN_CPROJ):
10387 return fold_builtin_cproj (loc, arg0, type);
10389 CASE_FLT_FN (BUILT_IN_CSIN):
10390 if (validate_arg (arg0, COMPLEX_TYPE)
10391 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10392 return do_mpc_arg1 (arg0, type, mpc_sin);
10393 break;
10395 CASE_FLT_FN (BUILT_IN_CSINH):
10396 if (validate_arg (arg0, COMPLEX_TYPE)
10397 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10398 return do_mpc_arg1 (arg0, type, mpc_sinh);
10399 break;
10401 CASE_FLT_FN (BUILT_IN_CTAN):
10402 if (validate_arg (arg0, COMPLEX_TYPE)
10403 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10404 return do_mpc_arg1 (arg0, type, mpc_tan);
10405 break;
10407 CASE_FLT_FN (BUILT_IN_CTANH):
10408 if (validate_arg (arg0, COMPLEX_TYPE)
10409 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10410 return do_mpc_arg1 (arg0, type, mpc_tanh);
10411 break;
10413 CASE_FLT_FN (BUILT_IN_CLOG):
10414 if (validate_arg (arg0, COMPLEX_TYPE)
10415 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10416 return do_mpc_arg1 (arg0, type, mpc_log);
10417 break;
10419 CASE_FLT_FN (BUILT_IN_CSQRT):
10420 if (validate_arg (arg0, COMPLEX_TYPE)
10421 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10422 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10423 break;
10425 CASE_FLT_FN (BUILT_IN_CASIN):
10426 if (validate_arg (arg0, COMPLEX_TYPE)
10427 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10428 return do_mpc_arg1 (arg0, type, mpc_asin);
10429 break;
10431 CASE_FLT_FN (BUILT_IN_CACOS):
10432 if (validate_arg (arg0, COMPLEX_TYPE)
10433 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10434 return do_mpc_arg1 (arg0, type, mpc_acos);
10435 break;
10437 CASE_FLT_FN (BUILT_IN_CATAN):
10438 if (validate_arg (arg0, COMPLEX_TYPE)
10439 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10440 return do_mpc_arg1 (arg0, type, mpc_atan);
10441 break;
10443 CASE_FLT_FN (BUILT_IN_CASINH):
10444 if (validate_arg (arg0, COMPLEX_TYPE)
10445 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10446 return do_mpc_arg1 (arg0, type, mpc_asinh);
10447 break;
10449 CASE_FLT_FN (BUILT_IN_CACOSH):
10450 if (validate_arg (arg0, COMPLEX_TYPE)
10451 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10452 return do_mpc_arg1 (arg0, type, mpc_acosh);
10453 break;
10455 CASE_FLT_FN (BUILT_IN_CATANH):
10456 if (validate_arg (arg0, COMPLEX_TYPE)
10457 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10458 return do_mpc_arg1 (arg0, type, mpc_atanh);
10459 break;
10461 CASE_FLT_FN (BUILT_IN_CABS):
10462 return fold_builtin_cabs (loc, arg0, type, fndecl);
10464 CASE_FLT_FN (BUILT_IN_CARG):
10465 return fold_builtin_carg (loc, arg0, type);
10467 CASE_FLT_FN (BUILT_IN_SQRT):
10468 return fold_builtin_sqrt (loc, arg0, type);
10470 CASE_FLT_FN (BUILT_IN_CBRT):
10471 return fold_builtin_cbrt (loc, arg0, type);
10473 CASE_FLT_FN (BUILT_IN_ASIN):
10474 if (validate_arg (arg0, REAL_TYPE))
10475 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10476 &dconstm1, &dconst1, true);
10477 break;
10479 CASE_FLT_FN (BUILT_IN_ACOS):
10480 if (validate_arg (arg0, REAL_TYPE))
10481 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10482 &dconstm1, &dconst1, true);
10483 break;
10485 CASE_FLT_FN (BUILT_IN_ATAN):
10486 if (validate_arg (arg0, REAL_TYPE))
10487 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10488 break;
10490 CASE_FLT_FN (BUILT_IN_ASINH):
10491 if (validate_arg (arg0, REAL_TYPE))
10492 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10493 break;
10495 CASE_FLT_FN (BUILT_IN_ACOSH):
10496 if (validate_arg (arg0, REAL_TYPE))
10497 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10498 &dconst1, NULL, true);
10499 break;
10501 CASE_FLT_FN (BUILT_IN_ATANH):
10502 if (validate_arg (arg0, REAL_TYPE))
10503 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10504 &dconstm1, &dconst1, false);
10505 break;
10507 CASE_FLT_FN (BUILT_IN_SIN):
10508 if (validate_arg (arg0, REAL_TYPE))
10509 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10510 break;
10512 CASE_FLT_FN (BUILT_IN_COS):
10513 return fold_builtin_cos (loc, arg0, type, fndecl);
10515 CASE_FLT_FN (BUILT_IN_TAN):
10516 return fold_builtin_tan (arg0, type);
10518 CASE_FLT_FN (BUILT_IN_CEXP):
10519 return fold_builtin_cexp (loc, arg0, type);
10521 CASE_FLT_FN (BUILT_IN_CEXPI):
10522 if (validate_arg (arg0, REAL_TYPE))
10523 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10524 break;
10526 CASE_FLT_FN (BUILT_IN_SINH):
10527 if (validate_arg (arg0, REAL_TYPE))
10528 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10529 break;
10531 CASE_FLT_FN (BUILT_IN_COSH):
10532 return fold_builtin_cosh (loc, arg0, type, fndecl);
10534 CASE_FLT_FN (BUILT_IN_TANH):
10535 if (validate_arg (arg0, REAL_TYPE))
10536 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10537 break;
10539 CASE_FLT_FN (BUILT_IN_ERF):
10540 if (validate_arg (arg0, REAL_TYPE))
10541 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10542 break;
10544 CASE_FLT_FN (BUILT_IN_ERFC):
10545 if (validate_arg (arg0, REAL_TYPE))
10546 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10547 break;
10549 CASE_FLT_FN (BUILT_IN_TGAMMA):
10550 if (validate_arg (arg0, REAL_TYPE))
10551 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10552 break;
10554 CASE_FLT_FN (BUILT_IN_EXP):
10555 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10557 CASE_FLT_FN (BUILT_IN_EXP2):
10558 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10560 CASE_FLT_FN (BUILT_IN_EXP10):
10561 CASE_FLT_FN (BUILT_IN_POW10):
10562 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10564 CASE_FLT_FN (BUILT_IN_EXPM1):
10565 if (validate_arg (arg0, REAL_TYPE))
10566 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10567 break;
10569 CASE_FLT_FN (BUILT_IN_LOG):
10570 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10572 CASE_FLT_FN (BUILT_IN_LOG2):
10573 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10575 CASE_FLT_FN (BUILT_IN_LOG10):
10576 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10578 CASE_FLT_FN (BUILT_IN_LOG1P):
10579 if (validate_arg (arg0, REAL_TYPE))
10580 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10581 &dconstm1, NULL, false);
10582 break;
10584 CASE_FLT_FN (BUILT_IN_J0):
10585 if (validate_arg (arg0, REAL_TYPE))
10586 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10587 NULL, NULL, 0);
10588 break;
10590 CASE_FLT_FN (BUILT_IN_J1):
10591 if (validate_arg (arg0, REAL_TYPE))
10592 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10593 NULL, NULL, 0);
10594 break;
10596 CASE_FLT_FN (BUILT_IN_Y0):
10597 if (validate_arg (arg0, REAL_TYPE))
10598 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10599 &dconst0, NULL, false);
10600 break;
10602 CASE_FLT_FN (BUILT_IN_Y1):
10603 if (validate_arg (arg0, REAL_TYPE))
10604 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10605 &dconst0, NULL, false);
10606 break;
10608 CASE_FLT_FN (BUILT_IN_NAN):
10609 case BUILT_IN_NAND32:
10610 case BUILT_IN_NAND64:
10611 case BUILT_IN_NAND128:
10612 return fold_builtin_nan (arg0, type, true);
10614 CASE_FLT_FN (BUILT_IN_NANS):
10615 return fold_builtin_nan (arg0, type, false);
10617 CASE_FLT_FN (BUILT_IN_FLOOR):
10618 return fold_builtin_floor (loc, fndecl, arg0);
10620 CASE_FLT_FN (BUILT_IN_CEIL):
10621 return fold_builtin_ceil (loc, fndecl, arg0);
10623 CASE_FLT_FN (BUILT_IN_TRUNC):
10624 return fold_builtin_trunc (loc, fndecl, arg0);
10626 CASE_FLT_FN (BUILT_IN_ROUND):
10627 return fold_builtin_round (loc, fndecl, arg0);
10629 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10630 CASE_FLT_FN (BUILT_IN_RINT):
10631 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10633 CASE_FLT_FN (BUILT_IN_ICEIL):
10634 CASE_FLT_FN (BUILT_IN_LCEIL):
10635 CASE_FLT_FN (BUILT_IN_LLCEIL):
10636 CASE_FLT_FN (BUILT_IN_LFLOOR):
10637 CASE_FLT_FN (BUILT_IN_IFLOOR):
10638 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10639 CASE_FLT_FN (BUILT_IN_IROUND):
10640 CASE_FLT_FN (BUILT_IN_LROUND):
10641 CASE_FLT_FN (BUILT_IN_LLROUND):
10642 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10644 CASE_FLT_FN (BUILT_IN_IRINT):
10645 CASE_FLT_FN (BUILT_IN_LRINT):
10646 CASE_FLT_FN (BUILT_IN_LLRINT):
10647 return fold_fixed_mathfn (loc, fndecl, arg0);
10649 case BUILT_IN_BSWAP16:
10650 case BUILT_IN_BSWAP32:
10651 case BUILT_IN_BSWAP64:
10652 return fold_builtin_bswap (fndecl, arg0);
10654 CASE_INT_FN (BUILT_IN_FFS):
10655 CASE_INT_FN (BUILT_IN_CLZ):
10656 CASE_INT_FN (BUILT_IN_CTZ):
10657 CASE_INT_FN (BUILT_IN_CLRSB):
10658 CASE_INT_FN (BUILT_IN_POPCOUNT):
10659 CASE_INT_FN (BUILT_IN_PARITY):
10660 return fold_builtin_bitop (fndecl, arg0);
10662 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10663 return fold_builtin_signbit (loc, arg0, type);
10665 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10666 return fold_builtin_significand (loc, arg0, type);
10668 CASE_FLT_FN (BUILT_IN_ILOGB):
10669 CASE_FLT_FN (BUILT_IN_LOGB):
10670 return fold_builtin_logb (loc, arg0, type);
10672 case BUILT_IN_ISASCII:
10673 return fold_builtin_isascii (loc, arg0);
10675 case BUILT_IN_TOASCII:
10676 return fold_builtin_toascii (loc, arg0);
10678 case BUILT_IN_ISDIGIT:
10679 return fold_builtin_isdigit (loc, arg0);
10681 CASE_FLT_FN (BUILT_IN_FINITE):
10682 case BUILT_IN_FINITED32:
10683 case BUILT_IN_FINITED64:
10684 case BUILT_IN_FINITED128:
10685 case BUILT_IN_ISFINITE:
10687 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10688 if (ret)
10689 return ret;
10690 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10693 CASE_FLT_FN (BUILT_IN_ISINF):
10694 case BUILT_IN_ISINFD32:
10695 case BUILT_IN_ISINFD64:
10696 case BUILT_IN_ISINFD128:
10698 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10699 if (ret)
10700 return ret;
10701 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10704 case BUILT_IN_ISNORMAL:
10705 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10707 case BUILT_IN_ISINF_SIGN:
10708 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10710 CASE_FLT_FN (BUILT_IN_ISNAN):
10711 case BUILT_IN_ISNAND32:
10712 case BUILT_IN_ISNAND64:
10713 case BUILT_IN_ISNAND128:
10714 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10716 case BUILT_IN_PRINTF:
10717 case BUILT_IN_PRINTF_UNLOCKED:
10718 case BUILT_IN_VPRINTF:
10719 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10721 case BUILT_IN_FREE:
10722 if (integer_zerop (arg0))
10723 return build_empty_stmt (loc);
10724 break;
10726 default:
10727 break;
10730 return NULL_TREE;
10734 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10735 IGNORE is true if the result of the function call is ignored. This
10736 function returns NULL_TREE if no simplification was possible. */
10738 static tree
10739 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10741 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10742 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10744 switch (fcode)
10746 CASE_FLT_FN (BUILT_IN_JN):
10747 if (validate_arg (arg0, INTEGER_TYPE)
10748 && validate_arg (arg1, REAL_TYPE))
10749 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10750 break;
10752 CASE_FLT_FN (BUILT_IN_YN):
10753 if (validate_arg (arg0, INTEGER_TYPE)
10754 && validate_arg (arg1, REAL_TYPE))
10755 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10756 &dconst0, false);
10757 break;
10759 CASE_FLT_FN (BUILT_IN_DREM):
10760 CASE_FLT_FN (BUILT_IN_REMAINDER):
10761 if (validate_arg (arg0, REAL_TYPE)
10762 && validate_arg (arg1, REAL_TYPE))
10763 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10764 break;
10766 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10767 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10768 if (validate_arg (arg0, REAL_TYPE)
10769 && validate_arg (arg1, POINTER_TYPE))
10770 return do_mpfr_lgamma_r (arg0, arg1, type);
10771 break;
10773 CASE_FLT_FN (BUILT_IN_ATAN2):
10774 if (validate_arg (arg0, REAL_TYPE)
10775 && validate_arg (arg1, REAL_TYPE))
10776 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10777 break;
10779 CASE_FLT_FN (BUILT_IN_FDIM):
10780 if (validate_arg (arg0, REAL_TYPE)
10781 && validate_arg (arg1, REAL_TYPE))
10782 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10783 break;
10785 CASE_FLT_FN (BUILT_IN_HYPOT):
10786 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10788 CASE_FLT_FN (BUILT_IN_CPOW):
10789 if (validate_arg (arg0, COMPLEX_TYPE)
10790 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10791 && validate_arg (arg1, COMPLEX_TYPE)
10792 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10793 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10794 break;
10796 CASE_FLT_FN (BUILT_IN_LDEXP):
10797 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10798 CASE_FLT_FN (BUILT_IN_SCALBN):
10799 CASE_FLT_FN (BUILT_IN_SCALBLN):
10800 return fold_builtin_load_exponent (loc, arg0, arg1,
10801 type, /*ldexp=*/false);
10803 CASE_FLT_FN (BUILT_IN_FREXP):
10804 return fold_builtin_frexp (loc, arg0, arg1, type);
10806 CASE_FLT_FN (BUILT_IN_MODF):
10807 return fold_builtin_modf (loc, arg0, arg1, type);
10809 case BUILT_IN_BZERO:
10810 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10812 case BUILT_IN_FPUTS:
10813 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10815 case BUILT_IN_FPUTS_UNLOCKED:
10816 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10818 case BUILT_IN_STRSTR:
10819 return fold_builtin_strstr (loc, arg0, arg1, type);
10821 case BUILT_IN_STRCAT:
10822 return fold_builtin_strcat (loc, arg0, arg1, NULL_TREE);
10824 case BUILT_IN_STRSPN:
10825 return fold_builtin_strspn (loc, arg0, arg1);
10827 case BUILT_IN_STRCSPN:
10828 return fold_builtin_strcspn (loc, arg0, arg1);
10830 case BUILT_IN_STRCHR:
10831 case BUILT_IN_INDEX:
10832 return fold_builtin_strchr (loc, arg0, arg1, type);
10834 case BUILT_IN_STRRCHR:
10835 case BUILT_IN_RINDEX:
10836 return fold_builtin_strrchr (loc, arg0, arg1, type);
10838 case BUILT_IN_STRCPY:
10839 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10841 case BUILT_IN_STPCPY:
10842 if (ignore)
10844 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10845 if (!fn)
10846 break;
10848 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10850 else
10851 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10852 break;
10854 case BUILT_IN_STRCMP:
10855 return fold_builtin_strcmp (loc, arg0, arg1);
10857 case BUILT_IN_STRPBRK:
10858 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10860 case BUILT_IN_EXPECT:
10861 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10863 CASE_FLT_FN (BUILT_IN_POW):
10864 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10866 CASE_FLT_FN (BUILT_IN_POWI):
10867 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10869 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10870 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10872 CASE_FLT_FN (BUILT_IN_FMIN):
10873 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10875 CASE_FLT_FN (BUILT_IN_FMAX):
10876 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10878 case BUILT_IN_ISGREATER:
10879 return fold_builtin_unordered_cmp (loc, fndecl,
10880 arg0, arg1, UNLE_EXPR, LE_EXPR);
10881 case BUILT_IN_ISGREATEREQUAL:
10882 return fold_builtin_unordered_cmp (loc, fndecl,
10883 arg0, arg1, UNLT_EXPR, LT_EXPR);
10884 case BUILT_IN_ISLESS:
10885 return fold_builtin_unordered_cmp (loc, fndecl,
10886 arg0, arg1, UNGE_EXPR, GE_EXPR);
10887 case BUILT_IN_ISLESSEQUAL:
10888 return fold_builtin_unordered_cmp (loc, fndecl,
10889 arg0, arg1, UNGT_EXPR, GT_EXPR);
10890 case BUILT_IN_ISLESSGREATER:
10891 return fold_builtin_unordered_cmp (loc, fndecl,
10892 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10893 case BUILT_IN_ISUNORDERED:
10894 return fold_builtin_unordered_cmp (loc, fndecl,
10895 arg0, arg1, UNORDERED_EXPR,
10896 NOP_EXPR);
10898 /* We do the folding for va_start in the expander. */
10899 case BUILT_IN_VA_START:
10900 break;
10902 case BUILT_IN_SPRINTF:
10903 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10905 case BUILT_IN_OBJECT_SIZE:
10906 return fold_builtin_object_size (arg0, arg1);
10908 case BUILT_IN_PRINTF:
10909 case BUILT_IN_PRINTF_UNLOCKED:
10910 case BUILT_IN_VPRINTF:
10911 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10913 case BUILT_IN_PRINTF_CHK:
10914 case BUILT_IN_VPRINTF_CHK:
10915 if (!validate_arg (arg0, INTEGER_TYPE)
10916 || TREE_SIDE_EFFECTS (arg0))
10917 return NULL_TREE;
10918 else
10919 return fold_builtin_printf (loc, fndecl,
10920 arg1, NULL_TREE, ignore, fcode);
10921 break;
10923 case BUILT_IN_FPRINTF:
10924 case BUILT_IN_FPRINTF_UNLOCKED:
10925 case BUILT_IN_VFPRINTF:
10926 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10927 ignore, fcode);
10929 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10930 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10932 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10933 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10935 default:
10936 break;
10938 return NULL_TREE;
10941 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10942 and ARG2. IGNORE is true if the result of the function call is ignored.
10943 This function returns NULL_TREE if no simplification was possible. */
10945 static tree
10946 fold_builtin_3 (location_t loc, tree fndecl,
10947 tree arg0, tree arg1, tree arg2, bool ignore)
10949 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10950 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10951 switch (fcode)
10954 CASE_FLT_FN (BUILT_IN_SINCOS):
10955 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10957 CASE_FLT_FN (BUILT_IN_FMA):
10958 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10959 break;
10961 CASE_FLT_FN (BUILT_IN_REMQUO):
10962 if (validate_arg (arg0, REAL_TYPE)
10963 && validate_arg (arg1, REAL_TYPE)
10964 && validate_arg (arg2, POINTER_TYPE))
10965 return do_mpfr_remquo (arg0, arg1, arg2);
10966 break;
10968 case BUILT_IN_MEMSET:
10969 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10971 case BUILT_IN_BCOPY:
10972 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10973 void_type_node, true, /*endp=*/3);
10975 case BUILT_IN_MEMCPY:
10976 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10977 type, ignore, /*endp=*/0);
10979 case BUILT_IN_MEMPCPY:
10980 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10981 type, ignore, /*endp=*/1);
10983 case BUILT_IN_MEMMOVE:
10984 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10985 type, ignore, /*endp=*/3);
10987 case BUILT_IN_STRNCAT:
10988 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10990 case BUILT_IN_STRNCPY:
10991 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10993 case BUILT_IN_STRNCMP:
10994 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10996 case BUILT_IN_MEMCHR:
10997 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10999 case BUILT_IN_BCMP:
11000 case BUILT_IN_MEMCMP:
11001 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
11003 case BUILT_IN_SPRINTF:
11004 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
11006 case BUILT_IN_SNPRINTF:
11007 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
11009 case BUILT_IN_STRCPY_CHK:
11010 case BUILT_IN_STPCPY_CHK:
11011 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
11012 ignore, fcode);
11014 case BUILT_IN_STRCAT_CHK:
11015 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
11017 case BUILT_IN_PRINTF_CHK:
11018 case BUILT_IN_VPRINTF_CHK:
11019 if (!validate_arg (arg0, INTEGER_TYPE)
11020 || TREE_SIDE_EFFECTS (arg0))
11021 return NULL_TREE;
11022 else
11023 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
11024 break;
11026 case BUILT_IN_FPRINTF:
11027 case BUILT_IN_FPRINTF_UNLOCKED:
11028 case BUILT_IN_VFPRINTF:
11029 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
11030 ignore, fcode);
11032 case BUILT_IN_FPRINTF_CHK:
11033 case BUILT_IN_VFPRINTF_CHK:
11034 if (!validate_arg (arg1, INTEGER_TYPE)
11035 || TREE_SIDE_EFFECTS (arg1))
11036 return NULL_TREE;
11037 else
11038 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
11039 ignore, fcode);
11041 case BUILT_IN_EXPECT:
11042 return fold_builtin_expect (loc, arg0, arg1, arg2);
11044 default:
11045 break;
11047 return NULL_TREE;
11050 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11051 ARG2, and ARG3. IGNORE is true if the result of the function call is
11052 ignored. This function returns NULL_TREE if no simplification was
11053 possible. */
11055 static tree
11056 fold_builtin_4 (location_t loc, tree fndecl,
11057 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11059 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11061 switch (fcode)
11063 case BUILT_IN_MEMCPY_CHK:
11064 case BUILT_IN_MEMPCPY_CHK:
11065 case BUILT_IN_MEMMOVE_CHK:
11066 case BUILT_IN_MEMSET_CHK:
11067 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11068 NULL_TREE, ignore,
11069 DECL_FUNCTION_CODE (fndecl));
11071 case BUILT_IN_STRNCPY_CHK:
11072 case BUILT_IN_STPNCPY_CHK:
11073 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
11074 ignore, fcode);
11076 case BUILT_IN_STRNCAT_CHK:
11077 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11079 case BUILT_IN_SNPRINTF:
11080 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
11082 case BUILT_IN_FPRINTF_CHK:
11083 case BUILT_IN_VFPRINTF_CHK:
11084 if (!validate_arg (arg1, INTEGER_TYPE)
11085 || TREE_SIDE_EFFECTS (arg1))
11086 return NULL_TREE;
11087 else
11088 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11089 ignore, fcode);
11090 break;
11092 default:
11093 break;
11095 return NULL_TREE;
11098 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11099 arguments, where NARGS <= 4. IGNORE is true if the result of the
11100 function call is ignored. This function returns NULL_TREE if no
11101 simplification was possible. Note that this only folds builtins with
11102 fixed argument patterns. Foldings that do varargs-to-varargs
11103 transformations, or that match calls with more than 4 arguments,
11104 need to be handled with fold_builtin_varargs instead. */
11106 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11108 static tree
11109 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11111 tree ret = NULL_TREE;
11113 switch (nargs)
11115 case 0:
11116 ret = fold_builtin_0 (loc, fndecl, ignore);
11117 break;
11118 case 1:
11119 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11120 break;
11121 case 2:
11122 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11123 break;
11124 case 3:
11125 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11126 break;
11127 case 4:
11128 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11129 ignore);
11130 break;
11131 default:
11132 break;
11134 if (ret)
11136 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11137 SET_EXPR_LOCATION (ret, loc);
11138 TREE_NO_WARNING (ret) = 1;
11139 return ret;
11141 return NULL_TREE;
11144 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11145 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11146 of arguments in ARGS to be omitted. OLDNARGS is the number of
11147 elements in ARGS. */
11149 static tree
11150 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11151 int skip, tree fndecl, int n, va_list newargs)
11153 int nargs = oldnargs - skip + n;
11154 tree *buffer;
11156 if (n > 0)
11158 int i, j;
11160 buffer = XALLOCAVEC (tree, nargs);
11161 for (i = 0; i < n; i++)
11162 buffer[i] = va_arg (newargs, tree);
11163 for (j = skip; j < oldnargs; j++, i++)
11164 buffer[i] = args[j];
11166 else
11167 buffer = args + skip;
11169 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11172 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11173 list ARGS along with N new arguments specified as the "..."
11174 parameters. SKIP is the number of arguments in ARGS to be omitted.
11175 OLDNARGS is the number of elements in ARGS. */
11177 static tree
11178 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11179 int skip, tree fndecl, int n, ...)
11181 va_list ap;
11182 tree t;
11184 va_start (ap, n);
11185 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11186 va_end (ap);
11188 return t;
11191 /* Return true if FNDECL shouldn't be folded right now.
11192 If a built-in function has an inline attribute always_inline
11193 wrapper, defer folding it after always_inline functions have
11194 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11195 might not be performed. */
11197 bool
11198 avoid_folding_inline_builtin (tree fndecl)
11200 return (DECL_DECLARED_INLINE_P (fndecl)
11201 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11202 && cfun
11203 && !cfun->always_inline_functions_inlined
11204 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11207 /* A wrapper function for builtin folding that prevents warnings for
11208 "statement without effect" and the like, caused by removing the
11209 call node earlier than the warning is generated. */
11211 tree
11212 fold_call_expr (location_t loc, tree exp, bool ignore)
11214 tree ret = NULL_TREE;
11215 tree fndecl = get_callee_fndecl (exp);
11216 if (fndecl
11217 && TREE_CODE (fndecl) == FUNCTION_DECL
11218 && DECL_BUILT_IN (fndecl)
11219 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11220 yet. Defer folding until we see all the arguments
11221 (after inlining). */
11222 && !CALL_EXPR_VA_ARG_PACK (exp))
11224 int nargs = call_expr_nargs (exp);
11226 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11227 instead last argument is __builtin_va_arg_pack (). Defer folding
11228 even in that case, until arguments are finalized. */
11229 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11231 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11232 if (fndecl2
11233 && TREE_CODE (fndecl2) == FUNCTION_DECL
11234 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11235 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11236 return NULL_TREE;
11239 if (avoid_folding_inline_builtin (fndecl))
11240 return NULL_TREE;
11242 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11243 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11244 CALL_EXPR_ARGP (exp), ignore);
11245 else
11247 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11249 tree *args = CALL_EXPR_ARGP (exp);
11250 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11252 if (!ret)
11253 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11254 if (ret)
11255 return ret;
11258 return NULL_TREE;
11261 /* Conveniently construct a function call expression. FNDECL names the
11262 function to be called and N arguments are passed in the array
11263 ARGARRAY. */
11265 tree
11266 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11268 tree fntype = TREE_TYPE (fndecl);
11269 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11271 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11274 /* Conveniently construct a function call expression. FNDECL names the
11275 function to be called and the arguments are passed in the vector
11276 VEC. */
11278 tree
11279 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11281 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11282 vec_safe_address (vec));
11286 /* Conveniently construct a function call expression. FNDECL names the
11287 function to be called, N is the number of arguments, and the "..."
11288 parameters are the argument expressions. */
11290 tree
11291 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11293 va_list ap;
11294 tree *argarray = XALLOCAVEC (tree, n);
11295 int i;
11297 va_start (ap, n);
11298 for (i = 0; i < n; i++)
11299 argarray[i] = va_arg (ap, tree);
11300 va_end (ap);
11301 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11304 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11305 varargs macros aren't supported by all bootstrap compilers. */
11307 tree
11308 build_call_expr (tree fndecl, int n, ...)
11310 va_list ap;
11311 tree *argarray = XALLOCAVEC (tree, n);
11312 int i;
11314 va_start (ap, n);
11315 for (i = 0; i < n; i++)
11316 argarray[i] = va_arg (ap, tree);
11317 va_end (ap);
11318 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11321 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11322 N arguments are passed in the array ARGARRAY. */
11324 tree
11325 fold_builtin_call_array (location_t loc, tree type,
11326 tree fn,
11327 int n,
11328 tree *argarray)
11330 tree ret = NULL_TREE;
11331 tree exp;
11333 if (TREE_CODE (fn) == ADDR_EXPR)
11335 tree fndecl = TREE_OPERAND (fn, 0);
11336 if (TREE_CODE (fndecl) == FUNCTION_DECL
11337 && DECL_BUILT_IN (fndecl))
11339 /* If last argument is __builtin_va_arg_pack (), arguments to this
11340 function are not finalized yet. Defer folding until they are. */
11341 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11343 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11344 if (fndecl2
11345 && TREE_CODE (fndecl2) == FUNCTION_DECL
11346 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11347 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11348 return build_call_array_loc (loc, type, fn, n, argarray);
11350 if (avoid_folding_inline_builtin (fndecl))
11351 return build_call_array_loc (loc, type, fn, n, argarray);
11352 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11354 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11355 if (ret)
11356 return ret;
11358 return build_call_array_loc (loc, type, fn, n, argarray);
11360 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11362 /* First try the transformations that don't require consing up
11363 an exp. */
11364 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11365 if (ret)
11366 return ret;
11369 /* If we got this far, we need to build an exp. */
11370 exp = build_call_array_loc (loc, type, fn, n, argarray);
11371 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11372 return ret ? ret : exp;
11376 return build_call_array_loc (loc, type, fn, n, argarray);
11379 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11380 along with N new arguments specified as the "..." parameters. SKIP
11381 is the number of arguments in EXP to be omitted. This function is used
11382 to do varargs-to-varargs transformations. */
11384 static tree
11385 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11387 va_list ap;
11388 tree t;
11390 va_start (ap, n);
11391 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11392 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11393 va_end (ap);
11395 return t;
11398 /* Validate a single argument ARG against a tree code CODE representing
11399 a type. */
11401 static bool
11402 validate_arg (const_tree arg, enum tree_code code)
11404 if (!arg)
11405 return false;
11406 else if (code == POINTER_TYPE)
11407 return POINTER_TYPE_P (TREE_TYPE (arg));
11408 else if (code == INTEGER_TYPE)
11409 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11410 return code == TREE_CODE (TREE_TYPE (arg));
11413 /* This function validates the types of a function call argument list
11414 against a specified list of tree_codes. If the last specifier is a 0,
11415 that represents an ellipses, otherwise the last specifier must be a
11416 VOID_TYPE.
11418 This is the GIMPLE version of validate_arglist. Eventually we want to
11419 completely convert builtins.c to work from GIMPLEs and the tree based
11420 validate_arglist will then be removed. */
11422 bool
11423 validate_gimple_arglist (const_gimple call, ...)
11425 enum tree_code code;
11426 bool res = 0;
11427 va_list ap;
11428 const_tree arg;
11429 size_t i;
11431 va_start (ap, call);
11432 i = 0;
11436 code = (enum tree_code) va_arg (ap, int);
11437 switch (code)
11439 case 0:
11440 /* This signifies an ellipses, any further arguments are all ok. */
11441 res = true;
11442 goto end;
11443 case VOID_TYPE:
11444 /* This signifies an endlink, if no arguments remain, return
11445 true, otherwise return false. */
11446 res = (i == gimple_call_num_args (call));
11447 goto end;
11448 default:
11449 /* If no parameters remain or the parameter's code does not
11450 match the specified code, return false. Otherwise continue
11451 checking any remaining arguments. */
11452 arg = gimple_call_arg (call, i++);
11453 if (!validate_arg (arg, code))
11454 goto end;
11455 break;
11458 while (1);
11460 /* We need gotos here since we can only have one VA_CLOSE in a
11461 function. */
11462 end: ;
11463 va_end (ap);
11465 return res;
11468 /* Default target-specific builtin expander that does nothing. */
11471 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11472 rtx target ATTRIBUTE_UNUSED,
11473 rtx subtarget ATTRIBUTE_UNUSED,
11474 enum machine_mode mode ATTRIBUTE_UNUSED,
11475 int ignore ATTRIBUTE_UNUSED)
11477 return NULL_RTX;
11480 /* Returns true is EXP represents data that would potentially reside
11481 in a readonly section. */
11483 static bool
11484 readonly_data_expr (tree exp)
11486 STRIP_NOPS (exp);
11488 if (TREE_CODE (exp) != ADDR_EXPR)
11489 return false;
11491 exp = get_base_address (TREE_OPERAND (exp, 0));
11492 if (!exp)
11493 return false;
11495 /* Make sure we call decl_readonly_section only for trees it
11496 can handle (since it returns true for everything it doesn't
11497 understand). */
11498 if (TREE_CODE (exp) == STRING_CST
11499 || TREE_CODE (exp) == CONSTRUCTOR
11500 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11501 return decl_readonly_section (exp, 0);
11502 else
11503 return false;
11506 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11507 to the call, and TYPE is its return type.
11509 Return NULL_TREE if no simplification was possible, otherwise return the
11510 simplified form of the call as a tree.
11512 The simplified form may be a constant or other expression which
11513 computes the same value, but in a more efficient manner (including
11514 calls to other builtin functions).
11516 The call may contain arguments which need to be evaluated, but
11517 which are not useful to determine the result of the call. In
11518 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11519 COMPOUND_EXPR will be an argument which must be evaluated.
11520 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11521 COMPOUND_EXPR in the chain will contain the tree for the simplified
11522 form of the builtin function call. */
11524 static tree
11525 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11527 if (!validate_arg (s1, POINTER_TYPE)
11528 || !validate_arg (s2, POINTER_TYPE))
11529 return NULL_TREE;
11530 else
11532 tree fn;
11533 const char *p1, *p2;
11535 p2 = c_getstr (s2);
11536 if (p2 == NULL)
11537 return NULL_TREE;
11539 p1 = c_getstr (s1);
11540 if (p1 != NULL)
11542 const char *r = strstr (p1, p2);
11543 tree tem;
11545 if (r == NULL)
11546 return build_int_cst (TREE_TYPE (s1), 0);
11548 /* Return an offset into the constant string argument. */
11549 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11550 return fold_convert_loc (loc, type, tem);
11553 /* The argument is const char *, and the result is char *, so we need
11554 a type conversion here to avoid a warning. */
11555 if (p2[0] == '\0')
11556 return fold_convert_loc (loc, type, s1);
11558 if (p2[1] != '\0')
11559 return NULL_TREE;
11561 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11562 if (!fn)
11563 return NULL_TREE;
11565 /* New argument list transforming strstr(s1, s2) to
11566 strchr(s1, s2[0]). */
11567 return build_call_expr_loc (loc, fn, 2, s1,
11568 build_int_cst (integer_type_node, p2[0]));
11572 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11573 the call, and TYPE is its return type.
11575 Return NULL_TREE if no simplification was possible, otherwise return the
11576 simplified form of the call as a tree.
11578 The simplified form may be a constant or other expression which
11579 computes the same value, but in a more efficient manner (including
11580 calls to other builtin functions).
11582 The call may contain arguments which need to be evaluated, but
11583 which are not useful to determine the result of the call. In
11584 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11585 COMPOUND_EXPR will be an argument which must be evaluated.
11586 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11587 COMPOUND_EXPR in the chain will contain the tree for the simplified
11588 form of the builtin function call. */
11590 static tree
11591 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11593 if (!validate_arg (s1, POINTER_TYPE)
11594 || !validate_arg (s2, INTEGER_TYPE))
11595 return NULL_TREE;
11596 else
11598 const char *p1;
11600 if (TREE_CODE (s2) != INTEGER_CST)
11601 return NULL_TREE;
11603 p1 = c_getstr (s1);
11604 if (p1 != NULL)
11606 char c;
11607 const char *r;
11608 tree tem;
11610 if (target_char_cast (s2, &c))
11611 return NULL_TREE;
11613 r = strchr (p1, c);
11615 if (r == NULL)
11616 return build_int_cst (TREE_TYPE (s1), 0);
11618 /* Return an offset into the constant string argument. */
11619 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11620 return fold_convert_loc (loc, type, tem);
11622 return NULL_TREE;
11626 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11627 the call, and TYPE is its return type.
11629 Return NULL_TREE if no simplification was possible, otherwise return the
11630 simplified form of the call as a tree.
11632 The simplified form may be a constant or other expression which
11633 computes the same value, but in a more efficient manner (including
11634 calls to other builtin functions).
11636 The call may contain arguments which need to be evaluated, but
11637 which are not useful to determine the result of the call. In
11638 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11639 COMPOUND_EXPR will be an argument which must be evaluated.
11640 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11641 COMPOUND_EXPR in the chain will contain the tree for the simplified
11642 form of the builtin function call. */
11644 static tree
11645 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11647 if (!validate_arg (s1, POINTER_TYPE)
11648 || !validate_arg (s2, INTEGER_TYPE))
11649 return NULL_TREE;
11650 else
11652 tree fn;
11653 const char *p1;
11655 if (TREE_CODE (s2) != INTEGER_CST)
11656 return NULL_TREE;
11658 p1 = c_getstr (s1);
11659 if (p1 != NULL)
11661 char c;
11662 const char *r;
11663 tree tem;
11665 if (target_char_cast (s2, &c))
11666 return NULL_TREE;
11668 r = strrchr (p1, c);
11670 if (r == NULL)
11671 return build_int_cst (TREE_TYPE (s1), 0);
11673 /* Return an offset into the constant string argument. */
11674 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11675 return fold_convert_loc (loc, type, tem);
11678 if (! integer_zerop (s2))
11679 return NULL_TREE;
11681 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11682 if (!fn)
11683 return NULL_TREE;
11685 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11686 return build_call_expr_loc (loc, fn, 2, s1, s2);
11690 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11691 to the call, and TYPE is its return type.
11693 Return NULL_TREE if no simplification was possible, otherwise return the
11694 simplified form of the call as a tree.
11696 The simplified form may be a constant or other expression which
11697 computes the same value, but in a more efficient manner (including
11698 calls to other builtin functions).
11700 The call may contain arguments which need to be evaluated, but
11701 which are not useful to determine the result of the call. In
11702 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11703 COMPOUND_EXPR will be an argument which must be evaluated.
11704 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11705 COMPOUND_EXPR in the chain will contain the tree for the simplified
11706 form of the builtin function call. */
11708 static tree
11709 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11711 if (!validate_arg (s1, POINTER_TYPE)
11712 || !validate_arg (s2, POINTER_TYPE))
11713 return NULL_TREE;
11714 else
11716 tree fn;
11717 const char *p1, *p2;
11719 p2 = c_getstr (s2);
11720 if (p2 == NULL)
11721 return NULL_TREE;
11723 p1 = c_getstr (s1);
11724 if (p1 != NULL)
11726 const char *r = strpbrk (p1, p2);
11727 tree tem;
11729 if (r == NULL)
11730 return build_int_cst (TREE_TYPE (s1), 0);
11732 /* Return an offset into the constant string argument. */
11733 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11734 return fold_convert_loc (loc, type, tem);
11737 if (p2[0] == '\0')
11738 /* strpbrk(x, "") == NULL.
11739 Evaluate and ignore s1 in case it had side-effects. */
11740 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11742 if (p2[1] != '\0')
11743 return NULL_TREE; /* Really call strpbrk. */
11745 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11746 if (!fn)
11747 return NULL_TREE;
11749 /* New argument list transforming strpbrk(s1, s2) to
11750 strchr(s1, s2[0]). */
11751 return build_call_expr_loc (loc, fn, 2, s1,
11752 build_int_cst (integer_type_node, p2[0]));
11756 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11757 to the call.
11759 Return NULL_TREE if no simplification was possible, otherwise return the
11760 simplified form of the call as a tree.
11762 The simplified form may be a constant or other expression which
11763 computes the same value, but in a more efficient manner (including
11764 calls to other builtin functions).
11766 The call may contain arguments which need to be evaluated, but
11767 which are not useful to determine the result of the call. In
11768 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11769 COMPOUND_EXPR will be an argument which must be evaluated.
11770 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11771 COMPOUND_EXPR in the chain will contain the tree for the simplified
11772 form of the builtin function call. */
11774 tree
11775 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src,
11776 tree len)
11778 if (!validate_arg (dst, POINTER_TYPE)
11779 || !validate_arg (src, POINTER_TYPE))
11780 return NULL_TREE;
11781 else
11783 const char *p = c_getstr (src);
11785 /* If the string length is zero, return the dst parameter. */
11786 if (p && *p == '\0')
11787 return dst;
11789 if (optimize_insn_for_speed_p ())
11791 /* See if we can store by pieces into (dst + strlen(dst)). */
11792 tree newdst, call;
11793 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11794 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
11796 if (!strlen_fn || !memcpy_fn)
11797 return NULL_TREE;
11799 /* If the length of the source string isn't computable don't
11800 split strcat into strlen and memcpy. */
11801 if (! len)
11802 len = c_strlen (src, 1);
11803 if (! len || TREE_SIDE_EFFECTS (len))
11804 return NULL_TREE;
11806 /* Stabilize the argument list. */
11807 dst = builtin_save_expr (dst);
11809 /* Create strlen (dst). */
11810 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11811 /* Create (dst p+ strlen (dst)). */
11813 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11814 newdst = builtin_save_expr (newdst);
11816 len = fold_convert_loc (loc, size_type_node, len);
11817 len = size_binop_loc (loc, PLUS_EXPR, len,
11818 build_int_cst (size_type_node, 1));
11820 call = build_call_expr_loc (loc, memcpy_fn, 3, newdst, src, len);
11821 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11823 return NULL_TREE;
11827 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11828 arguments to the call.
11830 Return NULL_TREE if no simplification was possible, otherwise return the
11831 simplified form of the call as a tree.
11833 The simplified form may be a constant or other expression which
11834 computes the same value, but in a more efficient manner (including
11835 calls to other builtin functions).
11837 The call may contain arguments which need to be evaluated, but
11838 which are not useful to determine the result of the call. In
11839 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11840 COMPOUND_EXPR will be an argument which must be evaluated.
11841 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11842 COMPOUND_EXPR in the chain will contain the tree for the simplified
11843 form of the builtin function call. */
11845 static tree
11846 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11848 if (!validate_arg (dst, POINTER_TYPE)
11849 || !validate_arg (src, POINTER_TYPE)
11850 || !validate_arg (len, INTEGER_TYPE))
11851 return NULL_TREE;
11852 else
11854 const char *p = c_getstr (src);
11856 /* If the requested length is zero, or the src parameter string
11857 length is zero, return the dst parameter. */
11858 if (integer_zerop (len) || (p && *p == '\0'))
11859 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11861 /* If the requested len is greater than or equal to the string
11862 length, call strcat. */
11863 if (TREE_CODE (len) == INTEGER_CST && p
11864 && compare_tree_int (len, strlen (p)) >= 0)
11866 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11868 /* If the replacement _DECL isn't initialized, don't do the
11869 transformation. */
11870 if (!fn)
11871 return NULL_TREE;
11873 return build_call_expr_loc (loc, fn, 2, dst, src);
11875 return NULL_TREE;
11879 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11880 to the call.
11882 Return NULL_TREE if no simplification was possible, otherwise return the
11883 simplified form of the call as a tree.
11885 The simplified form may be a constant or other expression which
11886 computes the same value, but in a more efficient manner (including
11887 calls to other builtin functions).
11889 The call may contain arguments which need to be evaluated, but
11890 which are not useful to determine the result of the call. In
11891 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11892 COMPOUND_EXPR will be an argument which must be evaluated.
11893 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11894 COMPOUND_EXPR in the chain will contain the tree for the simplified
11895 form of the builtin function call. */
11897 static tree
11898 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11900 if (!validate_arg (s1, POINTER_TYPE)
11901 || !validate_arg (s2, POINTER_TYPE))
11902 return NULL_TREE;
11903 else
11905 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11907 /* If both arguments are constants, evaluate at compile-time. */
11908 if (p1 && p2)
11910 const size_t r = strspn (p1, p2);
11911 return build_int_cst (size_type_node, r);
11914 /* If either argument is "", return NULL_TREE. */
11915 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11916 /* Evaluate and ignore both arguments in case either one has
11917 side-effects. */
11918 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11919 s1, s2);
11920 return NULL_TREE;
11924 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11925 to the call.
11927 Return NULL_TREE if no simplification was possible, otherwise return the
11928 simplified form of the call as a tree.
11930 The simplified form may be a constant or other expression which
11931 computes the same value, but in a more efficient manner (including
11932 calls to other builtin functions).
11934 The call may contain arguments which need to be evaluated, but
11935 which are not useful to determine the result of the call. In
11936 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11937 COMPOUND_EXPR will be an argument which must be evaluated.
11938 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11939 COMPOUND_EXPR in the chain will contain the tree for the simplified
11940 form of the builtin function call. */
11942 static tree
11943 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11945 if (!validate_arg (s1, POINTER_TYPE)
11946 || !validate_arg (s2, POINTER_TYPE))
11947 return NULL_TREE;
11948 else
11950 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11952 /* If both arguments are constants, evaluate at compile-time. */
11953 if (p1 && p2)
11955 const size_t r = strcspn (p1, p2);
11956 return build_int_cst (size_type_node, r);
11959 /* If the first argument is "", return NULL_TREE. */
11960 if (p1 && *p1 == '\0')
11962 /* Evaluate and ignore argument s2 in case it has
11963 side-effects. */
11964 return omit_one_operand_loc (loc, size_type_node,
11965 size_zero_node, s2);
11968 /* If the second argument is "", return __builtin_strlen(s1). */
11969 if (p2 && *p2 == '\0')
11971 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11973 /* If the replacement _DECL isn't initialized, don't do the
11974 transformation. */
11975 if (!fn)
11976 return NULL_TREE;
11978 return build_call_expr_loc (loc, fn, 1, s1);
11980 return NULL_TREE;
11984 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11985 to the call. IGNORE is true if the value returned
11986 by the builtin will be ignored. UNLOCKED is true is true if this
11987 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11988 the known length of the string. Return NULL_TREE if no simplification
11989 was possible. */
11991 tree
11992 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11993 bool ignore, bool unlocked, tree len)
11995 /* If we're using an unlocked function, assume the other unlocked
11996 functions exist explicitly. */
11997 tree const fn_fputc = (unlocked
11998 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11999 : builtin_decl_implicit (BUILT_IN_FPUTC));
12000 tree const fn_fwrite = (unlocked
12001 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
12002 : builtin_decl_implicit (BUILT_IN_FWRITE));
12004 /* If the return value is used, don't do the transformation. */
12005 if (!ignore)
12006 return NULL_TREE;
12008 /* Verify the arguments in the original call. */
12009 if (!validate_arg (arg0, POINTER_TYPE)
12010 || !validate_arg (arg1, POINTER_TYPE))
12011 return NULL_TREE;
12013 if (! len)
12014 len = c_strlen (arg0, 0);
12016 /* Get the length of the string passed to fputs. If the length
12017 can't be determined, punt. */
12018 if (!len
12019 || TREE_CODE (len) != INTEGER_CST)
12020 return NULL_TREE;
12022 switch (compare_tree_int (len, 1))
12024 case -1: /* length is 0, delete the call entirely . */
12025 return omit_one_operand_loc (loc, integer_type_node,
12026 integer_zero_node, arg1);;
12028 case 0: /* length is 1, call fputc. */
12030 const char *p = c_getstr (arg0);
12032 if (p != NULL)
12034 if (fn_fputc)
12035 return build_call_expr_loc (loc, fn_fputc, 2,
12036 build_int_cst
12037 (integer_type_node, p[0]), arg1);
12038 else
12039 return NULL_TREE;
12042 /* FALLTHROUGH */
12043 case 1: /* length is greater than 1, call fwrite. */
12045 /* If optimizing for size keep fputs. */
12046 if (optimize_function_for_size_p (cfun))
12047 return NULL_TREE;
12048 /* New argument list transforming fputs(string, stream) to
12049 fwrite(string, 1, len, stream). */
12050 if (fn_fwrite)
12051 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12052 size_one_node, len, arg1);
12053 else
12054 return NULL_TREE;
12056 default:
12057 gcc_unreachable ();
12059 return NULL_TREE;
12062 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12063 produced. False otherwise. This is done so that we don't output the error
12064 or warning twice or three times. */
12066 bool
12067 fold_builtin_next_arg (tree exp, bool va_start_p)
12069 tree fntype = TREE_TYPE (current_function_decl);
12070 int nargs = call_expr_nargs (exp);
12071 tree arg;
12072 /* There is good chance the current input_location points inside the
12073 definition of the va_start macro (perhaps on the token for
12074 builtin) in a system header, so warnings will not be emitted.
12075 Use the location in real source code. */
12076 source_location current_location =
12077 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12078 NULL);
12080 if (!stdarg_p (fntype))
12082 error ("%<va_start%> used in function with fixed args");
12083 return true;
12086 if (va_start_p)
12088 if (va_start_p && (nargs != 2))
12090 error ("wrong number of arguments to function %<va_start%>");
12091 return true;
12093 arg = CALL_EXPR_ARG (exp, 1);
12095 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12096 when we checked the arguments and if needed issued a warning. */
12097 else
12099 if (nargs == 0)
12101 /* Evidently an out of date version of <stdarg.h>; can't validate
12102 va_start's second argument, but can still work as intended. */
12103 warning_at (current_location,
12104 OPT_Wvarargs,
12105 "%<__builtin_next_arg%> called without an argument");
12106 return true;
12108 else if (nargs > 1)
12110 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12111 return true;
12113 arg = CALL_EXPR_ARG (exp, 0);
12116 if (TREE_CODE (arg) == SSA_NAME)
12117 arg = SSA_NAME_VAR (arg);
12119 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12120 or __builtin_next_arg (0) the first time we see it, after checking
12121 the arguments and if needed issuing a warning. */
12122 if (!integer_zerop (arg))
12124 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12126 /* Strip off all nops for the sake of the comparison. This
12127 is not quite the same as STRIP_NOPS. It does more.
12128 We must also strip off INDIRECT_EXPR for C++ reference
12129 parameters. */
12130 while (CONVERT_EXPR_P (arg)
12131 || TREE_CODE (arg) == INDIRECT_REF)
12132 arg = TREE_OPERAND (arg, 0);
12133 if (arg != last_parm)
12135 /* FIXME: Sometimes with the tree optimizers we can get the
12136 not the last argument even though the user used the last
12137 argument. We just warn and set the arg to be the last
12138 argument so that we will get wrong-code because of
12139 it. */
12140 warning_at (current_location,
12141 OPT_Wvarargs,
12142 "second parameter of %<va_start%> not last named argument");
12145 /* Undefined by C99 7.15.1.4p4 (va_start):
12146 "If the parameter parmN is declared with the register storage
12147 class, with a function or array type, or with a type that is
12148 not compatible with the type that results after application of
12149 the default argument promotions, the behavior is undefined."
12151 else if (DECL_REGISTER (arg))
12153 warning_at (current_location,
12154 OPT_Wvarargs,
12155 "undefined behaviour when second parameter of "
12156 "%<va_start%> is declared with %<register%> storage");
12159 /* We want to verify the second parameter just once before the tree
12160 optimizers are run and then avoid keeping it in the tree,
12161 as otherwise we could warn even for correct code like:
12162 void foo (int i, ...)
12163 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12164 if (va_start_p)
12165 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12166 else
12167 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12169 return false;
12173 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12174 ORIG may be null if this is a 2-argument call. We don't attempt to
12175 simplify calls with more than 3 arguments.
12177 Return NULL_TREE if no simplification was possible, otherwise return the
12178 simplified form of the call as a tree. If IGNORED is true, it means that
12179 the caller does not use the returned value of the function. */
12181 static tree
12182 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12183 tree orig, int ignored)
12185 tree call, retval;
12186 const char *fmt_str = NULL;
12188 /* Verify the required arguments in the original call. We deal with two
12189 types of sprintf() calls: 'sprintf (str, fmt)' and
12190 'sprintf (dest, "%s", orig)'. */
12191 if (!validate_arg (dest, POINTER_TYPE)
12192 || !validate_arg (fmt, POINTER_TYPE))
12193 return NULL_TREE;
12194 if (orig && !validate_arg (orig, POINTER_TYPE))
12195 return NULL_TREE;
12197 /* Check whether the format is a literal string constant. */
12198 fmt_str = c_getstr (fmt);
12199 if (fmt_str == NULL)
12200 return NULL_TREE;
12202 call = NULL_TREE;
12203 retval = NULL_TREE;
12205 if (!init_target_chars ())
12206 return NULL_TREE;
12208 /* If the format doesn't contain % args or %%, use strcpy. */
12209 if (strchr (fmt_str, target_percent) == NULL)
12211 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12213 if (!fn)
12214 return NULL_TREE;
12216 /* Don't optimize sprintf (buf, "abc", ptr++). */
12217 if (orig)
12218 return NULL_TREE;
12220 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12221 'format' is known to contain no % formats. */
12222 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12223 if (!ignored)
12224 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12227 /* If the format is "%s", use strcpy if the result isn't used. */
12228 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12230 tree fn;
12231 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12233 if (!fn)
12234 return NULL_TREE;
12236 /* Don't crash on sprintf (str1, "%s"). */
12237 if (!orig)
12238 return NULL_TREE;
12240 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12241 if (!ignored)
12243 retval = c_strlen (orig, 1);
12244 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12245 return NULL_TREE;
12247 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12250 if (call && retval)
12252 retval = fold_convert_loc
12253 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12254 retval);
12255 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12257 else
12258 return call;
12261 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12262 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12263 attempt to simplify calls with more than 4 arguments.
12265 Return NULL_TREE if no simplification was possible, otherwise return the
12266 simplified form of the call as a tree. If IGNORED is true, it means that
12267 the caller does not use the returned value of the function. */
12269 static tree
12270 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12271 tree orig, int ignored)
12273 tree call, retval;
12274 const char *fmt_str = NULL;
12275 unsigned HOST_WIDE_INT destlen;
12277 /* Verify the required arguments in the original call. We deal with two
12278 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12279 'snprintf (dest, cst, "%s", orig)'. */
12280 if (!validate_arg (dest, POINTER_TYPE)
12281 || !validate_arg (destsize, INTEGER_TYPE)
12282 || !validate_arg (fmt, POINTER_TYPE))
12283 return NULL_TREE;
12284 if (orig && !validate_arg (orig, POINTER_TYPE))
12285 return NULL_TREE;
12287 if (!tree_fits_uhwi_p (destsize))
12288 return NULL_TREE;
12290 /* Check whether the format is a literal string constant. */
12291 fmt_str = c_getstr (fmt);
12292 if (fmt_str == NULL)
12293 return NULL_TREE;
12295 call = NULL_TREE;
12296 retval = NULL_TREE;
12298 if (!init_target_chars ())
12299 return NULL_TREE;
12301 destlen = tree_to_uhwi (destsize);
12303 /* If the format doesn't contain % args or %%, use strcpy. */
12304 if (strchr (fmt_str, target_percent) == NULL)
12306 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12307 size_t len = strlen (fmt_str);
12309 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12310 if (orig)
12311 return NULL_TREE;
12313 /* We could expand this as
12314 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12315 or to
12316 memcpy (str, fmt_with_nul_at_cstm1, cst);
12317 but in the former case that might increase code size
12318 and in the latter case grow .rodata section too much.
12319 So punt for now. */
12320 if (len >= destlen)
12321 return NULL_TREE;
12323 if (!fn)
12324 return NULL_TREE;
12326 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12327 'format' is known to contain no % formats and
12328 strlen (fmt) < cst. */
12329 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12331 if (!ignored)
12332 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12335 /* If the format is "%s", use strcpy if the result isn't used. */
12336 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12338 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12339 unsigned HOST_WIDE_INT origlen;
12341 /* Don't crash on snprintf (str1, cst, "%s"). */
12342 if (!orig)
12343 return NULL_TREE;
12345 retval = c_strlen (orig, 1);
12346 if (!retval || !tree_fits_uhwi_p (retval))
12347 return NULL_TREE;
12349 origlen = tree_to_uhwi (retval);
12350 /* We could expand this as
12351 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12352 or to
12353 memcpy (str1, str2_with_nul_at_cstm1, cst);
12354 but in the former case that might increase code size
12355 and in the latter case grow .rodata section too much.
12356 So punt for now. */
12357 if (origlen >= destlen)
12358 return NULL_TREE;
12360 /* Convert snprintf (str1, cst, "%s", str2) into
12361 strcpy (str1, str2) if strlen (str2) < cst. */
12362 if (!fn)
12363 return NULL_TREE;
12365 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12367 if (ignored)
12368 retval = NULL_TREE;
12371 if (call && retval)
12373 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12374 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12375 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12377 else
12378 return call;
12381 /* Expand a call EXP to __builtin_object_size. */
12384 expand_builtin_object_size (tree exp)
12386 tree ost;
12387 int object_size_type;
12388 tree fndecl = get_callee_fndecl (exp);
12390 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12392 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12393 exp, fndecl);
12394 expand_builtin_trap ();
12395 return const0_rtx;
12398 ost = CALL_EXPR_ARG (exp, 1);
12399 STRIP_NOPS (ost);
12401 if (TREE_CODE (ost) != INTEGER_CST
12402 || tree_int_cst_sgn (ost) < 0
12403 || compare_tree_int (ost, 3) > 0)
12405 error ("%Klast argument of %D is not integer constant between 0 and 3",
12406 exp, fndecl);
12407 expand_builtin_trap ();
12408 return const0_rtx;
12411 object_size_type = tree_to_shwi (ost);
12413 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12416 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12417 FCODE is the BUILT_IN_* to use.
12418 Return NULL_RTX if we failed; the caller should emit a normal call,
12419 otherwise try to get the result in TARGET, if convenient (and in
12420 mode MODE if that's convenient). */
12422 static rtx
12423 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12424 enum built_in_function fcode)
12426 tree dest, src, len, size;
12428 if (!validate_arglist (exp,
12429 POINTER_TYPE,
12430 fcode == BUILT_IN_MEMSET_CHK
12431 ? INTEGER_TYPE : POINTER_TYPE,
12432 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12433 return NULL_RTX;
12435 dest = CALL_EXPR_ARG (exp, 0);
12436 src = CALL_EXPR_ARG (exp, 1);
12437 len = CALL_EXPR_ARG (exp, 2);
12438 size = CALL_EXPR_ARG (exp, 3);
12440 if (! tree_fits_uhwi_p (size))
12441 return NULL_RTX;
12443 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
12445 tree fn;
12447 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12449 warning_at (tree_nonartificial_location (exp),
12450 0, "%Kcall to %D will always overflow destination buffer",
12451 exp, get_callee_fndecl (exp));
12452 return NULL_RTX;
12455 fn = NULL_TREE;
12456 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12457 mem{cpy,pcpy,move,set} is available. */
12458 switch (fcode)
12460 case BUILT_IN_MEMCPY_CHK:
12461 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12462 break;
12463 case BUILT_IN_MEMPCPY_CHK:
12464 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12465 break;
12466 case BUILT_IN_MEMMOVE_CHK:
12467 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12468 break;
12469 case BUILT_IN_MEMSET_CHK:
12470 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12471 break;
12472 default:
12473 break;
12476 if (! fn)
12477 return NULL_RTX;
12479 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12480 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12481 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12482 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12484 else if (fcode == BUILT_IN_MEMSET_CHK)
12485 return NULL_RTX;
12486 else
12488 unsigned int dest_align = get_pointer_alignment (dest);
12490 /* If DEST is not a pointer type, call the normal function. */
12491 if (dest_align == 0)
12492 return NULL_RTX;
12494 /* If SRC and DEST are the same (and not volatile), do nothing. */
12495 if (operand_equal_p (src, dest, 0))
12497 tree expr;
12499 if (fcode != BUILT_IN_MEMPCPY_CHK)
12501 /* Evaluate and ignore LEN in case it has side-effects. */
12502 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12503 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12506 expr = fold_build_pointer_plus (dest, len);
12507 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12510 /* __memmove_chk special case. */
12511 if (fcode == BUILT_IN_MEMMOVE_CHK)
12513 unsigned int src_align = get_pointer_alignment (src);
12515 if (src_align == 0)
12516 return NULL_RTX;
12518 /* If src is categorized for a readonly section we can use
12519 normal __memcpy_chk. */
12520 if (readonly_data_expr (src))
12522 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12523 if (!fn)
12524 return NULL_RTX;
12525 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12526 dest, src, len, size);
12527 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12528 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12529 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12532 return NULL_RTX;
12536 /* Emit warning if a buffer overflow is detected at compile time. */
12538 static void
12539 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12541 int is_strlen = 0;
12542 tree len, size;
12543 location_t loc = tree_nonartificial_location (exp);
12545 switch (fcode)
12547 case BUILT_IN_STRCPY_CHK:
12548 case BUILT_IN_STPCPY_CHK:
12549 /* For __strcat_chk the warning will be emitted only if overflowing
12550 by at least strlen (dest) + 1 bytes. */
12551 case BUILT_IN_STRCAT_CHK:
12552 len = CALL_EXPR_ARG (exp, 1);
12553 size = CALL_EXPR_ARG (exp, 2);
12554 is_strlen = 1;
12555 break;
12556 case BUILT_IN_STRNCAT_CHK:
12557 case BUILT_IN_STRNCPY_CHK:
12558 case BUILT_IN_STPNCPY_CHK:
12559 len = CALL_EXPR_ARG (exp, 2);
12560 size = CALL_EXPR_ARG (exp, 3);
12561 break;
12562 case BUILT_IN_SNPRINTF_CHK:
12563 case BUILT_IN_VSNPRINTF_CHK:
12564 len = CALL_EXPR_ARG (exp, 1);
12565 size = CALL_EXPR_ARG (exp, 3);
12566 break;
12567 default:
12568 gcc_unreachable ();
12571 if (!len || !size)
12572 return;
12574 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12575 return;
12577 if (is_strlen)
12579 len = c_strlen (len, 1);
12580 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12581 return;
12583 else if (fcode == BUILT_IN_STRNCAT_CHK)
12585 tree src = CALL_EXPR_ARG (exp, 1);
12586 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12587 return;
12588 src = c_strlen (src, 1);
12589 if (! src || ! tree_fits_uhwi_p (src))
12591 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12592 exp, get_callee_fndecl (exp));
12593 return;
12595 else if (tree_int_cst_lt (src, size))
12596 return;
12598 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
12599 return;
12601 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12602 exp, get_callee_fndecl (exp));
12605 /* Emit warning if a buffer overflow is detected at compile time
12606 in __sprintf_chk/__vsprintf_chk calls. */
12608 static void
12609 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12611 tree size, len, fmt;
12612 const char *fmt_str;
12613 int nargs = call_expr_nargs (exp);
12615 /* Verify the required arguments in the original call. */
12617 if (nargs < 4)
12618 return;
12619 size = CALL_EXPR_ARG (exp, 2);
12620 fmt = CALL_EXPR_ARG (exp, 3);
12622 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12623 return;
12625 /* Check whether the format is a literal string constant. */
12626 fmt_str = c_getstr (fmt);
12627 if (fmt_str == NULL)
12628 return;
12630 if (!init_target_chars ())
12631 return;
12633 /* If the format doesn't contain % args or %%, we know its size. */
12634 if (strchr (fmt_str, target_percent) == 0)
12635 len = build_int_cstu (size_type_node, strlen (fmt_str));
12636 /* If the format is "%s" and first ... argument is a string literal,
12637 we know it too. */
12638 else if (fcode == BUILT_IN_SPRINTF_CHK
12639 && strcmp (fmt_str, target_percent_s) == 0)
12641 tree arg;
12643 if (nargs < 5)
12644 return;
12645 arg = CALL_EXPR_ARG (exp, 4);
12646 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12647 return;
12649 len = c_strlen (arg, 1);
12650 if (!len || ! tree_fits_uhwi_p (len))
12651 return;
12653 else
12654 return;
12656 if (! tree_int_cst_lt (len, size))
12657 warning_at (tree_nonartificial_location (exp),
12658 0, "%Kcall to %D will always overflow destination buffer",
12659 exp, get_callee_fndecl (exp));
12662 /* Emit warning if a free is called with address of a variable. */
12664 static void
12665 maybe_emit_free_warning (tree exp)
12667 tree arg = CALL_EXPR_ARG (exp, 0);
12669 STRIP_NOPS (arg);
12670 if (TREE_CODE (arg) != ADDR_EXPR)
12671 return;
12673 arg = get_base_address (TREE_OPERAND (arg, 0));
12674 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12675 return;
12677 if (SSA_VAR_P (arg))
12678 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12679 "%Kattempt to free a non-heap object %qD", exp, arg);
12680 else
12681 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12682 "%Kattempt to free a non-heap object", exp);
12685 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12686 if possible. */
12688 tree
12689 fold_builtin_object_size (tree ptr, tree ost)
12691 unsigned HOST_WIDE_INT bytes;
12692 int object_size_type;
12694 if (!validate_arg (ptr, POINTER_TYPE)
12695 || !validate_arg (ost, INTEGER_TYPE))
12696 return NULL_TREE;
12698 STRIP_NOPS (ost);
12700 if (TREE_CODE (ost) != INTEGER_CST
12701 || tree_int_cst_sgn (ost) < 0
12702 || compare_tree_int (ost, 3) > 0)
12703 return NULL_TREE;
12705 object_size_type = tree_to_shwi (ost);
12707 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12708 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12709 and (size_t) 0 for types 2 and 3. */
12710 if (TREE_SIDE_EFFECTS (ptr))
12711 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12713 if (TREE_CODE (ptr) == ADDR_EXPR)
12715 bytes = compute_builtin_object_size (ptr, object_size_type);
12716 if (double_int_fits_to_tree_p (size_type_node,
12717 double_int::from_uhwi (bytes)))
12718 return build_int_cstu (size_type_node, bytes);
12720 else if (TREE_CODE (ptr) == SSA_NAME)
12722 /* If object size is not known yet, delay folding until
12723 later. Maybe subsequent passes will help determining
12724 it. */
12725 bytes = compute_builtin_object_size (ptr, object_size_type);
12726 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12727 && double_int_fits_to_tree_p (size_type_node,
12728 double_int::from_uhwi (bytes)))
12729 return build_int_cstu (size_type_node, bytes);
12732 return NULL_TREE;
12735 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12736 DEST, SRC, LEN, and SIZE are the arguments to the call.
12737 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12738 code of the builtin. If MAXLEN is not NULL, it is maximum length
12739 passed as third argument. */
12741 tree
12742 fold_builtin_memory_chk (location_t loc, tree fndecl,
12743 tree dest, tree src, tree len, tree size,
12744 tree maxlen, bool ignore,
12745 enum built_in_function fcode)
12747 tree fn;
12749 if (!validate_arg (dest, POINTER_TYPE)
12750 || !validate_arg (src,
12751 (fcode == BUILT_IN_MEMSET_CHK
12752 ? INTEGER_TYPE : POINTER_TYPE))
12753 || !validate_arg (len, INTEGER_TYPE)
12754 || !validate_arg (size, INTEGER_TYPE))
12755 return NULL_TREE;
12757 /* If SRC and DEST are the same (and not volatile), return DEST
12758 (resp. DEST+LEN for __mempcpy_chk). */
12759 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12761 if (fcode != BUILT_IN_MEMPCPY_CHK)
12762 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12763 dest, len);
12764 else
12766 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12767 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12771 if (! tree_fits_uhwi_p (size))
12772 return NULL_TREE;
12774 if (! integer_all_onesp (size))
12776 if (! tree_fits_uhwi_p (len))
12778 /* If LEN is not constant, try MAXLEN too.
12779 For MAXLEN only allow optimizing into non-_ocs function
12780 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12781 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12783 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12785 /* (void) __mempcpy_chk () can be optimized into
12786 (void) __memcpy_chk (). */
12787 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12788 if (!fn)
12789 return NULL_TREE;
12791 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12793 return NULL_TREE;
12796 else
12797 maxlen = len;
12799 if (tree_int_cst_lt (size, maxlen))
12800 return NULL_TREE;
12803 fn = NULL_TREE;
12804 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12805 mem{cpy,pcpy,move,set} is available. */
12806 switch (fcode)
12808 case BUILT_IN_MEMCPY_CHK:
12809 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12810 break;
12811 case BUILT_IN_MEMPCPY_CHK:
12812 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12813 break;
12814 case BUILT_IN_MEMMOVE_CHK:
12815 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12816 break;
12817 case BUILT_IN_MEMSET_CHK:
12818 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12819 break;
12820 default:
12821 break;
12824 if (!fn)
12825 return NULL_TREE;
12827 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12830 /* Fold a call to the __st[rp]cpy_chk builtin.
12831 DEST, SRC, and SIZE are the arguments to the call.
12832 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12833 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12834 strings passed as second argument. */
12836 tree
12837 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12838 tree src, tree size,
12839 tree maxlen, bool ignore,
12840 enum built_in_function fcode)
12842 tree len, fn;
12844 if (!validate_arg (dest, POINTER_TYPE)
12845 || !validate_arg (src, POINTER_TYPE)
12846 || !validate_arg (size, INTEGER_TYPE))
12847 return NULL_TREE;
12849 /* If SRC and DEST are the same (and not volatile), return DEST. */
12850 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12851 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12853 if (! tree_fits_uhwi_p (size))
12854 return NULL_TREE;
12856 if (! integer_all_onesp (size))
12858 len = c_strlen (src, 1);
12859 if (! len || ! tree_fits_uhwi_p (len))
12861 /* If LEN is not constant, try MAXLEN too.
12862 For MAXLEN only allow optimizing into non-_ocs function
12863 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12864 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12866 if (fcode == BUILT_IN_STPCPY_CHK)
12868 if (! ignore)
12869 return NULL_TREE;
12871 /* If return value of __stpcpy_chk is ignored,
12872 optimize into __strcpy_chk. */
12873 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12874 if (!fn)
12875 return NULL_TREE;
12877 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12880 if (! len || TREE_SIDE_EFFECTS (len))
12881 return NULL_TREE;
12883 /* If c_strlen returned something, but not a constant,
12884 transform __strcpy_chk into __memcpy_chk. */
12885 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12886 if (!fn)
12887 return NULL_TREE;
12889 len = fold_convert_loc (loc, size_type_node, len);
12890 len = size_binop_loc (loc, PLUS_EXPR, len,
12891 build_int_cst (size_type_node, 1));
12892 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12893 build_call_expr_loc (loc, fn, 4,
12894 dest, src, len, size));
12897 else
12898 maxlen = len;
12900 if (! tree_int_cst_lt (maxlen, size))
12901 return NULL_TREE;
12904 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12905 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12906 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12907 if (!fn)
12908 return NULL_TREE;
12910 return build_call_expr_loc (loc, fn, 2, dest, src);
12913 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12914 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12915 length passed as third argument. IGNORE is true if return value can be
12916 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12918 tree
12919 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12920 tree len, tree size, tree maxlen, bool ignore,
12921 enum built_in_function fcode)
12923 tree fn;
12925 if (!validate_arg (dest, POINTER_TYPE)
12926 || !validate_arg (src, POINTER_TYPE)
12927 || !validate_arg (len, INTEGER_TYPE)
12928 || !validate_arg (size, INTEGER_TYPE))
12929 return NULL_TREE;
12931 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12933 /* If return value of __stpncpy_chk is ignored,
12934 optimize into __strncpy_chk. */
12935 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12936 if (fn)
12937 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12940 if (! tree_fits_uhwi_p (size))
12941 return NULL_TREE;
12943 if (! integer_all_onesp (size))
12945 if (! tree_fits_uhwi_p (len))
12947 /* If LEN is not constant, try MAXLEN too.
12948 For MAXLEN only allow optimizing into non-_ocs function
12949 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12950 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12951 return NULL_TREE;
12953 else
12954 maxlen = len;
12956 if (tree_int_cst_lt (size, maxlen))
12957 return NULL_TREE;
12960 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12961 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12962 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12963 if (!fn)
12964 return NULL_TREE;
12966 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12969 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12970 are the arguments to the call. */
12972 static tree
12973 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12974 tree src, tree size)
12976 tree fn;
12977 const char *p;
12979 if (!validate_arg (dest, POINTER_TYPE)
12980 || !validate_arg (src, POINTER_TYPE)
12981 || !validate_arg (size, INTEGER_TYPE))
12982 return NULL_TREE;
12984 p = c_getstr (src);
12985 /* If the SRC parameter is "", return DEST. */
12986 if (p && *p == '\0')
12987 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12989 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
12990 return NULL_TREE;
12992 /* If __builtin_strcat_chk is used, assume strcat is available. */
12993 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12994 if (!fn)
12995 return NULL_TREE;
12997 return build_call_expr_loc (loc, fn, 2, dest, src);
13000 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
13001 LEN, and SIZE. */
13003 static tree
13004 fold_builtin_strncat_chk (location_t loc, tree fndecl,
13005 tree dest, tree src, tree len, tree size)
13007 tree fn;
13008 const char *p;
13010 if (!validate_arg (dest, POINTER_TYPE)
13011 || !validate_arg (src, POINTER_TYPE)
13012 || !validate_arg (size, INTEGER_TYPE)
13013 || !validate_arg (size, INTEGER_TYPE))
13014 return NULL_TREE;
13016 p = c_getstr (src);
13017 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13018 if (p && *p == '\0')
13019 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
13020 else if (integer_zerop (len))
13021 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13023 if (! tree_fits_uhwi_p (size))
13024 return NULL_TREE;
13026 if (! integer_all_onesp (size))
13028 tree src_len = c_strlen (src, 1);
13029 if (src_len
13030 && tree_fits_uhwi_p (src_len)
13031 && tree_fits_uhwi_p (len)
13032 && ! tree_int_cst_lt (len, src_len))
13034 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13035 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13036 if (!fn)
13037 return NULL_TREE;
13039 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13041 return NULL_TREE;
13044 /* If __builtin_strncat_chk is used, assume strncat is available. */
13045 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13046 if (!fn)
13047 return NULL_TREE;
13049 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13052 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13053 Return NULL_TREE if a normal call should be emitted rather than
13054 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13055 or BUILT_IN_VSPRINTF_CHK. */
13057 static tree
13058 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13059 enum built_in_function fcode)
13061 tree dest, size, len, fn, fmt, flag;
13062 const char *fmt_str;
13064 /* Verify the required arguments in the original call. */
13065 if (nargs < 4)
13066 return NULL_TREE;
13067 dest = args[0];
13068 if (!validate_arg (dest, POINTER_TYPE))
13069 return NULL_TREE;
13070 flag = args[1];
13071 if (!validate_arg (flag, INTEGER_TYPE))
13072 return NULL_TREE;
13073 size = args[2];
13074 if (!validate_arg (size, INTEGER_TYPE))
13075 return NULL_TREE;
13076 fmt = args[3];
13077 if (!validate_arg (fmt, POINTER_TYPE))
13078 return NULL_TREE;
13080 if (! tree_fits_uhwi_p (size))
13081 return NULL_TREE;
13083 len = NULL_TREE;
13085 if (!init_target_chars ())
13086 return NULL_TREE;
13088 /* Check whether the format is a literal string constant. */
13089 fmt_str = c_getstr (fmt);
13090 if (fmt_str != NULL)
13092 /* If the format doesn't contain % args or %%, we know the size. */
13093 if (strchr (fmt_str, target_percent) == 0)
13095 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13096 len = build_int_cstu (size_type_node, strlen (fmt_str));
13098 /* If the format is "%s" and first ... argument is a string literal,
13099 we know the size too. */
13100 else if (fcode == BUILT_IN_SPRINTF_CHK
13101 && strcmp (fmt_str, target_percent_s) == 0)
13103 tree arg;
13105 if (nargs == 5)
13107 arg = args[4];
13108 if (validate_arg (arg, POINTER_TYPE))
13110 len = c_strlen (arg, 1);
13111 if (! len || ! tree_fits_uhwi_p (len))
13112 len = NULL_TREE;
13118 if (! integer_all_onesp (size))
13120 if (! len || ! tree_int_cst_lt (len, size))
13121 return NULL_TREE;
13124 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13125 or if format doesn't contain % chars or is "%s". */
13126 if (! integer_zerop (flag))
13128 if (fmt_str == NULL)
13129 return NULL_TREE;
13130 if (strchr (fmt_str, target_percent) != NULL
13131 && strcmp (fmt_str, target_percent_s))
13132 return NULL_TREE;
13135 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13136 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13137 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13138 if (!fn)
13139 return NULL_TREE;
13141 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13144 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13145 a normal call should be emitted rather than expanding the function
13146 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13148 static tree
13149 fold_builtin_sprintf_chk (location_t loc, tree exp,
13150 enum built_in_function fcode)
13152 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13153 CALL_EXPR_ARGP (exp), fcode);
13156 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13157 NULL_TREE if a normal call should be emitted rather than expanding
13158 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13159 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13160 passed as second argument. */
13162 static tree
13163 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13164 tree maxlen, enum built_in_function fcode)
13166 tree dest, size, len, fn, fmt, flag;
13167 const char *fmt_str;
13169 /* Verify the required arguments in the original call. */
13170 if (nargs < 5)
13171 return NULL_TREE;
13172 dest = args[0];
13173 if (!validate_arg (dest, POINTER_TYPE))
13174 return NULL_TREE;
13175 len = args[1];
13176 if (!validate_arg (len, INTEGER_TYPE))
13177 return NULL_TREE;
13178 flag = args[2];
13179 if (!validate_arg (flag, INTEGER_TYPE))
13180 return NULL_TREE;
13181 size = args[3];
13182 if (!validate_arg (size, INTEGER_TYPE))
13183 return NULL_TREE;
13184 fmt = args[4];
13185 if (!validate_arg (fmt, POINTER_TYPE))
13186 return NULL_TREE;
13188 if (! tree_fits_uhwi_p (size))
13189 return NULL_TREE;
13191 if (! integer_all_onesp (size))
13193 if (! tree_fits_uhwi_p (len))
13195 /* If LEN is not constant, try MAXLEN too.
13196 For MAXLEN only allow optimizing into non-_ocs function
13197 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13198 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
13199 return NULL_TREE;
13201 else
13202 maxlen = len;
13204 if (tree_int_cst_lt (size, maxlen))
13205 return NULL_TREE;
13208 if (!init_target_chars ())
13209 return NULL_TREE;
13211 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13212 or if format doesn't contain % chars or is "%s". */
13213 if (! integer_zerop (flag))
13215 fmt_str = c_getstr (fmt);
13216 if (fmt_str == NULL)
13217 return NULL_TREE;
13218 if (strchr (fmt_str, target_percent) != NULL
13219 && strcmp (fmt_str, target_percent_s))
13220 return NULL_TREE;
13223 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13224 available. */
13225 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13226 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13227 if (!fn)
13228 return NULL_TREE;
13230 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13233 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13234 a normal call should be emitted rather than expanding the function
13235 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13236 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13237 passed as second argument. */
13239 static tree
13240 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13241 enum built_in_function fcode)
13243 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13244 CALL_EXPR_ARGP (exp), maxlen, fcode);
13247 /* Builtins with folding operations that operate on "..." arguments
13248 need special handling; we need to store the arguments in a convenient
13249 data structure before attempting any folding. Fortunately there are
13250 only a few builtins that fall into this category. FNDECL is the
13251 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13252 result of the function call is ignored. */
13254 static tree
13255 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
13256 bool ignore ATTRIBUTE_UNUSED)
13258 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13259 tree ret = NULL_TREE;
13261 switch (fcode)
13263 case BUILT_IN_SPRINTF_CHK:
13264 case BUILT_IN_VSPRINTF_CHK:
13265 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
13266 break;
13268 case BUILT_IN_SNPRINTF_CHK:
13269 case BUILT_IN_VSNPRINTF_CHK:
13270 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
13271 break;
13273 case BUILT_IN_FPCLASSIFY:
13274 ret = fold_builtin_fpclassify (loc, exp);
13275 break;
13277 default:
13278 break;
13280 if (ret)
13282 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13283 SET_EXPR_LOCATION (ret, loc);
13284 TREE_NO_WARNING (ret) = 1;
13285 return ret;
13287 return NULL_TREE;
13290 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13291 FMT and ARG are the arguments to the call; we don't fold cases with
13292 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13294 Return NULL_TREE if no simplification was possible, otherwise return the
13295 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13296 code of the function to be simplified. */
13298 static tree
13299 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13300 tree arg, bool ignore,
13301 enum built_in_function fcode)
13303 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13304 const char *fmt_str = NULL;
13306 /* If the return value is used, don't do the transformation. */
13307 if (! ignore)
13308 return NULL_TREE;
13310 /* Verify the required arguments in the original call. */
13311 if (!validate_arg (fmt, POINTER_TYPE))
13312 return NULL_TREE;
13314 /* Check whether the format is a literal string constant. */
13315 fmt_str = c_getstr (fmt);
13316 if (fmt_str == NULL)
13317 return NULL_TREE;
13319 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13321 /* If we're using an unlocked function, assume the other
13322 unlocked functions exist explicitly. */
13323 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13324 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13326 else
13328 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13329 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13332 if (!init_target_chars ())
13333 return NULL_TREE;
13335 if (strcmp (fmt_str, target_percent_s) == 0
13336 || strchr (fmt_str, target_percent) == NULL)
13338 const char *str;
13340 if (strcmp (fmt_str, target_percent_s) == 0)
13342 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13343 return NULL_TREE;
13345 if (!arg || !validate_arg (arg, POINTER_TYPE))
13346 return NULL_TREE;
13348 str = c_getstr (arg);
13349 if (str == NULL)
13350 return NULL_TREE;
13352 else
13354 /* The format specifier doesn't contain any '%' characters. */
13355 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13356 && arg)
13357 return NULL_TREE;
13358 str = fmt_str;
13361 /* If the string was "", printf does nothing. */
13362 if (str[0] == '\0')
13363 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13365 /* If the string has length of 1, call putchar. */
13366 if (str[1] == '\0')
13368 /* Given printf("c"), (where c is any one character,)
13369 convert "c"[0] to an int and pass that to the replacement
13370 function. */
13371 newarg = build_int_cst (integer_type_node, str[0]);
13372 if (fn_putchar)
13373 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13375 else
13377 /* If the string was "string\n", call puts("string"). */
13378 size_t len = strlen (str);
13379 if ((unsigned char)str[len - 1] == target_newline
13380 && (size_t) (int) len == len
13381 && (int) len > 0)
13383 char *newstr;
13384 tree offset_node, string_cst;
13386 /* Create a NUL-terminated string that's one char shorter
13387 than the original, stripping off the trailing '\n'. */
13388 newarg = build_string_literal (len, str);
13389 string_cst = string_constant (newarg, &offset_node);
13390 gcc_checking_assert (string_cst
13391 && (TREE_STRING_LENGTH (string_cst)
13392 == (int) len)
13393 && integer_zerop (offset_node)
13394 && (unsigned char)
13395 TREE_STRING_POINTER (string_cst)[len - 1]
13396 == target_newline);
13397 /* build_string_literal creates a new STRING_CST,
13398 modify it in place to avoid double copying. */
13399 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13400 newstr[len - 1] = '\0';
13401 if (fn_puts)
13402 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13404 else
13405 /* We'd like to arrange to call fputs(string,stdout) here,
13406 but we need stdout and don't have a way to get it yet. */
13407 return NULL_TREE;
13411 /* The other optimizations can be done only on the non-va_list variants. */
13412 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13413 return NULL_TREE;
13415 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13416 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13418 if (!arg || !validate_arg (arg, POINTER_TYPE))
13419 return NULL_TREE;
13420 if (fn_puts)
13421 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13424 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13425 else if (strcmp (fmt_str, target_percent_c) == 0)
13427 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13428 return NULL_TREE;
13429 if (fn_putchar)
13430 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13433 if (!call)
13434 return NULL_TREE;
13436 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13439 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13440 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13441 more than 3 arguments, and ARG may be null in the 2-argument case.
13443 Return NULL_TREE if no simplification was possible, otherwise return the
13444 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13445 code of the function to be simplified. */
13447 static tree
13448 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13449 tree fmt, tree arg, bool ignore,
13450 enum built_in_function fcode)
13452 tree fn_fputc, fn_fputs, call = NULL_TREE;
13453 const char *fmt_str = NULL;
13455 /* If the return value is used, don't do the transformation. */
13456 if (! ignore)
13457 return NULL_TREE;
13459 /* Verify the required arguments in the original call. */
13460 if (!validate_arg (fp, POINTER_TYPE))
13461 return NULL_TREE;
13462 if (!validate_arg (fmt, POINTER_TYPE))
13463 return NULL_TREE;
13465 /* Check whether the format is a literal string constant. */
13466 fmt_str = c_getstr (fmt);
13467 if (fmt_str == NULL)
13468 return NULL_TREE;
13470 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13472 /* If we're using an unlocked function, assume the other
13473 unlocked functions exist explicitly. */
13474 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13475 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13477 else
13479 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13480 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13483 if (!init_target_chars ())
13484 return NULL_TREE;
13486 /* If the format doesn't contain % args or %%, use strcpy. */
13487 if (strchr (fmt_str, target_percent) == NULL)
13489 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13490 && arg)
13491 return NULL_TREE;
13493 /* If the format specifier was "", fprintf does nothing. */
13494 if (fmt_str[0] == '\0')
13496 /* If FP has side-effects, just wait until gimplification is
13497 done. */
13498 if (TREE_SIDE_EFFECTS (fp))
13499 return NULL_TREE;
13501 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13504 /* When "string" doesn't contain %, replace all cases of
13505 fprintf (fp, string) with fputs (string, fp). The fputs
13506 builtin will take care of special cases like length == 1. */
13507 if (fn_fputs)
13508 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13511 /* The other optimizations can be done only on the non-va_list variants. */
13512 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13513 return NULL_TREE;
13515 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13516 else if (strcmp (fmt_str, target_percent_s) == 0)
13518 if (!arg || !validate_arg (arg, POINTER_TYPE))
13519 return NULL_TREE;
13520 if (fn_fputs)
13521 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13524 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13525 else if (strcmp (fmt_str, target_percent_c) == 0)
13527 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13528 return NULL_TREE;
13529 if (fn_fputc)
13530 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13533 if (!call)
13534 return NULL_TREE;
13535 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13538 /* Initialize format string characters in the target charset. */
13540 static bool
13541 init_target_chars (void)
13543 static bool init;
13544 if (!init)
13546 target_newline = lang_hooks.to_target_charset ('\n');
13547 target_percent = lang_hooks.to_target_charset ('%');
13548 target_c = lang_hooks.to_target_charset ('c');
13549 target_s = lang_hooks.to_target_charset ('s');
13550 if (target_newline == 0 || target_percent == 0 || target_c == 0
13551 || target_s == 0)
13552 return false;
13554 target_percent_c[0] = target_percent;
13555 target_percent_c[1] = target_c;
13556 target_percent_c[2] = '\0';
13558 target_percent_s[0] = target_percent;
13559 target_percent_s[1] = target_s;
13560 target_percent_s[2] = '\0';
13562 target_percent_s_newline[0] = target_percent;
13563 target_percent_s_newline[1] = target_s;
13564 target_percent_s_newline[2] = target_newline;
13565 target_percent_s_newline[3] = '\0';
13567 init = true;
13569 return true;
13572 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13573 and no overflow/underflow occurred. INEXACT is true if M was not
13574 exactly calculated. TYPE is the tree type for the result. This
13575 function assumes that you cleared the MPFR flags and then
13576 calculated M to see if anything subsequently set a flag prior to
13577 entering this function. Return NULL_TREE if any checks fail. */
13579 static tree
13580 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13582 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13583 overflow/underflow occurred. If -frounding-math, proceed iff the
13584 result of calling FUNC was exact. */
13585 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13586 && (!flag_rounding_math || !inexact))
13588 REAL_VALUE_TYPE rr;
13590 real_from_mpfr (&rr, m, type, GMP_RNDN);
13591 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13592 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13593 but the mpft_t is not, then we underflowed in the
13594 conversion. */
13595 if (real_isfinite (&rr)
13596 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13598 REAL_VALUE_TYPE rmode;
13600 real_convert (&rmode, TYPE_MODE (type), &rr);
13601 /* Proceed iff the specified mode can hold the value. */
13602 if (real_identical (&rmode, &rr))
13603 return build_real (type, rmode);
13606 return NULL_TREE;
13609 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13610 number and no overflow/underflow occurred. INEXACT is true if M
13611 was not exactly calculated. TYPE is the tree type for the result.
13612 This function assumes that you cleared the MPFR flags and then
13613 calculated M to see if anything subsequently set a flag prior to
13614 entering this function. Return NULL_TREE if any checks fail, if
13615 FORCE_CONVERT is true, then bypass the checks. */
13617 static tree
13618 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13620 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13621 overflow/underflow occurred. If -frounding-math, proceed iff the
13622 result of calling FUNC was exact. */
13623 if (force_convert
13624 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13625 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13626 && (!flag_rounding_math || !inexact)))
13628 REAL_VALUE_TYPE re, im;
13630 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13631 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13632 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13633 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13634 but the mpft_t is not, then we underflowed in the
13635 conversion. */
13636 if (force_convert
13637 || (real_isfinite (&re) && real_isfinite (&im)
13638 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13639 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13641 REAL_VALUE_TYPE re_mode, im_mode;
13643 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13644 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13645 /* Proceed iff the specified mode can hold the value. */
13646 if (force_convert
13647 || (real_identical (&re_mode, &re)
13648 && real_identical (&im_mode, &im)))
13649 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13650 build_real (TREE_TYPE (type), im_mode));
13653 return NULL_TREE;
13656 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13657 FUNC on it and return the resulting value as a tree with type TYPE.
13658 If MIN and/or MAX are not NULL, then the supplied ARG must be
13659 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13660 acceptable values, otherwise they are not. The mpfr precision is
13661 set to the precision of TYPE. We assume that function FUNC returns
13662 zero if the result could be calculated exactly within the requested
13663 precision. */
13665 static tree
13666 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13667 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13668 bool inclusive)
13670 tree result = NULL_TREE;
13672 STRIP_NOPS (arg);
13674 /* To proceed, MPFR must exactly represent the target floating point
13675 format, which only happens when the target base equals two. */
13676 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13677 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13679 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13681 if (real_isfinite (ra)
13682 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13683 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13685 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13686 const int prec = fmt->p;
13687 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13688 int inexact;
13689 mpfr_t m;
13691 mpfr_init2 (m, prec);
13692 mpfr_from_real (m, ra, GMP_RNDN);
13693 mpfr_clear_flags ();
13694 inexact = func (m, m, rnd);
13695 result = do_mpfr_ckconv (m, type, inexact);
13696 mpfr_clear (m);
13700 return result;
13703 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13704 FUNC on it and return the resulting value as a tree with type TYPE.
13705 The mpfr precision is set to the precision of TYPE. We assume that
13706 function FUNC returns zero if the result could be calculated
13707 exactly within the requested precision. */
13709 static tree
13710 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13711 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13713 tree result = NULL_TREE;
13715 STRIP_NOPS (arg1);
13716 STRIP_NOPS (arg2);
13718 /* To proceed, MPFR must exactly represent the target floating point
13719 format, which only happens when the target base equals two. */
13720 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13721 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13722 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13724 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13725 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13727 if (real_isfinite (ra1) && real_isfinite (ra2))
13729 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13730 const int prec = fmt->p;
13731 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13732 int inexact;
13733 mpfr_t m1, m2;
13735 mpfr_inits2 (prec, m1, m2, NULL);
13736 mpfr_from_real (m1, ra1, GMP_RNDN);
13737 mpfr_from_real (m2, ra2, GMP_RNDN);
13738 mpfr_clear_flags ();
13739 inexact = func (m1, m1, m2, rnd);
13740 result = do_mpfr_ckconv (m1, type, inexact);
13741 mpfr_clears (m1, m2, NULL);
13745 return result;
13748 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13749 FUNC on it and return the resulting value as a tree with type TYPE.
13750 The mpfr precision is set to the precision of TYPE. We assume that
13751 function FUNC returns zero if the result could be calculated
13752 exactly within the requested precision. */
13754 static tree
13755 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13756 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13758 tree result = NULL_TREE;
13760 STRIP_NOPS (arg1);
13761 STRIP_NOPS (arg2);
13762 STRIP_NOPS (arg3);
13764 /* To proceed, MPFR must exactly represent the target floating point
13765 format, which only happens when the target base equals two. */
13766 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13767 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13768 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13769 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13771 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13772 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13773 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13775 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13777 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13778 const int prec = fmt->p;
13779 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13780 int inexact;
13781 mpfr_t m1, m2, m3;
13783 mpfr_inits2 (prec, m1, m2, m3, NULL);
13784 mpfr_from_real (m1, ra1, GMP_RNDN);
13785 mpfr_from_real (m2, ra2, GMP_RNDN);
13786 mpfr_from_real (m3, ra3, GMP_RNDN);
13787 mpfr_clear_flags ();
13788 inexact = func (m1, m1, m2, m3, rnd);
13789 result = do_mpfr_ckconv (m1, type, inexact);
13790 mpfr_clears (m1, m2, m3, NULL);
13794 return result;
13797 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13798 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13799 If ARG_SINP and ARG_COSP are NULL then the result is returned
13800 as a complex value.
13801 The type is taken from the type of ARG and is used for setting the
13802 precision of the calculation and results. */
13804 static tree
13805 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13807 tree const type = TREE_TYPE (arg);
13808 tree result = NULL_TREE;
13810 STRIP_NOPS (arg);
13812 /* To proceed, MPFR must exactly represent the target floating point
13813 format, which only happens when the target base equals two. */
13814 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13815 && TREE_CODE (arg) == REAL_CST
13816 && !TREE_OVERFLOW (arg))
13818 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13820 if (real_isfinite (ra))
13822 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13823 const int prec = fmt->p;
13824 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13825 tree result_s, result_c;
13826 int inexact;
13827 mpfr_t m, ms, mc;
13829 mpfr_inits2 (prec, m, ms, mc, NULL);
13830 mpfr_from_real (m, ra, GMP_RNDN);
13831 mpfr_clear_flags ();
13832 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13833 result_s = do_mpfr_ckconv (ms, type, inexact);
13834 result_c = do_mpfr_ckconv (mc, type, inexact);
13835 mpfr_clears (m, ms, mc, NULL);
13836 if (result_s && result_c)
13838 /* If we are to return in a complex value do so. */
13839 if (!arg_sinp && !arg_cosp)
13840 return build_complex (build_complex_type (type),
13841 result_c, result_s);
13843 /* Dereference the sin/cos pointer arguments. */
13844 arg_sinp = build_fold_indirect_ref (arg_sinp);
13845 arg_cosp = build_fold_indirect_ref (arg_cosp);
13846 /* Proceed if valid pointer type were passed in. */
13847 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13848 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13850 /* Set the values. */
13851 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13852 result_s);
13853 TREE_SIDE_EFFECTS (result_s) = 1;
13854 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13855 result_c);
13856 TREE_SIDE_EFFECTS (result_c) = 1;
13857 /* Combine the assignments into a compound expr. */
13858 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13859 result_s, result_c));
13864 return result;
13867 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13868 two-argument mpfr order N Bessel function FUNC on them and return
13869 the resulting value as a tree with type TYPE. The mpfr precision
13870 is set to the precision of TYPE. We assume that function FUNC
13871 returns zero if the result could be calculated exactly within the
13872 requested precision. */
13873 static tree
13874 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13875 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13876 const REAL_VALUE_TYPE *min, bool inclusive)
13878 tree result = NULL_TREE;
13880 STRIP_NOPS (arg1);
13881 STRIP_NOPS (arg2);
13883 /* To proceed, MPFR must exactly represent the target floating point
13884 format, which only happens when the target base equals two. */
13885 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13886 && tree_fits_shwi_p (arg1)
13887 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13889 const HOST_WIDE_INT n = tree_to_shwi (arg1);
13890 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13892 if (n == (long)n
13893 && real_isfinite (ra)
13894 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13896 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13897 const int prec = fmt->p;
13898 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13899 int inexact;
13900 mpfr_t m;
13902 mpfr_init2 (m, prec);
13903 mpfr_from_real (m, ra, GMP_RNDN);
13904 mpfr_clear_flags ();
13905 inexact = func (m, n, m, rnd);
13906 result = do_mpfr_ckconv (m, type, inexact);
13907 mpfr_clear (m);
13911 return result;
13914 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13915 the pointer *(ARG_QUO) and return the result. The type is taken
13916 from the type of ARG0 and is used for setting the precision of the
13917 calculation and results. */
13919 static tree
13920 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13922 tree const type = TREE_TYPE (arg0);
13923 tree result = NULL_TREE;
13925 STRIP_NOPS (arg0);
13926 STRIP_NOPS (arg1);
13928 /* To proceed, MPFR must exactly represent the target floating point
13929 format, which only happens when the target base equals two. */
13930 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13931 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13932 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13934 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13935 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13937 if (real_isfinite (ra0) && real_isfinite (ra1))
13939 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13940 const int prec = fmt->p;
13941 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13942 tree result_rem;
13943 long integer_quo;
13944 mpfr_t m0, m1;
13946 mpfr_inits2 (prec, m0, m1, NULL);
13947 mpfr_from_real (m0, ra0, GMP_RNDN);
13948 mpfr_from_real (m1, ra1, GMP_RNDN);
13949 mpfr_clear_flags ();
13950 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13951 /* Remquo is independent of the rounding mode, so pass
13952 inexact=0 to do_mpfr_ckconv(). */
13953 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13954 mpfr_clears (m0, m1, NULL);
13955 if (result_rem)
13957 /* MPFR calculates quo in the host's long so it may
13958 return more bits in quo than the target int can hold
13959 if sizeof(host long) > sizeof(target int). This can
13960 happen even for native compilers in LP64 mode. In
13961 these cases, modulo the quo value with the largest
13962 number that the target int can hold while leaving one
13963 bit for the sign. */
13964 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13965 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13967 /* Dereference the quo pointer argument. */
13968 arg_quo = build_fold_indirect_ref (arg_quo);
13969 /* Proceed iff a valid pointer type was passed in. */
13970 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13972 /* Set the value. */
13973 tree result_quo
13974 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13975 build_int_cst (TREE_TYPE (arg_quo),
13976 integer_quo));
13977 TREE_SIDE_EFFECTS (result_quo) = 1;
13978 /* Combine the quo assignment with the rem. */
13979 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13980 result_quo, result_rem));
13985 return result;
13988 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13989 resulting value as a tree with type TYPE. The mpfr precision is
13990 set to the precision of TYPE. We assume that this mpfr function
13991 returns zero if the result could be calculated exactly within the
13992 requested precision. In addition, the integer pointer represented
13993 by ARG_SG will be dereferenced and set to the appropriate signgam
13994 (-1,1) value. */
13996 static tree
13997 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13999 tree result = NULL_TREE;
14001 STRIP_NOPS (arg);
14003 /* To proceed, MPFR must exactly represent the target floating point
14004 format, which only happens when the target base equals two. Also
14005 verify ARG is a constant and that ARG_SG is an int pointer. */
14006 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
14007 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
14008 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
14009 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
14011 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
14013 /* In addition to NaN and Inf, the argument cannot be zero or a
14014 negative integer. */
14015 if (real_isfinite (ra)
14016 && ra->cl != rvc_zero
14017 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
14019 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14020 const int prec = fmt->p;
14021 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
14022 int inexact, sg;
14023 mpfr_t m;
14024 tree result_lg;
14026 mpfr_init2 (m, prec);
14027 mpfr_from_real (m, ra, GMP_RNDN);
14028 mpfr_clear_flags ();
14029 inexact = mpfr_lgamma (m, &sg, m, rnd);
14030 result_lg = do_mpfr_ckconv (m, type, inexact);
14031 mpfr_clear (m);
14032 if (result_lg)
14034 tree result_sg;
14036 /* Dereference the arg_sg pointer argument. */
14037 arg_sg = build_fold_indirect_ref (arg_sg);
14038 /* Assign the signgam value into *arg_sg. */
14039 result_sg = fold_build2 (MODIFY_EXPR,
14040 TREE_TYPE (arg_sg), arg_sg,
14041 build_int_cst (TREE_TYPE (arg_sg), sg));
14042 TREE_SIDE_EFFECTS (result_sg) = 1;
14043 /* Combine the signgam assignment with the lgamma result. */
14044 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14045 result_sg, result_lg));
14050 return result;
14053 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14054 function FUNC on it and return the resulting value as a tree with
14055 type TYPE. The mpfr precision is set to the precision of TYPE. We
14056 assume that function FUNC returns zero if the result could be
14057 calculated exactly within the requested precision. */
14059 static tree
14060 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14062 tree result = NULL_TREE;
14064 STRIP_NOPS (arg);
14066 /* To proceed, MPFR must exactly represent the target floating point
14067 format, which only happens when the target base equals two. */
14068 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14069 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14070 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14072 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14073 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14075 if (real_isfinite (re) && real_isfinite (im))
14077 const struct real_format *const fmt =
14078 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14079 const int prec = fmt->p;
14080 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14081 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14082 int inexact;
14083 mpc_t m;
14085 mpc_init2 (m, prec);
14086 mpfr_from_real (mpc_realref (m), re, rnd);
14087 mpfr_from_real (mpc_imagref (m), im, rnd);
14088 mpfr_clear_flags ();
14089 inexact = func (m, m, crnd);
14090 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14091 mpc_clear (m);
14095 return result;
14098 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14099 mpc function FUNC on it and return the resulting value as a tree
14100 with type TYPE. The mpfr precision is set to the precision of
14101 TYPE. We assume that function FUNC returns zero if the result
14102 could be calculated exactly within the requested precision. If
14103 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14104 in the arguments and/or results. */
14106 tree
14107 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14108 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14110 tree result = NULL_TREE;
14112 STRIP_NOPS (arg0);
14113 STRIP_NOPS (arg1);
14115 /* To proceed, MPFR must exactly represent the target floating point
14116 format, which only happens when the target base equals two. */
14117 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14118 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14119 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14120 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14121 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14123 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14124 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14125 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14126 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14128 if (do_nonfinite
14129 || (real_isfinite (re0) && real_isfinite (im0)
14130 && real_isfinite (re1) && real_isfinite (im1)))
14132 const struct real_format *const fmt =
14133 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14134 const int prec = fmt->p;
14135 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14136 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14137 int inexact;
14138 mpc_t m0, m1;
14140 mpc_init2 (m0, prec);
14141 mpc_init2 (m1, prec);
14142 mpfr_from_real (mpc_realref (m0), re0, rnd);
14143 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14144 mpfr_from_real (mpc_realref (m1), re1, rnd);
14145 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14146 mpfr_clear_flags ();
14147 inexact = func (m0, m0, m1, crnd);
14148 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14149 mpc_clear (m0);
14150 mpc_clear (m1);
14154 return result;
14157 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14158 a normal call should be emitted rather than expanding the function
14159 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14161 static tree
14162 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14164 int nargs = gimple_call_num_args (stmt);
14166 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14167 (nargs > 0
14168 ? gimple_call_arg_ptr (stmt, 0)
14169 : &error_mark_node), fcode);
14172 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14173 a normal call should be emitted rather than expanding the function
14174 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14175 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14176 passed as second argument. */
14178 tree
14179 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14180 enum built_in_function fcode)
14182 int nargs = gimple_call_num_args (stmt);
14184 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14185 (nargs > 0
14186 ? gimple_call_arg_ptr (stmt, 0)
14187 : &error_mark_node), maxlen, fcode);
14190 /* Builtins with folding operations that operate on "..." arguments
14191 need special handling; we need to store the arguments in a convenient
14192 data structure before attempting any folding. Fortunately there are
14193 only a few builtins that fall into this category. FNDECL is the
14194 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14195 result of the function call is ignored. */
14197 static tree
14198 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14199 bool ignore ATTRIBUTE_UNUSED)
14201 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14202 tree ret = NULL_TREE;
14204 switch (fcode)
14206 case BUILT_IN_SPRINTF_CHK:
14207 case BUILT_IN_VSPRINTF_CHK:
14208 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14209 break;
14211 case BUILT_IN_SNPRINTF_CHK:
14212 case BUILT_IN_VSNPRINTF_CHK:
14213 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14215 default:
14216 break;
14218 if (ret)
14220 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14221 TREE_NO_WARNING (ret) = 1;
14222 return ret;
14224 return NULL_TREE;
14227 /* A wrapper function for builtin folding that prevents warnings for
14228 "statement without effect" and the like, caused by removing the
14229 call node earlier than the warning is generated. */
14231 tree
14232 fold_call_stmt (gimple stmt, bool ignore)
14234 tree ret = NULL_TREE;
14235 tree fndecl = gimple_call_fndecl (stmt);
14236 location_t loc = gimple_location (stmt);
14237 if (fndecl
14238 && TREE_CODE (fndecl) == FUNCTION_DECL
14239 && DECL_BUILT_IN (fndecl)
14240 && !gimple_call_va_arg_pack_p (stmt))
14242 int nargs = gimple_call_num_args (stmt);
14243 tree *args = (nargs > 0
14244 ? gimple_call_arg_ptr (stmt, 0)
14245 : &error_mark_node);
14247 if (avoid_folding_inline_builtin (fndecl))
14248 return NULL_TREE;
14249 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14251 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14253 else
14255 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14256 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14257 if (!ret)
14258 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14259 if (ret)
14261 /* Propagate location information from original call to
14262 expansion of builtin. Otherwise things like
14263 maybe_emit_chk_warning, that operate on the expansion
14264 of a builtin, will use the wrong location information. */
14265 if (gimple_has_location (stmt))
14267 tree realret = ret;
14268 if (TREE_CODE (ret) == NOP_EXPR)
14269 realret = TREE_OPERAND (ret, 0);
14270 if (CAN_HAVE_LOCATION_P (realret)
14271 && !EXPR_HAS_LOCATION (realret))
14272 SET_EXPR_LOCATION (realret, loc);
14273 return realret;
14275 return ret;
14279 return NULL_TREE;
14282 /* Look up the function in builtin_decl that corresponds to DECL
14283 and set ASMSPEC as its user assembler name. DECL must be a
14284 function decl that declares a builtin. */
14286 void
14287 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14289 tree builtin;
14290 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14291 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14292 && asmspec != 0);
14294 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14295 set_user_assembler_name (builtin, asmspec);
14296 switch (DECL_FUNCTION_CODE (decl))
14298 case BUILT_IN_MEMCPY:
14299 init_block_move_fn (asmspec);
14300 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14301 break;
14302 case BUILT_IN_MEMSET:
14303 init_block_clear_fn (asmspec);
14304 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14305 break;
14306 case BUILT_IN_MEMMOVE:
14307 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14308 break;
14309 case BUILT_IN_MEMCMP:
14310 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14311 break;
14312 case BUILT_IN_ABORT:
14313 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14314 break;
14315 case BUILT_IN_FFS:
14316 if (INT_TYPE_SIZE < BITS_PER_WORD)
14318 set_user_assembler_libfunc ("ffs", asmspec);
14319 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14320 MODE_INT, 0), "ffs");
14322 break;
14323 default:
14324 break;
14328 /* Return true if DECL is a builtin that expands to a constant or similarly
14329 simple code. */
14330 bool
14331 is_simple_builtin (tree decl)
14333 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14334 switch (DECL_FUNCTION_CODE (decl))
14336 /* Builtins that expand to constants. */
14337 case BUILT_IN_CONSTANT_P:
14338 case BUILT_IN_EXPECT:
14339 case BUILT_IN_OBJECT_SIZE:
14340 case BUILT_IN_UNREACHABLE:
14341 /* Simple register moves or loads from stack. */
14342 case BUILT_IN_ASSUME_ALIGNED:
14343 case BUILT_IN_RETURN_ADDRESS:
14344 case BUILT_IN_EXTRACT_RETURN_ADDR:
14345 case BUILT_IN_FROB_RETURN_ADDR:
14346 case BUILT_IN_RETURN:
14347 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14348 case BUILT_IN_FRAME_ADDRESS:
14349 case BUILT_IN_VA_END:
14350 case BUILT_IN_STACK_SAVE:
14351 case BUILT_IN_STACK_RESTORE:
14352 /* Exception state returns or moves registers around. */
14353 case BUILT_IN_EH_FILTER:
14354 case BUILT_IN_EH_POINTER:
14355 case BUILT_IN_EH_COPY_VALUES:
14356 return true;
14358 default:
14359 return false;
14362 return false;
14365 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14366 most probably expanded inline into reasonably simple code. This is a
14367 superset of is_simple_builtin. */
14368 bool
14369 is_inexpensive_builtin (tree decl)
14371 if (!decl)
14372 return false;
14373 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14374 return true;
14375 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14376 switch (DECL_FUNCTION_CODE (decl))
14378 case BUILT_IN_ABS:
14379 case BUILT_IN_ALLOCA:
14380 case BUILT_IN_ALLOCA_WITH_ALIGN:
14381 case BUILT_IN_BSWAP16:
14382 case BUILT_IN_BSWAP32:
14383 case BUILT_IN_BSWAP64:
14384 case BUILT_IN_CLZ:
14385 case BUILT_IN_CLZIMAX:
14386 case BUILT_IN_CLZL:
14387 case BUILT_IN_CLZLL:
14388 case BUILT_IN_CTZ:
14389 case BUILT_IN_CTZIMAX:
14390 case BUILT_IN_CTZL:
14391 case BUILT_IN_CTZLL:
14392 case BUILT_IN_FFS:
14393 case BUILT_IN_FFSIMAX:
14394 case BUILT_IN_FFSL:
14395 case BUILT_IN_FFSLL:
14396 case BUILT_IN_IMAXABS:
14397 case BUILT_IN_FINITE:
14398 case BUILT_IN_FINITEF:
14399 case BUILT_IN_FINITEL:
14400 case BUILT_IN_FINITED32:
14401 case BUILT_IN_FINITED64:
14402 case BUILT_IN_FINITED128:
14403 case BUILT_IN_FPCLASSIFY:
14404 case BUILT_IN_ISFINITE:
14405 case BUILT_IN_ISINF_SIGN:
14406 case BUILT_IN_ISINF:
14407 case BUILT_IN_ISINFF:
14408 case BUILT_IN_ISINFL:
14409 case BUILT_IN_ISINFD32:
14410 case BUILT_IN_ISINFD64:
14411 case BUILT_IN_ISINFD128:
14412 case BUILT_IN_ISNAN:
14413 case BUILT_IN_ISNANF:
14414 case BUILT_IN_ISNANL:
14415 case BUILT_IN_ISNAND32:
14416 case BUILT_IN_ISNAND64:
14417 case BUILT_IN_ISNAND128:
14418 case BUILT_IN_ISNORMAL:
14419 case BUILT_IN_ISGREATER:
14420 case BUILT_IN_ISGREATEREQUAL:
14421 case BUILT_IN_ISLESS:
14422 case BUILT_IN_ISLESSEQUAL:
14423 case BUILT_IN_ISLESSGREATER:
14424 case BUILT_IN_ISUNORDERED:
14425 case BUILT_IN_VA_ARG_PACK:
14426 case BUILT_IN_VA_ARG_PACK_LEN:
14427 case BUILT_IN_VA_COPY:
14428 case BUILT_IN_TRAP:
14429 case BUILT_IN_SAVEREGS:
14430 case BUILT_IN_POPCOUNTL:
14431 case BUILT_IN_POPCOUNTLL:
14432 case BUILT_IN_POPCOUNTIMAX:
14433 case BUILT_IN_POPCOUNT:
14434 case BUILT_IN_PARITYL:
14435 case BUILT_IN_PARITYLL:
14436 case BUILT_IN_PARITYIMAX:
14437 case BUILT_IN_PARITY:
14438 case BUILT_IN_LABS:
14439 case BUILT_IN_LLABS:
14440 case BUILT_IN_PREFETCH:
14441 return true;
14443 default:
14444 return is_simple_builtin (decl);
14447 return false;