if_iwm - Factor out firmware station handling into if_iwm_sta.c.
[dragonfly.git] / contrib / gcc-5.0 / gcc / builtins.c
blobf6a7f21da4d48174e1fe5bcf209fef91fb8fb5bd
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "hash-set.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stringpool.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "varasm.h"
40 #include "tree-object-size.h"
41 #include "realmpfr.h"
42 #include "predict.h"
43 #include "hashtab.h"
44 #include "hard-reg-set.h"
45 #include "function.h"
46 #include "cfgrtl.h"
47 #include "basic-block.h"
48 #include "tree-ssa-alias.h"
49 #include "internal-fn.h"
50 #include "gimple-expr.h"
51 #include "is-a.h"
52 #include "gimple.h"
53 #include "flags.h"
54 #include "regs.h"
55 #include "except.h"
56 #include "insn-config.h"
57 #include "statistics.h"
58 #include "real.h"
59 #include "fixed-value.h"
60 #include "expmed.h"
61 #include "dojump.h"
62 #include "explow.h"
63 #include "emit-rtl.h"
64 #include "stmt.h"
65 #include "expr.h"
66 #include "insn-codes.h"
67 #include "optabs.h"
68 #include "libfuncs.h"
69 #include "recog.h"
70 #include "output.h"
71 #include "typeclass.h"
72 #include "tm_p.h"
73 #include "target.h"
74 #include "langhooks.h"
75 #include "tree-ssanames.h"
76 #include "tree-dfa.h"
77 #include "value-prof.h"
78 #include "diagnostic-core.h"
79 #include "builtins.h"
80 #include "asan.h"
81 #include "cilk.h"
82 #include "ipa-ref.h"
83 #include "lto-streamer.h"
84 #include "cgraph.h"
85 #include "tree-chkp.h"
86 #include "rtl-chkp.h"
87 #include "gomp-constants.h"
90 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
92 struct target_builtins default_target_builtins;
93 #if SWITCHABLE_TARGET
94 struct target_builtins *this_target_builtins = &default_target_builtins;
95 #endif
97 /* Define the names of the builtin function types and codes. */
98 const char *const built_in_class_names[BUILT_IN_LAST]
99 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
101 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
102 const char * built_in_names[(int) END_BUILTINS] =
104 #include "builtins.def"
106 #undef DEF_BUILTIN
108 /* Setup an array of builtin_info_type, make sure each element decl is
109 initialized to NULL_TREE. */
110 builtin_info_type builtin_info[(int)END_BUILTINS];
112 /* Non-zero if __builtin_constant_p should be folded right away. */
113 bool force_folding_builtin_constant_p;
115 static rtx c_readstr (const char *, machine_mode);
116 static int target_char_cast (tree, char *);
117 static rtx get_memory_rtx (tree, tree);
118 static int apply_args_size (void);
119 static int apply_result_size (void);
120 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
121 static rtx result_vector (int, rtx);
122 #endif
123 static void expand_builtin_update_setjmp_buf (rtx);
124 static void expand_builtin_prefetch (tree);
125 static rtx expand_builtin_apply_args (void);
126 static rtx expand_builtin_apply_args_1 (void);
127 static rtx expand_builtin_apply (rtx, rtx, rtx);
128 static void expand_builtin_return (rtx);
129 static enum type_class type_to_class (tree);
130 static rtx expand_builtin_classify_type (tree);
131 static void expand_errno_check (tree, rtx);
132 static rtx expand_builtin_mathfn (tree, rtx, rtx);
133 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
134 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
135 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
136 static rtx expand_builtin_interclass_mathfn (tree, rtx);
137 static rtx expand_builtin_sincos (tree);
138 static rtx expand_builtin_cexpi (tree, rtx);
139 static rtx expand_builtin_int_roundingfn (tree, rtx);
140 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
141 static rtx expand_builtin_next_arg (void);
142 static rtx expand_builtin_va_start (tree);
143 static rtx expand_builtin_va_end (tree);
144 static rtx expand_builtin_va_copy (tree);
145 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
146 static rtx expand_builtin_strcmp (tree, rtx);
147 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
148 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
149 static rtx expand_builtin_memcpy (tree, rtx);
150 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
151 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
152 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
153 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
154 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
155 machine_mode, int, tree);
156 static rtx expand_builtin_strcpy (tree, rtx);
157 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
158 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
159 static rtx expand_builtin_strncpy (tree, rtx);
160 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
161 static rtx expand_builtin_memset (tree, rtx, machine_mode);
162 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
163 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
164 static rtx expand_builtin_bzero (tree);
165 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
166 static rtx expand_builtin_alloca (tree, bool);
167 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
168 static rtx expand_builtin_frame_address (tree, tree);
169 static tree stabilize_va_list_loc (location_t, tree, int);
170 static rtx expand_builtin_expect (tree, rtx);
171 static tree fold_builtin_constant_p (tree);
172 static tree fold_builtin_classify_type (tree);
173 static tree fold_builtin_strlen (location_t, tree, tree);
174 static tree fold_builtin_inf (location_t, tree, int);
175 static tree fold_builtin_nan (tree, tree, int);
176 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
177 static bool validate_arg (const_tree, enum tree_code code);
178 static bool integer_valued_real_p (tree);
179 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
180 static rtx expand_builtin_fabs (tree, rtx, rtx);
181 static rtx expand_builtin_signbit (tree, rtx);
182 static tree fold_builtin_sqrt (location_t, tree, tree);
183 static tree fold_builtin_cbrt (location_t, tree, tree);
184 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
185 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
186 static tree fold_builtin_cos (location_t, tree, tree, tree);
187 static tree fold_builtin_cosh (location_t, tree, tree, tree);
188 static tree fold_builtin_tan (tree, tree);
189 static tree fold_builtin_trunc (location_t, tree, tree);
190 static tree fold_builtin_floor (location_t, tree, tree);
191 static tree fold_builtin_ceil (location_t, tree, tree);
192 static tree fold_builtin_round (location_t, tree, tree);
193 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
194 static tree fold_builtin_bitop (tree, tree);
195 static tree fold_builtin_strchr (location_t, tree, tree, tree);
196 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
197 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
198 static tree fold_builtin_strcmp (location_t, tree, tree);
199 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
200 static tree fold_builtin_signbit (location_t, tree, tree);
201 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
202 static tree fold_builtin_isascii (location_t, tree);
203 static tree fold_builtin_toascii (location_t, tree);
204 static tree fold_builtin_isdigit (location_t, tree);
205 static tree fold_builtin_fabs (location_t, tree, tree);
206 static tree fold_builtin_abs (location_t, tree, tree);
207 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
208 enum tree_code);
209 static tree fold_builtin_0 (location_t, tree);
210 static tree fold_builtin_1 (location_t, tree, tree);
211 static tree fold_builtin_2 (location_t, tree, tree, tree);
212 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
213 static tree fold_builtin_varargs (location_t, tree, tree*, int);
215 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
216 static tree fold_builtin_strstr (location_t, tree, tree, tree);
217 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
218 static tree fold_builtin_strspn (location_t, tree, tree);
219 static tree fold_builtin_strcspn (location_t, tree, tree);
221 static rtx expand_builtin_object_size (tree);
222 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
223 enum built_in_function);
224 static void maybe_emit_chk_warning (tree, enum built_in_function);
225 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
226 static void maybe_emit_free_warning (tree);
227 static tree fold_builtin_object_size (tree, tree);
229 unsigned HOST_WIDE_INT target_newline;
230 unsigned HOST_WIDE_INT target_percent;
231 static unsigned HOST_WIDE_INT target_c;
232 static unsigned HOST_WIDE_INT target_s;
233 char target_percent_c[3];
234 char target_percent_s[3];
235 char target_percent_s_newline[4];
236 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
237 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
238 static tree do_mpfr_arg2 (tree, tree, tree,
239 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
240 static tree do_mpfr_arg3 (tree, tree, tree, tree,
241 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
242 static tree do_mpfr_sincos (tree, tree, tree);
243 static tree do_mpfr_bessel_n (tree, tree, tree,
244 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
245 const REAL_VALUE_TYPE *, bool);
246 static tree do_mpfr_remquo (tree, tree, tree);
247 static tree do_mpfr_lgamma_r (tree, tree, tree);
248 static void expand_builtin_sync_synchronize (void);
250 /* Return true if NAME starts with __builtin_ or __sync_. */
252 static bool
253 is_builtin_name (const char *name)
255 if (strncmp (name, "__builtin_", 10) == 0)
256 return true;
257 if (strncmp (name, "__sync_", 7) == 0)
258 return true;
259 if (strncmp (name, "__atomic_", 9) == 0)
260 return true;
261 if (flag_cilkplus
262 && (!strcmp (name, "__cilkrts_detach")
263 || !strcmp (name, "__cilkrts_pop_frame")))
264 return true;
265 return false;
269 /* Return true if DECL is a function symbol representing a built-in. */
271 bool
272 is_builtin_fn (tree decl)
274 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
277 /* Return true if NODE should be considered for inline expansion regardless
278 of the optimization level. This means whenever a function is invoked with
279 its "internal" name, which normally contains the prefix "__builtin". */
281 static bool
282 called_as_built_in (tree node)
284 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
285 we want the name used to call the function, not the name it
286 will have. */
287 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
288 return is_builtin_name (name);
291 /* Compute values M and N such that M divides (address of EXP - N) and such
292 that N < M. If these numbers can be determined, store M in alignp and N in
293 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
294 *alignp and any bit-offset to *bitposp.
296 Note that the address (and thus the alignment) computed here is based
297 on the address to which a symbol resolves, whereas DECL_ALIGN is based
298 on the address at which an object is actually located. These two
299 addresses are not always the same. For example, on ARM targets,
300 the address &foo of a Thumb function foo() has the lowest bit set,
301 whereas foo() itself starts on an even address.
303 If ADDR_P is true we are taking the address of the memory reference EXP
304 and thus cannot rely on the access taking place. */
306 static bool
307 get_object_alignment_2 (tree exp, unsigned int *alignp,
308 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
310 HOST_WIDE_INT bitsize, bitpos;
311 tree offset;
312 machine_mode mode;
313 int unsignedp, volatilep;
314 unsigned int align = BITS_PER_UNIT;
315 bool known_alignment = false;
317 /* Get the innermost object and the constant (bitpos) and possibly
318 variable (offset) offset of the access. */
319 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
320 &mode, &unsignedp, &volatilep, true);
322 /* Extract alignment information from the innermost object and
323 possibly adjust bitpos and offset. */
324 if (TREE_CODE (exp) == FUNCTION_DECL)
326 /* Function addresses can encode extra information besides their
327 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
328 allows the low bit to be used as a virtual bit, we know
329 that the address itself must be at least 2-byte aligned. */
330 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
331 align = 2 * BITS_PER_UNIT;
333 else if (TREE_CODE (exp) == LABEL_DECL)
335 else if (TREE_CODE (exp) == CONST_DECL)
337 /* The alignment of a CONST_DECL is determined by its initializer. */
338 exp = DECL_INITIAL (exp);
339 align = TYPE_ALIGN (TREE_TYPE (exp));
340 #ifdef CONSTANT_ALIGNMENT
341 if (CONSTANT_CLASS_P (exp))
342 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
343 #endif
344 known_alignment = true;
346 else if (DECL_P (exp))
348 align = DECL_ALIGN (exp);
349 known_alignment = true;
351 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
353 align = TYPE_ALIGN (TREE_TYPE (exp));
355 else if (TREE_CODE (exp) == INDIRECT_REF
356 || TREE_CODE (exp) == MEM_REF
357 || TREE_CODE (exp) == TARGET_MEM_REF)
359 tree addr = TREE_OPERAND (exp, 0);
360 unsigned ptr_align;
361 unsigned HOST_WIDE_INT ptr_bitpos;
362 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
364 /* If the address is explicitely aligned, handle that. */
365 if (TREE_CODE (addr) == BIT_AND_EXPR
366 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
368 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
369 ptr_bitmask *= BITS_PER_UNIT;
370 align = ptr_bitmask & -ptr_bitmask;
371 addr = TREE_OPERAND (addr, 0);
374 known_alignment
375 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
376 align = MAX (ptr_align, align);
378 /* Re-apply explicit alignment to the bitpos. */
379 ptr_bitpos &= ptr_bitmask;
381 /* The alignment of the pointer operand in a TARGET_MEM_REF
382 has to take the variable offset parts into account. */
383 if (TREE_CODE (exp) == TARGET_MEM_REF)
385 if (TMR_INDEX (exp))
387 unsigned HOST_WIDE_INT step = 1;
388 if (TMR_STEP (exp))
389 step = TREE_INT_CST_LOW (TMR_STEP (exp));
390 align = MIN (align, (step & -step) * BITS_PER_UNIT);
392 if (TMR_INDEX2 (exp))
393 align = BITS_PER_UNIT;
394 known_alignment = false;
397 /* When EXP is an actual memory reference then we can use
398 TYPE_ALIGN of a pointer indirection to derive alignment.
399 Do so only if get_pointer_alignment_1 did not reveal absolute
400 alignment knowledge and if using that alignment would
401 improve the situation. */
402 if (!addr_p && !known_alignment
403 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
404 align = TYPE_ALIGN (TREE_TYPE (exp));
405 else
407 /* Else adjust bitpos accordingly. */
408 bitpos += ptr_bitpos;
409 if (TREE_CODE (exp) == MEM_REF
410 || TREE_CODE (exp) == TARGET_MEM_REF)
411 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
414 else if (TREE_CODE (exp) == STRING_CST)
416 /* STRING_CST are the only constant objects we allow to be not
417 wrapped inside a CONST_DECL. */
418 align = TYPE_ALIGN (TREE_TYPE (exp));
419 #ifdef CONSTANT_ALIGNMENT
420 if (CONSTANT_CLASS_P (exp))
421 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
422 #endif
423 known_alignment = true;
426 /* If there is a non-constant offset part extract the maximum
427 alignment that can prevail. */
428 if (offset)
430 unsigned int trailing_zeros = tree_ctz (offset);
431 if (trailing_zeros < HOST_BITS_PER_INT)
433 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
434 if (inner)
435 align = MIN (align, inner);
439 *alignp = align;
440 *bitposp = bitpos & (*alignp - 1);
441 return known_alignment;
444 /* For a memory reference expression EXP compute values M and N such that M
445 divides (&EXP - N) and such that N < M. If these numbers can be determined,
446 store M in alignp and N in *BITPOSP and return true. Otherwise return false
447 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
449 bool
450 get_object_alignment_1 (tree exp, unsigned int *alignp,
451 unsigned HOST_WIDE_INT *bitposp)
453 return get_object_alignment_2 (exp, alignp, bitposp, false);
456 /* Return the alignment in bits of EXP, an object. */
458 unsigned int
459 get_object_alignment (tree exp)
461 unsigned HOST_WIDE_INT bitpos = 0;
462 unsigned int align;
464 get_object_alignment_1 (exp, &align, &bitpos);
466 /* align and bitpos now specify known low bits of the pointer.
467 ptr & (align - 1) == bitpos. */
469 if (bitpos != 0)
470 align = (bitpos & -bitpos);
471 return align;
474 /* For a pointer valued expression EXP compute values M and N such that M
475 divides (EXP - N) and such that N < M. If these numbers can be determined,
476 store M in alignp and N in *BITPOSP and return true. Return false if
477 the results are just a conservative approximation.
479 If EXP is not a pointer, false is returned too. */
481 bool
482 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
483 unsigned HOST_WIDE_INT *bitposp)
485 STRIP_NOPS (exp);
487 if (TREE_CODE (exp) == ADDR_EXPR)
488 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
489 alignp, bitposp, true);
490 else if (TREE_CODE (exp) == SSA_NAME
491 && POINTER_TYPE_P (TREE_TYPE (exp)))
493 unsigned int ptr_align, ptr_misalign;
494 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
496 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
498 *bitposp = ptr_misalign * BITS_PER_UNIT;
499 *alignp = ptr_align * BITS_PER_UNIT;
500 /* Make sure to return a sensible alignment when the multiplication
501 by BITS_PER_UNIT overflowed. */
502 if (*alignp == 0)
503 *alignp = 1u << (HOST_BITS_PER_INT - 1);
504 /* We cannot really tell whether this result is an approximation. */
505 return false;
507 else
509 *bitposp = 0;
510 *alignp = BITS_PER_UNIT;
511 return false;
514 else if (TREE_CODE (exp) == INTEGER_CST)
516 *alignp = BIGGEST_ALIGNMENT;
517 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
518 & (BIGGEST_ALIGNMENT - 1));
519 return true;
522 *bitposp = 0;
523 *alignp = BITS_PER_UNIT;
524 return false;
527 /* Return the alignment in bits of EXP, a pointer valued expression.
528 The alignment returned is, by default, the alignment of the thing that
529 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
531 Otherwise, look at the expression to see if we can do better, i.e., if the
532 expression is actually pointing at an object whose alignment is tighter. */
534 unsigned int
535 get_pointer_alignment (tree exp)
537 unsigned HOST_WIDE_INT bitpos = 0;
538 unsigned int align;
540 get_pointer_alignment_1 (exp, &align, &bitpos);
542 /* align and bitpos now specify known low bits of the pointer.
543 ptr & (align - 1) == bitpos. */
545 if (bitpos != 0)
546 align = (bitpos & -bitpos);
548 return align;
551 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
552 way, because it could contain a zero byte in the middle.
553 TREE_STRING_LENGTH is the size of the character array, not the string.
555 ONLY_VALUE should be nonzero if the result is not going to be emitted
556 into the instruction stream and zero if it is going to be expanded.
557 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
558 is returned, otherwise NULL, since
559 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
560 evaluate the side-effects.
562 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
563 accesses. Note that this implies the result is not going to be emitted
564 into the instruction stream.
566 The value returned is of type `ssizetype'.
568 Unfortunately, string_constant can't access the values of const char
569 arrays with initializers, so neither can we do so here. */
571 tree
572 c_strlen (tree src, int only_value)
574 tree offset_node;
575 HOST_WIDE_INT offset;
576 int max;
577 const char *ptr;
578 location_t loc;
580 STRIP_NOPS (src);
581 if (TREE_CODE (src) == COND_EXPR
582 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
584 tree len1, len2;
586 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
587 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
588 if (tree_int_cst_equal (len1, len2))
589 return len1;
592 if (TREE_CODE (src) == COMPOUND_EXPR
593 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
594 return c_strlen (TREE_OPERAND (src, 1), only_value);
596 loc = EXPR_LOC_OR_LOC (src, input_location);
598 src = string_constant (src, &offset_node);
599 if (src == 0)
600 return NULL_TREE;
602 max = TREE_STRING_LENGTH (src) - 1;
603 ptr = TREE_STRING_POINTER (src);
605 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
607 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
608 compute the offset to the following null if we don't know where to
609 start searching for it. */
610 int i;
612 for (i = 0; i < max; i++)
613 if (ptr[i] == 0)
614 return NULL_TREE;
616 /* We don't know the starting offset, but we do know that the string
617 has no internal zero bytes. We can assume that the offset falls
618 within the bounds of the string; otherwise, the programmer deserves
619 what he gets. Subtract the offset from the length of the string,
620 and return that. This would perhaps not be valid if we were dealing
621 with named arrays in addition to literal string constants. */
623 return size_diffop_loc (loc, size_int (max), offset_node);
626 /* We have a known offset into the string. Start searching there for
627 a null character if we can represent it as a single HOST_WIDE_INT. */
628 if (offset_node == 0)
629 offset = 0;
630 else if (! tree_fits_shwi_p (offset_node))
631 offset = -1;
632 else
633 offset = tree_to_shwi (offset_node);
635 /* If the offset is known to be out of bounds, warn, and call strlen at
636 runtime. */
637 if (offset < 0 || offset > max)
639 /* Suppress multiple warnings for propagated constant strings. */
640 if (only_value != 2
641 && !TREE_NO_WARNING (src))
643 warning_at (loc, 0, "offset outside bounds of constant string");
644 TREE_NO_WARNING (src) = 1;
646 return NULL_TREE;
649 /* Use strlen to search for the first zero byte. Since any strings
650 constructed with build_string will have nulls appended, we win even
651 if we get handed something like (char[4])"abcd".
653 Since OFFSET is our starting index into the string, no further
654 calculation is needed. */
655 return ssize_int (strlen (ptr + offset));
658 /* Return a char pointer for a C string if it is a string constant
659 or sum of string constant and integer constant. */
661 const char *
662 c_getstr (tree src)
664 tree offset_node;
666 src = string_constant (src, &offset_node);
667 if (src == 0)
668 return 0;
670 if (offset_node == 0)
671 return TREE_STRING_POINTER (src);
672 else if (!tree_fits_uhwi_p (offset_node)
673 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
674 return 0;
676 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
679 /* Return a constant integer corresponding to target reading
680 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
682 static rtx
683 c_readstr (const char *str, machine_mode mode)
685 HOST_WIDE_INT ch;
686 unsigned int i, j;
687 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
689 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
690 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
691 / HOST_BITS_PER_WIDE_INT;
693 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
694 for (i = 0; i < len; i++)
695 tmp[i] = 0;
697 ch = 1;
698 for (i = 0; i < GET_MODE_SIZE (mode); i++)
700 j = i;
701 if (WORDS_BIG_ENDIAN)
702 j = GET_MODE_SIZE (mode) - i - 1;
703 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
704 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
705 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
706 j *= BITS_PER_UNIT;
708 if (ch)
709 ch = (unsigned char) str[i];
710 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
713 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
714 return immed_wide_int_const (c, mode);
717 /* Cast a target constant CST to target CHAR and if that value fits into
718 host char type, return zero and put that value into variable pointed to by
719 P. */
721 static int
722 target_char_cast (tree cst, char *p)
724 unsigned HOST_WIDE_INT val, hostval;
726 if (TREE_CODE (cst) != INTEGER_CST
727 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
728 return 1;
730 /* Do not care if it fits or not right here. */
731 val = TREE_INT_CST_LOW (cst);
733 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
734 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
736 hostval = val;
737 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
738 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
740 if (val != hostval)
741 return 1;
743 *p = hostval;
744 return 0;
747 /* Similar to save_expr, but assumes that arbitrary code is not executed
748 in between the multiple evaluations. In particular, we assume that a
749 non-addressable local variable will not be modified. */
751 static tree
752 builtin_save_expr (tree exp)
754 if (TREE_CODE (exp) == SSA_NAME
755 || (TREE_ADDRESSABLE (exp) == 0
756 && (TREE_CODE (exp) == PARM_DECL
757 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
758 return exp;
760 return save_expr (exp);
763 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
764 times to get the address of either a higher stack frame, or a return
765 address located within it (depending on FNDECL_CODE). */
767 static rtx
768 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
770 int i;
772 #ifdef INITIAL_FRAME_ADDRESS_RTX
773 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
774 #else
775 rtx tem;
777 /* For a zero count with __builtin_return_address, we don't care what
778 frame address we return, because target-specific definitions will
779 override us. Therefore frame pointer elimination is OK, and using
780 the soft frame pointer is OK.
782 For a nonzero count, or a zero count with __builtin_frame_address,
783 we require a stable offset from the current frame pointer to the
784 previous one, so we must use the hard frame pointer, and
785 we must disable frame pointer elimination. */
786 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
787 tem = frame_pointer_rtx;
788 else
790 tem = hard_frame_pointer_rtx;
792 /* Tell reload not to eliminate the frame pointer. */
793 crtl->accesses_prior_frames = 1;
795 #endif
797 /* Some machines need special handling before we can access
798 arbitrary frames. For example, on the SPARC, we must first flush
799 all register windows to the stack. */
800 #ifdef SETUP_FRAME_ADDRESSES
801 if (count > 0)
802 SETUP_FRAME_ADDRESSES ();
803 #endif
805 /* On the SPARC, the return address is not in the frame, it is in a
806 register. There is no way to access it off of the current frame
807 pointer, but it can be accessed off the previous frame pointer by
808 reading the value from the register window save area. */
809 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
810 count--;
812 /* Scan back COUNT frames to the specified frame. */
813 for (i = 0; i < count; i++)
815 /* Assume the dynamic chain pointer is in the word that the
816 frame address points to, unless otherwise specified. */
817 #ifdef DYNAMIC_CHAIN_ADDRESS
818 tem = DYNAMIC_CHAIN_ADDRESS (tem);
819 #endif
820 tem = memory_address (Pmode, tem);
821 tem = gen_frame_mem (Pmode, tem);
822 tem = copy_to_reg (tem);
825 /* For __builtin_frame_address, return what we've got. But, on
826 the SPARC for example, we may have to add a bias. */
827 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
828 #ifdef FRAME_ADDR_RTX
829 return FRAME_ADDR_RTX (tem);
830 #else
831 return tem;
832 #endif
834 /* For __builtin_return_address, get the return address from that frame. */
835 #ifdef RETURN_ADDR_RTX
836 tem = RETURN_ADDR_RTX (count, tem);
837 #else
838 tem = memory_address (Pmode,
839 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
840 tem = gen_frame_mem (Pmode, tem);
841 #endif
842 return tem;
845 /* Alias set used for setjmp buffer. */
846 static alias_set_type setjmp_alias_set = -1;
848 /* Construct the leading half of a __builtin_setjmp call. Control will
849 return to RECEIVER_LABEL. This is also called directly by the SJLJ
850 exception handling code. */
852 void
853 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
855 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
856 rtx stack_save;
857 rtx mem;
859 if (setjmp_alias_set == -1)
860 setjmp_alias_set = new_alias_set ();
862 buf_addr = convert_memory_address (Pmode, buf_addr);
864 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
866 /* We store the frame pointer and the address of receiver_label in
867 the buffer and use the rest of it for the stack save area, which
868 is machine-dependent. */
870 mem = gen_rtx_MEM (Pmode, buf_addr);
871 set_mem_alias_set (mem, setjmp_alias_set);
872 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
874 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
875 GET_MODE_SIZE (Pmode))),
876 set_mem_alias_set (mem, setjmp_alias_set);
878 emit_move_insn (validize_mem (mem),
879 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
881 stack_save = gen_rtx_MEM (sa_mode,
882 plus_constant (Pmode, buf_addr,
883 2 * GET_MODE_SIZE (Pmode)));
884 set_mem_alias_set (stack_save, setjmp_alias_set);
885 emit_stack_save (SAVE_NONLOCAL, &stack_save);
887 /* If there is further processing to do, do it. */
888 #ifdef HAVE_builtin_setjmp_setup
889 if (HAVE_builtin_setjmp_setup)
890 emit_insn (gen_builtin_setjmp_setup (buf_addr));
891 #endif
893 /* We have a nonlocal label. */
894 cfun->has_nonlocal_label = 1;
897 /* Construct the trailing part of a __builtin_setjmp call. This is
898 also called directly by the SJLJ exception handling code.
899 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
901 void
902 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
904 rtx chain;
906 /* Mark the FP as used when we get here, so we have to make sure it's
907 marked as used by this function. */
908 emit_use (hard_frame_pointer_rtx);
910 /* Mark the static chain as clobbered here so life information
911 doesn't get messed up for it. */
912 chain = targetm.calls.static_chain (current_function_decl, true);
913 if (chain && REG_P (chain))
914 emit_clobber (chain);
916 /* Now put in the code to restore the frame pointer, and argument
917 pointer, if needed. */
918 #ifdef HAVE_nonlocal_goto
919 if (! HAVE_nonlocal_goto)
920 #endif
922 /* First adjust our frame pointer to its actual value. It was
923 previously set to the start of the virtual area corresponding to
924 the stacked variables when we branched here and now needs to be
925 adjusted to the actual hardware fp value.
927 Assignments to virtual registers are converted by
928 instantiate_virtual_regs into the corresponding assignment
929 to the underlying register (fp in this case) that makes
930 the original assignment true.
931 So the following insn will actually be decrementing fp by
932 STARTING_FRAME_OFFSET. */
933 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
935 /* Restoring the frame pointer also modifies the hard frame pointer.
936 Mark it used (so that the previous assignment remains live once
937 the frame pointer is eliminated) and clobbered (to represent the
938 implicit update from the assignment). */
939 emit_use (hard_frame_pointer_rtx);
940 emit_clobber (hard_frame_pointer_rtx);
943 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
944 if (fixed_regs[ARG_POINTER_REGNUM])
946 #ifdef ELIMINABLE_REGS
947 /* If the argument pointer can be eliminated in favor of the
948 frame pointer, we don't need to restore it. We assume here
949 that if such an elimination is present, it can always be used.
950 This is the case on all known machines; if we don't make this
951 assumption, we do unnecessary saving on many machines. */
952 size_t i;
953 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
955 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
956 if (elim_regs[i].from == ARG_POINTER_REGNUM
957 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
958 break;
960 if (i == ARRAY_SIZE (elim_regs))
961 #endif
963 /* Now restore our arg pointer from the address at which it
964 was saved in our stack frame. */
965 emit_move_insn (crtl->args.internal_arg_pointer,
966 copy_to_reg (get_arg_pointer_save_area ()));
969 #endif
971 #ifdef HAVE_builtin_setjmp_receiver
972 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
973 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
974 else
975 #endif
976 #ifdef HAVE_nonlocal_goto_receiver
977 if (HAVE_nonlocal_goto_receiver)
978 emit_insn (gen_nonlocal_goto_receiver ());
979 else
980 #endif
981 { /* Nothing */ }
983 /* We must not allow the code we just generated to be reordered by
984 scheduling. Specifically, the update of the frame pointer must
985 happen immediately, not later. */
986 emit_insn (gen_blockage ());
989 /* __builtin_longjmp is passed a pointer to an array of five words (not
990 all will be used on all machines). It operates similarly to the C
991 library function of the same name, but is more efficient. Much of
992 the code below is copied from the handling of non-local gotos. */
994 static void
995 expand_builtin_longjmp (rtx buf_addr, rtx value)
997 rtx fp, lab, stack;
998 rtx_insn *insn, *last;
999 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1001 /* DRAP is needed for stack realign if longjmp is expanded to current
1002 function */
1003 if (SUPPORTS_STACK_ALIGNMENT)
1004 crtl->need_drap = true;
1006 if (setjmp_alias_set == -1)
1007 setjmp_alias_set = new_alias_set ();
1009 buf_addr = convert_memory_address (Pmode, buf_addr);
1011 buf_addr = force_reg (Pmode, buf_addr);
1013 /* We require that the user must pass a second argument of 1, because
1014 that is what builtin_setjmp will return. */
1015 gcc_assert (value == const1_rtx);
1017 last = get_last_insn ();
1018 #ifdef HAVE_builtin_longjmp
1019 if (HAVE_builtin_longjmp)
1020 emit_insn (gen_builtin_longjmp (buf_addr));
1021 else
1022 #endif
1024 fp = gen_rtx_MEM (Pmode, buf_addr);
1025 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1026 GET_MODE_SIZE (Pmode)));
1028 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1029 2 * GET_MODE_SIZE (Pmode)));
1030 set_mem_alias_set (fp, setjmp_alias_set);
1031 set_mem_alias_set (lab, setjmp_alias_set);
1032 set_mem_alias_set (stack, setjmp_alias_set);
1034 /* Pick up FP, label, and SP from the block and jump. This code is
1035 from expand_goto in stmt.c; see there for detailed comments. */
1036 #ifdef HAVE_nonlocal_goto
1037 if (HAVE_nonlocal_goto)
1038 /* We have to pass a value to the nonlocal_goto pattern that will
1039 get copied into the static_chain pointer, but it does not matter
1040 what that value is, because builtin_setjmp does not use it. */
1041 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1042 else
1043 #endif
1045 lab = copy_to_reg (lab);
1047 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1048 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1050 emit_move_insn (hard_frame_pointer_rtx, fp);
1051 emit_stack_restore (SAVE_NONLOCAL, stack);
1053 emit_use (hard_frame_pointer_rtx);
1054 emit_use (stack_pointer_rtx);
1055 emit_indirect_jump (lab);
1059 /* Search backwards and mark the jump insn as a non-local goto.
1060 Note that this precludes the use of __builtin_longjmp to a
1061 __builtin_setjmp target in the same function. However, we've
1062 already cautioned the user that these functions are for
1063 internal exception handling use only. */
1064 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1066 gcc_assert (insn != last);
1068 if (JUMP_P (insn))
1070 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1071 break;
1073 else if (CALL_P (insn))
1074 break;
1078 static inline bool
1079 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1081 return (iter->i < iter->n);
1084 /* This function validates the types of a function call argument list
1085 against a specified list of tree_codes. If the last specifier is a 0,
1086 that represents an ellipses, otherwise the last specifier must be a
1087 VOID_TYPE. */
1089 static bool
1090 validate_arglist (const_tree callexpr, ...)
1092 enum tree_code code;
1093 bool res = 0;
1094 va_list ap;
1095 const_call_expr_arg_iterator iter;
1096 const_tree arg;
1098 va_start (ap, callexpr);
1099 init_const_call_expr_arg_iterator (callexpr, &iter);
1103 code = (enum tree_code) va_arg (ap, int);
1104 switch (code)
1106 case 0:
1107 /* This signifies an ellipses, any further arguments are all ok. */
1108 res = true;
1109 goto end;
1110 case VOID_TYPE:
1111 /* This signifies an endlink, if no arguments remain, return
1112 true, otherwise return false. */
1113 res = !more_const_call_expr_args_p (&iter);
1114 goto end;
1115 default:
1116 /* If no parameters remain or the parameter's code does not
1117 match the specified code, return false. Otherwise continue
1118 checking any remaining arguments. */
1119 arg = next_const_call_expr_arg (&iter);
1120 if (!validate_arg (arg, code))
1121 goto end;
1122 break;
1125 while (1);
1127 /* We need gotos here since we can only have one VA_CLOSE in a
1128 function. */
1129 end: ;
1130 va_end (ap);
1132 return res;
1135 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1136 and the address of the save area. */
1138 static rtx
1139 expand_builtin_nonlocal_goto (tree exp)
1141 tree t_label, t_save_area;
1142 rtx r_label, r_save_area, r_fp, r_sp;
1143 rtx_insn *insn;
1145 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1146 return NULL_RTX;
1148 t_label = CALL_EXPR_ARG (exp, 0);
1149 t_save_area = CALL_EXPR_ARG (exp, 1);
1151 r_label = expand_normal (t_label);
1152 r_label = convert_memory_address (Pmode, r_label);
1153 r_save_area = expand_normal (t_save_area);
1154 r_save_area = convert_memory_address (Pmode, r_save_area);
1155 /* Copy the address of the save location to a register just in case it was
1156 based on the frame pointer. */
1157 r_save_area = copy_to_reg (r_save_area);
1158 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1159 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1160 plus_constant (Pmode, r_save_area,
1161 GET_MODE_SIZE (Pmode)));
1163 crtl->has_nonlocal_goto = 1;
1165 #ifdef HAVE_nonlocal_goto
1166 /* ??? We no longer need to pass the static chain value, afaik. */
1167 if (HAVE_nonlocal_goto)
1168 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1169 else
1170 #endif
1172 r_label = copy_to_reg (r_label);
1174 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1175 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1177 /* Restore frame pointer for containing function. */
1178 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1179 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1181 /* USE of hard_frame_pointer_rtx added for consistency;
1182 not clear if really needed. */
1183 emit_use (hard_frame_pointer_rtx);
1184 emit_use (stack_pointer_rtx);
1186 /* If the architecture is using a GP register, we must
1187 conservatively assume that the target function makes use of it.
1188 The prologue of functions with nonlocal gotos must therefore
1189 initialize the GP register to the appropriate value, and we
1190 must then make sure that this value is live at the point
1191 of the jump. (Note that this doesn't necessarily apply
1192 to targets with a nonlocal_goto pattern; they are free
1193 to implement it in their own way. Note also that this is
1194 a no-op if the GP register is a global invariant.) */
1195 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1196 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1197 emit_use (pic_offset_table_rtx);
1199 emit_indirect_jump (r_label);
1202 /* Search backwards to the jump insn and mark it as a
1203 non-local goto. */
1204 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1206 if (JUMP_P (insn))
1208 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1209 break;
1211 else if (CALL_P (insn))
1212 break;
1215 return const0_rtx;
1218 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1219 (not all will be used on all machines) that was passed to __builtin_setjmp.
1220 It updates the stack pointer in that block to correspond to the current
1221 stack pointer. */
1223 static void
1224 expand_builtin_update_setjmp_buf (rtx buf_addr)
1226 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1227 rtx stack_save
1228 = gen_rtx_MEM (sa_mode,
1229 memory_address
1230 (sa_mode,
1231 plus_constant (Pmode, buf_addr,
1232 2 * GET_MODE_SIZE (Pmode))));
1234 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1237 /* Expand a call to __builtin_prefetch. For a target that does not support
1238 data prefetch, evaluate the memory address argument in case it has side
1239 effects. */
1241 static void
1242 expand_builtin_prefetch (tree exp)
1244 tree arg0, arg1, arg2;
1245 int nargs;
1246 rtx op0, op1, op2;
1248 if (!validate_arglist (exp, POINTER_TYPE, 0))
1249 return;
1251 arg0 = CALL_EXPR_ARG (exp, 0);
1253 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1254 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1255 locality). */
1256 nargs = call_expr_nargs (exp);
1257 if (nargs > 1)
1258 arg1 = CALL_EXPR_ARG (exp, 1);
1259 else
1260 arg1 = integer_zero_node;
1261 if (nargs > 2)
1262 arg2 = CALL_EXPR_ARG (exp, 2);
1263 else
1264 arg2 = integer_three_node;
1266 /* Argument 0 is an address. */
1267 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1269 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1270 if (TREE_CODE (arg1) != INTEGER_CST)
1272 error ("second argument to %<__builtin_prefetch%> must be a constant");
1273 arg1 = integer_zero_node;
1275 op1 = expand_normal (arg1);
1276 /* Argument 1 must be either zero or one. */
1277 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1279 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1280 " using zero");
1281 op1 = const0_rtx;
1284 /* Argument 2 (locality) must be a compile-time constant int. */
1285 if (TREE_CODE (arg2) != INTEGER_CST)
1287 error ("third argument to %<__builtin_prefetch%> must be a constant");
1288 arg2 = integer_zero_node;
1290 op2 = expand_normal (arg2);
1291 /* Argument 2 must be 0, 1, 2, or 3. */
1292 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1294 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1295 op2 = const0_rtx;
1298 #ifdef HAVE_prefetch
1299 if (HAVE_prefetch)
1301 struct expand_operand ops[3];
1303 create_address_operand (&ops[0], op0);
1304 create_integer_operand (&ops[1], INTVAL (op1));
1305 create_integer_operand (&ops[2], INTVAL (op2));
1306 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1307 return;
1309 #endif
1311 /* Don't do anything with direct references to volatile memory, but
1312 generate code to handle other side effects. */
1313 if (!MEM_P (op0) && side_effects_p (op0))
1314 emit_insn (op0);
1317 /* Get a MEM rtx for expression EXP which is the address of an operand
1318 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1319 the maximum length of the block of memory that might be accessed or
1320 NULL if unknown. */
1322 static rtx
1323 get_memory_rtx (tree exp, tree len)
1325 tree orig_exp = exp;
1326 rtx addr, mem;
1328 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1329 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1330 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1331 exp = TREE_OPERAND (exp, 0);
1333 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1334 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1336 /* Get an expression we can use to find the attributes to assign to MEM.
1337 First remove any nops. */
1338 while (CONVERT_EXPR_P (exp)
1339 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1340 exp = TREE_OPERAND (exp, 0);
1342 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1343 (as builtin stringops may alias with anything). */
1344 exp = fold_build2 (MEM_REF,
1345 build_array_type (char_type_node,
1346 build_range_type (sizetype,
1347 size_one_node, len)),
1348 exp, build_int_cst (ptr_type_node, 0));
1350 /* If the MEM_REF has no acceptable address, try to get the base object
1351 from the original address we got, and build an all-aliasing
1352 unknown-sized access to that one. */
1353 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1354 set_mem_attributes (mem, exp, 0);
1355 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1356 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1357 0))))
1359 exp = build_fold_addr_expr (exp);
1360 exp = fold_build2 (MEM_REF,
1361 build_array_type (char_type_node,
1362 build_range_type (sizetype,
1363 size_zero_node,
1364 NULL)),
1365 exp, build_int_cst (ptr_type_node, 0));
1366 set_mem_attributes (mem, exp, 0);
1368 set_mem_alias_set (mem, 0);
1369 return mem;
1372 /* Built-in functions to perform an untyped call and return. */
1374 #define apply_args_mode \
1375 (this_target_builtins->x_apply_args_mode)
1376 #define apply_result_mode \
1377 (this_target_builtins->x_apply_result_mode)
1379 /* Return the size required for the block returned by __builtin_apply_args,
1380 and initialize apply_args_mode. */
1382 static int
1383 apply_args_size (void)
1385 static int size = -1;
1386 int align;
1387 unsigned int regno;
1388 machine_mode mode;
1390 /* The values computed by this function never change. */
1391 if (size < 0)
1393 /* The first value is the incoming arg-pointer. */
1394 size = GET_MODE_SIZE (Pmode);
1396 /* The second value is the structure value address unless this is
1397 passed as an "invisible" first argument. */
1398 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1399 size += GET_MODE_SIZE (Pmode);
1401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402 if (FUNCTION_ARG_REGNO_P (regno))
1404 mode = targetm.calls.get_raw_arg_mode (regno);
1406 gcc_assert (mode != VOIDmode);
1408 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1409 if (size % align != 0)
1410 size = CEIL (size, align) * align;
1411 size += GET_MODE_SIZE (mode);
1412 apply_args_mode[regno] = mode;
1414 else
1416 apply_args_mode[regno] = VOIDmode;
1419 return size;
1422 /* Return the size required for the block returned by __builtin_apply,
1423 and initialize apply_result_mode. */
1425 static int
1426 apply_result_size (void)
1428 static int size = -1;
1429 int align, regno;
1430 machine_mode mode;
1432 /* The values computed by this function never change. */
1433 if (size < 0)
1435 size = 0;
1437 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1438 if (targetm.calls.function_value_regno_p (regno))
1440 mode = targetm.calls.get_raw_result_mode (regno);
1442 gcc_assert (mode != VOIDmode);
1444 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1445 if (size % align != 0)
1446 size = CEIL (size, align) * align;
1447 size += GET_MODE_SIZE (mode);
1448 apply_result_mode[regno] = mode;
1450 else
1451 apply_result_mode[regno] = VOIDmode;
1453 /* Allow targets that use untyped_call and untyped_return to override
1454 the size so that machine-specific information can be stored here. */
1455 #ifdef APPLY_RESULT_SIZE
1456 size = APPLY_RESULT_SIZE;
1457 #endif
1459 return size;
1462 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1463 /* Create a vector describing the result block RESULT. If SAVEP is true,
1464 the result block is used to save the values; otherwise it is used to
1465 restore the values. */
1467 static rtx
1468 result_vector (int savep, rtx result)
1470 int regno, size, align, nelts;
1471 machine_mode mode;
1472 rtx reg, mem;
1473 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1475 size = nelts = 0;
1476 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1477 if ((mode = apply_result_mode[regno]) != VOIDmode)
1479 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1480 if (size % align != 0)
1481 size = CEIL (size, align) * align;
1482 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1483 mem = adjust_address (result, mode, size);
1484 savevec[nelts++] = (savep
1485 ? gen_rtx_SET (VOIDmode, mem, reg)
1486 : gen_rtx_SET (VOIDmode, reg, mem));
1487 size += GET_MODE_SIZE (mode);
1489 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1491 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1493 /* Save the state required to perform an untyped call with the same
1494 arguments as were passed to the current function. */
1496 static rtx
1497 expand_builtin_apply_args_1 (void)
1499 rtx registers, tem;
1500 int size, align, regno;
1501 machine_mode mode;
1502 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1504 /* Create a block where the arg-pointer, structure value address,
1505 and argument registers can be saved. */
1506 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1508 /* Walk past the arg-pointer and structure value address. */
1509 size = GET_MODE_SIZE (Pmode);
1510 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1511 size += GET_MODE_SIZE (Pmode);
1513 /* Save each register used in calling a function to the block. */
1514 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1515 if ((mode = apply_args_mode[regno]) != VOIDmode)
1517 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1518 if (size % align != 0)
1519 size = CEIL (size, align) * align;
1521 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1523 emit_move_insn (adjust_address (registers, mode, size), tem);
1524 size += GET_MODE_SIZE (mode);
1527 /* Save the arg pointer to the block. */
1528 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1529 #ifdef STACK_GROWS_DOWNWARD
1530 /* We need the pointer as the caller actually passed them to us, not
1531 as we might have pretended they were passed. Make sure it's a valid
1532 operand, as emit_move_insn isn't expected to handle a PLUS. */
1534 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1535 NULL_RTX);
1536 #endif
1537 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1539 size = GET_MODE_SIZE (Pmode);
1541 /* Save the structure value address unless this is passed as an
1542 "invisible" first argument. */
1543 if (struct_incoming_value)
1545 emit_move_insn (adjust_address (registers, Pmode, size),
1546 copy_to_reg (struct_incoming_value));
1547 size += GET_MODE_SIZE (Pmode);
1550 /* Return the address of the block. */
1551 return copy_addr_to_reg (XEXP (registers, 0));
1554 /* __builtin_apply_args returns block of memory allocated on
1555 the stack into which is stored the arg pointer, structure
1556 value address, static chain, and all the registers that might
1557 possibly be used in performing a function call. The code is
1558 moved to the start of the function so the incoming values are
1559 saved. */
1561 static rtx
1562 expand_builtin_apply_args (void)
1564 /* Don't do __builtin_apply_args more than once in a function.
1565 Save the result of the first call and reuse it. */
1566 if (apply_args_value != 0)
1567 return apply_args_value;
1569 /* When this function is called, it means that registers must be
1570 saved on entry to this function. So we migrate the
1571 call to the first insn of this function. */
1572 rtx temp;
1573 rtx seq;
1575 start_sequence ();
1576 temp = expand_builtin_apply_args_1 ();
1577 seq = get_insns ();
1578 end_sequence ();
1580 apply_args_value = temp;
1582 /* Put the insns after the NOTE that starts the function.
1583 If this is inside a start_sequence, make the outer-level insn
1584 chain current, so the code is placed at the start of the
1585 function. If internal_arg_pointer is a non-virtual pseudo,
1586 it needs to be placed after the function that initializes
1587 that pseudo. */
1588 push_topmost_sequence ();
1589 if (REG_P (crtl->args.internal_arg_pointer)
1590 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1591 emit_insn_before (seq, parm_birth_insn);
1592 else
1593 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1594 pop_topmost_sequence ();
1595 return temp;
1599 /* Perform an untyped call and save the state required to perform an
1600 untyped return of whatever value was returned by the given function. */
1602 static rtx
1603 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1605 int size, align, regno;
1606 machine_mode mode;
1607 rtx incoming_args, result, reg, dest, src;
1608 rtx_call_insn *call_insn;
1609 rtx old_stack_level = 0;
1610 rtx call_fusage = 0;
1611 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1613 arguments = convert_memory_address (Pmode, arguments);
1615 /* Create a block where the return registers can be saved. */
1616 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1618 /* Fetch the arg pointer from the ARGUMENTS block. */
1619 incoming_args = gen_reg_rtx (Pmode);
1620 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1621 #ifndef STACK_GROWS_DOWNWARD
1622 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1623 incoming_args, 0, OPTAB_LIB_WIDEN);
1624 #endif
1626 /* Push a new argument block and copy the arguments. Do not allow
1627 the (potential) memcpy call below to interfere with our stack
1628 manipulations. */
1629 do_pending_stack_adjust ();
1630 NO_DEFER_POP;
1632 /* Save the stack with nonlocal if available. */
1633 #ifdef HAVE_save_stack_nonlocal
1634 if (HAVE_save_stack_nonlocal)
1635 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1636 else
1637 #endif
1638 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1640 /* Allocate a block of memory onto the stack and copy the memory
1641 arguments to the outgoing arguments address. We can pass TRUE
1642 as the 4th argument because we just saved the stack pointer
1643 and will restore it right after the call. */
1644 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1646 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1647 may have already set current_function_calls_alloca to true.
1648 current_function_calls_alloca won't be set if argsize is zero,
1649 so we have to guarantee need_drap is true here. */
1650 if (SUPPORTS_STACK_ALIGNMENT)
1651 crtl->need_drap = true;
1653 dest = virtual_outgoing_args_rtx;
1654 #ifndef STACK_GROWS_DOWNWARD
1655 if (CONST_INT_P (argsize))
1656 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1657 else
1658 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1659 #endif
1660 dest = gen_rtx_MEM (BLKmode, dest);
1661 set_mem_align (dest, PARM_BOUNDARY);
1662 src = gen_rtx_MEM (BLKmode, incoming_args);
1663 set_mem_align (src, PARM_BOUNDARY);
1664 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1666 /* Refer to the argument block. */
1667 apply_args_size ();
1668 arguments = gen_rtx_MEM (BLKmode, arguments);
1669 set_mem_align (arguments, PARM_BOUNDARY);
1671 /* Walk past the arg-pointer and structure value address. */
1672 size = GET_MODE_SIZE (Pmode);
1673 if (struct_value)
1674 size += GET_MODE_SIZE (Pmode);
1676 /* Restore each of the registers previously saved. Make USE insns
1677 for each of these registers for use in making the call. */
1678 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1679 if ((mode = apply_args_mode[regno]) != VOIDmode)
1681 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1682 if (size % align != 0)
1683 size = CEIL (size, align) * align;
1684 reg = gen_rtx_REG (mode, regno);
1685 emit_move_insn (reg, adjust_address (arguments, mode, size));
1686 use_reg (&call_fusage, reg);
1687 size += GET_MODE_SIZE (mode);
1690 /* Restore the structure value address unless this is passed as an
1691 "invisible" first argument. */
1692 size = GET_MODE_SIZE (Pmode);
1693 if (struct_value)
1695 rtx value = gen_reg_rtx (Pmode);
1696 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1697 emit_move_insn (struct_value, value);
1698 if (REG_P (struct_value))
1699 use_reg (&call_fusage, struct_value);
1700 size += GET_MODE_SIZE (Pmode);
1703 /* All arguments and registers used for the call are set up by now! */
1704 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1706 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1707 and we don't want to load it into a register as an optimization,
1708 because prepare_call_address already did it if it should be done. */
1709 if (GET_CODE (function) != SYMBOL_REF)
1710 function = memory_address (FUNCTION_MODE, function);
1712 /* Generate the actual call instruction and save the return value. */
1713 #ifdef HAVE_untyped_call
1714 if (HAVE_untyped_call)
1715 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1716 result, result_vector (1, result)));
1717 else
1718 #endif
1719 #ifdef HAVE_call_value
1720 if (HAVE_call_value)
1722 rtx valreg = 0;
1724 /* Locate the unique return register. It is not possible to
1725 express a call that sets more than one return register using
1726 call_value; use untyped_call for that. In fact, untyped_call
1727 only needs to save the return registers in the given block. */
1728 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1729 if ((mode = apply_result_mode[regno]) != VOIDmode)
1731 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1733 valreg = gen_rtx_REG (mode, regno);
1736 emit_call_insn (GEN_CALL_VALUE (valreg,
1737 gen_rtx_MEM (FUNCTION_MODE, function),
1738 const0_rtx, NULL_RTX, const0_rtx));
1740 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1742 else
1743 #endif
1744 gcc_unreachable ();
1746 /* Find the CALL insn we just emitted, and attach the register usage
1747 information. */
1748 call_insn = last_call_insn ();
1749 add_function_usage_to (call_insn, call_fusage);
1751 /* Restore the stack. */
1752 #ifdef HAVE_save_stack_nonlocal
1753 if (HAVE_save_stack_nonlocal)
1754 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1755 else
1756 #endif
1757 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1758 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1760 OK_DEFER_POP;
1762 /* Return the address of the result block. */
1763 result = copy_addr_to_reg (XEXP (result, 0));
1764 return convert_memory_address (ptr_mode, result);
1767 /* Perform an untyped return. */
1769 static void
1770 expand_builtin_return (rtx result)
1772 int size, align, regno;
1773 machine_mode mode;
1774 rtx reg;
1775 rtx_insn *call_fusage = 0;
1777 result = convert_memory_address (Pmode, result);
1779 apply_result_size ();
1780 result = gen_rtx_MEM (BLKmode, result);
1782 #ifdef HAVE_untyped_return
1783 if (HAVE_untyped_return)
1785 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1786 emit_barrier ();
1787 return;
1789 #endif
1791 /* Restore the return value and note that each value is used. */
1792 size = 0;
1793 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1794 if ((mode = apply_result_mode[regno]) != VOIDmode)
1796 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1797 if (size % align != 0)
1798 size = CEIL (size, align) * align;
1799 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1800 emit_move_insn (reg, adjust_address (result, mode, size));
1802 push_to_sequence (call_fusage);
1803 emit_use (reg);
1804 call_fusage = get_insns ();
1805 end_sequence ();
1806 size += GET_MODE_SIZE (mode);
1809 /* Put the USE insns before the return. */
1810 emit_insn (call_fusage);
1812 /* Return whatever values was restored by jumping directly to the end
1813 of the function. */
1814 expand_naked_return ();
1817 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1819 static enum type_class
1820 type_to_class (tree type)
1822 switch (TREE_CODE (type))
1824 case VOID_TYPE: return void_type_class;
1825 case INTEGER_TYPE: return integer_type_class;
1826 case ENUMERAL_TYPE: return enumeral_type_class;
1827 case BOOLEAN_TYPE: return boolean_type_class;
1828 case POINTER_TYPE: return pointer_type_class;
1829 case REFERENCE_TYPE: return reference_type_class;
1830 case OFFSET_TYPE: return offset_type_class;
1831 case REAL_TYPE: return real_type_class;
1832 case COMPLEX_TYPE: return complex_type_class;
1833 case FUNCTION_TYPE: return function_type_class;
1834 case METHOD_TYPE: return method_type_class;
1835 case RECORD_TYPE: return record_type_class;
1836 case UNION_TYPE:
1837 case QUAL_UNION_TYPE: return union_type_class;
1838 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1839 ? string_type_class : array_type_class);
1840 case LANG_TYPE: return lang_type_class;
1841 default: return no_type_class;
1845 /* Expand a call EXP to __builtin_classify_type. */
1847 static rtx
1848 expand_builtin_classify_type (tree exp)
1850 if (call_expr_nargs (exp))
1851 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1852 return GEN_INT (no_type_class);
1855 /* This helper macro, meant to be used in mathfn_built_in below,
1856 determines which among a set of three builtin math functions is
1857 appropriate for a given type mode. The `F' and `L' cases are
1858 automatically generated from the `double' case. */
1859 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1860 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1861 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1862 fcodel = BUILT_IN_MATHFN##L ; break;
1863 /* Similar to above, but appends _R after any F/L suffix. */
1864 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1865 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1866 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1867 fcodel = BUILT_IN_MATHFN##L_R ; break;
1869 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1870 if available. If IMPLICIT is true use the implicit builtin declaration,
1871 otherwise use the explicit declaration. If we can't do the conversion,
1872 return zero. */
1874 static tree
1875 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1877 enum built_in_function fcode, fcodef, fcodel, fcode2;
1879 switch (fn)
1881 CASE_MATHFN (BUILT_IN_ACOS)
1882 CASE_MATHFN (BUILT_IN_ACOSH)
1883 CASE_MATHFN (BUILT_IN_ASIN)
1884 CASE_MATHFN (BUILT_IN_ASINH)
1885 CASE_MATHFN (BUILT_IN_ATAN)
1886 CASE_MATHFN (BUILT_IN_ATAN2)
1887 CASE_MATHFN (BUILT_IN_ATANH)
1888 CASE_MATHFN (BUILT_IN_CBRT)
1889 CASE_MATHFN (BUILT_IN_CEIL)
1890 CASE_MATHFN (BUILT_IN_CEXPI)
1891 CASE_MATHFN (BUILT_IN_COPYSIGN)
1892 CASE_MATHFN (BUILT_IN_COS)
1893 CASE_MATHFN (BUILT_IN_COSH)
1894 CASE_MATHFN (BUILT_IN_DREM)
1895 CASE_MATHFN (BUILT_IN_ERF)
1896 CASE_MATHFN (BUILT_IN_ERFC)
1897 CASE_MATHFN (BUILT_IN_EXP)
1898 CASE_MATHFN (BUILT_IN_EXP10)
1899 CASE_MATHFN (BUILT_IN_EXP2)
1900 CASE_MATHFN (BUILT_IN_EXPM1)
1901 CASE_MATHFN (BUILT_IN_FABS)
1902 CASE_MATHFN (BUILT_IN_FDIM)
1903 CASE_MATHFN (BUILT_IN_FLOOR)
1904 CASE_MATHFN (BUILT_IN_FMA)
1905 CASE_MATHFN (BUILT_IN_FMAX)
1906 CASE_MATHFN (BUILT_IN_FMIN)
1907 CASE_MATHFN (BUILT_IN_FMOD)
1908 CASE_MATHFN (BUILT_IN_FREXP)
1909 CASE_MATHFN (BUILT_IN_GAMMA)
1910 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1911 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1912 CASE_MATHFN (BUILT_IN_HYPOT)
1913 CASE_MATHFN (BUILT_IN_ILOGB)
1914 CASE_MATHFN (BUILT_IN_ICEIL)
1915 CASE_MATHFN (BUILT_IN_IFLOOR)
1916 CASE_MATHFN (BUILT_IN_INF)
1917 CASE_MATHFN (BUILT_IN_IRINT)
1918 CASE_MATHFN (BUILT_IN_IROUND)
1919 CASE_MATHFN (BUILT_IN_ISINF)
1920 CASE_MATHFN (BUILT_IN_J0)
1921 CASE_MATHFN (BUILT_IN_J1)
1922 CASE_MATHFN (BUILT_IN_JN)
1923 CASE_MATHFN (BUILT_IN_LCEIL)
1924 CASE_MATHFN (BUILT_IN_LDEXP)
1925 CASE_MATHFN (BUILT_IN_LFLOOR)
1926 CASE_MATHFN (BUILT_IN_LGAMMA)
1927 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1928 CASE_MATHFN (BUILT_IN_LLCEIL)
1929 CASE_MATHFN (BUILT_IN_LLFLOOR)
1930 CASE_MATHFN (BUILT_IN_LLRINT)
1931 CASE_MATHFN (BUILT_IN_LLROUND)
1932 CASE_MATHFN (BUILT_IN_LOG)
1933 CASE_MATHFN (BUILT_IN_LOG10)
1934 CASE_MATHFN (BUILT_IN_LOG1P)
1935 CASE_MATHFN (BUILT_IN_LOG2)
1936 CASE_MATHFN (BUILT_IN_LOGB)
1937 CASE_MATHFN (BUILT_IN_LRINT)
1938 CASE_MATHFN (BUILT_IN_LROUND)
1939 CASE_MATHFN (BUILT_IN_MODF)
1940 CASE_MATHFN (BUILT_IN_NAN)
1941 CASE_MATHFN (BUILT_IN_NANS)
1942 CASE_MATHFN (BUILT_IN_NEARBYINT)
1943 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1944 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1945 CASE_MATHFN (BUILT_IN_POW)
1946 CASE_MATHFN (BUILT_IN_POWI)
1947 CASE_MATHFN (BUILT_IN_POW10)
1948 CASE_MATHFN (BUILT_IN_REMAINDER)
1949 CASE_MATHFN (BUILT_IN_REMQUO)
1950 CASE_MATHFN (BUILT_IN_RINT)
1951 CASE_MATHFN (BUILT_IN_ROUND)
1952 CASE_MATHFN (BUILT_IN_SCALB)
1953 CASE_MATHFN (BUILT_IN_SCALBLN)
1954 CASE_MATHFN (BUILT_IN_SCALBN)
1955 CASE_MATHFN (BUILT_IN_SIGNBIT)
1956 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1957 CASE_MATHFN (BUILT_IN_SIN)
1958 CASE_MATHFN (BUILT_IN_SINCOS)
1959 CASE_MATHFN (BUILT_IN_SINH)
1960 CASE_MATHFN (BUILT_IN_SQRT)
1961 CASE_MATHFN (BUILT_IN_TAN)
1962 CASE_MATHFN (BUILT_IN_TANH)
1963 CASE_MATHFN (BUILT_IN_TGAMMA)
1964 CASE_MATHFN (BUILT_IN_TRUNC)
1965 CASE_MATHFN (BUILT_IN_Y0)
1966 CASE_MATHFN (BUILT_IN_Y1)
1967 CASE_MATHFN (BUILT_IN_YN)
1969 default:
1970 return NULL_TREE;
1973 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1974 fcode2 = fcode;
1975 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1976 fcode2 = fcodef;
1977 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1978 fcode2 = fcodel;
1979 else
1980 return NULL_TREE;
1982 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1983 return NULL_TREE;
1985 return builtin_decl_explicit (fcode2);
1988 /* Like mathfn_built_in_1(), but always use the implicit array. */
1990 tree
1991 mathfn_built_in (tree type, enum built_in_function fn)
1993 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1996 /* If errno must be maintained, expand the RTL to check if the result,
1997 TARGET, of a built-in function call, EXP, is NaN, and if so set
1998 errno to EDOM. */
2000 static void
2001 expand_errno_check (tree exp, rtx target)
2003 rtx_code_label *lab = gen_label_rtx ();
2005 /* Test the result; if it is NaN, set errno=EDOM because
2006 the argument was not in the domain. */
2007 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
2008 NULL_RTX, NULL_RTX, lab,
2009 /* The jump is very likely. */
2010 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
2012 #ifdef TARGET_EDOM
2013 /* If this built-in doesn't throw an exception, set errno directly. */
2014 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
2016 #ifdef GEN_ERRNO_RTX
2017 rtx errno_rtx = GEN_ERRNO_RTX;
2018 #else
2019 rtx errno_rtx
2020 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2021 #endif
2022 emit_move_insn (errno_rtx,
2023 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2024 emit_label (lab);
2025 return;
2027 #endif
2029 /* Make sure the library call isn't expanded as a tail call. */
2030 CALL_EXPR_TAILCALL (exp) = 0;
2032 /* We can't set errno=EDOM directly; let the library call do it.
2033 Pop the arguments right away in case the call gets deleted. */
2034 NO_DEFER_POP;
2035 expand_call (exp, target, 0);
2036 OK_DEFER_POP;
2037 emit_label (lab);
2040 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2041 Return NULL_RTX if a normal call should be emitted rather than expanding
2042 the function in-line. EXP is the expression that is a call to the builtin
2043 function; if convenient, the result should be placed in TARGET.
2044 SUBTARGET may be used as the target for computing one of EXP's operands. */
2046 static rtx
2047 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2049 optab builtin_optab;
2050 rtx op0;
2051 rtx_insn *insns;
2052 tree fndecl = get_callee_fndecl (exp);
2053 machine_mode mode;
2054 bool errno_set = false;
2055 bool try_widening = false;
2056 tree arg;
2058 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2059 return NULL_RTX;
2061 arg = CALL_EXPR_ARG (exp, 0);
2063 switch (DECL_FUNCTION_CODE (fndecl))
2065 CASE_FLT_FN (BUILT_IN_SQRT):
2066 errno_set = ! tree_expr_nonnegative_p (arg);
2067 try_widening = true;
2068 builtin_optab = sqrt_optab;
2069 break;
2070 CASE_FLT_FN (BUILT_IN_EXP):
2071 errno_set = true; builtin_optab = exp_optab; break;
2072 CASE_FLT_FN (BUILT_IN_EXP10):
2073 CASE_FLT_FN (BUILT_IN_POW10):
2074 errno_set = true; builtin_optab = exp10_optab; break;
2075 CASE_FLT_FN (BUILT_IN_EXP2):
2076 errno_set = true; builtin_optab = exp2_optab; break;
2077 CASE_FLT_FN (BUILT_IN_EXPM1):
2078 errno_set = true; builtin_optab = expm1_optab; break;
2079 CASE_FLT_FN (BUILT_IN_LOGB):
2080 errno_set = true; builtin_optab = logb_optab; break;
2081 CASE_FLT_FN (BUILT_IN_LOG):
2082 errno_set = true; builtin_optab = log_optab; break;
2083 CASE_FLT_FN (BUILT_IN_LOG10):
2084 errno_set = true; builtin_optab = log10_optab; break;
2085 CASE_FLT_FN (BUILT_IN_LOG2):
2086 errno_set = true; builtin_optab = log2_optab; break;
2087 CASE_FLT_FN (BUILT_IN_LOG1P):
2088 errno_set = true; builtin_optab = log1p_optab; break;
2089 CASE_FLT_FN (BUILT_IN_ASIN):
2090 builtin_optab = asin_optab; break;
2091 CASE_FLT_FN (BUILT_IN_ACOS):
2092 builtin_optab = acos_optab; break;
2093 CASE_FLT_FN (BUILT_IN_TAN):
2094 builtin_optab = tan_optab; break;
2095 CASE_FLT_FN (BUILT_IN_ATAN):
2096 builtin_optab = atan_optab; break;
2097 CASE_FLT_FN (BUILT_IN_FLOOR):
2098 builtin_optab = floor_optab; break;
2099 CASE_FLT_FN (BUILT_IN_CEIL):
2100 builtin_optab = ceil_optab; break;
2101 CASE_FLT_FN (BUILT_IN_TRUNC):
2102 builtin_optab = btrunc_optab; break;
2103 CASE_FLT_FN (BUILT_IN_ROUND):
2104 builtin_optab = round_optab; break;
2105 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2106 builtin_optab = nearbyint_optab;
2107 if (flag_trapping_math)
2108 break;
2109 /* Else fallthrough and expand as rint. */
2110 CASE_FLT_FN (BUILT_IN_RINT):
2111 builtin_optab = rint_optab; break;
2112 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2113 builtin_optab = significand_optab; break;
2114 default:
2115 gcc_unreachable ();
2118 /* Make a suitable register to place result in. */
2119 mode = TYPE_MODE (TREE_TYPE (exp));
2121 if (! flag_errno_math || ! HONOR_NANS (mode))
2122 errno_set = false;
2124 /* Before working hard, check whether the instruction is available, but try
2125 to widen the mode for specific operations. */
2126 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2127 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2128 && (!errno_set || !optimize_insn_for_size_p ()))
2130 rtx result = gen_reg_rtx (mode);
2132 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2133 need to expand the argument again. This way, we will not perform
2134 side-effects more the once. */
2135 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2137 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2139 start_sequence ();
2141 /* Compute into RESULT.
2142 Set RESULT to wherever the result comes back. */
2143 result = expand_unop (mode, builtin_optab, op0, result, 0);
2145 if (result != 0)
2147 if (errno_set)
2148 expand_errno_check (exp, result);
2150 /* Output the entire sequence. */
2151 insns = get_insns ();
2152 end_sequence ();
2153 emit_insn (insns);
2154 return result;
2157 /* If we were unable to expand via the builtin, stop the sequence
2158 (without outputting the insns) and call to the library function
2159 with the stabilized argument list. */
2160 end_sequence ();
2163 return expand_call (exp, target, target == const0_rtx);
2166 /* Expand a call to the builtin binary math functions (pow and atan2).
2167 Return NULL_RTX if a normal call should be emitted rather than expanding the
2168 function in-line. EXP is the expression that is a call to the builtin
2169 function; if convenient, the result should be placed in TARGET.
2170 SUBTARGET may be used as the target for computing one of EXP's
2171 operands. */
2173 static rtx
2174 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2176 optab builtin_optab;
2177 rtx op0, op1, result;
2178 rtx_insn *insns;
2179 int op1_type = REAL_TYPE;
2180 tree fndecl = get_callee_fndecl (exp);
2181 tree arg0, arg1;
2182 machine_mode mode;
2183 bool errno_set = true;
2185 switch (DECL_FUNCTION_CODE (fndecl))
2187 CASE_FLT_FN (BUILT_IN_SCALBN):
2188 CASE_FLT_FN (BUILT_IN_SCALBLN):
2189 CASE_FLT_FN (BUILT_IN_LDEXP):
2190 op1_type = INTEGER_TYPE;
2191 default:
2192 break;
2195 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2196 return NULL_RTX;
2198 arg0 = CALL_EXPR_ARG (exp, 0);
2199 arg1 = CALL_EXPR_ARG (exp, 1);
2201 switch (DECL_FUNCTION_CODE (fndecl))
2203 CASE_FLT_FN (BUILT_IN_POW):
2204 builtin_optab = pow_optab; break;
2205 CASE_FLT_FN (BUILT_IN_ATAN2):
2206 builtin_optab = atan2_optab; break;
2207 CASE_FLT_FN (BUILT_IN_SCALB):
2208 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2209 return 0;
2210 builtin_optab = scalb_optab; break;
2211 CASE_FLT_FN (BUILT_IN_SCALBN):
2212 CASE_FLT_FN (BUILT_IN_SCALBLN):
2213 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2214 return 0;
2215 /* Fall through... */
2216 CASE_FLT_FN (BUILT_IN_LDEXP):
2217 builtin_optab = ldexp_optab; break;
2218 CASE_FLT_FN (BUILT_IN_FMOD):
2219 builtin_optab = fmod_optab; break;
2220 CASE_FLT_FN (BUILT_IN_REMAINDER):
2221 CASE_FLT_FN (BUILT_IN_DREM):
2222 builtin_optab = remainder_optab; break;
2223 default:
2224 gcc_unreachable ();
2227 /* Make a suitable register to place result in. */
2228 mode = TYPE_MODE (TREE_TYPE (exp));
2230 /* Before working hard, check whether the instruction is available. */
2231 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2232 return NULL_RTX;
2234 result = gen_reg_rtx (mode);
2236 if (! flag_errno_math || ! HONOR_NANS (mode))
2237 errno_set = false;
2239 if (errno_set && optimize_insn_for_size_p ())
2240 return 0;
2242 /* Always stabilize the argument list. */
2243 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2244 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2246 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2247 op1 = expand_normal (arg1);
2249 start_sequence ();
2251 /* Compute into RESULT.
2252 Set RESULT to wherever the result comes back. */
2253 result = expand_binop (mode, builtin_optab, op0, op1,
2254 result, 0, OPTAB_DIRECT);
2256 /* If we were unable to expand via the builtin, stop the sequence
2257 (without outputting the insns) and call to the library function
2258 with the stabilized argument list. */
2259 if (result == 0)
2261 end_sequence ();
2262 return expand_call (exp, target, target == const0_rtx);
2265 if (errno_set)
2266 expand_errno_check (exp, result);
2268 /* Output the entire sequence. */
2269 insns = get_insns ();
2270 end_sequence ();
2271 emit_insn (insns);
2273 return result;
2276 /* Expand a call to the builtin trinary math functions (fma).
2277 Return NULL_RTX if a normal call should be emitted rather than expanding the
2278 function in-line. EXP is the expression that is a call to the builtin
2279 function; if convenient, the result should be placed in TARGET.
2280 SUBTARGET may be used as the target for computing one of EXP's
2281 operands. */
2283 static rtx
2284 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2286 optab builtin_optab;
2287 rtx op0, op1, op2, result;
2288 rtx_insn *insns;
2289 tree fndecl = get_callee_fndecl (exp);
2290 tree arg0, arg1, arg2;
2291 machine_mode mode;
2293 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2294 return NULL_RTX;
2296 arg0 = CALL_EXPR_ARG (exp, 0);
2297 arg1 = CALL_EXPR_ARG (exp, 1);
2298 arg2 = CALL_EXPR_ARG (exp, 2);
2300 switch (DECL_FUNCTION_CODE (fndecl))
2302 CASE_FLT_FN (BUILT_IN_FMA):
2303 builtin_optab = fma_optab; break;
2304 default:
2305 gcc_unreachable ();
2308 /* Make a suitable register to place result in. */
2309 mode = TYPE_MODE (TREE_TYPE (exp));
2311 /* Before working hard, check whether the instruction is available. */
2312 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2313 return NULL_RTX;
2315 result = gen_reg_rtx (mode);
2317 /* Always stabilize the argument list. */
2318 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2319 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2320 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2322 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2323 op1 = expand_normal (arg1);
2324 op2 = expand_normal (arg2);
2326 start_sequence ();
2328 /* Compute into RESULT.
2329 Set RESULT to wherever the result comes back. */
2330 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2331 result, 0);
2333 /* If we were unable to expand via the builtin, stop the sequence
2334 (without outputting the insns) and call to the library function
2335 with the stabilized argument list. */
2336 if (result == 0)
2338 end_sequence ();
2339 return expand_call (exp, target, target == const0_rtx);
2342 /* Output the entire sequence. */
2343 insns = get_insns ();
2344 end_sequence ();
2345 emit_insn (insns);
2347 return result;
2350 /* Expand a call to the builtin sin and cos math functions.
2351 Return NULL_RTX if a normal call should be emitted rather than expanding the
2352 function in-line. EXP is the expression that is a call to the builtin
2353 function; if convenient, the result should be placed in TARGET.
2354 SUBTARGET may be used as the target for computing one of EXP's
2355 operands. */
2357 static rtx
2358 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2360 optab builtin_optab;
2361 rtx op0;
2362 rtx_insn *insns;
2363 tree fndecl = get_callee_fndecl (exp);
2364 machine_mode mode;
2365 tree arg;
2367 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2368 return NULL_RTX;
2370 arg = CALL_EXPR_ARG (exp, 0);
2372 switch (DECL_FUNCTION_CODE (fndecl))
2374 CASE_FLT_FN (BUILT_IN_SIN):
2375 CASE_FLT_FN (BUILT_IN_COS):
2376 builtin_optab = sincos_optab; break;
2377 default:
2378 gcc_unreachable ();
2381 /* Make a suitable register to place result in. */
2382 mode = TYPE_MODE (TREE_TYPE (exp));
2384 /* Check if sincos insn is available, otherwise fallback
2385 to sin or cos insn. */
2386 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2387 switch (DECL_FUNCTION_CODE (fndecl))
2389 CASE_FLT_FN (BUILT_IN_SIN):
2390 builtin_optab = sin_optab; break;
2391 CASE_FLT_FN (BUILT_IN_COS):
2392 builtin_optab = cos_optab; break;
2393 default:
2394 gcc_unreachable ();
2397 /* Before working hard, check whether the instruction is available. */
2398 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2400 rtx result = gen_reg_rtx (mode);
2402 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2403 need to expand the argument again. This way, we will not perform
2404 side-effects more the once. */
2405 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2407 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2409 start_sequence ();
2411 /* Compute into RESULT.
2412 Set RESULT to wherever the result comes back. */
2413 if (builtin_optab == sincos_optab)
2415 int ok;
2417 switch (DECL_FUNCTION_CODE (fndecl))
2419 CASE_FLT_FN (BUILT_IN_SIN):
2420 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2421 break;
2422 CASE_FLT_FN (BUILT_IN_COS):
2423 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2424 break;
2425 default:
2426 gcc_unreachable ();
2428 gcc_assert (ok);
2430 else
2431 result = expand_unop (mode, builtin_optab, op0, result, 0);
2433 if (result != 0)
2435 /* Output the entire sequence. */
2436 insns = get_insns ();
2437 end_sequence ();
2438 emit_insn (insns);
2439 return result;
2442 /* If we were unable to expand via the builtin, stop the sequence
2443 (without outputting the insns) and call to the library function
2444 with the stabilized argument list. */
2445 end_sequence ();
2448 return expand_call (exp, target, target == const0_rtx);
2451 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2452 return an RTL instruction code that implements the functionality.
2453 If that isn't possible or available return CODE_FOR_nothing. */
2455 static enum insn_code
2456 interclass_mathfn_icode (tree arg, tree fndecl)
2458 bool errno_set = false;
2459 optab builtin_optab = unknown_optab;
2460 machine_mode mode;
2462 switch (DECL_FUNCTION_CODE (fndecl))
2464 CASE_FLT_FN (BUILT_IN_ILOGB):
2465 errno_set = true; builtin_optab = ilogb_optab; break;
2466 CASE_FLT_FN (BUILT_IN_ISINF):
2467 builtin_optab = isinf_optab; break;
2468 case BUILT_IN_ISNORMAL:
2469 case BUILT_IN_ISFINITE:
2470 CASE_FLT_FN (BUILT_IN_FINITE):
2471 case BUILT_IN_FINITED32:
2472 case BUILT_IN_FINITED64:
2473 case BUILT_IN_FINITED128:
2474 case BUILT_IN_ISINFD32:
2475 case BUILT_IN_ISINFD64:
2476 case BUILT_IN_ISINFD128:
2477 /* These builtins have no optabs (yet). */
2478 break;
2479 default:
2480 gcc_unreachable ();
2483 /* There's no easy way to detect the case we need to set EDOM. */
2484 if (flag_errno_math && errno_set)
2485 return CODE_FOR_nothing;
2487 /* Optab mode depends on the mode of the input argument. */
2488 mode = TYPE_MODE (TREE_TYPE (arg));
2490 if (builtin_optab)
2491 return optab_handler (builtin_optab, mode);
2492 return CODE_FOR_nothing;
2495 /* Expand a call to one of the builtin math functions that operate on
2496 floating point argument and output an integer result (ilogb, isinf,
2497 isnan, etc).
2498 Return 0 if a normal call should be emitted rather than expanding the
2499 function in-line. EXP is the expression that is a call to the builtin
2500 function; if convenient, the result should be placed in TARGET. */
2502 static rtx
2503 expand_builtin_interclass_mathfn (tree exp, rtx target)
2505 enum insn_code icode = CODE_FOR_nothing;
2506 rtx op0;
2507 tree fndecl = get_callee_fndecl (exp);
2508 machine_mode mode;
2509 tree arg;
2511 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2512 return NULL_RTX;
2514 arg = CALL_EXPR_ARG (exp, 0);
2515 icode = interclass_mathfn_icode (arg, fndecl);
2516 mode = TYPE_MODE (TREE_TYPE (arg));
2518 if (icode != CODE_FOR_nothing)
2520 struct expand_operand ops[1];
2521 rtx_insn *last = get_last_insn ();
2522 tree orig_arg = arg;
2524 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2525 need to expand the argument again. This way, we will not perform
2526 side-effects more the once. */
2527 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2529 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2531 if (mode != GET_MODE (op0))
2532 op0 = convert_to_mode (mode, op0, 0);
2534 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2535 if (maybe_legitimize_operands (icode, 0, 1, ops)
2536 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2537 return ops[0].value;
2539 delete_insns_since (last);
2540 CALL_EXPR_ARG (exp, 0) = orig_arg;
2543 return NULL_RTX;
2546 /* Expand a call to the builtin sincos math function.
2547 Return NULL_RTX if a normal call should be emitted rather than expanding the
2548 function in-line. EXP is the expression that is a call to the builtin
2549 function. */
2551 static rtx
2552 expand_builtin_sincos (tree exp)
2554 rtx op0, op1, op2, target1, target2;
2555 machine_mode mode;
2556 tree arg, sinp, cosp;
2557 int result;
2558 location_t loc = EXPR_LOCATION (exp);
2559 tree alias_type, alias_off;
2561 if (!validate_arglist (exp, REAL_TYPE,
2562 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2563 return NULL_RTX;
2565 arg = CALL_EXPR_ARG (exp, 0);
2566 sinp = CALL_EXPR_ARG (exp, 1);
2567 cosp = CALL_EXPR_ARG (exp, 2);
2569 /* Make a suitable register to place result in. */
2570 mode = TYPE_MODE (TREE_TYPE (arg));
2572 /* Check if sincos insn is available, otherwise emit the call. */
2573 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2574 return NULL_RTX;
2576 target1 = gen_reg_rtx (mode);
2577 target2 = gen_reg_rtx (mode);
2579 op0 = expand_normal (arg);
2580 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2581 alias_off = build_int_cst (alias_type, 0);
2582 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2583 sinp, alias_off));
2584 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2585 cosp, alias_off));
2587 /* Compute into target1 and target2.
2588 Set TARGET to wherever the result comes back. */
2589 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2590 gcc_assert (result);
2592 /* Move target1 and target2 to the memory locations indicated
2593 by op1 and op2. */
2594 emit_move_insn (op1, target1);
2595 emit_move_insn (op2, target2);
2597 return const0_rtx;
2600 /* Expand a call to the internal cexpi builtin to the sincos math function.
2601 EXP is the expression that is a call to the builtin function; if convenient,
2602 the result should be placed in TARGET. */
2604 static rtx
2605 expand_builtin_cexpi (tree exp, rtx target)
2607 tree fndecl = get_callee_fndecl (exp);
2608 tree arg, type;
2609 machine_mode mode;
2610 rtx op0, op1, op2;
2611 location_t loc = EXPR_LOCATION (exp);
2613 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2614 return NULL_RTX;
2616 arg = CALL_EXPR_ARG (exp, 0);
2617 type = TREE_TYPE (arg);
2618 mode = TYPE_MODE (TREE_TYPE (arg));
2620 /* Try expanding via a sincos optab, fall back to emitting a libcall
2621 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2622 is only generated from sincos, cexp or if we have either of them. */
2623 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2625 op1 = gen_reg_rtx (mode);
2626 op2 = gen_reg_rtx (mode);
2628 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2630 /* Compute into op1 and op2. */
2631 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2633 else if (targetm.libc_has_function (function_sincos))
2635 tree call, fn = NULL_TREE;
2636 tree top1, top2;
2637 rtx op1a, op2a;
2639 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2640 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2641 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2642 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2643 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2644 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2645 else
2646 gcc_unreachable ();
2648 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2649 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2650 op1a = copy_addr_to_reg (XEXP (op1, 0));
2651 op2a = copy_addr_to_reg (XEXP (op2, 0));
2652 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2653 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2655 /* Make sure not to fold the sincos call again. */
2656 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2657 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2658 call, 3, arg, top1, top2));
2660 else
2662 tree call, fn = NULL_TREE, narg;
2663 tree ctype = build_complex_type (type);
2665 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2666 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2667 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2668 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2669 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2670 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2671 else
2672 gcc_unreachable ();
2674 /* If we don't have a decl for cexp create one. This is the
2675 friendliest fallback if the user calls __builtin_cexpi
2676 without full target C99 function support. */
2677 if (fn == NULL_TREE)
2679 tree fntype;
2680 const char *name = NULL;
2682 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2683 name = "cexpf";
2684 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2685 name = "cexp";
2686 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2687 name = "cexpl";
2689 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2690 fn = build_fn_decl (name, fntype);
2693 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2694 build_real (type, dconst0), arg);
2696 /* Make sure not to fold the cexp call again. */
2697 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2698 return expand_expr (build_call_nary (ctype, call, 1, narg),
2699 target, VOIDmode, EXPAND_NORMAL);
2702 /* Now build the proper return type. */
2703 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2704 make_tree (TREE_TYPE (arg), op2),
2705 make_tree (TREE_TYPE (arg), op1)),
2706 target, VOIDmode, EXPAND_NORMAL);
2709 /* Conveniently construct a function call expression. FNDECL names the
2710 function to be called, N is the number of arguments, and the "..."
2711 parameters are the argument expressions. Unlike build_call_exr
2712 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2714 static tree
2715 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2717 va_list ap;
2718 tree fntype = TREE_TYPE (fndecl);
2719 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2721 va_start (ap, n);
2722 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2723 va_end (ap);
2724 SET_EXPR_LOCATION (fn, loc);
2725 return fn;
2728 /* Expand a call to one of the builtin rounding functions gcc defines
2729 as an extension (lfloor and lceil). As these are gcc extensions we
2730 do not need to worry about setting errno to EDOM.
2731 If expanding via optab fails, lower expression to (int)(floor(x)).
2732 EXP is the expression that is a call to the builtin function;
2733 if convenient, the result should be placed in TARGET. */
2735 static rtx
2736 expand_builtin_int_roundingfn (tree exp, rtx target)
2738 convert_optab builtin_optab;
2739 rtx op0, tmp;
2740 rtx_insn *insns;
2741 tree fndecl = get_callee_fndecl (exp);
2742 enum built_in_function fallback_fn;
2743 tree fallback_fndecl;
2744 machine_mode mode;
2745 tree arg;
2747 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2748 gcc_unreachable ();
2750 arg = CALL_EXPR_ARG (exp, 0);
2752 switch (DECL_FUNCTION_CODE (fndecl))
2754 CASE_FLT_FN (BUILT_IN_ICEIL):
2755 CASE_FLT_FN (BUILT_IN_LCEIL):
2756 CASE_FLT_FN (BUILT_IN_LLCEIL):
2757 builtin_optab = lceil_optab;
2758 fallback_fn = BUILT_IN_CEIL;
2759 break;
2761 CASE_FLT_FN (BUILT_IN_IFLOOR):
2762 CASE_FLT_FN (BUILT_IN_LFLOOR):
2763 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2764 builtin_optab = lfloor_optab;
2765 fallback_fn = BUILT_IN_FLOOR;
2766 break;
2768 default:
2769 gcc_unreachable ();
2772 /* Make a suitable register to place result in. */
2773 mode = TYPE_MODE (TREE_TYPE (exp));
2775 target = gen_reg_rtx (mode);
2777 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2778 need to expand the argument again. This way, we will not perform
2779 side-effects more the once. */
2780 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2782 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2784 start_sequence ();
2786 /* Compute into TARGET. */
2787 if (expand_sfix_optab (target, op0, builtin_optab))
2789 /* Output the entire sequence. */
2790 insns = get_insns ();
2791 end_sequence ();
2792 emit_insn (insns);
2793 return target;
2796 /* If we were unable to expand via the builtin, stop the sequence
2797 (without outputting the insns). */
2798 end_sequence ();
2800 /* Fall back to floating point rounding optab. */
2801 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2803 /* For non-C99 targets we may end up without a fallback fndecl here
2804 if the user called __builtin_lfloor directly. In this case emit
2805 a call to the floor/ceil variants nevertheless. This should result
2806 in the best user experience for not full C99 targets. */
2807 if (fallback_fndecl == NULL_TREE)
2809 tree fntype;
2810 const char *name = NULL;
2812 switch (DECL_FUNCTION_CODE (fndecl))
2814 case BUILT_IN_ICEIL:
2815 case BUILT_IN_LCEIL:
2816 case BUILT_IN_LLCEIL:
2817 name = "ceil";
2818 break;
2819 case BUILT_IN_ICEILF:
2820 case BUILT_IN_LCEILF:
2821 case BUILT_IN_LLCEILF:
2822 name = "ceilf";
2823 break;
2824 case BUILT_IN_ICEILL:
2825 case BUILT_IN_LCEILL:
2826 case BUILT_IN_LLCEILL:
2827 name = "ceill";
2828 break;
2829 case BUILT_IN_IFLOOR:
2830 case BUILT_IN_LFLOOR:
2831 case BUILT_IN_LLFLOOR:
2832 name = "floor";
2833 break;
2834 case BUILT_IN_IFLOORF:
2835 case BUILT_IN_LFLOORF:
2836 case BUILT_IN_LLFLOORF:
2837 name = "floorf";
2838 break;
2839 case BUILT_IN_IFLOORL:
2840 case BUILT_IN_LFLOORL:
2841 case BUILT_IN_LLFLOORL:
2842 name = "floorl";
2843 break;
2844 default:
2845 gcc_unreachable ();
2848 fntype = build_function_type_list (TREE_TYPE (arg),
2849 TREE_TYPE (arg), NULL_TREE);
2850 fallback_fndecl = build_fn_decl (name, fntype);
2853 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2855 tmp = expand_normal (exp);
2856 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2858 /* Truncate the result of floating point optab to integer
2859 via expand_fix (). */
2860 target = gen_reg_rtx (mode);
2861 expand_fix (target, tmp, 0);
2863 return target;
2866 /* Expand a call to one of the builtin math functions doing integer
2867 conversion (lrint).
2868 Return 0 if a normal call should be emitted rather than expanding the
2869 function in-line. EXP is the expression that is a call to the builtin
2870 function; if convenient, the result should be placed in TARGET. */
2872 static rtx
2873 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2875 convert_optab builtin_optab;
2876 rtx op0;
2877 rtx_insn *insns;
2878 tree fndecl = get_callee_fndecl (exp);
2879 tree arg;
2880 machine_mode mode;
2881 enum built_in_function fallback_fn = BUILT_IN_NONE;
2883 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2884 gcc_unreachable ();
2886 arg = CALL_EXPR_ARG (exp, 0);
2888 switch (DECL_FUNCTION_CODE (fndecl))
2890 CASE_FLT_FN (BUILT_IN_IRINT):
2891 fallback_fn = BUILT_IN_LRINT;
2892 /* FALLTHRU */
2893 CASE_FLT_FN (BUILT_IN_LRINT):
2894 CASE_FLT_FN (BUILT_IN_LLRINT):
2895 builtin_optab = lrint_optab;
2896 break;
2898 CASE_FLT_FN (BUILT_IN_IROUND):
2899 fallback_fn = BUILT_IN_LROUND;
2900 /* FALLTHRU */
2901 CASE_FLT_FN (BUILT_IN_LROUND):
2902 CASE_FLT_FN (BUILT_IN_LLROUND):
2903 builtin_optab = lround_optab;
2904 break;
2906 default:
2907 gcc_unreachable ();
2910 /* There's no easy way to detect the case we need to set EDOM. */
2911 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2912 return NULL_RTX;
2914 /* Make a suitable register to place result in. */
2915 mode = TYPE_MODE (TREE_TYPE (exp));
2917 /* There's no easy way to detect the case we need to set EDOM. */
2918 if (!flag_errno_math)
2920 rtx result = gen_reg_rtx (mode);
2922 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2923 need to expand the argument again. This way, we will not perform
2924 side-effects more the once. */
2925 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2927 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2929 start_sequence ();
2931 if (expand_sfix_optab (result, op0, builtin_optab))
2933 /* Output the entire sequence. */
2934 insns = get_insns ();
2935 end_sequence ();
2936 emit_insn (insns);
2937 return result;
2940 /* If we were unable to expand via the builtin, stop the sequence
2941 (without outputting the insns) and call to the library function
2942 with the stabilized argument list. */
2943 end_sequence ();
2946 if (fallback_fn != BUILT_IN_NONE)
2948 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2949 targets, (int) round (x) should never be transformed into
2950 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2951 a call to lround in the hope that the target provides at least some
2952 C99 functions. This should result in the best user experience for
2953 not full C99 targets. */
2954 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2955 fallback_fn, 0);
2957 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2958 fallback_fndecl, 1, arg);
2960 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2961 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2962 return convert_to_mode (mode, target, 0);
2965 return expand_call (exp, target, target == const0_rtx);
2968 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2969 a normal call should be emitted rather than expanding the function
2970 in-line. EXP is the expression that is a call to the builtin
2971 function; if convenient, the result should be placed in TARGET. */
2973 static rtx
2974 expand_builtin_powi (tree exp, rtx target)
2976 tree arg0, arg1;
2977 rtx op0, op1;
2978 machine_mode mode;
2979 machine_mode mode2;
2981 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2982 return NULL_RTX;
2984 arg0 = CALL_EXPR_ARG (exp, 0);
2985 arg1 = CALL_EXPR_ARG (exp, 1);
2986 mode = TYPE_MODE (TREE_TYPE (exp));
2988 /* Emit a libcall to libgcc. */
2990 /* Mode of the 2nd argument must match that of an int. */
2991 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2993 if (target == NULL_RTX)
2994 target = gen_reg_rtx (mode);
2996 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2997 if (GET_MODE (op0) != mode)
2998 op0 = convert_to_mode (mode, op0, 0);
2999 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3000 if (GET_MODE (op1) != mode2)
3001 op1 = convert_to_mode (mode2, op1, 0);
3003 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3004 target, LCT_CONST, mode, 2,
3005 op0, mode, op1, mode2);
3007 return target;
3010 /* Expand expression EXP which is a call to the strlen builtin. Return
3011 NULL_RTX if we failed the caller should emit a normal call, otherwise
3012 try to get the result in TARGET, if convenient. */
3014 static rtx
3015 expand_builtin_strlen (tree exp, rtx target,
3016 machine_mode target_mode)
3018 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3019 return NULL_RTX;
3020 else
3022 struct expand_operand ops[4];
3023 rtx pat;
3024 tree len;
3025 tree src = CALL_EXPR_ARG (exp, 0);
3026 rtx src_reg;
3027 rtx_insn *before_strlen;
3028 machine_mode insn_mode = target_mode;
3029 enum insn_code icode = CODE_FOR_nothing;
3030 unsigned int align;
3032 /* If the length can be computed at compile-time, return it. */
3033 len = c_strlen (src, 0);
3034 if (len)
3035 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3037 /* If the length can be computed at compile-time and is constant
3038 integer, but there are side-effects in src, evaluate
3039 src for side-effects, then return len.
3040 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3041 can be optimized into: i++; x = 3; */
3042 len = c_strlen (src, 1);
3043 if (len && TREE_CODE (len) == INTEGER_CST)
3045 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3046 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3049 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3051 /* If SRC is not a pointer type, don't do this operation inline. */
3052 if (align == 0)
3053 return NULL_RTX;
3055 /* Bail out if we can't compute strlen in the right mode. */
3056 while (insn_mode != VOIDmode)
3058 icode = optab_handler (strlen_optab, insn_mode);
3059 if (icode != CODE_FOR_nothing)
3060 break;
3062 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3064 if (insn_mode == VOIDmode)
3065 return NULL_RTX;
3067 /* Make a place to hold the source address. We will not expand
3068 the actual source until we are sure that the expansion will
3069 not fail -- there are trees that cannot be expanded twice. */
3070 src_reg = gen_reg_rtx (Pmode);
3072 /* Mark the beginning of the strlen sequence so we can emit the
3073 source operand later. */
3074 before_strlen = get_last_insn ();
3076 create_output_operand (&ops[0], target, insn_mode);
3077 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3078 create_integer_operand (&ops[2], 0);
3079 create_integer_operand (&ops[3], align);
3080 if (!maybe_expand_insn (icode, 4, ops))
3081 return NULL_RTX;
3083 /* Now that we are assured of success, expand the source. */
3084 start_sequence ();
3085 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3086 if (pat != src_reg)
3088 #ifdef POINTERS_EXTEND_UNSIGNED
3089 if (GET_MODE (pat) != Pmode)
3090 pat = convert_to_mode (Pmode, pat,
3091 POINTERS_EXTEND_UNSIGNED);
3092 #endif
3093 emit_move_insn (src_reg, pat);
3095 pat = get_insns ();
3096 end_sequence ();
3098 if (before_strlen)
3099 emit_insn_after (pat, before_strlen);
3100 else
3101 emit_insn_before (pat, get_insns ());
3103 /* Return the value in the proper mode for this function. */
3104 if (GET_MODE (ops[0].value) == target_mode)
3105 target = ops[0].value;
3106 else if (target != 0)
3107 convert_move (target, ops[0].value, 0);
3108 else
3109 target = convert_to_mode (target_mode, ops[0].value, 0);
3111 return target;
3115 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3116 bytes from constant string DATA + OFFSET and return it as target
3117 constant. */
3119 static rtx
3120 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3121 machine_mode mode)
3123 const char *str = (const char *) data;
3125 gcc_assert (offset >= 0
3126 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3127 <= strlen (str) + 1));
3129 return c_readstr (str + offset, mode);
3132 /* LEN specify length of the block of memcpy/memset operation.
3133 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3134 In some cases we can make very likely guess on max size, then we
3135 set it into PROBABLE_MAX_SIZE. */
3137 static void
3138 determine_block_size (tree len, rtx len_rtx,
3139 unsigned HOST_WIDE_INT *min_size,
3140 unsigned HOST_WIDE_INT *max_size,
3141 unsigned HOST_WIDE_INT *probable_max_size)
3143 if (CONST_INT_P (len_rtx))
3145 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3146 return;
3148 else
3150 wide_int min, max;
3151 enum value_range_type range_type = VR_UNDEFINED;
3153 /* Determine bounds from the type. */
3154 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3155 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3156 else
3157 *min_size = 0;
3158 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3159 *probable_max_size = *max_size
3160 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3161 else
3162 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3164 if (TREE_CODE (len) == SSA_NAME)
3165 range_type = get_range_info (len, &min, &max);
3166 if (range_type == VR_RANGE)
3168 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3169 *min_size = min.to_uhwi ();
3170 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3171 *probable_max_size = *max_size = max.to_uhwi ();
3173 else if (range_type == VR_ANTI_RANGE)
3175 /* Anti range 0...N lets us to determine minimal size to N+1. */
3176 if (min == 0)
3178 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3179 *min_size = max.to_uhwi () + 1;
3181 /* Code like
3183 int n;
3184 if (n < 100)
3185 memcpy (a, b, n)
3187 Produce anti range allowing negative values of N. We still
3188 can use the information and make a guess that N is not negative.
3190 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3191 *probable_max_size = min.to_uhwi () - 1;
3194 gcc_checking_assert (*max_size <=
3195 (unsigned HOST_WIDE_INT)
3196 GET_MODE_MASK (GET_MODE (len_rtx)));
3199 /* Helper function to do the actual work for expand_builtin_memcpy. */
3201 static rtx
3202 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3204 const char *src_str;
3205 unsigned int src_align = get_pointer_alignment (src);
3206 unsigned int dest_align = get_pointer_alignment (dest);
3207 rtx dest_mem, src_mem, dest_addr, len_rtx;
3208 HOST_WIDE_INT expected_size = -1;
3209 unsigned int expected_align = 0;
3210 unsigned HOST_WIDE_INT min_size;
3211 unsigned HOST_WIDE_INT max_size;
3212 unsigned HOST_WIDE_INT probable_max_size;
3214 /* If DEST is not a pointer type, call the normal function. */
3215 if (dest_align == 0)
3216 return NULL_RTX;
3218 /* If either SRC is not a pointer type, don't do this
3219 operation in-line. */
3220 if (src_align == 0)
3221 return NULL_RTX;
3223 if (currently_expanding_gimple_stmt)
3224 stringop_block_profile (currently_expanding_gimple_stmt,
3225 &expected_align, &expected_size);
3227 if (expected_align < dest_align)
3228 expected_align = dest_align;
3229 dest_mem = get_memory_rtx (dest, len);
3230 set_mem_align (dest_mem, dest_align);
3231 len_rtx = expand_normal (len);
3232 determine_block_size (len, len_rtx, &min_size, &max_size,
3233 &probable_max_size);
3234 src_str = c_getstr (src);
3236 /* If SRC is a string constant and block move would be done
3237 by pieces, we can avoid loading the string from memory
3238 and only stored the computed constants. */
3239 if (src_str
3240 && CONST_INT_P (len_rtx)
3241 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3242 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3243 CONST_CAST (char *, src_str),
3244 dest_align, false))
3246 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3247 builtin_memcpy_read_str,
3248 CONST_CAST (char *, src_str),
3249 dest_align, false, 0);
3250 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3251 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3252 return dest_mem;
3255 src_mem = get_memory_rtx (src, len);
3256 set_mem_align (src_mem, src_align);
3258 /* Copy word part most expediently. */
3259 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3260 CALL_EXPR_TAILCALL (exp)
3261 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3262 expected_align, expected_size,
3263 min_size, max_size, probable_max_size);
3265 if (dest_addr == 0)
3267 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3268 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3271 return dest_addr;
3274 /* Expand a call EXP to the memcpy builtin.
3275 Return NULL_RTX if we failed, the caller should emit a normal call,
3276 otherwise try to get the result in TARGET, if convenient (and in
3277 mode MODE if that's convenient). */
3279 static rtx
3280 expand_builtin_memcpy (tree exp, rtx target)
3282 if (!validate_arglist (exp,
3283 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3284 return NULL_RTX;
3285 else
3287 tree dest = CALL_EXPR_ARG (exp, 0);
3288 tree src = CALL_EXPR_ARG (exp, 1);
3289 tree len = CALL_EXPR_ARG (exp, 2);
3290 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3294 /* Expand an instrumented call EXP to the memcpy builtin.
3295 Return NULL_RTX if we failed, the caller should emit a normal call,
3296 otherwise try to get the result in TARGET, if convenient (and in
3297 mode MODE if that's convenient). */
3299 static rtx
3300 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3302 if (!validate_arglist (exp,
3303 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3304 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3305 INTEGER_TYPE, VOID_TYPE))
3306 return NULL_RTX;
3307 else
3309 tree dest = CALL_EXPR_ARG (exp, 0);
3310 tree src = CALL_EXPR_ARG (exp, 2);
3311 tree len = CALL_EXPR_ARG (exp, 4);
3312 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3314 /* Return src bounds with the result. */
3315 if (res)
3317 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3318 expand_normal (CALL_EXPR_ARG (exp, 1)));
3319 res = chkp_join_splitted_slot (res, bnd);
3321 return res;
3325 /* Expand a call EXP to the mempcpy builtin.
3326 Return NULL_RTX if we failed; the caller should emit a normal call,
3327 otherwise try to get the result in TARGET, if convenient (and in
3328 mode MODE if that's convenient). If ENDP is 0 return the
3329 destination pointer, if ENDP is 1 return the end pointer ala
3330 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3331 stpcpy. */
3333 static rtx
3334 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3336 if (!validate_arglist (exp,
3337 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3338 return NULL_RTX;
3339 else
3341 tree dest = CALL_EXPR_ARG (exp, 0);
3342 tree src = CALL_EXPR_ARG (exp, 1);
3343 tree len = CALL_EXPR_ARG (exp, 2);
3344 return expand_builtin_mempcpy_args (dest, src, len,
3345 target, mode, /*endp=*/ 1,
3346 exp);
3350 /* Expand an instrumented call EXP to the mempcpy builtin.
3351 Return NULL_RTX if we failed, the caller should emit a normal call,
3352 otherwise try to get the result in TARGET, if convenient (and in
3353 mode MODE if that's convenient). */
3355 static rtx
3356 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3358 if (!validate_arglist (exp,
3359 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3360 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3361 INTEGER_TYPE, VOID_TYPE))
3362 return NULL_RTX;
3363 else
3365 tree dest = CALL_EXPR_ARG (exp, 0);
3366 tree src = CALL_EXPR_ARG (exp, 2);
3367 tree len = CALL_EXPR_ARG (exp, 4);
3368 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3369 mode, 1, exp);
3371 /* Return src bounds with the result. */
3372 if (res)
3374 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3375 expand_normal (CALL_EXPR_ARG (exp, 1)));
3376 res = chkp_join_splitted_slot (res, bnd);
3378 return res;
3382 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3383 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3384 so that this can also be called without constructing an actual CALL_EXPR.
3385 The other arguments and return value are the same as for
3386 expand_builtin_mempcpy. */
3388 static rtx
3389 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3390 rtx target, machine_mode mode, int endp,
3391 tree orig_exp)
3393 tree fndecl = get_callee_fndecl (orig_exp);
3395 /* If return value is ignored, transform mempcpy into memcpy. */
3396 if (target == const0_rtx
3397 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3398 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3400 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3401 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3402 dest, src, len);
3403 return expand_expr (result, target, mode, EXPAND_NORMAL);
3405 else if (target == const0_rtx
3406 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3408 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3409 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3410 dest, src, len);
3411 return expand_expr (result, target, mode, EXPAND_NORMAL);
3413 else
3415 const char *src_str;
3416 unsigned int src_align = get_pointer_alignment (src);
3417 unsigned int dest_align = get_pointer_alignment (dest);
3418 rtx dest_mem, src_mem, len_rtx;
3420 /* If either SRC or DEST is not a pointer type, don't do this
3421 operation in-line. */
3422 if (dest_align == 0 || src_align == 0)
3423 return NULL_RTX;
3425 /* If LEN is not constant, call the normal function. */
3426 if (! tree_fits_uhwi_p (len))
3427 return NULL_RTX;
3429 len_rtx = expand_normal (len);
3430 src_str = c_getstr (src);
3432 /* If SRC is a string constant and block move would be done
3433 by pieces, we can avoid loading the string from memory
3434 and only stored the computed constants. */
3435 if (src_str
3436 && CONST_INT_P (len_rtx)
3437 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3438 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3439 CONST_CAST (char *, src_str),
3440 dest_align, false))
3442 dest_mem = get_memory_rtx (dest, len);
3443 set_mem_align (dest_mem, dest_align);
3444 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3445 builtin_memcpy_read_str,
3446 CONST_CAST (char *, src_str),
3447 dest_align, false, endp);
3448 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3449 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3450 return dest_mem;
3453 if (CONST_INT_P (len_rtx)
3454 && can_move_by_pieces (INTVAL (len_rtx),
3455 MIN (dest_align, src_align)))
3457 dest_mem = get_memory_rtx (dest, len);
3458 set_mem_align (dest_mem, dest_align);
3459 src_mem = get_memory_rtx (src, len);
3460 set_mem_align (src_mem, src_align);
3461 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3462 MIN (dest_align, src_align), endp);
3463 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3464 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3465 return dest_mem;
3468 return NULL_RTX;
3472 #ifndef HAVE_movstr
3473 # define HAVE_movstr 0
3474 # define CODE_FOR_movstr CODE_FOR_nothing
3475 #endif
3477 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3478 we failed, the caller should emit a normal call, otherwise try to
3479 get the result in TARGET, if convenient. If ENDP is 0 return the
3480 destination pointer, if ENDP is 1 return the end pointer ala
3481 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3482 stpcpy. */
3484 static rtx
3485 expand_movstr (tree dest, tree src, rtx target, int endp)
3487 struct expand_operand ops[3];
3488 rtx dest_mem;
3489 rtx src_mem;
3491 if (!HAVE_movstr)
3492 return NULL_RTX;
3494 dest_mem = get_memory_rtx (dest, NULL);
3495 src_mem = get_memory_rtx (src, NULL);
3496 if (!endp)
3498 target = force_reg (Pmode, XEXP (dest_mem, 0));
3499 dest_mem = replace_equiv_address (dest_mem, target);
3502 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3503 create_fixed_operand (&ops[1], dest_mem);
3504 create_fixed_operand (&ops[2], src_mem);
3505 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3506 return NULL_RTX;
3508 if (endp && target != const0_rtx)
3510 target = ops[0].value;
3511 /* movstr is supposed to set end to the address of the NUL
3512 terminator. If the caller requested a mempcpy-like return value,
3513 adjust it. */
3514 if (endp == 1)
3516 rtx tem = plus_constant (GET_MODE (target),
3517 gen_lowpart (GET_MODE (target), target), 1);
3518 emit_move_insn (target, force_operand (tem, NULL_RTX));
3521 return target;
3524 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3525 NULL_RTX if we failed the caller should emit a normal call, otherwise
3526 try to get the result in TARGET, if convenient (and in mode MODE if that's
3527 convenient). */
3529 static rtx
3530 expand_builtin_strcpy (tree exp, rtx target)
3532 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3534 tree dest = CALL_EXPR_ARG (exp, 0);
3535 tree src = CALL_EXPR_ARG (exp, 1);
3536 return expand_builtin_strcpy_args (dest, src, target);
3538 return NULL_RTX;
3541 /* Helper function to do the actual work for expand_builtin_strcpy. The
3542 arguments to the builtin_strcpy call DEST and SRC are broken out
3543 so that this can also be called without constructing an actual CALL_EXPR.
3544 The other arguments and return value are the same as for
3545 expand_builtin_strcpy. */
3547 static rtx
3548 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3550 return expand_movstr (dest, src, target, /*endp=*/0);
3553 /* Expand a call EXP to the stpcpy builtin.
3554 Return NULL_RTX if we failed the caller should emit a normal call,
3555 otherwise try to get the result in TARGET, if convenient (and in
3556 mode MODE if that's convenient). */
3558 static rtx
3559 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3561 tree dst, src;
3562 location_t loc = EXPR_LOCATION (exp);
3564 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3565 return NULL_RTX;
3567 dst = CALL_EXPR_ARG (exp, 0);
3568 src = CALL_EXPR_ARG (exp, 1);
3570 /* If return value is ignored, transform stpcpy into strcpy. */
3571 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3573 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3574 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3575 return expand_expr (result, target, mode, EXPAND_NORMAL);
3577 else
3579 tree len, lenp1;
3580 rtx ret;
3582 /* Ensure we get an actual string whose length can be evaluated at
3583 compile-time, not an expression containing a string. This is
3584 because the latter will potentially produce pessimized code
3585 when used to produce the return value. */
3586 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3587 return expand_movstr (dst, src, target, /*endp=*/2);
3589 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3590 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3591 target, mode, /*endp=*/2,
3592 exp);
3594 if (ret)
3595 return ret;
3597 if (TREE_CODE (len) == INTEGER_CST)
3599 rtx len_rtx = expand_normal (len);
3601 if (CONST_INT_P (len_rtx))
3603 ret = expand_builtin_strcpy_args (dst, src, target);
3605 if (ret)
3607 if (! target)
3609 if (mode != VOIDmode)
3610 target = gen_reg_rtx (mode);
3611 else
3612 target = gen_reg_rtx (GET_MODE (ret));
3614 if (GET_MODE (target) != GET_MODE (ret))
3615 ret = gen_lowpart (GET_MODE (target), ret);
3617 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3618 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3619 gcc_assert (ret);
3621 return target;
3626 return expand_movstr (dst, src, target, /*endp=*/2);
3630 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3631 bytes from constant string DATA + OFFSET and return it as target
3632 constant. */
3635 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3636 machine_mode mode)
3638 const char *str = (const char *) data;
3640 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3641 return const0_rtx;
3643 return c_readstr (str + offset, mode);
3646 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3647 NULL_RTX if we failed the caller should emit a normal call. */
3649 static rtx
3650 expand_builtin_strncpy (tree exp, rtx target)
3652 location_t loc = EXPR_LOCATION (exp);
3654 if (validate_arglist (exp,
3655 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3657 tree dest = CALL_EXPR_ARG (exp, 0);
3658 tree src = CALL_EXPR_ARG (exp, 1);
3659 tree len = CALL_EXPR_ARG (exp, 2);
3660 tree slen = c_strlen (src, 1);
3662 /* We must be passed a constant len and src parameter. */
3663 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3664 return NULL_RTX;
3666 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3668 /* We're required to pad with trailing zeros if the requested
3669 len is greater than strlen(s2)+1. In that case try to
3670 use store_by_pieces, if it fails, punt. */
3671 if (tree_int_cst_lt (slen, len))
3673 unsigned int dest_align = get_pointer_alignment (dest);
3674 const char *p = c_getstr (src);
3675 rtx dest_mem;
3677 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3678 || !can_store_by_pieces (tree_to_uhwi (len),
3679 builtin_strncpy_read_str,
3680 CONST_CAST (char *, p),
3681 dest_align, false))
3682 return NULL_RTX;
3684 dest_mem = get_memory_rtx (dest, len);
3685 store_by_pieces (dest_mem, tree_to_uhwi (len),
3686 builtin_strncpy_read_str,
3687 CONST_CAST (char *, p), dest_align, false, 0);
3688 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3689 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3690 return dest_mem;
3693 return NULL_RTX;
3696 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3697 bytes from constant string DATA + OFFSET and return it as target
3698 constant. */
3701 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3702 machine_mode mode)
3704 const char *c = (const char *) data;
3705 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3707 memset (p, *c, GET_MODE_SIZE (mode));
3709 return c_readstr (p, mode);
3712 /* Callback routine for store_by_pieces. Return the RTL of a register
3713 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3714 char value given in the RTL register data. For example, if mode is
3715 4 bytes wide, return the RTL for 0x01010101*data. */
3717 static rtx
3718 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3719 machine_mode mode)
3721 rtx target, coeff;
3722 size_t size;
3723 char *p;
3725 size = GET_MODE_SIZE (mode);
3726 if (size == 1)
3727 return (rtx) data;
3729 p = XALLOCAVEC (char, size);
3730 memset (p, 1, size);
3731 coeff = c_readstr (p, mode);
3733 target = convert_to_mode (mode, (rtx) data, 1);
3734 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3735 return force_reg (mode, target);
3738 /* Expand expression EXP, which is a call to the memset builtin. Return
3739 NULL_RTX if we failed the caller should emit a normal call, otherwise
3740 try to get the result in TARGET, if convenient (and in mode MODE if that's
3741 convenient). */
3743 static rtx
3744 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3746 if (!validate_arglist (exp,
3747 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3748 return NULL_RTX;
3749 else
3751 tree dest = CALL_EXPR_ARG (exp, 0);
3752 tree val = CALL_EXPR_ARG (exp, 1);
3753 tree len = CALL_EXPR_ARG (exp, 2);
3754 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3758 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3759 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3760 try to get the result in TARGET, if convenient (and in mode MODE if that's
3761 convenient). */
3763 static rtx
3764 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3766 if (!validate_arglist (exp,
3767 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3768 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3769 return NULL_RTX;
3770 else
3772 tree dest = CALL_EXPR_ARG (exp, 0);
3773 tree val = CALL_EXPR_ARG (exp, 2);
3774 tree len = CALL_EXPR_ARG (exp, 3);
3775 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3777 /* Return src bounds with the result. */
3778 if (res)
3780 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3781 expand_normal (CALL_EXPR_ARG (exp, 1)));
3782 res = chkp_join_splitted_slot (res, bnd);
3784 return res;
3788 /* Helper function to do the actual work for expand_builtin_memset. The
3789 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3790 so that this can also be called without constructing an actual CALL_EXPR.
3791 The other arguments and return value are the same as for
3792 expand_builtin_memset. */
3794 static rtx
3795 expand_builtin_memset_args (tree dest, tree val, tree len,
3796 rtx target, machine_mode mode, tree orig_exp)
3798 tree fndecl, fn;
3799 enum built_in_function fcode;
3800 machine_mode val_mode;
3801 char c;
3802 unsigned int dest_align;
3803 rtx dest_mem, dest_addr, len_rtx;
3804 HOST_WIDE_INT expected_size = -1;
3805 unsigned int expected_align = 0;
3806 unsigned HOST_WIDE_INT min_size;
3807 unsigned HOST_WIDE_INT max_size;
3808 unsigned HOST_WIDE_INT probable_max_size;
3810 dest_align = get_pointer_alignment (dest);
3812 /* If DEST is not a pointer type, don't do this operation in-line. */
3813 if (dest_align == 0)
3814 return NULL_RTX;
3816 if (currently_expanding_gimple_stmt)
3817 stringop_block_profile (currently_expanding_gimple_stmt,
3818 &expected_align, &expected_size);
3820 if (expected_align < dest_align)
3821 expected_align = dest_align;
3823 /* If the LEN parameter is zero, return DEST. */
3824 if (integer_zerop (len))
3826 /* Evaluate and ignore VAL in case it has side-effects. */
3827 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3828 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3831 /* Stabilize the arguments in case we fail. */
3832 dest = builtin_save_expr (dest);
3833 val = builtin_save_expr (val);
3834 len = builtin_save_expr (len);
3836 len_rtx = expand_normal (len);
3837 determine_block_size (len, len_rtx, &min_size, &max_size,
3838 &probable_max_size);
3839 dest_mem = get_memory_rtx (dest, len);
3840 val_mode = TYPE_MODE (unsigned_char_type_node);
3842 if (TREE_CODE (val) != INTEGER_CST)
3844 rtx val_rtx;
3846 val_rtx = expand_normal (val);
3847 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3849 /* Assume that we can memset by pieces if we can store
3850 * the coefficients by pieces (in the required modes).
3851 * We can't pass builtin_memset_gen_str as that emits RTL. */
3852 c = 1;
3853 if (tree_fits_uhwi_p (len)
3854 && can_store_by_pieces (tree_to_uhwi (len),
3855 builtin_memset_read_str, &c, dest_align,
3856 true))
3858 val_rtx = force_reg (val_mode, val_rtx);
3859 store_by_pieces (dest_mem, tree_to_uhwi (len),
3860 builtin_memset_gen_str, val_rtx, dest_align,
3861 true, 0);
3863 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3864 dest_align, expected_align,
3865 expected_size, min_size, max_size,
3866 probable_max_size))
3867 goto do_libcall;
3869 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3870 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3871 return dest_mem;
3874 if (target_char_cast (val, &c))
3875 goto do_libcall;
3877 if (c)
3879 if (tree_fits_uhwi_p (len)
3880 && can_store_by_pieces (tree_to_uhwi (len),
3881 builtin_memset_read_str, &c, dest_align,
3882 true))
3883 store_by_pieces (dest_mem, tree_to_uhwi (len),
3884 builtin_memset_read_str, &c, dest_align, true, 0);
3885 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3886 gen_int_mode (c, val_mode),
3887 dest_align, expected_align,
3888 expected_size, min_size, max_size,
3889 probable_max_size))
3890 goto do_libcall;
3892 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3893 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3894 return dest_mem;
3897 set_mem_align (dest_mem, dest_align);
3898 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3899 CALL_EXPR_TAILCALL (orig_exp)
3900 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3901 expected_align, expected_size,
3902 min_size, max_size,
3903 probable_max_size);
3905 if (dest_addr == 0)
3907 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3908 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3911 return dest_addr;
3913 do_libcall:
3914 fndecl = get_callee_fndecl (orig_exp);
3915 fcode = DECL_FUNCTION_CODE (fndecl);
3916 if (fcode == BUILT_IN_MEMSET
3917 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3918 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3919 dest, val, len);
3920 else if (fcode == BUILT_IN_BZERO)
3921 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3922 dest, len);
3923 else
3924 gcc_unreachable ();
3925 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3926 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3927 return expand_call (fn, target, target == const0_rtx);
3930 /* Expand expression EXP, which is a call to the bzero builtin. Return
3931 NULL_RTX if we failed the caller should emit a normal call. */
3933 static rtx
3934 expand_builtin_bzero (tree exp)
3936 tree dest, size;
3937 location_t loc = EXPR_LOCATION (exp);
3939 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3940 return NULL_RTX;
3942 dest = CALL_EXPR_ARG (exp, 0);
3943 size = CALL_EXPR_ARG (exp, 1);
3945 /* New argument list transforming bzero(ptr x, int y) to
3946 memset(ptr x, int 0, size_t y). This is done this way
3947 so that if it isn't expanded inline, we fallback to
3948 calling bzero instead of memset. */
3950 return expand_builtin_memset_args (dest, integer_zero_node,
3951 fold_convert_loc (loc,
3952 size_type_node, size),
3953 const0_rtx, VOIDmode, exp);
3956 /* Expand expression EXP, which is a call to the memcmp built-in function.
3957 Return NULL_RTX if we failed and the caller should emit a normal call,
3958 otherwise try to get the result in TARGET, if convenient (and in mode
3959 MODE, if that's convenient). */
3961 static rtx
3962 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3963 ATTRIBUTE_UNUSED machine_mode mode)
3965 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3967 if (!validate_arglist (exp,
3968 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3969 return NULL_RTX;
3971 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3972 implementing memcmp because it will stop if it encounters two
3973 zero bytes. */
3974 #if defined HAVE_cmpmemsi
3976 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3977 rtx result;
3978 rtx insn;
3979 tree arg1 = CALL_EXPR_ARG (exp, 0);
3980 tree arg2 = CALL_EXPR_ARG (exp, 1);
3981 tree len = CALL_EXPR_ARG (exp, 2);
3983 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3984 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3985 machine_mode insn_mode;
3987 if (HAVE_cmpmemsi)
3988 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3989 else
3990 return NULL_RTX;
3992 /* If we don't have POINTER_TYPE, call the function. */
3993 if (arg1_align == 0 || arg2_align == 0)
3994 return NULL_RTX;
3996 /* Make a place to write the result of the instruction. */
3997 result = target;
3998 if (! (result != 0
3999 && REG_P (result) && GET_MODE (result) == insn_mode
4000 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4001 result = gen_reg_rtx (insn_mode);
4003 arg1_rtx = get_memory_rtx (arg1, len);
4004 arg2_rtx = get_memory_rtx (arg2, len);
4005 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4007 /* Set MEM_SIZE as appropriate. */
4008 if (CONST_INT_P (arg3_rtx))
4010 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
4011 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
4014 if (HAVE_cmpmemsi)
4015 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4016 GEN_INT (MIN (arg1_align, arg2_align)));
4017 else
4018 gcc_unreachable ();
4020 if (insn)
4021 emit_insn (insn);
4022 else
4023 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4024 TYPE_MODE (integer_type_node), 3,
4025 XEXP (arg1_rtx, 0), Pmode,
4026 XEXP (arg2_rtx, 0), Pmode,
4027 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4028 TYPE_UNSIGNED (sizetype)),
4029 TYPE_MODE (sizetype));
4031 /* Return the value in the proper mode for this function. */
4032 mode = TYPE_MODE (TREE_TYPE (exp));
4033 if (GET_MODE (result) == mode)
4034 return result;
4035 else if (target != 0)
4037 convert_move (target, result, 0);
4038 return target;
4040 else
4041 return convert_to_mode (mode, result, 0);
4043 #endif /* HAVE_cmpmemsi. */
4045 return NULL_RTX;
4048 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4049 if we failed the caller should emit a normal call, otherwise try to get
4050 the result in TARGET, if convenient. */
4052 static rtx
4053 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4055 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4056 return NULL_RTX;
4058 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4059 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4060 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4062 rtx arg1_rtx, arg2_rtx;
4063 rtx result, insn = NULL_RTX;
4064 tree fndecl, fn;
4065 tree arg1 = CALL_EXPR_ARG (exp, 0);
4066 tree arg2 = CALL_EXPR_ARG (exp, 1);
4068 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4069 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4071 /* If we don't have POINTER_TYPE, call the function. */
4072 if (arg1_align == 0 || arg2_align == 0)
4073 return NULL_RTX;
4075 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4076 arg1 = builtin_save_expr (arg1);
4077 arg2 = builtin_save_expr (arg2);
4079 arg1_rtx = get_memory_rtx (arg1, NULL);
4080 arg2_rtx = get_memory_rtx (arg2, NULL);
4082 #ifdef HAVE_cmpstrsi
4083 /* Try to call cmpstrsi. */
4084 if (HAVE_cmpstrsi)
4086 machine_mode insn_mode
4087 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4089 /* Make a place to write the result of the instruction. */
4090 result = target;
4091 if (! (result != 0
4092 && REG_P (result) && GET_MODE (result) == insn_mode
4093 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4094 result = gen_reg_rtx (insn_mode);
4096 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4097 GEN_INT (MIN (arg1_align, arg2_align)));
4099 #endif
4100 #ifdef HAVE_cmpstrnsi
4101 /* Try to determine at least one length and call cmpstrnsi. */
4102 if (!insn && HAVE_cmpstrnsi)
4104 tree len;
4105 rtx arg3_rtx;
4107 machine_mode insn_mode
4108 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4109 tree len1 = c_strlen (arg1, 1);
4110 tree len2 = c_strlen (arg2, 1);
4112 if (len1)
4113 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4114 if (len2)
4115 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4117 /* If we don't have a constant length for the first, use the length
4118 of the second, if we know it. We don't require a constant for
4119 this case; some cost analysis could be done if both are available
4120 but neither is constant. For now, assume they're equally cheap,
4121 unless one has side effects. If both strings have constant lengths,
4122 use the smaller. */
4124 if (!len1)
4125 len = len2;
4126 else if (!len2)
4127 len = len1;
4128 else if (TREE_SIDE_EFFECTS (len1))
4129 len = len2;
4130 else if (TREE_SIDE_EFFECTS (len2))
4131 len = len1;
4132 else if (TREE_CODE (len1) != INTEGER_CST)
4133 len = len2;
4134 else if (TREE_CODE (len2) != INTEGER_CST)
4135 len = len1;
4136 else if (tree_int_cst_lt (len1, len2))
4137 len = len1;
4138 else
4139 len = len2;
4141 /* If both arguments have side effects, we cannot optimize. */
4142 if (!len || TREE_SIDE_EFFECTS (len))
4143 goto do_libcall;
4145 arg3_rtx = expand_normal (len);
4147 /* Make a place to write the result of the instruction. */
4148 result = target;
4149 if (! (result != 0
4150 && REG_P (result) && GET_MODE (result) == insn_mode
4151 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4152 result = gen_reg_rtx (insn_mode);
4154 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4155 GEN_INT (MIN (arg1_align, arg2_align)));
4157 #endif
4159 if (insn)
4161 machine_mode mode;
4162 emit_insn (insn);
4164 /* Return the value in the proper mode for this function. */
4165 mode = TYPE_MODE (TREE_TYPE (exp));
4166 if (GET_MODE (result) == mode)
4167 return result;
4168 if (target == 0)
4169 return convert_to_mode (mode, result, 0);
4170 convert_move (target, result, 0);
4171 return target;
4174 /* Expand the library call ourselves using a stabilized argument
4175 list to avoid re-evaluating the function's arguments twice. */
4176 #ifdef HAVE_cmpstrnsi
4177 do_libcall:
4178 #endif
4179 fndecl = get_callee_fndecl (exp);
4180 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4181 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4182 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4183 return expand_call (fn, target, target == const0_rtx);
4185 #endif
4186 return NULL_RTX;
4189 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4190 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4191 the result in TARGET, if convenient. */
4193 static rtx
4194 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4195 ATTRIBUTE_UNUSED machine_mode mode)
4197 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4199 if (!validate_arglist (exp,
4200 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4201 return NULL_RTX;
4203 /* If c_strlen can determine an expression for one of the string
4204 lengths, and it doesn't have side effects, then emit cmpstrnsi
4205 using length MIN(strlen(string)+1, arg3). */
4206 #ifdef HAVE_cmpstrnsi
4207 if (HAVE_cmpstrnsi)
4209 tree len, len1, len2;
4210 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4211 rtx result, insn;
4212 tree fndecl, fn;
4213 tree arg1 = CALL_EXPR_ARG (exp, 0);
4214 tree arg2 = CALL_EXPR_ARG (exp, 1);
4215 tree arg3 = CALL_EXPR_ARG (exp, 2);
4217 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4218 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4219 machine_mode insn_mode
4220 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4222 len1 = c_strlen (arg1, 1);
4223 len2 = c_strlen (arg2, 1);
4225 if (len1)
4226 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4227 if (len2)
4228 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4230 /* If we don't have a constant length for the first, use the length
4231 of the second, if we know it. We don't require a constant for
4232 this case; some cost analysis could be done if both are available
4233 but neither is constant. For now, assume they're equally cheap,
4234 unless one has side effects. If both strings have constant lengths,
4235 use the smaller. */
4237 if (!len1)
4238 len = len2;
4239 else if (!len2)
4240 len = len1;
4241 else if (TREE_SIDE_EFFECTS (len1))
4242 len = len2;
4243 else if (TREE_SIDE_EFFECTS (len2))
4244 len = len1;
4245 else if (TREE_CODE (len1) != INTEGER_CST)
4246 len = len2;
4247 else if (TREE_CODE (len2) != INTEGER_CST)
4248 len = len1;
4249 else if (tree_int_cst_lt (len1, len2))
4250 len = len1;
4251 else
4252 len = len2;
4254 /* If both arguments have side effects, we cannot optimize. */
4255 if (!len || TREE_SIDE_EFFECTS (len))
4256 return NULL_RTX;
4258 /* The actual new length parameter is MIN(len,arg3). */
4259 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4260 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4262 /* If we don't have POINTER_TYPE, call the function. */
4263 if (arg1_align == 0 || arg2_align == 0)
4264 return NULL_RTX;
4266 /* Make a place to write the result of the instruction. */
4267 result = target;
4268 if (! (result != 0
4269 && REG_P (result) && GET_MODE (result) == insn_mode
4270 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4271 result = gen_reg_rtx (insn_mode);
4273 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4274 arg1 = builtin_save_expr (arg1);
4275 arg2 = builtin_save_expr (arg2);
4276 len = builtin_save_expr (len);
4278 arg1_rtx = get_memory_rtx (arg1, len);
4279 arg2_rtx = get_memory_rtx (arg2, len);
4280 arg3_rtx = expand_normal (len);
4281 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4282 GEN_INT (MIN (arg1_align, arg2_align)));
4283 if (insn)
4285 emit_insn (insn);
4287 /* Return the value in the proper mode for this function. */
4288 mode = TYPE_MODE (TREE_TYPE (exp));
4289 if (GET_MODE (result) == mode)
4290 return result;
4291 if (target == 0)
4292 return convert_to_mode (mode, result, 0);
4293 convert_move (target, result, 0);
4294 return target;
4297 /* Expand the library call ourselves using a stabilized argument
4298 list to avoid re-evaluating the function's arguments twice. */
4299 fndecl = get_callee_fndecl (exp);
4300 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4301 arg1, arg2, len);
4302 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4303 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4304 return expand_call (fn, target, target == const0_rtx);
4306 #endif
4307 return NULL_RTX;
4310 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4311 if that's convenient. */
4314 expand_builtin_saveregs (void)
4316 rtx val;
4317 rtx_insn *seq;
4319 /* Don't do __builtin_saveregs more than once in a function.
4320 Save the result of the first call and reuse it. */
4321 if (saveregs_value != 0)
4322 return saveregs_value;
4324 /* When this function is called, it means that registers must be
4325 saved on entry to this function. So we migrate the call to the
4326 first insn of this function. */
4328 start_sequence ();
4330 /* Do whatever the machine needs done in this case. */
4331 val = targetm.calls.expand_builtin_saveregs ();
4333 seq = get_insns ();
4334 end_sequence ();
4336 saveregs_value = val;
4338 /* Put the insns after the NOTE that starts the function. If this
4339 is inside a start_sequence, make the outer-level insn chain current, so
4340 the code is placed at the start of the function. */
4341 push_topmost_sequence ();
4342 emit_insn_after (seq, entry_of_function ());
4343 pop_topmost_sequence ();
4345 return val;
4348 /* Expand a call to __builtin_next_arg. */
4350 static rtx
4351 expand_builtin_next_arg (void)
4353 /* Checking arguments is already done in fold_builtin_next_arg
4354 that must be called before this function. */
4355 return expand_binop (ptr_mode, add_optab,
4356 crtl->args.internal_arg_pointer,
4357 crtl->args.arg_offset_rtx,
4358 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4361 /* Make it easier for the backends by protecting the valist argument
4362 from multiple evaluations. */
4364 static tree
4365 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4367 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4369 /* The current way of determining the type of valist is completely
4370 bogus. We should have the information on the va builtin instead. */
4371 if (!vatype)
4372 vatype = targetm.fn_abi_va_list (cfun->decl);
4374 if (TREE_CODE (vatype) == ARRAY_TYPE)
4376 if (TREE_SIDE_EFFECTS (valist))
4377 valist = save_expr (valist);
4379 /* For this case, the backends will be expecting a pointer to
4380 vatype, but it's possible we've actually been given an array
4381 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4382 So fix it. */
4383 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4385 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4386 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4389 else
4391 tree pt = build_pointer_type (vatype);
4393 if (! needs_lvalue)
4395 if (! TREE_SIDE_EFFECTS (valist))
4396 return valist;
4398 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4399 TREE_SIDE_EFFECTS (valist) = 1;
4402 if (TREE_SIDE_EFFECTS (valist))
4403 valist = save_expr (valist);
4404 valist = fold_build2_loc (loc, MEM_REF,
4405 vatype, valist, build_int_cst (pt, 0));
4408 return valist;
4411 /* The "standard" definition of va_list is void*. */
4413 tree
4414 std_build_builtin_va_list (void)
4416 return ptr_type_node;
4419 /* The "standard" abi va_list is va_list_type_node. */
4421 tree
4422 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4424 return va_list_type_node;
4427 /* The "standard" type of va_list is va_list_type_node. */
4429 tree
4430 std_canonical_va_list_type (tree type)
4432 tree wtype, htype;
4434 if (INDIRECT_REF_P (type))
4435 type = TREE_TYPE (type);
4436 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4437 type = TREE_TYPE (type);
4438 wtype = va_list_type_node;
4439 htype = type;
4440 /* Treat structure va_list types. */
4441 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4442 htype = TREE_TYPE (htype);
4443 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4445 /* If va_list is an array type, the argument may have decayed
4446 to a pointer type, e.g. by being passed to another function.
4447 In that case, unwrap both types so that we can compare the
4448 underlying records. */
4449 if (TREE_CODE (htype) == ARRAY_TYPE
4450 || POINTER_TYPE_P (htype))
4452 wtype = TREE_TYPE (wtype);
4453 htype = TREE_TYPE (htype);
4456 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4457 return va_list_type_node;
4459 return NULL_TREE;
4462 /* The "standard" implementation of va_start: just assign `nextarg' to
4463 the variable. */
4465 void
4466 std_expand_builtin_va_start (tree valist, rtx nextarg)
4468 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4469 convert_move (va_r, nextarg, 0);
4471 /* We do not have any valid bounds for the pointer, so
4472 just store zero bounds for it. */
4473 if (chkp_function_instrumented_p (current_function_decl))
4474 chkp_expand_bounds_reset_for_mem (valist,
4475 make_tree (TREE_TYPE (valist),
4476 nextarg));
4479 /* Expand EXP, a call to __builtin_va_start. */
4481 static rtx
4482 expand_builtin_va_start (tree exp)
4484 rtx nextarg;
4485 tree valist;
4486 location_t loc = EXPR_LOCATION (exp);
4488 if (call_expr_nargs (exp) < 2)
4490 error_at (loc, "too few arguments to function %<va_start%>");
4491 return const0_rtx;
4494 if (fold_builtin_next_arg (exp, true))
4495 return const0_rtx;
4497 nextarg = expand_builtin_next_arg ();
4498 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4500 if (targetm.expand_builtin_va_start)
4501 targetm.expand_builtin_va_start (valist, nextarg);
4502 else
4503 std_expand_builtin_va_start (valist, nextarg);
4505 return const0_rtx;
4508 /* Expand EXP, a call to __builtin_va_end. */
4510 static rtx
4511 expand_builtin_va_end (tree exp)
4513 tree valist = CALL_EXPR_ARG (exp, 0);
4515 /* Evaluate for side effects, if needed. I hate macros that don't
4516 do that. */
4517 if (TREE_SIDE_EFFECTS (valist))
4518 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4520 return const0_rtx;
4523 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4524 builtin rather than just as an assignment in stdarg.h because of the
4525 nastiness of array-type va_list types. */
4527 static rtx
4528 expand_builtin_va_copy (tree exp)
4530 tree dst, src, t;
4531 location_t loc = EXPR_LOCATION (exp);
4533 dst = CALL_EXPR_ARG (exp, 0);
4534 src = CALL_EXPR_ARG (exp, 1);
4536 dst = stabilize_va_list_loc (loc, dst, 1);
4537 src = stabilize_va_list_loc (loc, src, 0);
4539 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4541 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4543 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4544 TREE_SIDE_EFFECTS (t) = 1;
4545 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4547 else
4549 rtx dstb, srcb, size;
4551 /* Evaluate to pointers. */
4552 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4553 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4554 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4555 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4557 dstb = convert_memory_address (Pmode, dstb);
4558 srcb = convert_memory_address (Pmode, srcb);
4560 /* "Dereference" to BLKmode memories. */
4561 dstb = gen_rtx_MEM (BLKmode, dstb);
4562 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4563 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4564 srcb = gen_rtx_MEM (BLKmode, srcb);
4565 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4566 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4568 /* Copy. */
4569 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4572 return const0_rtx;
4575 /* Expand a call to one of the builtin functions __builtin_frame_address or
4576 __builtin_return_address. */
4578 static rtx
4579 expand_builtin_frame_address (tree fndecl, tree exp)
4581 /* The argument must be a nonnegative integer constant.
4582 It counts the number of frames to scan up the stack.
4583 The value is the return address saved in that frame. */
4584 if (call_expr_nargs (exp) == 0)
4585 /* Warning about missing arg was already issued. */
4586 return const0_rtx;
4587 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4589 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4590 error ("invalid argument to %<__builtin_frame_address%>");
4591 else
4592 error ("invalid argument to %<__builtin_return_address%>");
4593 return const0_rtx;
4595 else
4597 rtx tem
4598 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4599 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4601 /* Some ports cannot access arbitrary stack frames. */
4602 if (tem == NULL)
4604 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4605 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4606 else
4607 warning (0, "unsupported argument to %<__builtin_return_address%>");
4608 return const0_rtx;
4611 /* For __builtin_frame_address, return what we've got. */
4612 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4613 return tem;
4615 if (!REG_P (tem)
4616 && ! CONSTANT_P (tem))
4617 tem = copy_addr_to_reg (tem);
4618 return tem;
4622 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4623 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4624 is the same as for allocate_dynamic_stack_space. */
4626 static rtx
4627 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4629 rtx op0;
4630 rtx result;
4631 bool valid_arglist;
4632 unsigned int align;
4633 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4634 == BUILT_IN_ALLOCA_WITH_ALIGN);
4636 valid_arglist
4637 = (alloca_with_align
4638 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4639 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4641 if (!valid_arglist)
4642 return NULL_RTX;
4644 /* Compute the argument. */
4645 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4647 /* Compute the alignment. */
4648 align = (alloca_with_align
4649 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4650 : BIGGEST_ALIGNMENT);
4652 /* Allocate the desired space. */
4653 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4654 result = convert_memory_address (ptr_mode, result);
4656 return result;
4659 /* Expand a call to bswap builtin in EXP.
4660 Return NULL_RTX if a normal call should be emitted rather than expanding the
4661 function in-line. If convenient, the result should be placed in TARGET.
4662 SUBTARGET may be used as the target for computing one of EXP's operands. */
4664 static rtx
4665 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4666 rtx subtarget)
4668 tree arg;
4669 rtx op0;
4671 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4672 return NULL_RTX;
4674 arg = CALL_EXPR_ARG (exp, 0);
4675 op0 = expand_expr (arg,
4676 subtarget && GET_MODE (subtarget) == target_mode
4677 ? subtarget : NULL_RTX,
4678 target_mode, EXPAND_NORMAL);
4679 if (GET_MODE (op0) != target_mode)
4680 op0 = convert_to_mode (target_mode, op0, 1);
4682 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4684 gcc_assert (target);
4686 return convert_to_mode (target_mode, target, 1);
4689 /* Expand a call to a unary builtin in EXP.
4690 Return NULL_RTX if a normal call should be emitted rather than expanding the
4691 function in-line. If convenient, the result should be placed in TARGET.
4692 SUBTARGET may be used as the target for computing one of EXP's operands. */
4694 static rtx
4695 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4696 rtx subtarget, optab op_optab)
4698 rtx op0;
4700 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4701 return NULL_RTX;
4703 /* Compute the argument. */
4704 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4705 (subtarget
4706 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4707 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4708 VOIDmode, EXPAND_NORMAL);
4709 /* Compute op, into TARGET if possible.
4710 Set TARGET to wherever the result comes back. */
4711 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4712 op_optab, op0, target, op_optab != clrsb_optab);
4713 gcc_assert (target);
4715 return convert_to_mode (target_mode, target, 0);
4718 /* Expand a call to __builtin_expect. We just return our argument
4719 as the builtin_expect semantic should've been already executed by
4720 tree branch prediction pass. */
4722 static rtx
4723 expand_builtin_expect (tree exp, rtx target)
4725 tree arg;
4727 if (call_expr_nargs (exp) < 2)
4728 return const0_rtx;
4729 arg = CALL_EXPR_ARG (exp, 0);
4731 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4732 /* When guessing was done, the hints should be already stripped away. */
4733 gcc_assert (!flag_guess_branch_prob
4734 || optimize == 0 || seen_error ());
4735 return target;
4738 /* Expand a call to __builtin_assume_aligned. We just return our first
4739 argument as the builtin_assume_aligned semantic should've been already
4740 executed by CCP. */
4742 static rtx
4743 expand_builtin_assume_aligned (tree exp, rtx target)
4745 if (call_expr_nargs (exp) < 2)
4746 return const0_rtx;
4747 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4748 EXPAND_NORMAL);
4749 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4750 && (call_expr_nargs (exp) < 3
4751 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4752 return target;
4755 void
4756 expand_builtin_trap (void)
4758 #ifdef HAVE_trap
4759 if (HAVE_trap)
4761 rtx insn = emit_insn (gen_trap ());
4762 /* For trap insns when not accumulating outgoing args force
4763 REG_ARGS_SIZE note to prevent crossjumping of calls with
4764 different args sizes. */
4765 if (!ACCUMULATE_OUTGOING_ARGS)
4766 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4768 else
4769 #endif
4770 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4771 emit_barrier ();
4774 /* Expand a call to __builtin_unreachable. We do nothing except emit
4775 a barrier saying that control flow will not pass here.
4777 It is the responsibility of the program being compiled to ensure
4778 that control flow does never reach __builtin_unreachable. */
4779 static void
4780 expand_builtin_unreachable (void)
4782 emit_barrier ();
4785 /* Expand EXP, a call to fabs, fabsf or fabsl.
4786 Return NULL_RTX if a normal call should be emitted rather than expanding
4787 the function inline. If convenient, the result should be placed
4788 in TARGET. SUBTARGET may be used as the target for computing
4789 the operand. */
4791 static rtx
4792 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4794 machine_mode mode;
4795 tree arg;
4796 rtx op0;
4798 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4799 return NULL_RTX;
4801 arg = CALL_EXPR_ARG (exp, 0);
4802 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4803 mode = TYPE_MODE (TREE_TYPE (arg));
4804 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4805 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4808 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4809 Return NULL is a normal call should be emitted rather than expanding the
4810 function inline. If convenient, the result should be placed in TARGET.
4811 SUBTARGET may be used as the target for computing the operand. */
4813 static rtx
4814 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4816 rtx op0, op1;
4817 tree arg;
4819 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4820 return NULL_RTX;
4822 arg = CALL_EXPR_ARG (exp, 0);
4823 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4825 arg = CALL_EXPR_ARG (exp, 1);
4826 op1 = expand_normal (arg);
4828 return expand_copysign (op0, op1, target);
4831 /* Expand a call to __builtin___clear_cache. */
4833 static rtx
4834 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4836 #ifndef HAVE_clear_cache
4837 #ifdef CLEAR_INSN_CACHE
4838 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4839 does something. Just do the default expansion to a call to
4840 __clear_cache(). */
4841 return NULL_RTX;
4842 #else
4843 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4844 does nothing. There is no need to call it. Do nothing. */
4845 return const0_rtx;
4846 #endif /* CLEAR_INSN_CACHE */
4847 #else
4848 /* We have a "clear_cache" insn, and it will handle everything. */
4849 tree begin, end;
4850 rtx begin_rtx, end_rtx;
4852 /* We must not expand to a library call. If we did, any
4853 fallback library function in libgcc that might contain a call to
4854 __builtin___clear_cache() would recurse infinitely. */
4855 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4857 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4858 return const0_rtx;
4861 if (HAVE_clear_cache)
4863 struct expand_operand ops[2];
4865 begin = CALL_EXPR_ARG (exp, 0);
4866 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4868 end = CALL_EXPR_ARG (exp, 1);
4869 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4871 create_address_operand (&ops[0], begin_rtx);
4872 create_address_operand (&ops[1], end_rtx);
4873 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4874 return const0_rtx;
4876 return const0_rtx;
4877 #endif /* HAVE_clear_cache */
4880 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4882 static rtx
4883 round_trampoline_addr (rtx tramp)
4885 rtx temp, addend, mask;
4887 /* If we don't need too much alignment, we'll have been guaranteed
4888 proper alignment by get_trampoline_type. */
4889 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4890 return tramp;
4892 /* Round address up to desired boundary. */
4893 temp = gen_reg_rtx (Pmode);
4894 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4895 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4897 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4898 temp, 0, OPTAB_LIB_WIDEN);
4899 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4900 temp, 0, OPTAB_LIB_WIDEN);
4902 return tramp;
4905 static rtx
4906 expand_builtin_init_trampoline (tree exp, bool onstack)
4908 tree t_tramp, t_func, t_chain;
4909 rtx m_tramp, r_tramp, r_chain, tmp;
4911 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4912 POINTER_TYPE, VOID_TYPE))
4913 return NULL_RTX;
4915 t_tramp = CALL_EXPR_ARG (exp, 0);
4916 t_func = CALL_EXPR_ARG (exp, 1);
4917 t_chain = CALL_EXPR_ARG (exp, 2);
4919 r_tramp = expand_normal (t_tramp);
4920 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4921 MEM_NOTRAP_P (m_tramp) = 1;
4923 /* If ONSTACK, the TRAMP argument should be the address of a field
4924 within the local function's FRAME decl. Either way, let's see if
4925 we can fill in the MEM_ATTRs for this memory. */
4926 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4927 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4929 /* Creator of a heap trampoline is responsible for making sure the
4930 address is aligned to at least STACK_BOUNDARY. Normally malloc
4931 will ensure this anyhow. */
4932 tmp = round_trampoline_addr (r_tramp);
4933 if (tmp != r_tramp)
4935 m_tramp = change_address (m_tramp, BLKmode, tmp);
4936 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4937 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4940 /* The FUNC argument should be the address of the nested function.
4941 Extract the actual function decl to pass to the hook. */
4942 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4943 t_func = TREE_OPERAND (t_func, 0);
4944 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4946 r_chain = expand_normal (t_chain);
4948 /* Generate insns to initialize the trampoline. */
4949 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4951 if (onstack)
4953 trampolines_created = 1;
4955 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4956 "trampoline generated for nested function %qD", t_func);
4959 return const0_rtx;
4962 static rtx
4963 expand_builtin_adjust_trampoline (tree exp)
4965 rtx tramp;
4967 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4968 return NULL_RTX;
4970 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4971 tramp = round_trampoline_addr (tramp);
4972 if (targetm.calls.trampoline_adjust_address)
4973 tramp = targetm.calls.trampoline_adjust_address (tramp);
4975 return tramp;
4978 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4979 function. The function first checks whether the back end provides
4980 an insn to implement signbit for the respective mode. If not, it
4981 checks whether the floating point format of the value is such that
4982 the sign bit can be extracted. If that is not the case, the
4983 function returns NULL_RTX to indicate that a normal call should be
4984 emitted rather than expanding the function in-line. EXP is the
4985 expression that is a call to the builtin function; if convenient,
4986 the result should be placed in TARGET. */
4987 static rtx
4988 expand_builtin_signbit (tree exp, rtx target)
4990 const struct real_format *fmt;
4991 machine_mode fmode, imode, rmode;
4992 tree arg;
4993 int word, bitpos;
4994 enum insn_code icode;
4995 rtx temp;
4996 location_t loc = EXPR_LOCATION (exp);
4998 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4999 return NULL_RTX;
5001 arg = CALL_EXPR_ARG (exp, 0);
5002 fmode = TYPE_MODE (TREE_TYPE (arg));
5003 rmode = TYPE_MODE (TREE_TYPE (exp));
5004 fmt = REAL_MODE_FORMAT (fmode);
5006 arg = builtin_save_expr (arg);
5008 /* Expand the argument yielding a RTX expression. */
5009 temp = expand_normal (arg);
5011 /* Check if the back end provides an insn that handles signbit for the
5012 argument's mode. */
5013 icode = optab_handler (signbit_optab, fmode);
5014 if (icode != CODE_FOR_nothing)
5016 rtx_insn *last = get_last_insn ();
5017 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5018 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5019 return target;
5020 delete_insns_since (last);
5023 /* For floating point formats without a sign bit, implement signbit
5024 as "ARG < 0.0". */
5025 bitpos = fmt->signbit_ro;
5026 if (bitpos < 0)
5028 /* But we can't do this if the format supports signed zero. */
5029 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5030 return NULL_RTX;
5032 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5033 build_real (TREE_TYPE (arg), dconst0));
5034 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5037 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5039 imode = int_mode_for_mode (fmode);
5040 if (imode == BLKmode)
5041 return NULL_RTX;
5042 temp = gen_lowpart (imode, temp);
5044 else
5046 imode = word_mode;
5047 /* Handle targets with different FP word orders. */
5048 if (FLOAT_WORDS_BIG_ENDIAN)
5049 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5050 else
5051 word = bitpos / BITS_PER_WORD;
5052 temp = operand_subword_force (temp, word, fmode);
5053 bitpos = bitpos % BITS_PER_WORD;
5056 /* Force the intermediate word_mode (or narrower) result into a
5057 register. This avoids attempting to create paradoxical SUBREGs
5058 of floating point modes below. */
5059 temp = force_reg (imode, temp);
5061 /* If the bitpos is within the "result mode" lowpart, the operation
5062 can be implement with a single bitwise AND. Otherwise, we need
5063 a right shift and an AND. */
5065 if (bitpos < GET_MODE_BITSIZE (rmode))
5067 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5069 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5070 temp = gen_lowpart (rmode, temp);
5071 temp = expand_binop (rmode, and_optab, temp,
5072 immed_wide_int_const (mask, rmode),
5073 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5075 else
5077 /* Perform a logical right shift to place the signbit in the least
5078 significant bit, then truncate the result to the desired mode
5079 and mask just this bit. */
5080 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5081 temp = gen_lowpart (rmode, temp);
5082 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5083 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5086 return temp;
5089 /* Expand fork or exec calls. TARGET is the desired target of the
5090 call. EXP is the call. FN is the
5091 identificator of the actual function. IGNORE is nonzero if the
5092 value is to be ignored. */
5094 static rtx
5095 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5097 tree id, decl;
5098 tree call;
5100 /* If we are not profiling, just call the function. */
5101 if (!profile_arc_flag)
5102 return NULL_RTX;
5104 /* Otherwise call the wrapper. This should be equivalent for the rest of
5105 compiler, so the code does not diverge, and the wrapper may run the
5106 code necessary for keeping the profiling sane. */
5108 switch (DECL_FUNCTION_CODE (fn))
5110 case BUILT_IN_FORK:
5111 id = get_identifier ("__gcov_fork");
5112 break;
5114 case BUILT_IN_EXECL:
5115 id = get_identifier ("__gcov_execl");
5116 break;
5118 case BUILT_IN_EXECV:
5119 id = get_identifier ("__gcov_execv");
5120 break;
5122 case BUILT_IN_EXECLP:
5123 id = get_identifier ("__gcov_execlp");
5124 break;
5126 case BUILT_IN_EXECLE:
5127 id = get_identifier ("__gcov_execle");
5128 break;
5130 case BUILT_IN_EXECVP:
5131 id = get_identifier ("__gcov_execvp");
5132 break;
5134 case BUILT_IN_EXECVE:
5135 id = get_identifier ("__gcov_execve");
5136 break;
5138 default:
5139 gcc_unreachable ();
5142 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5143 FUNCTION_DECL, id, TREE_TYPE (fn));
5144 DECL_EXTERNAL (decl) = 1;
5145 TREE_PUBLIC (decl) = 1;
5146 DECL_ARTIFICIAL (decl) = 1;
5147 TREE_NOTHROW (decl) = 1;
5148 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5149 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5150 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5151 return expand_call (call, target, ignore);
5156 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5157 the pointer in these functions is void*, the tree optimizers may remove
5158 casts. The mode computed in expand_builtin isn't reliable either, due
5159 to __sync_bool_compare_and_swap.
5161 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5162 group of builtins. This gives us log2 of the mode size. */
5164 static inline machine_mode
5165 get_builtin_sync_mode (int fcode_diff)
5167 /* The size is not negotiable, so ask not to get BLKmode in return
5168 if the target indicates that a smaller size would be better. */
5169 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5172 /* Expand the memory expression LOC and return the appropriate memory operand
5173 for the builtin_sync operations. */
5175 static rtx
5176 get_builtin_sync_mem (tree loc, machine_mode mode)
5178 rtx addr, mem;
5180 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5181 addr = convert_memory_address (Pmode, addr);
5183 /* Note that we explicitly do not want any alias information for this
5184 memory, so that we kill all other live memories. Otherwise we don't
5185 satisfy the full barrier semantics of the intrinsic. */
5186 mem = validize_mem (gen_rtx_MEM (mode, addr));
5188 /* The alignment needs to be at least according to that of the mode. */
5189 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5190 get_pointer_alignment (loc)));
5191 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5192 MEM_VOLATILE_P (mem) = 1;
5194 return mem;
5197 /* Make sure an argument is in the right mode.
5198 EXP is the tree argument.
5199 MODE is the mode it should be in. */
5201 static rtx
5202 expand_expr_force_mode (tree exp, machine_mode mode)
5204 rtx val;
5205 machine_mode old_mode;
5207 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5208 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5209 of CONST_INTs, where we know the old_mode only from the call argument. */
5211 old_mode = GET_MODE (val);
5212 if (old_mode == VOIDmode)
5213 old_mode = TYPE_MODE (TREE_TYPE (exp));
5214 val = convert_modes (mode, old_mode, val, 1);
5215 return val;
5219 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5220 EXP is the CALL_EXPR. CODE is the rtx code
5221 that corresponds to the arithmetic or logical operation from the name;
5222 an exception here is that NOT actually means NAND. TARGET is an optional
5223 place for us to store the results; AFTER is true if this is the
5224 fetch_and_xxx form. */
5226 static rtx
5227 expand_builtin_sync_operation (machine_mode mode, tree exp,
5228 enum rtx_code code, bool after,
5229 rtx target)
5231 rtx val, mem;
5232 location_t loc = EXPR_LOCATION (exp);
5234 if (code == NOT && warn_sync_nand)
5236 tree fndecl = get_callee_fndecl (exp);
5237 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5239 static bool warned_f_a_n, warned_n_a_f;
5241 switch (fcode)
5243 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5244 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5245 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5246 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5247 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5248 if (warned_f_a_n)
5249 break;
5251 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5252 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5253 warned_f_a_n = true;
5254 break;
5256 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5257 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5258 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5259 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5260 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5261 if (warned_n_a_f)
5262 break;
5264 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5265 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5266 warned_n_a_f = true;
5267 break;
5269 default:
5270 gcc_unreachable ();
5274 /* Expand the operands. */
5275 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5276 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5278 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5279 after);
5282 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5283 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5284 true if this is the boolean form. TARGET is a place for us to store the
5285 results; this is NOT optional if IS_BOOL is true. */
5287 static rtx
5288 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5289 bool is_bool, rtx target)
5291 rtx old_val, new_val, mem;
5292 rtx *pbool, *poval;
5294 /* Expand the operands. */
5295 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5296 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5297 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5299 pbool = poval = NULL;
5300 if (target != const0_rtx)
5302 if (is_bool)
5303 pbool = &target;
5304 else
5305 poval = &target;
5307 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5308 false, MEMMODEL_SYNC_SEQ_CST,
5309 MEMMODEL_SYNC_SEQ_CST))
5310 return NULL_RTX;
5312 return target;
5315 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5316 general form is actually an atomic exchange, and some targets only
5317 support a reduced form with the second argument being a constant 1.
5318 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5319 the results. */
5321 static rtx
5322 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5323 rtx target)
5325 rtx val, mem;
5327 /* Expand the operands. */
5328 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5329 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5331 return expand_sync_lock_test_and_set (target, mem, val);
5334 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5336 static void
5337 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5339 rtx mem;
5341 /* Expand the operands. */
5342 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5344 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5347 /* Given an integer representing an ``enum memmodel'', verify its
5348 correctness and return the memory model enum. */
5350 static enum memmodel
5351 get_memmodel (tree exp)
5353 rtx op;
5354 unsigned HOST_WIDE_INT val;
5356 /* If the parameter is not a constant, it's a run time value so we'll just
5357 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5358 if (TREE_CODE (exp) != INTEGER_CST)
5359 return MEMMODEL_SEQ_CST;
5361 op = expand_normal (exp);
5363 val = INTVAL (op);
5364 if (targetm.memmodel_check)
5365 val = targetm.memmodel_check (val);
5366 else if (val & ~MEMMODEL_MASK)
5368 warning (OPT_Winvalid_memory_model,
5369 "Unknown architecture specifier in memory model to builtin.");
5370 return MEMMODEL_SEQ_CST;
5373 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5374 if (memmodel_base (val) >= MEMMODEL_LAST)
5376 warning (OPT_Winvalid_memory_model,
5377 "invalid memory model argument to builtin");
5378 return MEMMODEL_SEQ_CST;
5381 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5382 be conservative and promote consume to acquire. */
5383 if (val == MEMMODEL_CONSUME)
5384 val = MEMMODEL_ACQUIRE;
5386 return (enum memmodel) val;
5389 /* Expand the __atomic_exchange intrinsic:
5390 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5391 EXP is the CALL_EXPR.
5392 TARGET is an optional place for us to store the results. */
5394 static rtx
5395 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5397 rtx val, mem;
5398 enum memmodel model;
5400 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5402 if (!flag_inline_atomics)
5403 return NULL_RTX;
5405 /* Expand the operands. */
5406 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5407 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5409 return expand_atomic_exchange (target, mem, val, model);
5412 /* Expand the __atomic_compare_exchange intrinsic:
5413 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5414 TYPE desired, BOOL weak,
5415 enum memmodel success,
5416 enum memmodel failure)
5417 EXP is the CALL_EXPR.
5418 TARGET is an optional place for us to store the results. */
5420 static rtx
5421 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5422 rtx target)
5424 rtx expect, desired, mem, oldval;
5425 rtx_code_label *label;
5426 enum memmodel success, failure;
5427 tree weak;
5428 bool is_weak;
5430 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5431 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5433 if (failure > success)
5435 warning (OPT_Winvalid_memory_model,
5436 "failure memory model cannot be stronger than success memory "
5437 "model for %<__atomic_compare_exchange%>");
5438 success = MEMMODEL_SEQ_CST;
5441 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5443 warning (OPT_Winvalid_memory_model,
5444 "invalid failure memory model for "
5445 "%<__atomic_compare_exchange%>");
5446 failure = MEMMODEL_SEQ_CST;
5447 success = MEMMODEL_SEQ_CST;
5451 if (!flag_inline_atomics)
5452 return NULL_RTX;
5454 /* Expand the operands. */
5455 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5457 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5458 expect = convert_memory_address (Pmode, expect);
5459 expect = gen_rtx_MEM (mode, expect);
5460 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5462 weak = CALL_EXPR_ARG (exp, 3);
5463 is_weak = false;
5464 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5465 is_weak = true;
5467 if (target == const0_rtx)
5468 target = NULL;
5470 /* Lest the rtl backend create a race condition with an imporoper store
5471 to memory, always create a new pseudo for OLDVAL. */
5472 oldval = NULL;
5474 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5475 is_weak, success, failure))
5476 return NULL_RTX;
5478 /* Conditionally store back to EXPECT, lest we create a race condition
5479 with an improper store to memory. */
5480 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5481 the normal case where EXPECT is totally private, i.e. a register. At
5482 which point the store can be unconditional. */
5483 label = gen_label_rtx ();
5484 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5485 emit_move_insn (expect, oldval);
5486 emit_label (label);
5488 return target;
5491 /* Expand the __atomic_load intrinsic:
5492 TYPE __atomic_load (TYPE *object, enum memmodel)
5493 EXP is the CALL_EXPR.
5494 TARGET is an optional place for us to store the results. */
5496 static rtx
5497 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5499 rtx mem;
5500 enum memmodel model;
5502 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5503 if (is_mm_release (model) || is_mm_acq_rel (model))
5505 warning (OPT_Winvalid_memory_model,
5506 "invalid memory model for %<__atomic_load%>");
5507 model = MEMMODEL_SEQ_CST;
5510 if (!flag_inline_atomics)
5511 return NULL_RTX;
5513 /* Expand the operand. */
5514 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5516 return expand_atomic_load (target, mem, model);
5520 /* Expand the __atomic_store intrinsic:
5521 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5522 EXP is the CALL_EXPR.
5523 TARGET is an optional place for us to store the results. */
5525 static rtx
5526 expand_builtin_atomic_store (machine_mode mode, tree exp)
5528 rtx mem, val;
5529 enum memmodel model;
5531 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5532 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5533 || is_mm_release (model)))
5535 warning (OPT_Winvalid_memory_model,
5536 "invalid memory model for %<__atomic_store%>");
5537 model = MEMMODEL_SEQ_CST;
5540 if (!flag_inline_atomics)
5541 return NULL_RTX;
5543 /* Expand the operands. */
5544 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5545 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5547 return expand_atomic_store (mem, val, model, false);
5550 /* Expand the __atomic_fetch_XXX intrinsic:
5551 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5552 EXP is the CALL_EXPR.
5553 TARGET is an optional place for us to store the results.
5554 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5555 FETCH_AFTER is true if returning the result of the operation.
5556 FETCH_AFTER is false if returning the value before the operation.
5557 IGNORE is true if the result is not used.
5558 EXT_CALL is the correct builtin for an external call if this cannot be
5559 resolved to an instruction sequence. */
5561 static rtx
5562 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5563 enum rtx_code code, bool fetch_after,
5564 bool ignore, enum built_in_function ext_call)
5566 rtx val, mem, ret;
5567 enum memmodel model;
5568 tree fndecl;
5569 tree addr;
5571 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5573 /* Expand the operands. */
5574 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5575 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5577 /* Only try generating instructions if inlining is turned on. */
5578 if (flag_inline_atomics)
5580 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5581 if (ret)
5582 return ret;
5585 /* Return if a different routine isn't needed for the library call. */
5586 if (ext_call == BUILT_IN_NONE)
5587 return NULL_RTX;
5589 /* Change the call to the specified function. */
5590 fndecl = get_callee_fndecl (exp);
5591 addr = CALL_EXPR_FN (exp);
5592 STRIP_NOPS (addr);
5594 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5595 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5597 /* Expand the call here so we can emit trailing code. */
5598 ret = expand_call (exp, target, ignore);
5600 /* Replace the original function just in case it matters. */
5601 TREE_OPERAND (addr, 0) = fndecl;
5603 /* Then issue the arithmetic correction to return the right result. */
5604 if (!ignore)
5606 if (code == NOT)
5608 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5609 OPTAB_LIB_WIDEN);
5610 ret = expand_simple_unop (mode, NOT, ret, target, true);
5612 else
5613 ret = expand_simple_binop (mode, code, ret, val, target, true,
5614 OPTAB_LIB_WIDEN);
5616 return ret;
5620 #ifndef HAVE_atomic_clear
5621 # define HAVE_atomic_clear 0
5622 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5623 #endif
5625 /* Expand an atomic clear operation.
5626 void _atomic_clear (BOOL *obj, enum memmodel)
5627 EXP is the call expression. */
5629 static rtx
5630 expand_builtin_atomic_clear (tree exp)
5632 machine_mode mode;
5633 rtx mem, ret;
5634 enum memmodel model;
5636 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5637 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5638 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5640 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5642 warning (OPT_Winvalid_memory_model,
5643 "invalid memory model for %<__atomic_store%>");
5644 model = MEMMODEL_SEQ_CST;
5647 if (HAVE_atomic_clear)
5649 emit_insn (gen_atomic_clear (mem, model));
5650 return const0_rtx;
5653 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5654 Failing that, a store is issued by __atomic_store. The only way this can
5655 fail is if the bool type is larger than a word size. Unlikely, but
5656 handle it anyway for completeness. Assume a single threaded model since
5657 there is no atomic support in this case, and no barriers are required. */
5658 ret = expand_atomic_store (mem, const0_rtx, model, true);
5659 if (!ret)
5660 emit_move_insn (mem, const0_rtx);
5661 return const0_rtx;
5664 /* Expand an atomic test_and_set operation.
5665 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5666 EXP is the call expression. */
5668 static rtx
5669 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5671 rtx mem;
5672 enum memmodel model;
5673 machine_mode mode;
5675 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5676 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5677 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5679 return expand_atomic_test_and_set (target, mem, model);
5683 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5684 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5686 static tree
5687 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5689 int size;
5690 machine_mode mode;
5691 unsigned int mode_align, type_align;
5693 if (TREE_CODE (arg0) != INTEGER_CST)
5694 return NULL_TREE;
5696 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5697 mode = mode_for_size (size, MODE_INT, 0);
5698 mode_align = GET_MODE_ALIGNMENT (mode);
5700 if (TREE_CODE (arg1) == INTEGER_CST)
5702 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5704 /* Either this argument is null, or it's a fake pointer encoding
5705 the alignment of the object. */
5706 val = val & -val;
5707 val *= BITS_PER_UNIT;
5709 if (val == 0 || mode_align < val)
5710 type_align = mode_align;
5711 else
5712 type_align = val;
5714 else
5716 tree ttype = TREE_TYPE (arg1);
5718 /* This function is usually invoked and folded immediately by the front
5719 end before anything else has a chance to look at it. The pointer
5720 parameter at this point is usually cast to a void *, so check for that
5721 and look past the cast. */
5722 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5723 && VOID_TYPE_P (TREE_TYPE (ttype)))
5724 arg1 = TREE_OPERAND (arg1, 0);
5726 ttype = TREE_TYPE (arg1);
5727 gcc_assert (POINTER_TYPE_P (ttype));
5729 /* Get the underlying type of the object. */
5730 ttype = TREE_TYPE (ttype);
5731 type_align = TYPE_ALIGN (ttype);
5734 /* If the object has smaller alignment, the the lock free routines cannot
5735 be used. */
5736 if (type_align < mode_align)
5737 return boolean_false_node;
5739 /* Check if a compare_and_swap pattern exists for the mode which represents
5740 the required size. The pattern is not allowed to fail, so the existence
5741 of the pattern indicates support is present. */
5742 if (can_compare_and_swap_p (mode, true))
5743 return boolean_true_node;
5744 else
5745 return boolean_false_node;
5748 /* Return true if the parameters to call EXP represent an object which will
5749 always generate lock free instructions. The first argument represents the
5750 size of the object, and the second parameter is a pointer to the object
5751 itself. If NULL is passed for the object, then the result is based on
5752 typical alignment for an object of the specified size. Otherwise return
5753 false. */
5755 static rtx
5756 expand_builtin_atomic_always_lock_free (tree exp)
5758 tree size;
5759 tree arg0 = CALL_EXPR_ARG (exp, 0);
5760 tree arg1 = CALL_EXPR_ARG (exp, 1);
5762 if (TREE_CODE (arg0) != INTEGER_CST)
5764 error ("non-constant argument 1 to __atomic_always_lock_free");
5765 return const0_rtx;
5768 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5769 if (size == boolean_true_node)
5770 return const1_rtx;
5771 return const0_rtx;
5774 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5775 is lock free on this architecture. */
5777 static tree
5778 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5780 if (!flag_inline_atomics)
5781 return NULL_TREE;
5783 /* If it isn't always lock free, don't generate a result. */
5784 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5785 return boolean_true_node;
5787 return NULL_TREE;
5790 /* Return true if the parameters to call EXP represent an object which will
5791 always generate lock free instructions. The first argument represents the
5792 size of the object, and the second parameter is a pointer to the object
5793 itself. If NULL is passed for the object, then the result is based on
5794 typical alignment for an object of the specified size. Otherwise return
5795 NULL*/
5797 static rtx
5798 expand_builtin_atomic_is_lock_free (tree exp)
5800 tree size;
5801 tree arg0 = CALL_EXPR_ARG (exp, 0);
5802 tree arg1 = CALL_EXPR_ARG (exp, 1);
5804 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5806 error ("non-integer argument 1 to __atomic_is_lock_free");
5807 return NULL_RTX;
5810 if (!flag_inline_atomics)
5811 return NULL_RTX;
5813 /* If the value is known at compile time, return the RTX for it. */
5814 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5815 if (size == boolean_true_node)
5816 return const1_rtx;
5818 return NULL_RTX;
5821 /* Expand the __atomic_thread_fence intrinsic:
5822 void __atomic_thread_fence (enum memmodel)
5823 EXP is the CALL_EXPR. */
5825 static void
5826 expand_builtin_atomic_thread_fence (tree exp)
5828 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5829 expand_mem_thread_fence (model);
5832 /* Expand the __atomic_signal_fence intrinsic:
5833 void __atomic_signal_fence (enum memmodel)
5834 EXP is the CALL_EXPR. */
5836 static void
5837 expand_builtin_atomic_signal_fence (tree exp)
5839 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5840 expand_mem_signal_fence (model);
5843 /* Expand the __sync_synchronize intrinsic. */
5845 static void
5846 expand_builtin_sync_synchronize (void)
5848 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5851 static rtx
5852 expand_builtin_thread_pointer (tree exp, rtx target)
5854 enum insn_code icode;
5855 if (!validate_arglist (exp, VOID_TYPE))
5856 return const0_rtx;
5857 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5858 if (icode != CODE_FOR_nothing)
5860 struct expand_operand op;
5861 /* If the target is not sutitable then create a new target. */
5862 if (target == NULL_RTX
5863 || !REG_P (target)
5864 || GET_MODE (target) != Pmode)
5865 target = gen_reg_rtx (Pmode);
5866 create_output_operand (&op, target, Pmode);
5867 expand_insn (icode, 1, &op);
5868 return target;
5870 error ("__builtin_thread_pointer is not supported on this target");
5871 return const0_rtx;
5874 static void
5875 expand_builtin_set_thread_pointer (tree exp)
5877 enum insn_code icode;
5878 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5879 return;
5880 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5881 if (icode != CODE_FOR_nothing)
5883 struct expand_operand op;
5884 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5885 Pmode, EXPAND_NORMAL);
5886 create_input_operand (&op, val, Pmode);
5887 expand_insn (icode, 1, &op);
5888 return;
5890 error ("__builtin_set_thread_pointer is not supported on this target");
5894 /* Emit code to restore the current value of stack. */
5896 static void
5897 expand_stack_restore (tree var)
5899 rtx_insn *prev;
5900 rtx sa = expand_normal (var);
5902 sa = convert_memory_address (Pmode, sa);
5904 prev = get_last_insn ();
5905 emit_stack_restore (SAVE_BLOCK, sa);
5906 fixup_args_size_notes (prev, get_last_insn (), 0);
5910 /* Emit code to save the current value of stack. */
5912 static rtx
5913 expand_stack_save (void)
5915 rtx ret = NULL_RTX;
5917 do_pending_stack_adjust ();
5918 emit_stack_save (SAVE_BLOCK, &ret);
5919 return ret;
5923 /* Expand OpenACC acc_on_device.
5925 This has to happen late (that is, not in early folding; expand_builtin_*,
5926 rather than fold_builtin_*), as we have to act differently for host and
5927 acceleration device (ACCEL_COMPILER conditional). */
5929 static rtx
5930 expand_builtin_acc_on_device (tree exp ATTRIBUTE_UNUSED,
5931 rtx target ATTRIBUTE_UNUSED)
5933 #ifdef ACCEL_COMPILER
5934 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5935 return NULL_RTX;
5937 tree arg = CALL_EXPR_ARG (exp, 0);
5939 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5940 machine_mode v_mode = TYPE_MODE (TREE_TYPE (arg));
5941 rtx v = expand_normal (arg), v1, v2;
5942 v1 = GEN_INT (GOMP_DEVICE_NOT_HOST);
5943 v2 = GEN_INT (ACCEL_COMPILER_acc_device);
5944 machine_mode target_mode = TYPE_MODE (integer_type_node);
5945 if (!target || !register_operand (target, target_mode))
5946 target = gen_reg_rtx (target_mode);
5947 emit_move_insn (target, const1_rtx);
5948 rtx_code_label *done_label = gen_label_rtx ();
5949 do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
5950 NULL_RTX, done_label, PROB_EVEN);
5951 do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
5952 NULL_RTX, done_label, PROB_EVEN);
5953 emit_move_insn (target, const0_rtx);
5954 emit_label (done_label);
5956 return target;
5957 #else
5958 return NULL;
5959 #endif
5963 /* Expand an expression EXP that calls a built-in function,
5964 with result going to TARGET if that's convenient
5965 (and in mode MODE if that's convenient).
5966 SUBTARGET may be used as the target for computing one of EXP's operands.
5967 IGNORE is nonzero if the value is to be ignored. */
5970 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5971 int ignore)
5973 tree fndecl = get_callee_fndecl (exp);
5974 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5975 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5976 int flags;
5978 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5979 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5981 /* When ASan is enabled, we don't want to expand some memory/string
5982 builtins and rely on libsanitizer's hooks. This allows us to avoid
5983 redundant checks and be sure, that possible overflow will be detected
5984 by ASan. */
5986 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5987 return expand_call (exp, target, ignore);
5989 /* When not optimizing, generate calls to library functions for a certain
5990 set of builtins. */
5991 if (!optimize
5992 && !called_as_built_in (fndecl)
5993 && fcode != BUILT_IN_FORK
5994 && fcode != BUILT_IN_EXECL
5995 && fcode != BUILT_IN_EXECV
5996 && fcode != BUILT_IN_EXECLP
5997 && fcode != BUILT_IN_EXECLE
5998 && fcode != BUILT_IN_EXECVP
5999 && fcode != BUILT_IN_EXECVE
6000 && fcode != BUILT_IN_ALLOCA
6001 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
6002 && fcode != BUILT_IN_FREE
6003 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6004 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6005 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6006 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6007 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6008 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6009 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6010 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6011 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6012 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6013 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6014 && fcode != BUILT_IN_CHKP_BNDRET)
6015 return expand_call (exp, target, ignore);
6017 /* The built-in function expanders test for target == const0_rtx
6018 to determine whether the function's result will be ignored. */
6019 if (ignore)
6020 target = const0_rtx;
6022 /* If the result of a pure or const built-in function is ignored, and
6023 none of its arguments are volatile, we can avoid expanding the
6024 built-in call and just evaluate the arguments for side-effects. */
6025 if (target == const0_rtx
6026 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6027 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6029 bool volatilep = false;
6030 tree arg;
6031 call_expr_arg_iterator iter;
6033 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6034 if (TREE_THIS_VOLATILE (arg))
6036 volatilep = true;
6037 break;
6040 if (! volatilep)
6042 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6043 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6044 return const0_rtx;
6048 /* expand_builtin_with_bounds is supposed to be used for
6049 instrumented builtin calls. */
6050 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6052 switch (fcode)
6054 CASE_FLT_FN (BUILT_IN_FABS):
6055 case BUILT_IN_FABSD32:
6056 case BUILT_IN_FABSD64:
6057 case BUILT_IN_FABSD128:
6058 target = expand_builtin_fabs (exp, target, subtarget);
6059 if (target)
6060 return target;
6061 break;
6063 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6064 target = expand_builtin_copysign (exp, target, subtarget);
6065 if (target)
6066 return target;
6067 break;
6069 /* Just do a normal library call if we were unable to fold
6070 the values. */
6071 CASE_FLT_FN (BUILT_IN_CABS):
6072 break;
6074 CASE_FLT_FN (BUILT_IN_EXP):
6075 CASE_FLT_FN (BUILT_IN_EXP10):
6076 CASE_FLT_FN (BUILT_IN_POW10):
6077 CASE_FLT_FN (BUILT_IN_EXP2):
6078 CASE_FLT_FN (BUILT_IN_EXPM1):
6079 CASE_FLT_FN (BUILT_IN_LOGB):
6080 CASE_FLT_FN (BUILT_IN_LOG):
6081 CASE_FLT_FN (BUILT_IN_LOG10):
6082 CASE_FLT_FN (BUILT_IN_LOG2):
6083 CASE_FLT_FN (BUILT_IN_LOG1P):
6084 CASE_FLT_FN (BUILT_IN_TAN):
6085 CASE_FLT_FN (BUILT_IN_ASIN):
6086 CASE_FLT_FN (BUILT_IN_ACOS):
6087 CASE_FLT_FN (BUILT_IN_ATAN):
6088 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6089 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6090 because of possible accuracy problems. */
6091 if (! flag_unsafe_math_optimizations)
6092 break;
6093 CASE_FLT_FN (BUILT_IN_SQRT):
6094 CASE_FLT_FN (BUILT_IN_FLOOR):
6095 CASE_FLT_FN (BUILT_IN_CEIL):
6096 CASE_FLT_FN (BUILT_IN_TRUNC):
6097 CASE_FLT_FN (BUILT_IN_ROUND):
6098 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6099 CASE_FLT_FN (BUILT_IN_RINT):
6100 target = expand_builtin_mathfn (exp, target, subtarget);
6101 if (target)
6102 return target;
6103 break;
6105 CASE_FLT_FN (BUILT_IN_FMA):
6106 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6107 if (target)
6108 return target;
6109 break;
6111 CASE_FLT_FN (BUILT_IN_ILOGB):
6112 if (! flag_unsafe_math_optimizations)
6113 break;
6114 CASE_FLT_FN (BUILT_IN_ISINF):
6115 CASE_FLT_FN (BUILT_IN_FINITE):
6116 case BUILT_IN_ISFINITE:
6117 case BUILT_IN_ISNORMAL:
6118 target = expand_builtin_interclass_mathfn (exp, target);
6119 if (target)
6120 return target;
6121 break;
6123 CASE_FLT_FN (BUILT_IN_ICEIL):
6124 CASE_FLT_FN (BUILT_IN_LCEIL):
6125 CASE_FLT_FN (BUILT_IN_LLCEIL):
6126 CASE_FLT_FN (BUILT_IN_LFLOOR):
6127 CASE_FLT_FN (BUILT_IN_IFLOOR):
6128 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6129 target = expand_builtin_int_roundingfn (exp, target);
6130 if (target)
6131 return target;
6132 break;
6134 CASE_FLT_FN (BUILT_IN_IRINT):
6135 CASE_FLT_FN (BUILT_IN_LRINT):
6136 CASE_FLT_FN (BUILT_IN_LLRINT):
6137 CASE_FLT_FN (BUILT_IN_IROUND):
6138 CASE_FLT_FN (BUILT_IN_LROUND):
6139 CASE_FLT_FN (BUILT_IN_LLROUND):
6140 target = expand_builtin_int_roundingfn_2 (exp, target);
6141 if (target)
6142 return target;
6143 break;
6145 CASE_FLT_FN (BUILT_IN_POWI):
6146 target = expand_builtin_powi (exp, target);
6147 if (target)
6148 return target;
6149 break;
6151 CASE_FLT_FN (BUILT_IN_ATAN2):
6152 CASE_FLT_FN (BUILT_IN_LDEXP):
6153 CASE_FLT_FN (BUILT_IN_SCALB):
6154 CASE_FLT_FN (BUILT_IN_SCALBN):
6155 CASE_FLT_FN (BUILT_IN_SCALBLN):
6156 if (! flag_unsafe_math_optimizations)
6157 break;
6159 CASE_FLT_FN (BUILT_IN_FMOD):
6160 CASE_FLT_FN (BUILT_IN_REMAINDER):
6161 CASE_FLT_FN (BUILT_IN_DREM):
6162 CASE_FLT_FN (BUILT_IN_POW):
6163 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6164 if (target)
6165 return target;
6166 break;
6168 CASE_FLT_FN (BUILT_IN_CEXPI):
6169 target = expand_builtin_cexpi (exp, target);
6170 gcc_assert (target);
6171 return target;
6173 CASE_FLT_FN (BUILT_IN_SIN):
6174 CASE_FLT_FN (BUILT_IN_COS):
6175 if (! flag_unsafe_math_optimizations)
6176 break;
6177 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6178 if (target)
6179 return target;
6180 break;
6182 CASE_FLT_FN (BUILT_IN_SINCOS):
6183 if (! flag_unsafe_math_optimizations)
6184 break;
6185 target = expand_builtin_sincos (exp);
6186 if (target)
6187 return target;
6188 break;
6190 case BUILT_IN_APPLY_ARGS:
6191 return expand_builtin_apply_args ();
6193 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6194 FUNCTION with a copy of the parameters described by
6195 ARGUMENTS, and ARGSIZE. It returns a block of memory
6196 allocated on the stack into which is stored all the registers
6197 that might possibly be used for returning the result of a
6198 function. ARGUMENTS is the value returned by
6199 __builtin_apply_args. ARGSIZE is the number of bytes of
6200 arguments that must be copied. ??? How should this value be
6201 computed? We'll also need a safe worst case value for varargs
6202 functions. */
6203 case BUILT_IN_APPLY:
6204 if (!validate_arglist (exp, POINTER_TYPE,
6205 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6206 && !validate_arglist (exp, REFERENCE_TYPE,
6207 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6208 return const0_rtx;
6209 else
6211 rtx ops[3];
6213 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6214 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6215 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6217 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6220 /* __builtin_return (RESULT) causes the function to return the
6221 value described by RESULT. RESULT is address of the block of
6222 memory returned by __builtin_apply. */
6223 case BUILT_IN_RETURN:
6224 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6225 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6226 return const0_rtx;
6228 case BUILT_IN_SAVEREGS:
6229 return expand_builtin_saveregs ();
6231 case BUILT_IN_VA_ARG_PACK:
6232 /* All valid uses of __builtin_va_arg_pack () are removed during
6233 inlining. */
6234 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6235 return const0_rtx;
6237 case BUILT_IN_VA_ARG_PACK_LEN:
6238 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6239 inlining. */
6240 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6241 return const0_rtx;
6243 /* Return the address of the first anonymous stack arg. */
6244 case BUILT_IN_NEXT_ARG:
6245 if (fold_builtin_next_arg (exp, false))
6246 return const0_rtx;
6247 return expand_builtin_next_arg ();
6249 case BUILT_IN_CLEAR_CACHE:
6250 target = expand_builtin___clear_cache (exp);
6251 if (target)
6252 return target;
6253 break;
6255 case BUILT_IN_CLASSIFY_TYPE:
6256 return expand_builtin_classify_type (exp);
6258 case BUILT_IN_CONSTANT_P:
6259 return const0_rtx;
6261 case BUILT_IN_FRAME_ADDRESS:
6262 case BUILT_IN_RETURN_ADDRESS:
6263 return expand_builtin_frame_address (fndecl, exp);
6265 /* Returns the address of the area where the structure is returned.
6266 0 otherwise. */
6267 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6268 if (call_expr_nargs (exp) != 0
6269 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6270 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6271 return const0_rtx;
6272 else
6273 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6275 case BUILT_IN_ALLOCA:
6276 case BUILT_IN_ALLOCA_WITH_ALIGN:
6277 /* If the allocation stems from the declaration of a variable-sized
6278 object, it cannot accumulate. */
6279 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6280 if (target)
6281 return target;
6282 break;
6284 case BUILT_IN_STACK_SAVE:
6285 return expand_stack_save ();
6287 case BUILT_IN_STACK_RESTORE:
6288 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6289 return const0_rtx;
6291 case BUILT_IN_BSWAP16:
6292 case BUILT_IN_BSWAP32:
6293 case BUILT_IN_BSWAP64:
6294 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6295 if (target)
6296 return target;
6297 break;
6299 CASE_INT_FN (BUILT_IN_FFS):
6300 target = expand_builtin_unop (target_mode, exp, target,
6301 subtarget, ffs_optab);
6302 if (target)
6303 return target;
6304 break;
6306 CASE_INT_FN (BUILT_IN_CLZ):
6307 target = expand_builtin_unop (target_mode, exp, target,
6308 subtarget, clz_optab);
6309 if (target)
6310 return target;
6311 break;
6313 CASE_INT_FN (BUILT_IN_CTZ):
6314 target = expand_builtin_unop (target_mode, exp, target,
6315 subtarget, ctz_optab);
6316 if (target)
6317 return target;
6318 break;
6320 CASE_INT_FN (BUILT_IN_CLRSB):
6321 target = expand_builtin_unop (target_mode, exp, target,
6322 subtarget, clrsb_optab);
6323 if (target)
6324 return target;
6325 break;
6327 CASE_INT_FN (BUILT_IN_POPCOUNT):
6328 target = expand_builtin_unop (target_mode, exp, target,
6329 subtarget, popcount_optab);
6330 if (target)
6331 return target;
6332 break;
6334 CASE_INT_FN (BUILT_IN_PARITY):
6335 target = expand_builtin_unop (target_mode, exp, target,
6336 subtarget, parity_optab);
6337 if (target)
6338 return target;
6339 break;
6341 case BUILT_IN_STRLEN:
6342 target = expand_builtin_strlen (exp, target, target_mode);
6343 if (target)
6344 return target;
6345 break;
6347 case BUILT_IN_STRCPY:
6348 target = expand_builtin_strcpy (exp, target);
6349 if (target)
6350 return target;
6351 break;
6353 case BUILT_IN_STRNCPY:
6354 target = expand_builtin_strncpy (exp, target);
6355 if (target)
6356 return target;
6357 break;
6359 case BUILT_IN_STPCPY:
6360 target = expand_builtin_stpcpy (exp, target, mode);
6361 if (target)
6362 return target;
6363 break;
6365 case BUILT_IN_MEMCPY:
6366 target = expand_builtin_memcpy (exp, target);
6367 if (target)
6368 return target;
6369 break;
6371 case BUILT_IN_MEMPCPY:
6372 target = expand_builtin_mempcpy (exp, target, mode);
6373 if (target)
6374 return target;
6375 break;
6377 case BUILT_IN_MEMSET:
6378 target = expand_builtin_memset (exp, target, mode);
6379 if (target)
6380 return target;
6381 break;
6383 case BUILT_IN_BZERO:
6384 target = expand_builtin_bzero (exp);
6385 if (target)
6386 return target;
6387 break;
6389 case BUILT_IN_STRCMP:
6390 target = expand_builtin_strcmp (exp, target);
6391 if (target)
6392 return target;
6393 break;
6395 case BUILT_IN_STRNCMP:
6396 target = expand_builtin_strncmp (exp, target, mode);
6397 if (target)
6398 return target;
6399 break;
6401 case BUILT_IN_BCMP:
6402 case BUILT_IN_MEMCMP:
6403 target = expand_builtin_memcmp (exp, target, mode);
6404 if (target)
6405 return target;
6406 break;
6408 case BUILT_IN_SETJMP:
6409 /* This should have been lowered to the builtins below. */
6410 gcc_unreachable ();
6412 case BUILT_IN_SETJMP_SETUP:
6413 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6414 and the receiver label. */
6415 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6417 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6418 VOIDmode, EXPAND_NORMAL);
6419 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6420 rtx label_r = label_rtx (label);
6422 /* This is copied from the handling of non-local gotos. */
6423 expand_builtin_setjmp_setup (buf_addr, label_r);
6424 nonlocal_goto_handler_labels
6425 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6426 nonlocal_goto_handler_labels);
6427 /* ??? Do not let expand_label treat us as such since we would
6428 not want to be both on the list of non-local labels and on
6429 the list of forced labels. */
6430 FORCED_LABEL (label) = 0;
6431 return const0_rtx;
6433 break;
6435 case BUILT_IN_SETJMP_RECEIVER:
6436 /* __builtin_setjmp_receiver is passed the receiver label. */
6437 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6439 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6440 rtx label_r = label_rtx (label);
6442 expand_builtin_setjmp_receiver (label_r);
6443 return const0_rtx;
6445 break;
6447 /* __builtin_longjmp is passed a pointer to an array of five words.
6448 It's similar to the C library longjmp function but works with
6449 __builtin_setjmp above. */
6450 case BUILT_IN_LONGJMP:
6451 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6453 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6454 VOIDmode, EXPAND_NORMAL);
6455 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6457 if (value != const1_rtx)
6459 error ("%<__builtin_longjmp%> second argument must be 1");
6460 return const0_rtx;
6463 expand_builtin_longjmp (buf_addr, value);
6464 return const0_rtx;
6466 break;
6468 case BUILT_IN_NONLOCAL_GOTO:
6469 target = expand_builtin_nonlocal_goto (exp);
6470 if (target)
6471 return target;
6472 break;
6474 /* This updates the setjmp buffer that is its argument with the value
6475 of the current stack pointer. */
6476 case BUILT_IN_UPDATE_SETJMP_BUF:
6477 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6479 rtx buf_addr
6480 = expand_normal (CALL_EXPR_ARG (exp, 0));
6482 expand_builtin_update_setjmp_buf (buf_addr);
6483 return const0_rtx;
6485 break;
6487 case BUILT_IN_TRAP:
6488 expand_builtin_trap ();
6489 return const0_rtx;
6491 case BUILT_IN_UNREACHABLE:
6492 expand_builtin_unreachable ();
6493 return const0_rtx;
6495 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6496 case BUILT_IN_SIGNBITD32:
6497 case BUILT_IN_SIGNBITD64:
6498 case BUILT_IN_SIGNBITD128:
6499 target = expand_builtin_signbit (exp, target);
6500 if (target)
6501 return target;
6502 break;
6504 /* Various hooks for the DWARF 2 __throw routine. */
6505 case BUILT_IN_UNWIND_INIT:
6506 expand_builtin_unwind_init ();
6507 return const0_rtx;
6508 case BUILT_IN_DWARF_CFA:
6509 return virtual_cfa_rtx;
6510 #ifdef DWARF2_UNWIND_INFO
6511 case BUILT_IN_DWARF_SP_COLUMN:
6512 return expand_builtin_dwarf_sp_column ();
6513 case BUILT_IN_INIT_DWARF_REG_SIZES:
6514 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6515 return const0_rtx;
6516 #endif
6517 case BUILT_IN_FROB_RETURN_ADDR:
6518 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6519 case BUILT_IN_EXTRACT_RETURN_ADDR:
6520 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6521 case BUILT_IN_EH_RETURN:
6522 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6523 CALL_EXPR_ARG (exp, 1));
6524 return const0_rtx;
6525 #ifdef EH_RETURN_DATA_REGNO
6526 case BUILT_IN_EH_RETURN_DATA_REGNO:
6527 return expand_builtin_eh_return_data_regno (exp);
6528 #endif
6529 case BUILT_IN_EXTEND_POINTER:
6530 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6531 case BUILT_IN_EH_POINTER:
6532 return expand_builtin_eh_pointer (exp);
6533 case BUILT_IN_EH_FILTER:
6534 return expand_builtin_eh_filter (exp);
6535 case BUILT_IN_EH_COPY_VALUES:
6536 return expand_builtin_eh_copy_values (exp);
6538 case BUILT_IN_VA_START:
6539 return expand_builtin_va_start (exp);
6540 case BUILT_IN_VA_END:
6541 return expand_builtin_va_end (exp);
6542 case BUILT_IN_VA_COPY:
6543 return expand_builtin_va_copy (exp);
6544 case BUILT_IN_EXPECT:
6545 return expand_builtin_expect (exp, target);
6546 case BUILT_IN_ASSUME_ALIGNED:
6547 return expand_builtin_assume_aligned (exp, target);
6548 case BUILT_IN_PREFETCH:
6549 expand_builtin_prefetch (exp);
6550 return const0_rtx;
6552 case BUILT_IN_INIT_TRAMPOLINE:
6553 return expand_builtin_init_trampoline (exp, true);
6554 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6555 return expand_builtin_init_trampoline (exp, false);
6556 case BUILT_IN_ADJUST_TRAMPOLINE:
6557 return expand_builtin_adjust_trampoline (exp);
6559 case BUILT_IN_FORK:
6560 case BUILT_IN_EXECL:
6561 case BUILT_IN_EXECV:
6562 case BUILT_IN_EXECLP:
6563 case BUILT_IN_EXECLE:
6564 case BUILT_IN_EXECVP:
6565 case BUILT_IN_EXECVE:
6566 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6567 if (target)
6568 return target;
6569 break;
6571 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6572 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6573 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6574 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6575 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6576 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6577 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6578 if (target)
6579 return target;
6580 break;
6582 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6583 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6584 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6585 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6586 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6587 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6588 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6589 if (target)
6590 return target;
6591 break;
6593 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6594 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6595 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6596 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6597 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6598 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6599 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6600 if (target)
6601 return target;
6602 break;
6604 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6605 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6606 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6607 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6608 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6609 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6610 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6611 if (target)
6612 return target;
6613 break;
6615 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6616 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6617 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6618 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6619 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6620 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6621 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6622 if (target)
6623 return target;
6624 break;
6626 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6627 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6628 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6629 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6630 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6631 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6632 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6633 if (target)
6634 return target;
6635 break;
6637 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6638 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6639 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6640 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6641 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6642 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6643 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6644 if (target)
6645 return target;
6646 break;
6648 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6649 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6650 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6651 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6652 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6653 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6654 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6655 if (target)
6656 return target;
6657 break;
6659 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6660 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6661 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6662 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6663 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6664 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6665 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6666 if (target)
6667 return target;
6668 break;
6670 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6671 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6672 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6673 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6674 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6675 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6676 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6677 if (target)
6678 return target;
6679 break;
6681 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6682 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6683 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6684 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6685 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6686 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6687 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6688 if (target)
6689 return target;
6690 break;
6692 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6693 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6694 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6695 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6696 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6697 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6698 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6699 if (target)
6700 return target;
6701 break;
6703 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6704 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6705 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6706 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6707 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6708 if (mode == VOIDmode)
6709 mode = TYPE_MODE (boolean_type_node);
6710 if (!target || !register_operand (target, mode))
6711 target = gen_reg_rtx (mode);
6713 mode = get_builtin_sync_mode
6714 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6715 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6716 if (target)
6717 return target;
6718 break;
6720 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6721 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6722 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6723 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6724 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6725 mode = get_builtin_sync_mode
6726 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6727 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6728 if (target)
6729 return target;
6730 break;
6732 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6733 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6734 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6735 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6736 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6737 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6738 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6739 if (target)
6740 return target;
6741 break;
6743 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6744 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6745 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6746 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6747 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6748 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6749 expand_builtin_sync_lock_release (mode, exp);
6750 return const0_rtx;
6752 case BUILT_IN_SYNC_SYNCHRONIZE:
6753 expand_builtin_sync_synchronize ();
6754 return const0_rtx;
6756 case BUILT_IN_ATOMIC_EXCHANGE_1:
6757 case BUILT_IN_ATOMIC_EXCHANGE_2:
6758 case BUILT_IN_ATOMIC_EXCHANGE_4:
6759 case BUILT_IN_ATOMIC_EXCHANGE_8:
6760 case BUILT_IN_ATOMIC_EXCHANGE_16:
6761 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6762 target = expand_builtin_atomic_exchange (mode, exp, target);
6763 if (target)
6764 return target;
6765 break;
6767 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6768 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6769 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6770 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6771 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6773 unsigned int nargs, z;
6774 vec<tree, va_gc> *vec;
6776 mode =
6777 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6778 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6779 if (target)
6780 return target;
6782 /* If this is turned into an external library call, the weak parameter
6783 must be dropped to match the expected parameter list. */
6784 nargs = call_expr_nargs (exp);
6785 vec_alloc (vec, nargs - 1);
6786 for (z = 0; z < 3; z++)
6787 vec->quick_push (CALL_EXPR_ARG (exp, z));
6788 /* Skip the boolean weak parameter. */
6789 for (z = 4; z < 6; z++)
6790 vec->quick_push (CALL_EXPR_ARG (exp, z));
6791 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6792 break;
6795 case BUILT_IN_ATOMIC_LOAD_1:
6796 case BUILT_IN_ATOMIC_LOAD_2:
6797 case BUILT_IN_ATOMIC_LOAD_4:
6798 case BUILT_IN_ATOMIC_LOAD_8:
6799 case BUILT_IN_ATOMIC_LOAD_16:
6800 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6801 target = expand_builtin_atomic_load (mode, exp, target);
6802 if (target)
6803 return target;
6804 break;
6806 case BUILT_IN_ATOMIC_STORE_1:
6807 case BUILT_IN_ATOMIC_STORE_2:
6808 case BUILT_IN_ATOMIC_STORE_4:
6809 case BUILT_IN_ATOMIC_STORE_8:
6810 case BUILT_IN_ATOMIC_STORE_16:
6811 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6812 target = expand_builtin_atomic_store (mode, exp);
6813 if (target)
6814 return const0_rtx;
6815 break;
6817 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6818 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6819 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6820 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6821 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6823 enum built_in_function lib;
6824 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6825 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6826 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6827 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6828 ignore, lib);
6829 if (target)
6830 return target;
6831 break;
6833 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6834 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6835 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6836 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6837 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6839 enum built_in_function lib;
6840 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6841 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6842 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6843 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6844 ignore, lib);
6845 if (target)
6846 return target;
6847 break;
6849 case BUILT_IN_ATOMIC_AND_FETCH_1:
6850 case BUILT_IN_ATOMIC_AND_FETCH_2:
6851 case BUILT_IN_ATOMIC_AND_FETCH_4:
6852 case BUILT_IN_ATOMIC_AND_FETCH_8:
6853 case BUILT_IN_ATOMIC_AND_FETCH_16:
6855 enum built_in_function lib;
6856 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6857 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6858 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6859 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6860 ignore, lib);
6861 if (target)
6862 return target;
6863 break;
6865 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6866 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6867 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6868 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6869 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6871 enum built_in_function lib;
6872 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6873 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6874 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6875 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6876 ignore, lib);
6877 if (target)
6878 return target;
6879 break;
6881 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6882 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6883 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6884 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6885 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6887 enum built_in_function lib;
6888 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6889 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6890 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6891 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6892 ignore, lib);
6893 if (target)
6894 return target;
6895 break;
6897 case BUILT_IN_ATOMIC_OR_FETCH_1:
6898 case BUILT_IN_ATOMIC_OR_FETCH_2:
6899 case BUILT_IN_ATOMIC_OR_FETCH_4:
6900 case BUILT_IN_ATOMIC_OR_FETCH_8:
6901 case BUILT_IN_ATOMIC_OR_FETCH_16:
6903 enum built_in_function lib;
6904 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6905 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6906 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6907 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6908 ignore, lib);
6909 if (target)
6910 return target;
6911 break;
6913 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6914 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6915 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6916 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6917 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6918 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6919 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6920 ignore, BUILT_IN_NONE);
6921 if (target)
6922 return target;
6923 break;
6925 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6926 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6927 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6928 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6929 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6930 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6931 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6932 ignore, BUILT_IN_NONE);
6933 if (target)
6934 return target;
6935 break;
6937 case BUILT_IN_ATOMIC_FETCH_AND_1:
6938 case BUILT_IN_ATOMIC_FETCH_AND_2:
6939 case BUILT_IN_ATOMIC_FETCH_AND_4:
6940 case BUILT_IN_ATOMIC_FETCH_AND_8:
6941 case BUILT_IN_ATOMIC_FETCH_AND_16:
6942 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6943 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6944 ignore, BUILT_IN_NONE);
6945 if (target)
6946 return target;
6947 break;
6949 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6950 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6951 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6952 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6953 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6954 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6955 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6956 ignore, BUILT_IN_NONE);
6957 if (target)
6958 return target;
6959 break;
6961 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6962 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6963 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6964 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6965 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6966 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6967 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6968 ignore, BUILT_IN_NONE);
6969 if (target)
6970 return target;
6971 break;
6973 case BUILT_IN_ATOMIC_FETCH_OR_1:
6974 case BUILT_IN_ATOMIC_FETCH_OR_2:
6975 case BUILT_IN_ATOMIC_FETCH_OR_4:
6976 case BUILT_IN_ATOMIC_FETCH_OR_8:
6977 case BUILT_IN_ATOMIC_FETCH_OR_16:
6978 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6979 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6980 ignore, BUILT_IN_NONE);
6981 if (target)
6982 return target;
6983 break;
6985 case BUILT_IN_ATOMIC_TEST_AND_SET:
6986 return expand_builtin_atomic_test_and_set (exp, target);
6988 case BUILT_IN_ATOMIC_CLEAR:
6989 return expand_builtin_atomic_clear (exp);
6991 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6992 return expand_builtin_atomic_always_lock_free (exp);
6994 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6995 target = expand_builtin_atomic_is_lock_free (exp);
6996 if (target)
6997 return target;
6998 break;
7000 case BUILT_IN_ATOMIC_THREAD_FENCE:
7001 expand_builtin_atomic_thread_fence (exp);
7002 return const0_rtx;
7004 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7005 expand_builtin_atomic_signal_fence (exp);
7006 return const0_rtx;
7008 case BUILT_IN_OBJECT_SIZE:
7009 return expand_builtin_object_size (exp);
7011 case BUILT_IN_MEMCPY_CHK:
7012 case BUILT_IN_MEMPCPY_CHK:
7013 case BUILT_IN_MEMMOVE_CHK:
7014 case BUILT_IN_MEMSET_CHK:
7015 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7016 if (target)
7017 return target;
7018 break;
7020 case BUILT_IN_STRCPY_CHK:
7021 case BUILT_IN_STPCPY_CHK:
7022 case BUILT_IN_STRNCPY_CHK:
7023 case BUILT_IN_STPNCPY_CHK:
7024 case BUILT_IN_STRCAT_CHK:
7025 case BUILT_IN_STRNCAT_CHK:
7026 case BUILT_IN_SNPRINTF_CHK:
7027 case BUILT_IN_VSNPRINTF_CHK:
7028 maybe_emit_chk_warning (exp, fcode);
7029 break;
7031 case BUILT_IN_SPRINTF_CHK:
7032 case BUILT_IN_VSPRINTF_CHK:
7033 maybe_emit_sprintf_chk_warning (exp, fcode);
7034 break;
7036 case BUILT_IN_FREE:
7037 if (warn_free_nonheap_object)
7038 maybe_emit_free_warning (exp);
7039 break;
7041 case BUILT_IN_THREAD_POINTER:
7042 return expand_builtin_thread_pointer (exp, target);
7044 case BUILT_IN_SET_THREAD_POINTER:
7045 expand_builtin_set_thread_pointer (exp);
7046 return const0_rtx;
7048 case BUILT_IN_CILK_DETACH:
7049 expand_builtin_cilk_detach (exp);
7050 return const0_rtx;
7052 case BUILT_IN_CILK_POP_FRAME:
7053 expand_builtin_cilk_pop_frame (exp);
7054 return const0_rtx;
7056 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7057 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7058 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7059 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7060 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7061 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7062 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7063 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7064 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7065 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7066 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7067 /* We allow user CHKP builtins if Pointer Bounds
7068 Checker is off. */
7069 if (!chkp_function_instrumented_p (current_function_decl))
7071 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7072 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7073 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7074 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7075 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7076 return expand_normal (CALL_EXPR_ARG (exp, 0));
7077 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7078 return expand_normal (size_zero_node);
7079 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7080 return expand_normal (size_int (-1));
7081 else
7082 return const0_rtx;
7084 /* FALLTHROUGH */
7086 case BUILT_IN_CHKP_BNDMK:
7087 case BUILT_IN_CHKP_BNDSTX:
7088 case BUILT_IN_CHKP_BNDCL:
7089 case BUILT_IN_CHKP_BNDCU:
7090 case BUILT_IN_CHKP_BNDLDX:
7091 case BUILT_IN_CHKP_BNDRET:
7092 case BUILT_IN_CHKP_INTERSECT:
7093 case BUILT_IN_CHKP_NARROW:
7094 case BUILT_IN_CHKP_EXTRACT_LOWER:
7095 case BUILT_IN_CHKP_EXTRACT_UPPER:
7096 /* Software implementation of Pointer Bounds Checker is NYI.
7097 Target support is required. */
7098 error ("Your target platform does not support -fcheck-pointer-bounds");
7099 break;
7101 case BUILT_IN_ACC_ON_DEVICE:
7102 target = expand_builtin_acc_on_device (exp, target);
7103 if (target)
7104 return target;
7105 break;
7107 default: /* just do library call, if unknown builtin */
7108 break;
7111 /* The switch statement above can drop through to cause the function
7112 to be called normally. */
7113 return expand_call (exp, target, ignore);
7116 /* Similar to expand_builtin but is used for instrumented calls. */
7119 expand_builtin_with_bounds (tree exp, rtx target,
7120 rtx subtarget ATTRIBUTE_UNUSED,
7121 machine_mode mode, int ignore)
7123 tree fndecl = get_callee_fndecl (exp);
7124 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7126 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7128 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7129 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7131 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7132 && fcode < END_CHKP_BUILTINS);
7134 switch (fcode)
7136 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7137 target = expand_builtin_memcpy_with_bounds (exp, target);
7138 if (target)
7139 return target;
7140 break;
7142 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7143 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7144 if (target)
7145 return target;
7146 break;
7148 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7149 target = expand_builtin_memset_with_bounds (exp, target, mode);
7150 if (target)
7151 return target;
7152 break;
7154 default:
7155 break;
7158 /* The switch statement above can drop through to cause the function
7159 to be called normally. */
7160 return expand_call (exp, target, ignore);
7163 /* Determine whether a tree node represents a call to a built-in
7164 function. If the tree T is a call to a built-in function with
7165 the right number of arguments of the appropriate types, return
7166 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7167 Otherwise the return value is END_BUILTINS. */
7169 enum built_in_function
7170 builtin_mathfn_code (const_tree t)
7172 const_tree fndecl, arg, parmlist;
7173 const_tree argtype, parmtype;
7174 const_call_expr_arg_iterator iter;
7176 if (TREE_CODE (t) != CALL_EXPR
7177 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7178 return END_BUILTINS;
7180 fndecl = get_callee_fndecl (t);
7181 if (fndecl == NULL_TREE
7182 || TREE_CODE (fndecl) != FUNCTION_DECL
7183 || ! DECL_BUILT_IN (fndecl)
7184 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7185 return END_BUILTINS;
7187 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7188 init_const_call_expr_arg_iterator (t, &iter);
7189 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7191 /* If a function doesn't take a variable number of arguments,
7192 the last element in the list will have type `void'. */
7193 parmtype = TREE_VALUE (parmlist);
7194 if (VOID_TYPE_P (parmtype))
7196 if (more_const_call_expr_args_p (&iter))
7197 return END_BUILTINS;
7198 return DECL_FUNCTION_CODE (fndecl);
7201 if (! more_const_call_expr_args_p (&iter))
7202 return END_BUILTINS;
7204 arg = next_const_call_expr_arg (&iter);
7205 argtype = TREE_TYPE (arg);
7207 if (SCALAR_FLOAT_TYPE_P (parmtype))
7209 if (! SCALAR_FLOAT_TYPE_P (argtype))
7210 return END_BUILTINS;
7212 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7214 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7215 return END_BUILTINS;
7217 else if (POINTER_TYPE_P (parmtype))
7219 if (! POINTER_TYPE_P (argtype))
7220 return END_BUILTINS;
7222 else if (INTEGRAL_TYPE_P (parmtype))
7224 if (! INTEGRAL_TYPE_P (argtype))
7225 return END_BUILTINS;
7227 else
7228 return END_BUILTINS;
7231 /* Variable-length argument list. */
7232 return DECL_FUNCTION_CODE (fndecl);
7235 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7236 evaluate to a constant. */
7238 static tree
7239 fold_builtin_constant_p (tree arg)
7241 /* We return 1 for a numeric type that's known to be a constant
7242 value at compile-time or for an aggregate type that's a
7243 literal constant. */
7244 STRIP_NOPS (arg);
7246 /* If we know this is a constant, emit the constant of one. */
7247 if (CONSTANT_CLASS_P (arg)
7248 || (TREE_CODE (arg) == CONSTRUCTOR
7249 && TREE_CONSTANT (arg)))
7250 return integer_one_node;
7251 if (TREE_CODE (arg) == ADDR_EXPR)
7253 tree op = TREE_OPERAND (arg, 0);
7254 if (TREE_CODE (op) == STRING_CST
7255 || (TREE_CODE (op) == ARRAY_REF
7256 && integer_zerop (TREE_OPERAND (op, 1))
7257 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7258 return integer_one_node;
7261 /* If this expression has side effects, show we don't know it to be a
7262 constant. Likewise if it's a pointer or aggregate type since in
7263 those case we only want literals, since those are only optimized
7264 when generating RTL, not later.
7265 And finally, if we are compiling an initializer, not code, we
7266 need to return a definite result now; there's not going to be any
7267 more optimization done. */
7268 if (TREE_SIDE_EFFECTS (arg)
7269 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7270 || POINTER_TYPE_P (TREE_TYPE (arg))
7271 || cfun == 0
7272 || folding_initializer
7273 || force_folding_builtin_constant_p)
7274 return integer_zero_node;
7276 return NULL_TREE;
7279 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7280 return it as a truthvalue. */
7282 static tree
7283 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7284 tree predictor)
7286 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7288 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7289 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7290 ret_type = TREE_TYPE (TREE_TYPE (fn));
7291 pred_type = TREE_VALUE (arg_types);
7292 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7294 pred = fold_convert_loc (loc, pred_type, pred);
7295 expected = fold_convert_loc (loc, expected_type, expected);
7296 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7297 predictor);
7299 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7300 build_int_cst (ret_type, 0));
7303 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7304 NULL_TREE if no simplification is possible. */
7306 tree
7307 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7309 tree inner, fndecl, inner_arg0;
7310 enum tree_code code;
7312 /* Distribute the expected value over short-circuiting operators.
7313 See through the cast from truthvalue_type_node to long. */
7314 inner_arg0 = arg0;
7315 while (CONVERT_EXPR_P (inner_arg0)
7316 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7317 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7318 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7320 /* If this is a builtin_expect within a builtin_expect keep the
7321 inner one. See through a comparison against a constant. It
7322 might have been added to create a thruthvalue. */
7323 inner = inner_arg0;
7325 if (COMPARISON_CLASS_P (inner)
7326 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7327 inner = TREE_OPERAND (inner, 0);
7329 if (TREE_CODE (inner) == CALL_EXPR
7330 && (fndecl = get_callee_fndecl (inner))
7331 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7332 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7333 return arg0;
7335 inner = inner_arg0;
7336 code = TREE_CODE (inner);
7337 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7339 tree op0 = TREE_OPERAND (inner, 0);
7340 tree op1 = TREE_OPERAND (inner, 1);
7342 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7343 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7344 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7346 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7349 /* If the argument isn't invariant then there's nothing else we can do. */
7350 if (!TREE_CONSTANT (inner_arg0))
7351 return NULL_TREE;
7353 /* If we expect that a comparison against the argument will fold to
7354 a constant return the constant. In practice, this means a true
7355 constant or the address of a non-weak symbol. */
7356 inner = inner_arg0;
7357 STRIP_NOPS (inner);
7358 if (TREE_CODE (inner) == ADDR_EXPR)
7362 inner = TREE_OPERAND (inner, 0);
7364 while (TREE_CODE (inner) == COMPONENT_REF
7365 || TREE_CODE (inner) == ARRAY_REF);
7366 if ((TREE_CODE (inner) == VAR_DECL
7367 || TREE_CODE (inner) == FUNCTION_DECL)
7368 && DECL_WEAK (inner))
7369 return NULL_TREE;
7372 /* Otherwise, ARG0 already has the proper type for the return value. */
7373 return arg0;
7376 /* Fold a call to __builtin_classify_type with argument ARG. */
7378 static tree
7379 fold_builtin_classify_type (tree arg)
7381 if (arg == 0)
7382 return build_int_cst (integer_type_node, no_type_class);
7384 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7387 /* Fold a call to __builtin_strlen with argument ARG. */
7389 static tree
7390 fold_builtin_strlen (location_t loc, tree type, tree arg)
7392 if (!validate_arg (arg, POINTER_TYPE))
7393 return NULL_TREE;
7394 else
7396 tree len = c_strlen (arg, 0);
7398 if (len)
7399 return fold_convert_loc (loc, type, len);
7401 return NULL_TREE;
7405 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7407 static tree
7408 fold_builtin_inf (location_t loc, tree type, int warn)
7410 REAL_VALUE_TYPE real;
7412 /* __builtin_inff is intended to be usable to define INFINITY on all
7413 targets. If an infinity is not available, INFINITY expands "to a
7414 positive constant of type float that overflows at translation
7415 time", footnote "In this case, using INFINITY will violate the
7416 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7417 Thus we pedwarn to ensure this constraint violation is
7418 diagnosed. */
7419 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7420 pedwarn (loc, 0, "target format does not support infinity");
7422 real_inf (&real);
7423 return build_real (type, real);
7426 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7428 static tree
7429 fold_builtin_nan (tree arg, tree type, int quiet)
7431 REAL_VALUE_TYPE real;
7432 const char *str;
7434 if (!validate_arg (arg, POINTER_TYPE))
7435 return NULL_TREE;
7436 str = c_getstr (arg);
7437 if (!str)
7438 return NULL_TREE;
7440 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7441 return NULL_TREE;
7443 return build_real (type, real);
7446 /* Return true if the floating point expression T has an integer value.
7447 We also allow +Inf, -Inf and NaN to be considered integer values. */
7449 static bool
7450 integer_valued_real_p (tree t)
7452 switch (TREE_CODE (t))
7454 case FLOAT_EXPR:
7455 return true;
7457 case ABS_EXPR:
7458 case SAVE_EXPR:
7459 return integer_valued_real_p (TREE_OPERAND (t, 0));
7461 case COMPOUND_EXPR:
7462 case MODIFY_EXPR:
7463 case BIND_EXPR:
7464 return integer_valued_real_p (TREE_OPERAND (t, 1));
7466 case PLUS_EXPR:
7467 case MINUS_EXPR:
7468 case MULT_EXPR:
7469 case MIN_EXPR:
7470 case MAX_EXPR:
7471 return integer_valued_real_p (TREE_OPERAND (t, 0))
7472 && integer_valued_real_p (TREE_OPERAND (t, 1));
7474 case COND_EXPR:
7475 return integer_valued_real_p (TREE_OPERAND (t, 1))
7476 && integer_valued_real_p (TREE_OPERAND (t, 2));
7478 case REAL_CST:
7479 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7481 CASE_CONVERT:
7483 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7484 if (TREE_CODE (type) == INTEGER_TYPE)
7485 return true;
7486 if (TREE_CODE (type) == REAL_TYPE)
7487 return integer_valued_real_p (TREE_OPERAND (t, 0));
7488 break;
7491 case CALL_EXPR:
7492 switch (builtin_mathfn_code (t))
7494 CASE_FLT_FN (BUILT_IN_CEIL):
7495 CASE_FLT_FN (BUILT_IN_FLOOR):
7496 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7497 CASE_FLT_FN (BUILT_IN_RINT):
7498 CASE_FLT_FN (BUILT_IN_ROUND):
7499 CASE_FLT_FN (BUILT_IN_TRUNC):
7500 return true;
7502 CASE_FLT_FN (BUILT_IN_FMIN):
7503 CASE_FLT_FN (BUILT_IN_FMAX):
7504 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7505 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7507 default:
7508 break;
7510 break;
7512 default:
7513 break;
7515 return false;
7518 /* FNDECL is assumed to be a builtin where truncation can be propagated
7519 across (for instance floor((double)f) == (double)floorf (f).
7520 Do the transformation for a call with argument ARG. */
7522 static tree
7523 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7525 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7527 if (!validate_arg (arg, REAL_TYPE))
7528 return NULL_TREE;
7530 /* Integer rounding functions are idempotent. */
7531 if (fcode == builtin_mathfn_code (arg))
7532 return arg;
7534 /* If argument is already integer valued, and we don't need to worry
7535 about setting errno, there's no need to perform rounding. */
7536 if (! flag_errno_math && integer_valued_real_p (arg))
7537 return arg;
7539 if (optimize)
7541 tree arg0 = strip_float_extensions (arg);
7542 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7543 tree newtype = TREE_TYPE (arg0);
7544 tree decl;
7546 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7547 && (decl = mathfn_built_in (newtype, fcode)))
7548 return fold_convert_loc (loc, ftype,
7549 build_call_expr_loc (loc, decl, 1,
7550 fold_convert_loc (loc,
7551 newtype,
7552 arg0)));
7554 return NULL_TREE;
7557 /* FNDECL is assumed to be builtin which can narrow the FP type of
7558 the argument, for instance lround((double)f) -> lroundf (f).
7559 Do the transformation for a call with argument ARG. */
7561 static tree
7562 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7564 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7566 if (!validate_arg (arg, REAL_TYPE))
7567 return NULL_TREE;
7569 /* If argument is already integer valued, and we don't need to worry
7570 about setting errno, there's no need to perform rounding. */
7571 if (! flag_errno_math && integer_valued_real_p (arg))
7572 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7573 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7575 if (optimize)
7577 tree ftype = TREE_TYPE (arg);
7578 tree arg0 = strip_float_extensions (arg);
7579 tree newtype = TREE_TYPE (arg0);
7580 tree decl;
7582 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7583 && (decl = mathfn_built_in (newtype, fcode)))
7584 return build_call_expr_loc (loc, decl, 1,
7585 fold_convert_loc (loc, newtype, arg0));
7588 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7589 sizeof (int) == sizeof (long). */
7590 if (TYPE_PRECISION (integer_type_node)
7591 == TYPE_PRECISION (long_integer_type_node))
7593 tree newfn = NULL_TREE;
7594 switch (fcode)
7596 CASE_FLT_FN (BUILT_IN_ICEIL):
7597 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7598 break;
7600 CASE_FLT_FN (BUILT_IN_IFLOOR):
7601 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7602 break;
7604 CASE_FLT_FN (BUILT_IN_IROUND):
7605 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7606 break;
7608 CASE_FLT_FN (BUILT_IN_IRINT):
7609 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7610 break;
7612 default:
7613 break;
7616 if (newfn)
7618 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7619 return fold_convert_loc (loc,
7620 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7624 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7625 sizeof (long long) == sizeof (long). */
7626 if (TYPE_PRECISION (long_long_integer_type_node)
7627 == TYPE_PRECISION (long_integer_type_node))
7629 tree newfn = NULL_TREE;
7630 switch (fcode)
7632 CASE_FLT_FN (BUILT_IN_LLCEIL):
7633 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7634 break;
7636 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7637 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7638 break;
7640 CASE_FLT_FN (BUILT_IN_LLROUND):
7641 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7642 break;
7644 CASE_FLT_FN (BUILT_IN_LLRINT):
7645 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7646 break;
7648 default:
7649 break;
7652 if (newfn)
7654 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7655 return fold_convert_loc (loc,
7656 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7660 return NULL_TREE;
7663 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7664 return type. Return NULL_TREE if no simplification can be made. */
7666 static tree
7667 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7669 tree res;
7671 if (!validate_arg (arg, COMPLEX_TYPE)
7672 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7673 return NULL_TREE;
7675 /* Calculate the result when the argument is a constant. */
7676 if (TREE_CODE (arg) == COMPLEX_CST
7677 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7678 type, mpfr_hypot)))
7679 return res;
7681 if (TREE_CODE (arg) == COMPLEX_EXPR)
7683 tree real = TREE_OPERAND (arg, 0);
7684 tree imag = TREE_OPERAND (arg, 1);
7686 /* If either part is zero, cabs is fabs of the other. */
7687 if (real_zerop (real))
7688 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7689 if (real_zerop (imag))
7690 return fold_build1_loc (loc, ABS_EXPR, type, real);
7692 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7693 if (flag_unsafe_math_optimizations
7694 && operand_equal_p (real, imag, OEP_PURE_SAME))
7696 const REAL_VALUE_TYPE sqrt2_trunc
7697 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7698 STRIP_NOPS (real);
7699 return fold_build2_loc (loc, MULT_EXPR, type,
7700 fold_build1_loc (loc, ABS_EXPR, type, real),
7701 build_real (type, sqrt2_trunc));
7705 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7706 if (TREE_CODE (arg) == NEGATE_EXPR
7707 || TREE_CODE (arg) == CONJ_EXPR)
7708 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7710 /* Don't do this when optimizing for size. */
7711 if (flag_unsafe_math_optimizations
7712 && optimize && optimize_function_for_speed_p (cfun))
7714 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7716 if (sqrtfn != NULL_TREE)
7718 tree rpart, ipart, result;
7720 arg = builtin_save_expr (arg);
7722 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7723 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7725 rpart = builtin_save_expr (rpart);
7726 ipart = builtin_save_expr (ipart);
7728 result = fold_build2_loc (loc, PLUS_EXPR, type,
7729 fold_build2_loc (loc, MULT_EXPR, type,
7730 rpart, rpart),
7731 fold_build2_loc (loc, MULT_EXPR, type,
7732 ipart, ipart));
7734 return build_call_expr_loc (loc, sqrtfn, 1, result);
7738 return NULL_TREE;
7741 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7742 complex tree type of the result. If NEG is true, the imaginary
7743 zero is negative. */
7745 static tree
7746 build_complex_cproj (tree type, bool neg)
7748 REAL_VALUE_TYPE rinf, rzero = dconst0;
7750 real_inf (&rinf);
7751 rzero.sign = neg;
7752 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7753 build_real (TREE_TYPE (type), rzero));
7756 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7757 return type. Return NULL_TREE if no simplification can be made. */
7759 static tree
7760 fold_builtin_cproj (location_t loc, tree arg, tree type)
7762 if (!validate_arg (arg, COMPLEX_TYPE)
7763 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7764 return NULL_TREE;
7766 /* If there are no infinities, return arg. */
7767 if (! HONOR_INFINITIES (type))
7768 return non_lvalue_loc (loc, arg);
7770 /* Calculate the result when the argument is a constant. */
7771 if (TREE_CODE (arg) == COMPLEX_CST)
7773 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7774 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7776 if (real_isinf (real) || real_isinf (imag))
7777 return build_complex_cproj (type, imag->sign);
7778 else
7779 return arg;
7781 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7783 tree real = TREE_OPERAND (arg, 0);
7784 tree imag = TREE_OPERAND (arg, 1);
7786 STRIP_NOPS (real);
7787 STRIP_NOPS (imag);
7789 /* If the real part is inf and the imag part is known to be
7790 nonnegative, return (inf + 0i). Remember side-effects are
7791 possible in the imag part. */
7792 if (TREE_CODE (real) == REAL_CST
7793 && real_isinf (TREE_REAL_CST_PTR (real))
7794 && tree_expr_nonnegative_p (imag))
7795 return omit_one_operand_loc (loc, type,
7796 build_complex_cproj (type, false),
7797 arg);
7799 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7800 Remember side-effects are possible in the real part. */
7801 if (TREE_CODE (imag) == REAL_CST
7802 && real_isinf (TREE_REAL_CST_PTR (imag)))
7803 return
7804 omit_one_operand_loc (loc, type,
7805 build_complex_cproj (type, TREE_REAL_CST_PTR
7806 (imag)->sign), arg);
7809 return NULL_TREE;
7812 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7813 Return NULL_TREE if no simplification can be made. */
7815 static tree
7816 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7819 enum built_in_function fcode;
7820 tree res;
7822 if (!validate_arg (arg, REAL_TYPE))
7823 return NULL_TREE;
7825 /* Calculate the result when the argument is a constant. */
7826 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7827 return res;
7829 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7830 fcode = builtin_mathfn_code (arg);
7831 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7833 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7834 arg = fold_build2_loc (loc, MULT_EXPR, type,
7835 CALL_EXPR_ARG (arg, 0),
7836 build_real (type, dconsthalf));
7837 return build_call_expr_loc (loc, expfn, 1, arg);
7840 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7841 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7843 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7845 if (powfn)
7847 tree arg0 = CALL_EXPR_ARG (arg, 0);
7848 tree tree_root;
7849 /* The inner root was either sqrt or cbrt. */
7850 /* This was a conditional expression but it triggered a bug
7851 in Sun C 5.5. */
7852 REAL_VALUE_TYPE dconstroot;
7853 if (BUILTIN_SQRT_P (fcode))
7854 dconstroot = dconsthalf;
7855 else
7856 dconstroot = dconst_third ();
7858 /* Adjust for the outer root. */
7859 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7860 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7861 tree_root = build_real (type, dconstroot);
7862 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7866 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7867 if (flag_unsafe_math_optimizations
7868 && (fcode == BUILT_IN_POW
7869 || fcode == BUILT_IN_POWF
7870 || fcode == BUILT_IN_POWL))
7872 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7873 tree arg0 = CALL_EXPR_ARG (arg, 0);
7874 tree arg1 = CALL_EXPR_ARG (arg, 1);
7875 tree narg1;
7876 if (!tree_expr_nonnegative_p (arg0))
7877 arg0 = build1 (ABS_EXPR, type, arg0);
7878 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7879 build_real (type, dconsthalf));
7880 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7883 return NULL_TREE;
7886 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7887 Return NULL_TREE if no simplification can be made. */
7889 static tree
7890 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7892 const enum built_in_function fcode = builtin_mathfn_code (arg);
7893 tree res;
7895 if (!validate_arg (arg, REAL_TYPE))
7896 return NULL_TREE;
7898 /* Calculate the result when the argument is a constant. */
7899 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7900 return res;
7902 if (flag_unsafe_math_optimizations)
7904 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7905 if (BUILTIN_EXPONENT_P (fcode))
7907 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7908 const REAL_VALUE_TYPE third_trunc =
7909 real_value_truncate (TYPE_MODE (type), dconst_third ());
7910 arg = fold_build2_loc (loc, MULT_EXPR, type,
7911 CALL_EXPR_ARG (arg, 0),
7912 build_real (type, third_trunc));
7913 return build_call_expr_loc (loc, expfn, 1, arg);
7916 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7917 if (BUILTIN_SQRT_P (fcode))
7919 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7921 if (powfn)
7923 tree arg0 = CALL_EXPR_ARG (arg, 0);
7924 tree tree_root;
7925 REAL_VALUE_TYPE dconstroot = dconst_third ();
7927 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7928 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7929 tree_root = build_real (type, dconstroot);
7930 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7934 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7935 if (BUILTIN_CBRT_P (fcode))
7937 tree arg0 = CALL_EXPR_ARG (arg, 0);
7938 if (tree_expr_nonnegative_p (arg0))
7940 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7942 if (powfn)
7944 tree tree_root;
7945 REAL_VALUE_TYPE dconstroot;
7947 real_arithmetic (&dconstroot, MULT_EXPR,
7948 dconst_third_ptr (), dconst_third_ptr ());
7949 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7950 tree_root = build_real (type, dconstroot);
7951 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7956 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7957 if (fcode == BUILT_IN_POW
7958 || fcode == BUILT_IN_POWF
7959 || fcode == BUILT_IN_POWL)
7961 tree arg00 = CALL_EXPR_ARG (arg, 0);
7962 tree arg01 = CALL_EXPR_ARG (arg, 1);
7963 if (tree_expr_nonnegative_p (arg00))
7965 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7966 const REAL_VALUE_TYPE dconstroot
7967 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7968 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7969 build_real (type, dconstroot));
7970 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7974 return NULL_TREE;
7977 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7978 TYPE is the type of the return value. Return NULL_TREE if no
7979 simplification can be made. */
7981 static tree
7982 fold_builtin_cos (location_t loc,
7983 tree arg, tree type, tree fndecl)
7985 tree res, narg;
7987 if (!validate_arg (arg, REAL_TYPE))
7988 return NULL_TREE;
7990 /* Calculate the result when the argument is a constant. */
7991 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7992 return res;
7994 /* Optimize cos(-x) into cos (x). */
7995 if ((narg = fold_strip_sign_ops (arg)))
7996 return build_call_expr_loc (loc, fndecl, 1, narg);
7998 return NULL_TREE;
8001 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
8002 Return NULL_TREE if no simplification can be made. */
8004 static tree
8005 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
8007 if (validate_arg (arg, REAL_TYPE))
8009 tree res, narg;
8011 /* Calculate the result when the argument is a constant. */
8012 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
8013 return res;
8015 /* Optimize cosh(-x) into cosh (x). */
8016 if ((narg = fold_strip_sign_ops (arg)))
8017 return build_call_expr_loc (loc, fndecl, 1, narg);
8020 return NULL_TREE;
8023 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
8024 argument ARG. TYPE is the type of the return value. Return
8025 NULL_TREE if no simplification can be made. */
8027 static tree
8028 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
8029 bool hyper)
8031 if (validate_arg (arg, COMPLEX_TYPE)
8032 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8034 tree tmp;
8036 /* Calculate the result when the argument is a constant. */
8037 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
8038 return tmp;
8040 /* Optimize fn(-x) into fn(x). */
8041 if ((tmp = fold_strip_sign_ops (arg)))
8042 return build_call_expr_loc (loc, fndecl, 1, tmp);
8045 return NULL_TREE;
8048 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
8049 Return NULL_TREE if no simplification can be made. */
8051 static tree
8052 fold_builtin_tan (tree arg, tree type)
8054 enum built_in_function fcode;
8055 tree res;
8057 if (!validate_arg (arg, REAL_TYPE))
8058 return NULL_TREE;
8060 /* Calculate the result when the argument is a constant. */
8061 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8062 return res;
8064 /* Optimize tan(atan(x)) = x. */
8065 fcode = builtin_mathfn_code (arg);
8066 if (flag_unsafe_math_optimizations
8067 && (fcode == BUILT_IN_ATAN
8068 || fcode == BUILT_IN_ATANF
8069 || fcode == BUILT_IN_ATANL))
8070 return CALL_EXPR_ARG (arg, 0);
8072 return NULL_TREE;
8075 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8076 NULL_TREE if no simplification can be made. */
8078 static tree
8079 fold_builtin_sincos (location_t loc,
8080 tree arg0, tree arg1, tree arg2)
8082 tree type;
8083 tree res, fn, call;
8085 if (!validate_arg (arg0, REAL_TYPE)
8086 || !validate_arg (arg1, POINTER_TYPE)
8087 || !validate_arg (arg2, POINTER_TYPE))
8088 return NULL_TREE;
8090 type = TREE_TYPE (arg0);
8092 /* Calculate the result when the argument is a constant. */
8093 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8094 return res;
8096 /* Canonicalize sincos to cexpi. */
8097 if (!targetm.libc_has_function (function_c99_math_complex))
8098 return NULL_TREE;
8099 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8100 if (!fn)
8101 return NULL_TREE;
8103 call = build_call_expr_loc (loc, fn, 1, arg0);
8104 call = builtin_save_expr (call);
8106 return build2 (COMPOUND_EXPR, void_type_node,
8107 build2 (MODIFY_EXPR, void_type_node,
8108 build_fold_indirect_ref_loc (loc, arg1),
8109 build1 (IMAGPART_EXPR, type, call)),
8110 build2 (MODIFY_EXPR, void_type_node,
8111 build_fold_indirect_ref_loc (loc, arg2),
8112 build1 (REALPART_EXPR, type, call)));
8115 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8116 NULL_TREE if no simplification can be made. */
8118 static tree
8119 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8121 tree rtype;
8122 tree realp, imagp, ifn;
8123 tree res;
8125 if (!validate_arg (arg0, COMPLEX_TYPE)
8126 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
8127 return NULL_TREE;
8129 /* Calculate the result when the argument is a constant. */
8130 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8131 return res;
8133 rtype = TREE_TYPE (TREE_TYPE (arg0));
8135 /* In case we can figure out the real part of arg0 and it is constant zero
8136 fold to cexpi. */
8137 if (!targetm.libc_has_function (function_c99_math_complex))
8138 return NULL_TREE;
8139 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8140 if (!ifn)
8141 return NULL_TREE;
8143 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8144 && real_zerop (realp))
8146 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8147 return build_call_expr_loc (loc, ifn, 1, narg);
8150 /* In case we can easily decompose real and imaginary parts split cexp
8151 to exp (r) * cexpi (i). */
8152 if (flag_unsafe_math_optimizations
8153 && realp)
8155 tree rfn, rcall, icall;
8157 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8158 if (!rfn)
8159 return NULL_TREE;
8161 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8162 if (!imagp)
8163 return NULL_TREE;
8165 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8166 icall = builtin_save_expr (icall);
8167 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8168 rcall = builtin_save_expr (rcall);
8169 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8170 fold_build2_loc (loc, MULT_EXPR, rtype,
8171 rcall,
8172 fold_build1_loc (loc, REALPART_EXPR,
8173 rtype, icall)),
8174 fold_build2_loc (loc, MULT_EXPR, rtype,
8175 rcall,
8176 fold_build1_loc (loc, IMAGPART_EXPR,
8177 rtype, icall)));
8180 return NULL_TREE;
8183 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8184 Return NULL_TREE if no simplification can be made. */
8186 static tree
8187 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8189 if (!validate_arg (arg, REAL_TYPE))
8190 return NULL_TREE;
8192 /* Optimize trunc of constant value. */
8193 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8195 REAL_VALUE_TYPE r, x;
8196 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8198 x = TREE_REAL_CST (arg);
8199 real_trunc (&r, TYPE_MODE (type), &x);
8200 return build_real (type, r);
8203 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8206 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8207 Return NULL_TREE if no simplification can be made. */
8209 static tree
8210 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8212 if (!validate_arg (arg, REAL_TYPE))
8213 return NULL_TREE;
8215 /* Optimize floor of constant value. */
8216 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8218 REAL_VALUE_TYPE x;
8220 x = TREE_REAL_CST (arg);
8221 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8223 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8224 REAL_VALUE_TYPE r;
8226 real_floor (&r, TYPE_MODE (type), &x);
8227 return build_real (type, r);
8231 /* Fold floor (x) where x is nonnegative to trunc (x). */
8232 if (tree_expr_nonnegative_p (arg))
8234 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8235 if (truncfn)
8236 return build_call_expr_loc (loc, truncfn, 1, arg);
8239 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8242 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8243 Return NULL_TREE if no simplification can be made. */
8245 static tree
8246 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8248 if (!validate_arg (arg, REAL_TYPE))
8249 return NULL_TREE;
8251 /* Optimize ceil of constant value. */
8252 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8254 REAL_VALUE_TYPE x;
8256 x = TREE_REAL_CST (arg);
8257 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8259 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8260 REAL_VALUE_TYPE r;
8262 real_ceil (&r, TYPE_MODE (type), &x);
8263 return build_real (type, r);
8267 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8270 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8271 Return NULL_TREE if no simplification can be made. */
8273 static tree
8274 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8276 if (!validate_arg (arg, REAL_TYPE))
8277 return NULL_TREE;
8279 /* Optimize round of constant value. */
8280 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8282 REAL_VALUE_TYPE x;
8284 x = TREE_REAL_CST (arg);
8285 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8287 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8288 REAL_VALUE_TYPE r;
8290 real_round (&r, TYPE_MODE (type), &x);
8291 return build_real (type, r);
8295 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8298 /* Fold function call to builtin lround, lroundf or lroundl (or the
8299 corresponding long long versions) and other rounding functions. ARG
8300 is the argument to the call. Return NULL_TREE if no simplification
8301 can be made. */
8303 static tree
8304 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8306 if (!validate_arg (arg, REAL_TYPE))
8307 return NULL_TREE;
8309 /* Optimize lround of constant value. */
8310 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8312 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8314 if (real_isfinite (&x))
8316 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8317 tree ftype = TREE_TYPE (arg);
8318 REAL_VALUE_TYPE r;
8319 bool fail = false;
8321 switch (DECL_FUNCTION_CODE (fndecl))
8323 CASE_FLT_FN (BUILT_IN_IFLOOR):
8324 CASE_FLT_FN (BUILT_IN_LFLOOR):
8325 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8326 real_floor (&r, TYPE_MODE (ftype), &x);
8327 break;
8329 CASE_FLT_FN (BUILT_IN_ICEIL):
8330 CASE_FLT_FN (BUILT_IN_LCEIL):
8331 CASE_FLT_FN (BUILT_IN_LLCEIL):
8332 real_ceil (&r, TYPE_MODE (ftype), &x);
8333 break;
8335 CASE_FLT_FN (BUILT_IN_IROUND):
8336 CASE_FLT_FN (BUILT_IN_LROUND):
8337 CASE_FLT_FN (BUILT_IN_LLROUND):
8338 real_round (&r, TYPE_MODE (ftype), &x);
8339 break;
8341 default:
8342 gcc_unreachable ();
8345 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8346 if (!fail)
8347 return wide_int_to_tree (itype, val);
8351 switch (DECL_FUNCTION_CODE (fndecl))
8353 CASE_FLT_FN (BUILT_IN_LFLOOR):
8354 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8355 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8356 if (tree_expr_nonnegative_p (arg))
8357 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8358 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8359 break;
8360 default:;
8363 return fold_fixed_mathfn (loc, fndecl, arg);
8366 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8367 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8368 the argument to the call. Return NULL_TREE if no simplification can
8369 be made. */
8371 static tree
8372 fold_builtin_bitop (tree fndecl, tree arg)
8374 if (!validate_arg (arg, INTEGER_TYPE))
8375 return NULL_TREE;
8377 /* Optimize for constant argument. */
8378 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8380 tree type = TREE_TYPE (arg);
8381 int result;
8383 switch (DECL_FUNCTION_CODE (fndecl))
8385 CASE_INT_FN (BUILT_IN_FFS):
8386 result = wi::ffs (arg);
8387 break;
8389 CASE_INT_FN (BUILT_IN_CLZ):
8390 if (wi::ne_p (arg, 0))
8391 result = wi::clz (arg);
8392 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8393 result = TYPE_PRECISION (type);
8394 break;
8396 CASE_INT_FN (BUILT_IN_CTZ):
8397 if (wi::ne_p (arg, 0))
8398 result = wi::ctz (arg);
8399 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8400 result = TYPE_PRECISION (type);
8401 break;
8403 CASE_INT_FN (BUILT_IN_CLRSB):
8404 result = wi::clrsb (arg);
8405 break;
8407 CASE_INT_FN (BUILT_IN_POPCOUNT):
8408 result = wi::popcount (arg);
8409 break;
8411 CASE_INT_FN (BUILT_IN_PARITY):
8412 result = wi::parity (arg);
8413 break;
8415 default:
8416 gcc_unreachable ();
8419 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8422 return NULL_TREE;
8425 /* Fold function call to builtin_bswap and the short, long and long long
8426 variants. Return NULL_TREE if no simplification can be made. */
8427 static tree
8428 fold_builtin_bswap (tree fndecl, tree arg)
8430 if (! validate_arg (arg, INTEGER_TYPE))
8431 return NULL_TREE;
8433 /* Optimize constant value. */
8434 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8436 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8438 switch (DECL_FUNCTION_CODE (fndecl))
8440 case BUILT_IN_BSWAP16:
8441 case BUILT_IN_BSWAP32:
8442 case BUILT_IN_BSWAP64:
8444 signop sgn = TYPE_SIGN (type);
8445 tree result =
8446 wide_int_to_tree (type,
8447 wide_int::from (arg, TYPE_PRECISION (type),
8448 sgn).bswap ());
8449 return result;
8451 default:
8452 gcc_unreachable ();
8456 return NULL_TREE;
8459 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8460 NULL_TREE if no simplification can be made. */
8462 static tree
8463 fold_builtin_hypot (location_t loc, tree fndecl,
8464 tree arg0, tree arg1, tree type)
8466 tree res, narg0, narg1;
8468 if (!validate_arg (arg0, REAL_TYPE)
8469 || !validate_arg (arg1, REAL_TYPE))
8470 return NULL_TREE;
8472 /* Calculate the result when the argument is a constant. */
8473 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8474 return res;
8476 /* If either argument to hypot has a negate or abs, strip that off.
8477 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8478 narg0 = fold_strip_sign_ops (arg0);
8479 narg1 = fold_strip_sign_ops (arg1);
8480 if (narg0 || narg1)
8482 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8483 narg1 ? narg1 : arg1);
8486 /* If either argument is zero, hypot is fabs of the other. */
8487 if (real_zerop (arg0))
8488 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8489 else if (real_zerop (arg1))
8490 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8492 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8493 if (flag_unsafe_math_optimizations
8494 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8496 const REAL_VALUE_TYPE sqrt2_trunc
8497 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8498 return fold_build2_loc (loc, MULT_EXPR, type,
8499 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8500 build_real (type, sqrt2_trunc));
8503 return NULL_TREE;
8507 /* Fold a builtin function call to pow, powf, or powl. Return
8508 NULL_TREE if no simplification can be made. */
8509 static tree
8510 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8512 tree res;
8514 if (!validate_arg (arg0, REAL_TYPE)
8515 || !validate_arg (arg1, REAL_TYPE))
8516 return NULL_TREE;
8518 /* Calculate the result when the argument is a constant. */
8519 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8520 return res;
8522 /* Optimize pow(1.0,y) = 1.0. */
8523 if (real_onep (arg0))
8524 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8526 if (TREE_CODE (arg1) == REAL_CST
8527 && !TREE_OVERFLOW (arg1))
8529 REAL_VALUE_TYPE cint;
8530 REAL_VALUE_TYPE c;
8531 HOST_WIDE_INT n;
8533 c = TREE_REAL_CST (arg1);
8535 /* Optimize pow(x,0.0) = 1.0. */
8536 if (REAL_VALUES_EQUAL (c, dconst0))
8537 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8538 arg0);
8540 /* Optimize pow(x,1.0) = x. */
8541 if (REAL_VALUES_EQUAL (c, dconst1))
8542 return arg0;
8544 /* Optimize pow(x,-1.0) = 1.0/x. */
8545 if (REAL_VALUES_EQUAL (c, dconstm1))
8546 return fold_build2_loc (loc, RDIV_EXPR, type,
8547 build_real (type, dconst1), arg0);
8549 /* Optimize pow(x,0.5) = sqrt(x). */
8550 if (flag_unsafe_math_optimizations
8551 && REAL_VALUES_EQUAL (c, dconsthalf))
8553 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8555 if (sqrtfn != NULL_TREE)
8556 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8559 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8560 if (flag_unsafe_math_optimizations)
8562 const REAL_VALUE_TYPE dconstroot
8563 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8565 if (REAL_VALUES_EQUAL (c, dconstroot))
8567 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8568 if (cbrtfn != NULL_TREE)
8569 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8573 /* Check for an integer exponent. */
8574 n = real_to_integer (&c);
8575 real_from_integer (&cint, VOIDmode, n, SIGNED);
8576 if (real_identical (&c, &cint))
8578 /* Attempt to evaluate pow at compile-time, unless this should
8579 raise an exception. */
8580 if (TREE_CODE (arg0) == REAL_CST
8581 && !TREE_OVERFLOW (arg0)
8582 && (n > 0
8583 || (!flag_trapping_math && !flag_errno_math)
8584 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8586 REAL_VALUE_TYPE x;
8587 bool inexact;
8589 x = TREE_REAL_CST (arg0);
8590 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8591 if (flag_unsafe_math_optimizations || !inexact)
8592 return build_real (type, x);
8595 /* Strip sign ops from even integer powers. */
8596 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8598 tree narg0 = fold_strip_sign_ops (arg0);
8599 if (narg0)
8600 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8605 if (flag_unsafe_math_optimizations)
8607 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8609 /* Optimize pow(expN(x),y) = expN(x*y). */
8610 if (BUILTIN_EXPONENT_P (fcode))
8612 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8613 tree arg = CALL_EXPR_ARG (arg0, 0);
8614 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8615 return build_call_expr_loc (loc, expfn, 1, arg);
8618 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8619 if (BUILTIN_SQRT_P (fcode))
8621 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8622 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8623 build_real (type, dconsthalf));
8624 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8627 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8628 if (BUILTIN_CBRT_P (fcode))
8630 tree arg = CALL_EXPR_ARG (arg0, 0);
8631 if (tree_expr_nonnegative_p (arg))
8633 const REAL_VALUE_TYPE dconstroot
8634 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8635 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8636 build_real (type, dconstroot));
8637 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8641 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8642 if (fcode == BUILT_IN_POW
8643 || fcode == BUILT_IN_POWF
8644 || fcode == BUILT_IN_POWL)
8646 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8647 if (tree_expr_nonnegative_p (arg00))
8649 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8650 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8651 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8656 return NULL_TREE;
8659 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8660 Return NULL_TREE if no simplification can be made. */
8661 static tree
8662 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8663 tree arg0, tree arg1, tree type)
8665 if (!validate_arg (arg0, REAL_TYPE)
8666 || !validate_arg (arg1, INTEGER_TYPE))
8667 return NULL_TREE;
8669 /* Optimize pow(1.0,y) = 1.0. */
8670 if (real_onep (arg0))
8671 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8673 if (tree_fits_shwi_p (arg1))
8675 HOST_WIDE_INT c = tree_to_shwi (arg1);
8677 /* Evaluate powi at compile-time. */
8678 if (TREE_CODE (arg0) == REAL_CST
8679 && !TREE_OVERFLOW (arg0))
8681 REAL_VALUE_TYPE x;
8682 x = TREE_REAL_CST (arg0);
8683 real_powi (&x, TYPE_MODE (type), &x, c);
8684 return build_real (type, x);
8687 /* Optimize pow(x,0) = 1.0. */
8688 if (c == 0)
8689 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8690 arg0);
8692 /* Optimize pow(x,1) = x. */
8693 if (c == 1)
8694 return arg0;
8696 /* Optimize pow(x,-1) = 1.0/x. */
8697 if (c == -1)
8698 return fold_build2_loc (loc, RDIV_EXPR, type,
8699 build_real (type, dconst1), arg0);
8702 return NULL_TREE;
8705 /* A subroutine of fold_builtin to fold the various exponent
8706 functions. Return NULL_TREE if no simplification can be made.
8707 FUNC is the corresponding MPFR exponent function. */
8709 static tree
8710 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8711 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8713 if (validate_arg (arg, REAL_TYPE))
8715 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8716 tree res;
8718 /* Calculate the result when the argument is a constant. */
8719 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8720 return res;
8722 /* Optimize expN(logN(x)) = x. */
8723 if (flag_unsafe_math_optimizations)
8725 const enum built_in_function fcode = builtin_mathfn_code (arg);
8727 if ((func == mpfr_exp
8728 && (fcode == BUILT_IN_LOG
8729 || fcode == BUILT_IN_LOGF
8730 || fcode == BUILT_IN_LOGL))
8731 || (func == mpfr_exp2
8732 && (fcode == BUILT_IN_LOG2
8733 || fcode == BUILT_IN_LOG2F
8734 || fcode == BUILT_IN_LOG2L))
8735 || (func == mpfr_exp10
8736 && (fcode == BUILT_IN_LOG10
8737 || fcode == BUILT_IN_LOG10F
8738 || fcode == BUILT_IN_LOG10L)))
8739 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8743 return NULL_TREE;
8746 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8747 arguments to the call, and TYPE is its return type.
8748 Return NULL_TREE if no simplification can be made. */
8750 static tree
8751 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8753 if (!validate_arg (arg1, POINTER_TYPE)
8754 || !validate_arg (arg2, INTEGER_TYPE)
8755 || !validate_arg (len, INTEGER_TYPE))
8756 return NULL_TREE;
8757 else
8759 const char *p1;
8761 if (TREE_CODE (arg2) != INTEGER_CST
8762 || !tree_fits_uhwi_p (len))
8763 return NULL_TREE;
8765 p1 = c_getstr (arg1);
8766 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8768 char c;
8769 const char *r;
8770 tree tem;
8772 if (target_char_cast (arg2, &c))
8773 return NULL_TREE;
8775 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8777 if (r == NULL)
8778 return build_int_cst (TREE_TYPE (arg1), 0);
8780 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8781 return fold_convert_loc (loc, type, tem);
8783 return NULL_TREE;
8787 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8788 Return NULL_TREE if no simplification can be made. */
8790 static tree
8791 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8793 const char *p1, *p2;
8795 if (!validate_arg (arg1, POINTER_TYPE)
8796 || !validate_arg (arg2, POINTER_TYPE)
8797 || !validate_arg (len, INTEGER_TYPE))
8798 return NULL_TREE;
8800 /* If the LEN parameter is zero, return zero. */
8801 if (integer_zerop (len))
8802 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8803 arg1, arg2);
8805 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8806 if (operand_equal_p (arg1, arg2, 0))
8807 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8809 p1 = c_getstr (arg1);
8810 p2 = c_getstr (arg2);
8812 /* If all arguments are constant, and the value of len is not greater
8813 than the lengths of arg1 and arg2, evaluate at compile-time. */
8814 if (tree_fits_uhwi_p (len) && p1 && p2
8815 && compare_tree_int (len, strlen (p1) + 1) <= 0
8816 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8818 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8820 if (r > 0)
8821 return integer_one_node;
8822 else if (r < 0)
8823 return integer_minus_one_node;
8824 else
8825 return integer_zero_node;
8828 /* If len parameter is one, return an expression corresponding to
8829 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8830 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8832 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8833 tree cst_uchar_ptr_node
8834 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8836 tree ind1
8837 = fold_convert_loc (loc, integer_type_node,
8838 build1 (INDIRECT_REF, cst_uchar_node,
8839 fold_convert_loc (loc,
8840 cst_uchar_ptr_node,
8841 arg1)));
8842 tree ind2
8843 = fold_convert_loc (loc, integer_type_node,
8844 build1 (INDIRECT_REF, cst_uchar_node,
8845 fold_convert_loc (loc,
8846 cst_uchar_ptr_node,
8847 arg2)));
8848 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8851 return NULL_TREE;
8854 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8855 Return NULL_TREE if no simplification can be made. */
8857 static tree
8858 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8860 const char *p1, *p2;
8862 if (!validate_arg (arg1, POINTER_TYPE)
8863 || !validate_arg (arg2, POINTER_TYPE))
8864 return NULL_TREE;
8866 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8867 if (operand_equal_p (arg1, arg2, 0))
8868 return integer_zero_node;
8870 p1 = c_getstr (arg1);
8871 p2 = c_getstr (arg2);
8873 if (p1 && p2)
8875 const int i = strcmp (p1, p2);
8876 if (i < 0)
8877 return integer_minus_one_node;
8878 else if (i > 0)
8879 return integer_one_node;
8880 else
8881 return integer_zero_node;
8884 /* If the second arg is "", return *(const unsigned char*)arg1. */
8885 if (p2 && *p2 == '\0')
8887 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8888 tree cst_uchar_ptr_node
8889 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8891 return fold_convert_loc (loc, integer_type_node,
8892 build1 (INDIRECT_REF, cst_uchar_node,
8893 fold_convert_loc (loc,
8894 cst_uchar_ptr_node,
8895 arg1)));
8898 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8899 if (p1 && *p1 == '\0')
8901 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8902 tree cst_uchar_ptr_node
8903 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8905 tree temp
8906 = fold_convert_loc (loc, integer_type_node,
8907 build1 (INDIRECT_REF, cst_uchar_node,
8908 fold_convert_loc (loc,
8909 cst_uchar_ptr_node,
8910 arg2)));
8911 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8914 return NULL_TREE;
8917 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8918 Return NULL_TREE if no simplification can be made. */
8920 static tree
8921 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8923 const char *p1, *p2;
8925 if (!validate_arg (arg1, POINTER_TYPE)
8926 || !validate_arg (arg2, POINTER_TYPE)
8927 || !validate_arg (len, INTEGER_TYPE))
8928 return NULL_TREE;
8930 /* If the LEN parameter is zero, return zero. */
8931 if (integer_zerop (len))
8932 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8933 arg1, arg2);
8935 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8936 if (operand_equal_p (arg1, arg2, 0))
8937 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8939 p1 = c_getstr (arg1);
8940 p2 = c_getstr (arg2);
8942 if (tree_fits_uhwi_p (len) && p1 && p2)
8944 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8945 if (i > 0)
8946 return integer_one_node;
8947 else if (i < 0)
8948 return integer_minus_one_node;
8949 else
8950 return integer_zero_node;
8953 /* If the second arg is "", and the length is greater than zero,
8954 return *(const unsigned char*)arg1. */
8955 if (p2 && *p2 == '\0'
8956 && TREE_CODE (len) == INTEGER_CST
8957 && tree_int_cst_sgn (len) == 1)
8959 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8960 tree cst_uchar_ptr_node
8961 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8963 return fold_convert_loc (loc, integer_type_node,
8964 build1 (INDIRECT_REF, cst_uchar_node,
8965 fold_convert_loc (loc,
8966 cst_uchar_ptr_node,
8967 arg1)));
8970 /* If the first arg is "", and the length is greater than zero,
8971 return -*(const unsigned char*)arg2. */
8972 if (p1 && *p1 == '\0'
8973 && TREE_CODE (len) == INTEGER_CST
8974 && tree_int_cst_sgn (len) == 1)
8976 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8977 tree cst_uchar_ptr_node
8978 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8980 tree temp = fold_convert_loc (loc, integer_type_node,
8981 build1 (INDIRECT_REF, cst_uchar_node,
8982 fold_convert_loc (loc,
8983 cst_uchar_ptr_node,
8984 arg2)));
8985 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8988 /* If len parameter is one, return an expression corresponding to
8989 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8990 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8992 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8993 tree cst_uchar_ptr_node
8994 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8996 tree ind1 = fold_convert_loc (loc, integer_type_node,
8997 build1 (INDIRECT_REF, cst_uchar_node,
8998 fold_convert_loc (loc,
8999 cst_uchar_ptr_node,
9000 arg1)));
9001 tree ind2 = fold_convert_loc (loc, integer_type_node,
9002 build1 (INDIRECT_REF, cst_uchar_node,
9003 fold_convert_loc (loc,
9004 cst_uchar_ptr_node,
9005 arg2)));
9006 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9009 return NULL_TREE;
9012 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9013 ARG. Return NULL_TREE if no simplification can be made. */
9015 static tree
9016 fold_builtin_signbit (location_t loc, tree arg, tree type)
9018 if (!validate_arg (arg, REAL_TYPE))
9019 return NULL_TREE;
9021 /* If ARG is a compile-time constant, determine the result. */
9022 if (TREE_CODE (arg) == REAL_CST
9023 && !TREE_OVERFLOW (arg))
9025 REAL_VALUE_TYPE c;
9027 c = TREE_REAL_CST (arg);
9028 return (REAL_VALUE_NEGATIVE (c)
9029 ? build_one_cst (type)
9030 : build_zero_cst (type));
9033 /* If ARG is non-negative, the result is always zero. */
9034 if (tree_expr_nonnegative_p (arg))
9035 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9037 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9038 if (!HONOR_SIGNED_ZEROS (arg))
9039 return fold_convert (type,
9040 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9041 build_real (TREE_TYPE (arg), dconst0)));
9043 return NULL_TREE;
9046 /* Fold function call to builtin copysign, copysignf or copysignl with
9047 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9048 be made. */
9050 static tree
9051 fold_builtin_copysign (location_t loc, tree fndecl,
9052 tree arg1, tree arg2, tree type)
9054 tree tem;
9056 if (!validate_arg (arg1, REAL_TYPE)
9057 || !validate_arg (arg2, REAL_TYPE))
9058 return NULL_TREE;
9060 /* copysign(X,X) is X. */
9061 if (operand_equal_p (arg1, arg2, 0))
9062 return fold_convert_loc (loc, type, arg1);
9064 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9065 if (TREE_CODE (arg1) == REAL_CST
9066 && TREE_CODE (arg2) == REAL_CST
9067 && !TREE_OVERFLOW (arg1)
9068 && !TREE_OVERFLOW (arg2))
9070 REAL_VALUE_TYPE c1, c2;
9072 c1 = TREE_REAL_CST (arg1);
9073 c2 = TREE_REAL_CST (arg2);
9074 /* c1.sign := c2.sign. */
9075 real_copysign (&c1, &c2);
9076 return build_real (type, c1);
9079 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9080 Remember to evaluate Y for side-effects. */
9081 if (tree_expr_nonnegative_p (arg2))
9082 return omit_one_operand_loc (loc, type,
9083 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9084 arg2);
9086 /* Strip sign changing operations for the first argument. */
9087 tem = fold_strip_sign_ops (arg1);
9088 if (tem)
9089 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9091 return NULL_TREE;
9094 /* Fold a call to builtin isascii with argument ARG. */
9096 static tree
9097 fold_builtin_isascii (location_t loc, tree arg)
9099 if (!validate_arg (arg, INTEGER_TYPE))
9100 return NULL_TREE;
9101 else
9103 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9104 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9105 build_int_cst (integer_type_node,
9106 ~ (unsigned HOST_WIDE_INT) 0x7f));
9107 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9108 arg, integer_zero_node);
9112 /* Fold a call to builtin toascii with argument ARG. */
9114 static tree
9115 fold_builtin_toascii (location_t loc, tree arg)
9117 if (!validate_arg (arg, INTEGER_TYPE))
9118 return NULL_TREE;
9120 /* Transform toascii(c) -> (c & 0x7f). */
9121 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9122 build_int_cst (integer_type_node, 0x7f));
9125 /* Fold a call to builtin isdigit with argument ARG. */
9127 static tree
9128 fold_builtin_isdigit (location_t loc, tree arg)
9130 if (!validate_arg (arg, INTEGER_TYPE))
9131 return NULL_TREE;
9132 else
9134 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9135 /* According to the C standard, isdigit is unaffected by locale.
9136 However, it definitely is affected by the target character set. */
9137 unsigned HOST_WIDE_INT target_digit0
9138 = lang_hooks.to_target_charset ('0');
9140 if (target_digit0 == 0)
9141 return NULL_TREE;
9143 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9144 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9145 build_int_cst (unsigned_type_node, target_digit0));
9146 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9147 build_int_cst (unsigned_type_node, 9));
9151 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9153 static tree
9154 fold_builtin_fabs (location_t loc, tree arg, tree type)
9156 if (!validate_arg (arg, REAL_TYPE))
9157 return NULL_TREE;
9159 arg = fold_convert_loc (loc, type, arg);
9160 if (TREE_CODE (arg) == REAL_CST)
9161 return fold_abs_const (arg, type);
9162 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9165 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9167 static tree
9168 fold_builtin_abs (location_t loc, tree arg, tree type)
9170 if (!validate_arg (arg, INTEGER_TYPE))
9171 return NULL_TREE;
9173 arg = fold_convert_loc (loc, type, arg);
9174 if (TREE_CODE (arg) == INTEGER_CST)
9175 return fold_abs_const (arg, type);
9176 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9179 /* Fold a fma operation with arguments ARG[012]. */
9181 tree
9182 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9183 tree type, tree arg0, tree arg1, tree arg2)
9185 if (TREE_CODE (arg0) == REAL_CST
9186 && TREE_CODE (arg1) == REAL_CST
9187 && TREE_CODE (arg2) == REAL_CST)
9188 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9190 return NULL_TREE;
9193 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9195 static tree
9196 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9198 if (validate_arg (arg0, REAL_TYPE)
9199 && validate_arg (arg1, REAL_TYPE)
9200 && validate_arg (arg2, REAL_TYPE))
9202 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9203 if (tem)
9204 return tem;
9206 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9207 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9208 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9210 return NULL_TREE;
9213 /* Fold a call to builtin fmin or fmax. */
9215 static tree
9216 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9217 tree type, bool max)
9219 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9221 /* Calculate the result when the argument is a constant. */
9222 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9224 if (res)
9225 return res;
9227 /* If either argument is NaN, return the other one. Avoid the
9228 transformation if we get (and honor) a signalling NaN. Using
9229 omit_one_operand() ensures we create a non-lvalue. */
9230 if (TREE_CODE (arg0) == REAL_CST
9231 && real_isnan (&TREE_REAL_CST (arg0))
9232 && (! HONOR_SNANS (arg0)
9233 || ! TREE_REAL_CST (arg0).signalling))
9234 return omit_one_operand_loc (loc, type, arg1, arg0);
9235 if (TREE_CODE (arg1) == REAL_CST
9236 && real_isnan (&TREE_REAL_CST (arg1))
9237 && (! HONOR_SNANS (arg1)
9238 || ! TREE_REAL_CST (arg1).signalling))
9239 return omit_one_operand_loc (loc, type, arg0, arg1);
9241 /* Transform fmin/fmax(x,x) -> x. */
9242 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9243 return omit_one_operand_loc (loc, type, arg0, arg1);
9245 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9246 functions to return the numeric arg if the other one is NaN.
9247 These tree codes don't honor that, so only transform if
9248 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9249 handled, so we don't have to worry about it either. */
9250 if (flag_finite_math_only)
9251 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9252 fold_convert_loc (loc, type, arg0),
9253 fold_convert_loc (loc, type, arg1));
9255 return NULL_TREE;
9258 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9260 static tree
9261 fold_builtin_carg (location_t loc, tree arg, tree type)
9263 if (validate_arg (arg, COMPLEX_TYPE)
9264 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9266 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9268 if (atan2_fn)
9270 tree new_arg = builtin_save_expr (arg);
9271 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9272 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9273 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9277 return NULL_TREE;
9280 /* Fold a call to builtin logb/ilogb. */
9282 static tree
9283 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9285 if (! validate_arg (arg, REAL_TYPE))
9286 return NULL_TREE;
9288 STRIP_NOPS (arg);
9290 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9292 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9294 switch (value->cl)
9296 case rvc_nan:
9297 case rvc_inf:
9298 /* If arg is Inf or NaN and we're logb, return it. */
9299 if (TREE_CODE (rettype) == REAL_TYPE)
9301 /* For logb(-Inf) we have to return +Inf. */
9302 if (real_isinf (value) && real_isneg (value))
9304 REAL_VALUE_TYPE tem;
9305 real_inf (&tem);
9306 return build_real (rettype, tem);
9308 return fold_convert_loc (loc, rettype, arg);
9310 /* Fall through... */
9311 case rvc_zero:
9312 /* Zero may set errno and/or raise an exception for logb, also
9313 for ilogb we don't know FP_ILOGB0. */
9314 return NULL_TREE;
9315 case rvc_normal:
9316 /* For normal numbers, proceed iff radix == 2. In GCC,
9317 normalized significands are in the range [0.5, 1.0). We
9318 want the exponent as if they were [1.0, 2.0) so get the
9319 exponent and subtract 1. */
9320 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9321 return fold_convert_loc (loc, rettype,
9322 build_int_cst (integer_type_node,
9323 REAL_EXP (value)-1));
9324 break;
9328 return NULL_TREE;
9331 /* Fold a call to builtin significand, if radix == 2. */
9333 static tree
9334 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9336 if (! validate_arg (arg, REAL_TYPE))
9337 return NULL_TREE;
9339 STRIP_NOPS (arg);
9341 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9343 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9345 switch (value->cl)
9347 case rvc_zero:
9348 case rvc_nan:
9349 case rvc_inf:
9350 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9351 return fold_convert_loc (loc, rettype, arg);
9352 case rvc_normal:
9353 /* For normal numbers, proceed iff radix == 2. */
9354 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9356 REAL_VALUE_TYPE result = *value;
9357 /* In GCC, normalized significands are in the range [0.5,
9358 1.0). We want them to be [1.0, 2.0) so set the
9359 exponent to 1. */
9360 SET_REAL_EXP (&result, 1);
9361 return build_real (rettype, result);
9363 break;
9367 return NULL_TREE;
9370 /* Fold a call to builtin frexp, we can assume the base is 2. */
9372 static tree
9373 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9375 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9376 return NULL_TREE;
9378 STRIP_NOPS (arg0);
9380 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9381 return NULL_TREE;
9383 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9385 /* Proceed if a valid pointer type was passed in. */
9386 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9388 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9389 tree frac, exp;
9391 switch (value->cl)
9393 case rvc_zero:
9394 /* For +-0, return (*exp = 0, +-0). */
9395 exp = integer_zero_node;
9396 frac = arg0;
9397 break;
9398 case rvc_nan:
9399 case rvc_inf:
9400 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9401 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9402 case rvc_normal:
9404 /* Since the frexp function always expects base 2, and in
9405 GCC normalized significands are already in the range
9406 [0.5, 1.0), we have exactly what frexp wants. */
9407 REAL_VALUE_TYPE frac_rvt = *value;
9408 SET_REAL_EXP (&frac_rvt, 0);
9409 frac = build_real (rettype, frac_rvt);
9410 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9412 break;
9413 default:
9414 gcc_unreachable ();
9417 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9418 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9419 TREE_SIDE_EFFECTS (arg1) = 1;
9420 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9423 return NULL_TREE;
9426 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9427 then we can assume the base is two. If it's false, then we have to
9428 check the mode of the TYPE parameter in certain cases. */
9430 static tree
9431 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9432 tree type, bool ldexp)
9434 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9436 STRIP_NOPS (arg0);
9437 STRIP_NOPS (arg1);
9439 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9440 if (real_zerop (arg0) || integer_zerop (arg1)
9441 || (TREE_CODE (arg0) == REAL_CST
9442 && !real_isfinite (&TREE_REAL_CST (arg0))))
9443 return omit_one_operand_loc (loc, type, arg0, arg1);
9445 /* If both arguments are constant, then try to evaluate it. */
9446 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9447 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9448 && tree_fits_shwi_p (arg1))
9450 /* Bound the maximum adjustment to twice the range of the
9451 mode's valid exponents. Use abs to ensure the range is
9452 positive as a sanity check. */
9453 const long max_exp_adj = 2 *
9454 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9455 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9457 /* Get the user-requested adjustment. */
9458 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9460 /* The requested adjustment must be inside this range. This
9461 is a preliminary cap to avoid things like overflow, we
9462 may still fail to compute the result for other reasons. */
9463 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9465 REAL_VALUE_TYPE initial_result;
9467 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9469 /* Ensure we didn't overflow. */
9470 if (! real_isinf (&initial_result))
9472 const REAL_VALUE_TYPE trunc_result
9473 = real_value_truncate (TYPE_MODE (type), initial_result);
9475 /* Only proceed if the target mode can hold the
9476 resulting value. */
9477 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9478 return build_real (type, trunc_result);
9484 return NULL_TREE;
9487 /* Fold a call to builtin modf. */
9489 static tree
9490 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9492 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9493 return NULL_TREE;
9495 STRIP_NOPS (arg0);
9497 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9498 return NULL_TREE;
9500 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9502 /* Proceed if a valid pointer type was passed in. */
9503 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9505 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9506 REAL_VALUE_TYPE trunc, frac;
9508 switch (value->cl)
9510 case rvc_nan:
9511 case rvc_zero:
9512 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9513 trunc = frac = *value;
9514 break;
9515 case rvc_inf:
9516 /* For +-Inf, return (*arg1 = arg0, +-0). */
9517 frac = dconst0;
9518 frac.sign = value->sign;
9519 trunc = *value;
9520 break;
9521 case rvc_normal:
9522 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9523 real_trunc (&trunc, VOIDmode, value);
9524 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9525 /* If the original number was negative and already
9526 integral, then the fractional part is -0.0. */
9527 if (value->sign && frac.cl == rvc_zero)
9528 frac.sign = value->sign;
9529 break;
9532 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9533 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9534 build_real (rettype, trunc));
9535 TREE_SIDE_EFFECTS (arg1) = 1;
9536 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9537 build_real (rettype, frac));
9540 return NULL_TREE;
9543 /* Given a location LOC, an interclass builtin function decl FNDECL
9544 and its single argument ARG, return an folded expression computing
9545 the same, or NULL_TREE if we either couldn't or didn't want to fold
9546 (the latter happen if there's an RTL instruction available). */
9548 static tree
9549 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9551 machine_mode mode;
9553 if (!validate_arg (arg, REAL_TYPE))
9554 return NULL_TREE;
9556 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9557 return NULL_TREE;
9559 mode = TYPE_MODE (TREE_TYPE (arg));
9561 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9563 /* If there is no optab, try generic code. */
9564 switch (DECL_FUNCTION_CODE (fndecl))
9566 tree result;
9568 CASE_FLT_FN (BUILT_IN_ISINF):
9570 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9571 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9572 tree type = TREE_TYPE (arg);
9573 REAL_VALUE_TYPE r;
9574 char buf[128];
9576 if (is_ibm_extended)
9578 /* NaN and Inf are encoded in the high-order double value
9579 only. The low-order value is not significant. */
9580 type = double_type_node;
9581 mode = DFmode;
9582 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9584 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9585 real_from_string (&r, buf);
9586 result = build_call_expr (isgr_fn, 2,
9587 fold_build1_loc (loc, ABS_EXPR, type, arg),
9588 build_real (type, r));
9589 return result;
9591 CASE_FLT_FN (BUILT_IN_FINITE):
9592 case BUILT_IN_ISFINITE:
9594 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9595 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9596 tree type = TREE_TYPE (arg);
9597 REAL_VALUE_TYPE r;
9598 char buf[128];
9600 if (is_ibm_extended)
9602 /* NaN and Inf are encoded in the high-order double value
9603 only. The low-order value is not significant. */
9604 type = double_type_node;
9605 mode = DFmode;
9606 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9608 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9609 real_from_string (&r, buf);
9610 result = build_call_expr (isle_fn, 2,
9611 fold_build1_loc (loc, ABS_EXPR, type, arg),
9612 build_real (type, r));
9613 /*result = fold_build2_loc (loc, UNGT_EXPR,
9614 TREE_TYPE (TREE_TYPE (fndecl)),
9615 fold_build1_loc (loc, ABS_EXPR, type, arg),
9616 build_real (type, r));
9617 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9618 TREE_TYPE (TREE_TYPE (fndecl)),
9619 result);*/
9620 return result;
9622 case BUILT_IN_ISNORMAL:
9624 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9625 islessequal(fabs(x),DBL_MAX). */
9626 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9627 tree type = TREE_TYPE (arg);
9628 tree orig_arg, max_exp, min_exp;
9629 machine_mode orig_mode = mode;
9630 REAL_VALUE_TYPE rmax, rmin;
9631 char buf[128];
9633 orig_arg = arg = builtin_save_expr (arg);
9634 if (is_ibm_extended)
9636 /* Use double to test the normal range of IBM extended
9637 precision. Emin for IBM extended precision is
9638 different to emin for IEEE double, being 53 higher
9639 since the low double exponent is at least 53 lower
9640 than the high double exponent. */
9641 type = double_type_node;
9642 mode = DFmode;
9643 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9645 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9647 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9648 real_from_string (&rmax, buf);
9649 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9650 real_from_string (&rmin, buf);
9651 max_exp = build_real (type, rmax);
9652 min_exp = build_real (type, rmin);
9654 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9655 if (is_ibm_extended)
9657 /* Testing the high end of the range is done just using
9658 the high double, using the same test as isfinite().
9659 For the subnormal end of the range we first test the
9660 high double, then if its magnitude is equal to the
9661 limit of 0x1p-969, we test whether the low double is
9662 non-zero and opposite sign to the high double. */
9663 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9664 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9665 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9666 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9667 arg, min_exp);
9668 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9669 complex_double_type_node, orig_arg);
9670 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9671 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9672 tree zero = build_real (type, dconst0);
9673 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9674 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9675 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9676 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9677 fold_build3 (COND_EXPR,
9678 integer_type_node,
9679 hilt, logt, lolt));
9680 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9681 eq_min, ok_lo);
9682 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9683 gt_min, eq_min);
9685 else
9687 tree const isge_fn
9688 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9689 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9691 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9692 max_exp, min_exp);
9693 return result;
9695 default:
9696 break;
9699 return NULL_TREE;
9702 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9703 ARG is the argument for the call. */
9705 static tree
9706 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9708 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9709 REAL_VALUE_TYPE r;
9711 if (!validate_arg (arg, REAL_TYPE))
9712 return NULL_TREE;
9714 switch (builtin_index)
9716 case BUILT_IN_ISINF:
9717 if (!HONOR_INFINITIES (arg))
9718 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9720 if (TREE_CODE (arg) == REAL_CST)
9722 r = TREE_REAL_CST (arg);
9723 if (real_isinf (&r))
9724 return real_compare (GT_EXPR, &r, &dconst0)
9725 ? integer_one_node : integer_minus_one_node;
9726 else
9727 return integer_zero_node;
9730 return NULL_TREE;
9732 case BUILT_IN_ISINF_SIGN:
9734 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9735 /* In a boolean context, GCC will fold the inner COND_EXPR to
9736 1. So e.g. "if (isinf_sign(x))" would be folded to just
9737 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9738 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9739 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9740 tree tmp = NULL_TREE;
9742 arg = builtin_save_expr (arg);
9744 if (signbit_fn && isinf_fn)
9746 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9747 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9749 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9750 signbit_call, integer_zero_node);
9751 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9752 isinf_call, integer_zero_node);
9754 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9755 integer_minus_one_node, integer_one_node);
9756 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9757 isinf_call, tmp,
9758 integer_zero_node);
9761 return tmp;
9764 case BUILT_IN_ISFINITE:
9765 if (!HONOR_NANS (arg)
9766 && !HONOR_INFINITIES (arg))
9767 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9769 if (TREE_CODE (arg) == REAL_CST)
9771 r = TREE_REAL_CST (arg);
9772 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9775 return NULL_TREE;
9777 case BUILT_IN_ISNAN:
9778 if (!HONOR_NANS (arg))
9779 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9781 if (TREE_CODE (arg) == REAL_CST)
9783 r = TREE_REAL_CST (arg);
9784 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9788 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9789 if (is_ibm_extended)
9791 /* NaN and Inf are encoded in the high-order double value
9792 only. The low-order value is not significant. */
9793 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9796 arg = builtin_save_expr (arg);
9797 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9799 default:
9800 gcc_unreachable ();
9804 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9805 This builtin will generate code to return the appropriate floating
9806 point classification depending on the value of the floating point
9807 number passed in. The possible return values must be supplied as
9808 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9809 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9810 one floating point argument which is "type generic". */
9812 static tree
9813 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9815 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9816 arg, type, res, tmp;
9817 machine_mode mode;
9818 REAL_VALUE_TYPE r;
9819 char buf[128];
9821 /* Verify the required arguments in the original call. */
9822 if (nargs != 6
9823 || !validate_arg (args[0], INTEGER_TYPE)
9824 || !validate_arg (args[1], INTEGER_TYPE)
9825 || !validate_arg (args[2], INTEGER_TYPE)
9826 || !validate_arg (args[3], INTEGER_TYPE)
9827 || !validate_arg (args[4], INTEGER_TYPE)
9828 || !validate_arg (args[5], REAL_TYPE))
9829 return NULL_TREE;
9831 fp_nan = args[0];
9832 fp_infinite = args[1];
9833 fp_normal = args[2];
9834 fp_subnormal = args[3];
9835 fp_zero = args[4];
9836 arg = args[5];
9837 type = TREE_TYPE (arg);
9838 mode = TYPE_MODE (type);
9839 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9841 /* fpclassify(x) ->
9842 isnan(x) ? FP_NAN :
9843 (fabs(x) == Inf ? FP_INFINITE :
9844 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9845 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9847 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9848 build_real (type, dconst0));
9849 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9850 tmp, fp_zero, fp_subnormal);
9852 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9853 real_from_string (&r, buf);
9854 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9855 arg, build_real (type, r));
9856 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9858 if (HONOR_INFINITIES (mode))
9860 real_inf (&r);
9861 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9862 build_real (type, r));
9863 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9864 fp_infinite, res);
9867 if (HONOR_NANS (mode))
9869 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9870 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9873 return res;
9876 /* Fold a call to an unordered comparison function such as
9877 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9878 being called and ARG0 and ARG1 are the arguments for the call.
9879 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9880 the opposite of the desired result. UNORDERED_CODE is used
9881 for modes that can hold NaNs and ORDERED_CODE is used for
9882 the rest. */
9884 static tree
9885 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9886 enum tree_code unordered_code,
9887 enum tree_code ordered_code)
9889 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9890 enum tree_code code;
9891 tree type0, type1;
9892 enum tree_code code0, code1;
9893 tree cmp_type = NULL_TREE;
9895 type0 = TREE_TYPE (arg0);
9896 type1 = TREE_TYPE (arg1);
9898 code0 = TREE_CODE (type0);
9899 code1 = TREE_CODE (type1);
9901 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9902 /* Choose the wider of two real types. */
9903 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9904 ? type0 : type1;
9905 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9906 cmp_type = type0;
9907 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9908 cmp_type = type1;
9910 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9911 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9913 if (unordered_code == UNORDERED_EXPR)
9915 if (!HONOR_NANS (arg0))
9916 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9917 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9920 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9921 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9922 fold_build2_loc (loc, code, type, arg0, arg1));
9925 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9926 arithmetics if it can never overflow, or into internal functions that
9927 return both result of arithmetics and overflowed boolean flag in
9928 a complex integer result, or some other check for overflow. */
9930 static tree
9931 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9932 tree arg0, tree arg1, tree arg2)
9934 enum internal_fn ifn = IFN_LAST;
9935 tree type = TREE_TYPE (TREE_TYPE (arg2));
9936 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9937 switch (fcode)
9939 case BUILT_IN_ADD_OVERFLOW:
9940 case BUILT_IN_SADD_OVERFLOW:
9941 case BUILT_IN_SADDL_OVERFLOW:
9942 case BUILT_IN_SADDLL_OVERFLOW:
9943 case BUILT_IN_UADD_OVERFLOW:
9944 case BUILT_IN_UADDL_OVERFLOW:
9945 case BUILT_IN_UADDLL_OVERFLOW:
9946 ifn = IFN_ADD_OVERFLOW;
9947 break;
9948 case BUILT_IN_SUB_OVERFLOW:
9949 case BUILT_IN_SSUB_OVERFLOW:
9950 case BUILT_IN_SSUBL_OVERFLOW:
9951 case BUILT_IN_SSUBLL_OVERFLOW:
9952 case BUILT_IN_USUB_OVERFLOW:
9953 case BUILT_IN_USUBL_OVERFLOW:
9954 case BUILT_IN_USUBLL_OVERFLOW:
9955 ifn = IFN_SUB_OVERFLOW;
9956 break;
9957 case BUILT_IN_MUL_OVERFLOW:
9958 case BUILT_IN_SMUL_OVERFLOW:
9959 case BUILT_IN_SMULL_OVERFLOW:
9960 case BUILT_IN_SMULLL_OVERFLOW:
9961 case BUILT_IN_UMUL_OVERFLOW:
9962 case BUILT_IN_UMULL_OVERFLOW:
9963 case BUILT_IN_UMULLL_OVERFLOW:
9964 ifn = IFN_MUL_OVERFLOW;
9965 break;
9966 default:
9967 gcc_unreachable ();
9969 tree ctype = build_complex_type (type);
9970 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9971 2, arg0, arg1);
9972 tree tgt = save_expr (call);
9973 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9974 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9975 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9976 tree store
9977 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9978 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9981 /* Fold a call to built-in function FNDECL with 0 arguments.
9982 This function returns NULL_TREE if no simplification was possible. */
9984 static tree
9985 fold_builtin_0 (location_t loc, tree fndecl)
9987 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9988 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9989 switch (fcode)
9991 CASE_FLT_FN (BUILT_IN_INF):
9992 case BUILT_IN_INFD32:
9993 case BUILT_IN_INFD64:
9994 case BUILT_IN_INFD128:
9995 return fold_builtin_inf (loc, type, true);
9997 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9998 return fold_builtin_inf (loc, type, false);
10000 case BUILT_IN_CLASSIFY_TYPE:
10001 return fold_builtin_classify_type (NULL_TREE);
10003 default:
10004 break;
10006 return NULL_TREE;
10009 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10010 This function returns NULL_TREE if no simplification was possible. */
10012 static tree
10013 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
10015 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10016 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10017 switch (fcode)
10019 case BUILT_IN_CONSTANT_P:
10021 tree val = fold_builtin_constant_p (arg0);
10023 /* Gimplification will pull the CALL_EXPR for the builtin out of
10024 an if condition. When not optimizing, we'll not CSE it back.
10025 To avoid link error types of regressions, return false now. */
10026 if (!val && !optimize)
10027 val = integer_zero_node;
10029 return val;
10032 case BUILT_IN_CLASSIFY_TYPE:
10033 return fold_builtin_classify_type (arg0);
10035 case BUILT_IN_STRLEN:
10036 return fold_builtin_strlen (loc, type, arg0);
10038 CASE_FLT_FN (BUILT_IN_FABS):
10039 case BUILT_IN_FABSD32:
10040 case BUILT_IN_FABSD64:
10041 case BUILT_IN_FABSD128:
10042 return fold_builtin_fabs (loc, arg0, type);
10044 case BUILT_IN_ABS:
10045 case BUILT_IN_LABS:
10046 case BUILT_IN_LLABS:
10047 case BUILT_IN_IMAXABS:
10048 return fold_builtin_abs (loc, arg0, type);
10050 CASE_FLT_FN (BUILT_IN_CONJ):
10051 if (validate_arg (arg0, COMPLEX_TYPE)
10052 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10053 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10054 break;
10056 CASE_FLT_FN (BUILT_IN_CREAL):
10057 if (validate_arg (arg0, COMPLEX_TYPE)
10058 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10059 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10060 break;
10062 CASE_FLT_FN (BUILT_IN_CIMAG):
10063 if (validate_arg (arg0, COMPLEX_TYPE)
10064 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10065 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10066 break;
10068 CASE_FLT_FN (BUILT_IN_CCOS):
10069 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10071 CASE_FLT_FN (BUILT_IN_CCOSH):
10072 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10074 CASE_FLT_FN (BUILT_IN_CPROJ):
10075 return fold_builtin_cproj (loc, arg0, type);
10077 CASE_FLT_FN (BUILT_IN_CSIN):
10078 if (validate_arg (arg0, COMPLEX_TYPE)
10079 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10080 return do_mpc_arg1 (arg0, type, mpc_sin);
10081 break;
10083 CASE_FLT_FN (BUILT_IN_CSINH):
10084 if (validate_arg (arg0, COMPLEX_TYPE)
10085 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10086 return do_mpc_arg1 (arg0, type, mpc_sinh);
10087 break;
10089 CASE_FLT_FN (BUILT_IN_CTAN):
10090 if (validate_arg (arg0, COMPLEX_TYPE)
10091 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10092 return do_mpc_arg1 (arg0, type, mpc_tan);
10093 break;
10095 CASE_FLT_FN (BUILT_IN_CTANH):
10096 if (validate_arg (arg0, COMPLEX_TYPE)
10097 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10098 return do_mpc_arg1 (arg0, type, mpc_tanh);
10099 break;
10101 CASE_FLT_FN (BUILT_IN_CLOG):
10102 if (validate_arg (arg0, COMPLEX_TYPE)
10103 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10104 return do_mpc_arg1 (arg0, type, mpc_log);
10105 break;
10107 CASE_FLT_FN (BUILT_IN_CSQRT):
10108 if (validate_arg (arg0, COMPLEX_TYPE)
10109 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10110 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10111 break;
10113 CASE_FLT_FN (BUILT_IN_CASIN):
10114 if (validate_arg (arg0, COMPLEX_TYPE)
10115 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10116 return do_mpc_arg1 (arg0, type, mpc_asin);
10117 break;
10119 CASE_FLT_FN (BUILT_IN_CACOS):
10120 if (validate_arg (arg0, COMPLEX_TYPE)
10121 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10122 return do_mpc_arg1 (arg0, type, mpc_acos);
10123 break;
10125 CASE_FLT_FN (BUILT_IN_CATAN):
10126 if (validate_arg (arg0, COMPLEX_TYPE)
10127 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10128 return do_mpc_arg1 (arg0, type, mpc_atan);
10129 break;
10131 CASE_FLT_FN (BUILT_IN_CASINH):
10132 if (validate_arg (arg0, COMPLEX_TYPE)
10133 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10134 return do_mpc_arg1 (arg0, type, mpc_asinh);
10135 break;
10137 CASE_FLT_FN (BUILT_IN_CACOSH):
10138 if (validate_arg (arg0, COMPLEX_TYPE)
10139 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10140 return do_mpc_arg1 (arg0, type, mpc_acosh);
10141 break;
10143 CASE_FLT_FN (BUILT_IN_CATANH):
10144 if (validate_arg (arg0, COMPLEX_TYPE)
10145 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10146 return do_mpc_arg1 (arg0, type, mpc_atanh);
10147 break;
10149 CASE_FLT_FN (BUILT_IN_CABS):
10150 return fold_builtin_cabs (loc, arg0, type, fndecl);
10152 CASE_FLT_FN (BUILT_IN_CARG):
10153 return fold_builtin_carg (loc, arg0, type);
10155 CASE_FLT_FN (BUILT_IN_SQRT):
10156 return fold_builtin_sqrt (loc, arg0, type);
10158 CASE_FLT_FN (BUILT_IN_CBRT):
10159 return fold_builtin_cbrt (loc, arg0, type);
10161 CASE_FLT_FN (BUILT_IN_ASIN):
10162 if (validate_arg (arg0, REAL_TYPE))
10163 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10164 &dconstm1, &dconst1, true);
10165 break;
10167 CASE_FLT_FN (BUILT_IN_ACOS):
10168 if (validate_arg (arg0, REAL_TYPE))
10169 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10170 &dconstm1, &dconst1, true);
10171 break;
10173 CASE_FLT_FN (BUILT_IN_ATAN):
10174 if (validate_arg (arg0, REAL_TYPE))
10175 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10176 break;
10178 CASE_FLT_FN (BUILT_IN_ASINH):
10179 if (validate_arg (arg0, REAL_TYPE))
10180 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10181 break;
10183 CASE_FLT_FN (BUILT_IN_ACOSH):
10184 if (validate_arg (arg0, REAL_TYPE))
10185 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10186 &dconst1, NULL, true);
10187 break;
10189 CASE_FLT_FN (BUILT_IN_ATANH):
10190 if (validate_arg (arg0, REAL_TYPE))
10191 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10192 &dconstm1, &dconst1, false);
10193 break;
10195 CASE_FLT_FN (BUILT_IN_SIN):
10196 if (validate_arg (arg0, REAL_TYPE))
10197 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10198 break;
10200 CASE_FLT_FN (BUILT_IN_COS):
10201 return fold_builtin_cos (loc, arg0, type, fndecl);
10203 CASE_FLT_FN (BUILT_IN_TAN):
10204 return fold_builtin_tan (arg0, type);
10206 CASE_FLT_FN (BUILT_IN_CEXP):
10207 return fold_builtin_cexp (loc, arg0, type);
10209 CASE_FLT_FN (BUILT_IN_CEXPI):
10210 if (validate_arg (arg0, REAL_TYPE))
10211 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10212 break;
10214 CASE_FLT_FN (BUILT_IN_SINH):
10215 if (validate_arg (arg0, REAL_TYPE))
10216 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10217 break;
10219 CASE_FLT_FN (BUILT_IN_COSH):
10220 return fold_builtin_cosh (loc, arg0, type, fndecl);
10222 CASE_FLT_FN (BUILT_IN_TANH):
10223 if (validate_arg (arg0, REAL_TYPE))
10224 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10225 break;
10227 CASE_FLT_FN (BUILT_IN_ERF):
10228 if (validate_arg (arg0, REAL_TYPE))
10229 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10230 break;
10232 CASE_FLT_FN (BUILT_IN_ERFC):
10233 if (validate_arg (arg0, REAL_TYPE))
10234 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10235 break;
10237 CASE_FLT_FN (BUILT_IN_TGAMMA):
10238 if (validate_arg (arg0, REAL_TYPE))
10239 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10240 break;
10242 CASE_FLT_FN (BUILT_IN_EXP):
10243 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10245 CASE_FLT_FN (BUILT_IN_EXP2):
10246 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10248 CASE_FLT_FN (BUILT_IN_EXP10):
10249 CASE_FLT_FN (BUILT_IN_POW10):
10250 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10252 CASE_FLT_FN (BUILT_IN_EXPM1):
10253 if (validate_arg (arg0, REAL_TYPE))
10254 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10255 break;
10257 CASE_FLT_FN (BUILT_IN_LOG):
10258 if (validate_arg (arg0, REAL_TYPE))
10259 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10260 break;
10262 CASE_FLT_FN (BUILT_IN_LOG2):
10263 if (validate_arg (arg0, REAL_TYPE))
10264 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10265 break;
10267 CASE_FLT_FN (BUILT_IN_LOG10):
10268 if (validate_arg (arg0, REAL_TYPE))
10269 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10270 break;
10272 CASE_FLT_FN (BUILT_IN_LOG1P):
10273 if (validate_arg (arg0, REAL_TYPE))
10274 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10275 &dconstm1, NULL, false);
10276 break;
10278 CASE_FLT_FN (BUILT_IN_J0):
10279 if (validate_arg (arg0, REAL_TYPE))
10280 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10281 NULL, NULL, 0);
10282 break;
10284 CASE_FLT_FN (BUILT_IN_J1):
10285 if (validate_arg (arg0, REAL_TYPE))
10286 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10287 NULL, NULL, 0);
10288 break;
10290 CASE_FLT_FN (BUILT_IN_Y0):
10291 if (validate_arg (arg0, REAL_TYPE))
10292 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10293 &dconst0, NULL, false);
10294 break;
10296 CASE_FLT_FN (BUILT_IN_Y1):
10297 if (validate_arg (arg0, REAL_TYPE))
10298 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10299 &dconst0, NULL, false);
10300 break;
10302 CASE_FLT_FN (BUILT_IN_NAN):
10303 case BUILT_IN_NAND32:
10304 case BUILT_IN_NAND64:
10305 case BUILT_IN_NAND128:
10306 return fold_builtin_nan (arg0, type, true);
10308 CASE_FLT_FN (BUILT_IN_NANS):
10309 return fold_builtin_nan (arg0, type, false);
10311 CASE_FLT_FN (BUILT_IN_FLOOR):
10312 return fold_builtin_floor (loc, fndecl, arg0);
10314 CASE_FLT_FN (BUILT_IN_CEIL):
10315 return fold_builtin_ceil (loc, fndecl, arg0);
10317 CASE_FLT_FN (BUILT_IN_TRUNC):
10318 return fold_builtin_trunc (loc, fndecl, arg0);
10320 CASE_FLT_FN (BUILT_IN_ROUND):
10321 return fold_builtin_round (loc, fndecl, arg0);
10323 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10324 CASE_FLT_FN (BUILT_IN_RINT):
10325 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10327 CASE_FLT_FN (BUILT_IN_ICEIL):
10328 CASE_FLT_FN (BUILT_IN_LCEIL):
10329 CASE_FLT_FN (BUILT_IN_LLCEIL):
10330 CASE_FLT_FN (BUILT_IN_LFLOOR):
10331 CASE_FLT_FN (BUILT_IN_IFLOOR):
10332 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10333 CASE_FLT_FN (BUILT_IN_IROUND):
10334 CASE_FLT_FN (BUILT_IN_LROUND):
10335 CASE_FLT_FN (BUILT_IN_LLROUND):
10336 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10338 CASE_FLT_FN (BUILT_IN_IRINT):
10339 CASE_FLT_FN (BUILT_IN_LRINT):
10340 CASE_FLT_FN (BUILT_IN_LLRINT):
10341 return fold_fixed_mathfn (loc, fndecl, arg0);
10343 case BUILT_IN_BSWAP16:
10344 case BUILT_IN_BSWAP32:
10345 case BUILT_IN_BSWAP64:
10346 return fold_builtin_bswap (fndecl, arg0);
10348 CASE_INT_FN (BUILT_IN_FFS):
10349 CASE_INT_FN (BUILT_IN_CLZ):
10350 CASE_INT_FN (BUILT_IN_CTZ):
10351 CASE_INT_FN (BUILT_IN_CLRSB):
10352 CASE_INT_FN (BUILT_IN_POPCOUNT):
10353 CASE_INT_FN (BUILT_IN_PARITY):
10354 return fold_builtin_bitop (fndecl, arg0);
10356 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10357 return fold_builtin_signbit (loc, arg0, type);
10359 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10360 return fold_builtin_significand (loc, arg0, type);
10362 CASE_FLT_FN (BUILT_IN_ILOGB):
10363 CASE_FLT_FN (BUILT_IN_LOGB):
10364 return fold_builtin_logb (loc, arg0, type);
10366 case BUILT_IN_ISASCII:
10367 return fold_builtin_isascii (loc, arg0);
10369 case BUILT_IN_TOASCII:
10370 return fold_builtin_toascii (loc, arg0);
10372 case BUILT_IN_ISDIGIT:
10373 return fold_builtin_isdigit (loc, arg0);
10375 CASE_FLT_FN (BUILT_IN_FINITE):
10376 case BUILT_IN_FINITED32:
10377 case BUILT_IN_FINITED64:
10378 case BUILT_IN_FINITED128:
10379 case BUILT_IN_ISFINITE:
10381 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10382 if (ret)
10383 return ret;
10384 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10387 CASE_FLT_FN (BUILT_IN_ISINF):
10388 case BUILT_IN_ISINFD32:
10389 case BUILT_IN_ISINFD64:
10390 case BUILT_IN_ISINFD128:
10392 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10393 if (ret)
10394 return ret;
10395 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10398 case BUILT_IN_ISNORMAL:
10399 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10401 case BUILT_IN_ISINF_SIGN:
10402 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10404 CASE_FLT_FN (BUILT_IN_ISNAN):
10405 case BUILT_IN_ISNAND32:
10406 case BUILT_IN_ISNAND64:
10407 case BUILT_IN_ISNAND128:
10408 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10410 case BUILT_IN_FREE:
10411 if (integer_zerop (arg0))
10412 return build_empty_stmt (loc);
10413 break;
10415 default:
10416 break;
10419 return NULL_TREE;
10423 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10424 This function returns NULL_TREE if no simplification was possible. */
10426 static tree
10427 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10429 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10430 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10432 switch (fcode)
10434 CASE_FLT_FN (BUILT_IN_JN):
10435 if (validate_arg (arg0, INTEGER_TYPE)
10436 && validate_arg (arg1, REAL_TYPE))
10437 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10438 break;
10440 CASE_FLT_FN (BUILT_IN_YN):
10441 if (validate_arg (arg0, INTEGER_TYPE)
10442 && validate_arg (arg1, REAL_TYPE))
10443 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10444 &dconst0, false);
10445 break;
10447 CASE_FLT_FN (BUILT_IN_DREM):
10448 CASE_FLT_FN (BUILT_IN_REMAINDER):
10449 if (validate_arg (arg0, REAL_TYPE)
10450 && validate_arg (arg1, REAL_TYPE))
10451 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10452 break;
10454 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10455 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10456 if (validate_arg (arg0, REAL_TYPE)
10457 && validate_arg (arg1, POINTER_TYPE))
10458 return do_mpfr_lgamma_r (arg0, arg1, type);
10459 break;
10461 CASE_FLT_FN (BUILT_IN_ATAN2):
10462 if (validate_arg (arg0, REAL_TYPE)
10463 && validate_arg (arg1, REAL_TYPE))
10464 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10465 break;
10467 CASE_FLT_FN (BUILT_IN_FDIM):
10468 if (validate_arg (arg0, REAL_TYPE)
10469 && validate_arg (arg1, REAL_TYPE))
10470 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10471 break;
10473 CASE_FLT_FN (BUILT_IN_HYPOT):
10474 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10476 CASE_FLT_FN (BUILT_IN_CPOW):
10477 if (validate_arg (arg0, COMPLEX_TYPE)
10478 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10479 && validate_arg (arg1, COMPLEX_TYPE)
10480 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10481 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10482 break;
10484 CASE_FLT_FN (BUILT_IN_LDEXP):
10485 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10486 CASE_FLT_FN (BUILT_IN_SCALBN):
10487 CASE_FLT_FN (BUILT_IN_SCALBLN):
10488 return fold_builtin_load_exponent (loc, arg0, arg1,
10489 type, /*ldexp=*/false);
10491 CASE_FLT_FN (BUILT_IN_FREXP):
10492 return fold_builtin_frexp (loc, arg0, arg1, type);
10494 CASE_FLT_FN (BUILT_IN_MODF):
10495 return fold_builtin_modf (loc, arg0, arg1, type);
10497 case BUILT_IN_STRSTR:
10498 return fold_builtin_strstr (loc, arg0, arg1, type);
10500 case BUILT_IN_STRSPN:
10501 return fold_builtin_strspn (loc, arg0, arg1);
10503 case BUILT_IN_STRCSPN:
10504 return fold_builtin_strcspn (loc, arg0, arg1);
10506 case BUILT_IN_STRCHR:
10507 case BUILT_IN_INDEX:
10508 return fold_builtin_strchr (loc, arg0, arg1, type);
10510 case BUILT_IN_STRRCHR:
10511 case BUILT_IN_RINDEX:
10512 return fold_builtin_strrchr (loc, arg0, arg1, type);
10514 case BUILT_IN_STRCMP:
10515 return fold_builtin_strcmp (loc, arg0, arg1);
10517 case BUILT_IN_STRPBRK:
10518 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10520 case BUILT_IN_EXPECT:
10521 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10523 CASE_FLT_FN (BUILT_IN_POW):
10524 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10526 CASE_FLT_FN (BUILT_IN_POWI):
10527 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10529 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10530 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10532 CASE_FLT_FN (BUILT_IN_FMIN):
10533 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10535 CASE_FLT_FN (BUILT_IN_FMAX):
10536 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10538 case BUILT_IN_ISGREATER:
10539 return fold_builtin_unordered_cmp (loc, fndecl,
10540 arg0, arg1, UNLE_EXPR, LE_EXPR);
10541 case BUILT_IN_ISGREATEREQUAL:
10542 return fold_builtin_unordered_cmp (loc, fndecl,
10543 arg0, arg1, UNLT_EXPR, LT_EXPR);
10544 case BUILT_IN_ISLESS:
10545 return fold_builtin_unordered_cmp (loc, fndecl,
10546 arg0, arg1, UNGE_EXPR, GE_EXPR);
10547 case BUILT_IN_ISLESSEQUAL:
10548 return fold_builtin_unordered_cmp (loc, fndecl,
10549 arg0, arg1, UNGT_EXPR, GT_EXPR);
10550 case BUILT_IN_ISLESSGREATER:
10551 return fold_builtin_unordered_cmp (loc, fndecl,
10552 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10553 case BUILT_IN_ISUNORDERED:
10554 return fold_builtin_unordered_cmp (loc, fndecl,
10555 arg0, arg1, UNORDERED_EXPR,
10556 NOP_EXPR);
10558 /* We do the folding for va_start in the expander. */
10559 case BUILT_IN_VA_START:
10560 break;
10562 case BUILT_IN_OBJECT_SIZE:
10563 return fold_builtin_object_size (arg0, arg1);
10565 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10566 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10568 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10569 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10571 default:
10572 break;
10574 return NULL_TREE;
10577 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10578 and ARG2.
10579 This function returns NULL_TREE if no simplification was possible. */
10581 static tree
10582 fold_builtin_3 (location_t loc, tree fndecl,
10583 tree arg0, tree arg1, tree arg2)
10585 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10586 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10587 switch (fcode)
10590 CASE_FLT_FN (BUILT_IN_SINCOS):
10591 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10593 CASE_FLT_FN (BUILT_IN_FMA):
10594 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10595 break;
10597 CASE_FLT_FN (BUILT_IN_REMQUO):
10598 if (validate_arg (arg0, REAL_TYPE)
10599 && validate_arg (arg1, REAL_TYPE)
10600 && validate_arg (arg2, POINTER_TYPE))
10601 return do_mpfr_remquo (arg0, arg1, arg2);
10602 break;
10604 case BUILT_IN_STRNCMP:
10605 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10607 case BUILT_IN_MEMCHR:
10608 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10610 case BUILT_IN_BCMP:
10611 case BUILT_IN_MEMCMP:
10612 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10614 case BUILT_IN_EXPECT:
10615 return fold_builtin_expect (loc, arg0, arg1, arg2);
10617 case BUILT_IN_ADD_OVERFLOW:
10618 case BUILT_IN_SUB_OVERFLOW:
10619 case BUILT_IN_MUL_OVERFLOW:
10620 case BUILT_IN_SADD_OVERFLOW:
10621 case BUILT_IN_SADDL_OVERFLOW:
10622 case BUILT_IN_SADDLL_OVERFLOW:
10623 case BUILT_IN_SSUB_OVERFLOW:
10624 case BUILT_IN_SSUBL_OVERFLOW:
10625 case BUILT_IN_SSUBLL_OVERFLOW:
10626 case BUILT_IN_SMUL_OVERFLOW:
10627 case BUILT_IN_SMULL_OVERFLOW:
10628 case BUILT_IN_SMULLL_OVERFLOW:
10629 case BUILT_IN_UADD_OVERFLOW:
10630 case BUILT_IN_UADDL_OVERFLOW:
10631 case BUILT_IN_UADDLL_OVERFLOW:
10632 case BUILT_IN_USUB_OVERFLOW:
10633 case BUILT_IN_USUBL_OVERFLOW:
10634 case BUILT_IN_USUBLL_OVERFLOW:
10635 case BUILT_IN_UMUL_OVERFLOW:
10636 case BUILT_IN_UMULL_OVERFLOW:
10637 case BUILT_IN_UMULLL_OVERFLOW:
10638 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10640 default:
10641 break;
10643 return NULL_TREE;
10646 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10647 arguments. IGNORE is true if the result of the
10648 function call is ignored. This function returns NULL_TREE if no
10649 simplification was possible. */
10651 tree
10652 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10654 tree ret = NULL_TREE;
10656 switch (nargs)
10658 case 0:
10659 ret = fold_builtin_0 (loc, fndecl);
10660 break;
10661 case 1:
10662 ret = fold_builtin_1 (loc, fndecl, args[0]);
10663 break;
10664 case 2:
10665 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10666 break;
10667 case 3:
10668 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10669 break;
10670 default:
10671 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10672 break;
10674 if (ret)
10676 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10677 SET_EXPR_LOCATION (ret, loc);
10678 TREE_NO_WARNING (ret) = 1;
10679 return ret;
10681 return NULL_TREE;
10684 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10685 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10686 of arguments in ARGS to be omitted. OLDNARGS is the number of
10687 elements in ARGS. */
10689 static tree
10690 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10691 int skip, tree fndecl, int n, va_list newargs)
10693 int nargs = oldnargs - skip + n;
10694 tree *buffer;
10696 if (n > 0)
10698 int i, j;
10700 buffer = XALLOCAVEC (tree, nargs);
10701 for (i = 0; i < n; i++)
10702 buffer[i] = va_arg (newargs, tree);
10703 for (j = skip; j < oldnargs; j++, i++)
10704 buffer[i] = args[j];
10706 else
10707 buffer = args + skip;
10709 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10712 /* Return true if FNDECL shouldn't be folded right now.
10713 If a built-in function has an inline attribute always_inline
10714 wrapper, defer folding it after always_inline functions have
10715 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10716 might not be performed. */
10718 bool
10719 avoid_folding_inline_builtin (tree fndecl)
10721 return (DECL_DECLARED_INLINE_P (fndecl)
10722 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10723 && cfun
10724 && !cfun->always_inline_functions_inlined
10725 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10728 /* A wrapper function for builtin folding that prevents warnings for
10729 "statement without effect" and the like, caused by removing the
10730 call node earlier than the warning is generated. */
10732 tree
10733 fold_call_expr (location_t loc, tree exp, bool ignore)
10735 tree ret = NULL_TREE;
10736 tree fndecl = get_callee_fndecl (exp);
10737 if (fndecl
10738 && TREE_CODE (fndecl) == FUNCTION_DECL
10739 && DECL_BUILT_IN (fndecl)
10740 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10741 yet. Defer folding until we see all the arguments
10742 (after inlining). */
10743 && !CALL_EXPR_VA_ARG_PACK (exp))
10745 int nargs = call_expr_nargs (exp);
10747 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10748 instead last argument is __builtin_va_arg_pack (). Defer folding
10749 even in that case, until arguments are finalized. */
10750 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10752 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10753 if (fndecl2
10754 && TREE_CODE (fndecl2) == FUNCTION_DECL
10755 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10756 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10757 return NULL_TREE;
10760 if (avoid_folding_inline_builtin (fndecl))
10761 return NULL_TREE;
10763 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10764 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10765 CALL_EXPR_ARGP (exp), ignore);
10766 else
10768 tree *args = CALL_EXPR_ARGP (exp);
10769 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10770 if (ret)
10771 return ret;
10774 return NULL_TREE;
10777 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10778 N arguments are passed in the array ARGARRAY. Return a folded
10779 expression or NULL_TREE if no simplification was possible. */
10781 tree
10782 fold_builtin_call_array (location_t loc, tree,
10783 tree fn,
10784 int n,
10785 tree *argarray)
10787 if (TREE_CODE (fn) != ADDR_EXPR)
10788 return NULL_TREE;
10790 tree fndecl = TREE_OPERAND (fn, 0);
10791 if (TREE_CODE (fndecl) == FUNCTION_DECL
10792 && DECL_BUILT_IN (fndecl))
10794 /* If last argument is __builtin_va_arg_pack (), arguments to this
10795 function are not finalized yet. Defer folding until they are. */
10796 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10798 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10799 if (fndecl2
10800 && TREE_CODE (fndecl2) == FUNCTION_DECL
10801 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10802 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10803 return NULL_TREE;
10805 if (avoid_folding_inline_builtin (fndecl))
10806 return NULL_TREE;
10807 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10808 return targetm.fold_builtin (fndecl, n, argarray, false);
10809 else
10810 return fold_builtin_n (loc, fndecl, argarray, n, false);
10813 return NULL_TREE;
10816 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10817 along with N new arguments specified as the "..." parameters. SKIP
10818 is the number of arguments in EXP to be omitted. This function is used
10819 to do varargs-to-varargs transformations. */
10821 static tree
10822 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10824 va_list ap;
10825 tree t;
10827 va_start (ap, n);
10828 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10829 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10830 va_end (ap);
10832 return t;
10835 /* Validate a single argument ARG against a tree code CODE representing
10836 a type. */
10838 static bool
10839 validate_arg (const_tree arg, enum tree_code code)
10841 if (!arg)
10842 return false;
10843 else if (code == POINTER_TYPE)
10844 return POINTER_TYPE_P (TREE_TYPE (arg));
10845 else if (code == INTEGER_TYPE)
10846 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10847 return code == TREE_CODE (TREE_TYPE (arg));
10850 /* This function validates the types of a function call argument list
10851 against a specified list of tree_codes. If the last specifier is a 0,
10852 that represents an ellipses, otherwise the last specifier must be a
10853 VOID_TYPE.
10855 This is the GIMPLE version of validate_arglist. Eventually we want to
10856 completely convert builtins.c to work from GIMPLEs and the tree based
10857 validate_arglist will then be removed. */
10859 bool
10860 validate_gimple_arglist (const gcall *call, ...)
10862 enum tree_code code;
10863 bool res = 0;
10864 va_list ap;
10865 const_tree arg;
10866 size_t i;
10868 va_start (ap, call);
10869 i = 0;
10873 code = (enum tree_code) va_arg (ap, int);
10874 switch (code)
10876 case 0:
10877 /* This signifies an ellipses, any further arguments are all ok. */
10878 res = true;
10879 goto end;
10880 case VOID_TYPE:
10881 /* This signifies an endlink, if no arguments remain, return
10882 true, otherwise return false. */
10883 res = (i == gimple_call_num_args (call));
10884 goto end;
10885 default:
10886 /* If no parameters remain or the parameter's code does not
10887 match the specified code, return false. Otherwise continue
10888 checking any remaining arguments. */
10889 arg = gimple_call_arg (call, i++);
10890 if (!validate_arg (arg, code))
10891 goto end;
10892 break;
10895 while (1);
10897 /* We need gotos here since we can only have one VA_CLOSE in a
10898 function. */
10899 end: ;
10900 va_end (ap);
10902 return res;
10905 /* Default target-specific builtin expander that does nothing. */
10908 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10909 rtx target ATTRIBUTE_UNUSED,
10910 rtx subtarget ATTRIBUTE_UNUSED,
10911 machine_mode mode ATTRIBUTE_UNUSED,
10912 int ignore ATTRIBUTE_UNUSED)
10914 return NULL_RTX;
10917 /* Returns true is EXP represents data that would potentially reside
10918 in a readonly section. */
10920 bool
10921 readonly_data_expr (tree exp)
10923 STRIP_NOPS (exp);
10925 if (TREE_CODE (exp) != ADDR_EXPR)
10926 return false;
10928 exp = get_base_address (TREE_OPERAND (exp, 0));
10929 if (!exp)
10930 return false;
10932 /* Make sure we call decl_readonly_section only for trees it
10933 can handle (since it returns true for everything it doesn't
10934 understand). */
10935 if (TREE_CODE (exp) == STRING_CST
10936 || TREE_CODE (exp) == CONSTRUCTOR
10937 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10938 return decl_readonly_section (exp, 0);
10939 else
10940 return false;
10943 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10944 to the call, and TYPE is its return type.
10946 Return NULL_TREE if no simplification was possible, otherwise return the
10947 simplified form of the call as a tree.
10949 The simplified form may be a constant or other expression which
10950 computes the same value, but in a more efficient manner (including
10951 calls to other builtin functions).
10953 The call may contain arguments which need to be evaluated, but
10954 which are not useful to determine the result of the call. In
10955 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10956 COMPOUND_EXPR will be an argument which must be evaluated.
10957 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10958 COMPOUND_EXPR in the chain will contain the tree for the simplified
10959 form of the builtin function call. */
10961 static tree
10962 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10964 if (!validate_arg (s1, POINTER_TYPE)
10965 || !validate_arg (s2, POINTER_TYPE))
10966 return NULL_TREE;
10967 else
10969 tree fn;
10970 const char *p1, *p2;
10972 p2 = c_getstr (s2);
10973 if (p2 == NULL)
10974 return NULL_TREE;
10976 p1 = c_getstr (s1);
10977 if (p1 != NULL)
10979 const char *r = strstr (p1, p2);
10980 tree tem;
10982 if (r == NULL)
10983 return build_int_cst (TREE_TYPE (s1), 0);
10985 /* Return an offset into the constant string argument. */
10986 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10987 return fold_convert_loc (loc, type, tem);
10990 /* The argument is const char *, and the result is char *, so we need
10991 a type conversion here to avoid a warning. */
10992 if (p2[0] == '\0')
10993 return fold_convert_loc (loc, type, s1);
10995 if (p2[1] != '\0')
10996 return NULL_TREE;
10998 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10999 if (!fn)
11000 return NULL_TREE;
11002 /* New argument list transforming strstr(s1, s2) to
11003 strchr(s1, s2[0]). */
11004 return build_call_expr_loc (loc, fn, 2, s1,
11005 build_int_cst (integer_type_node, p2[0]));
11009 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11010 the call, and TYPE is its return type.
11012 Return NULL_TREE if no simplification was possible, otherwise return the
11013 simplified form of the call as a tree.
11015 The simplified form may be a constant or other expression which
11016 computes the same value, but in a more efficient manner (including
11017 calls to other builtin functions).
11019 The call may contain arguments which need to be evaluated, but
11020 which are not useful to determine the result of the call. In
11021 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11022 COMPOUND_EXPR will be an argument which must be evaluated.
11023 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11024 COMPOUND_EXPR in the chain will contain the tree for the simplified
11025 form of the builtin function call. */
11027 static tree
11028 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11030 if (!validate_arg (s1, POINTER_TYPE)
11031 || !validate_arg (s2, INTEGER_TYPE))
11032 return NULL_TREE;
11033 else
11035 const char *p1;
11037 if (TREE_CODE (s2) != INTEGER_CST)
11038 return NULL_TREE;
11040 p1 = c_getstr (s1);
11041 if (p1 != NULL)
11043 char c;
11044 const char *r;
11045 tree tem;
11047 if (target_char_cast (s2, &c))
11048 return NULL_TREE;
11050 r = strchr (p1, c);
11052 if (r == NULL)
11053 return build_int_cst (TREE_TYPE (s1), 0);
11055 /* Return an offset into the constant string argument. */
11056 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11057 return fold_convert_loc (loc, type, tem);
11059 return NULL_TREE;
11063 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11064 the call, and TYPE is its return type.
11066 Return NULL_TREE if no simplification was possible, otherwise return the
11067 simplified form of the call as a tree.
11069 The simplified form may be a constant or other expression which
11070 computes the same value, but in a more efficient manner (including
11071 calls to other builtin functions).
11073 The call may contain arguments which need to be evaluated, but
11074 which are not useful to determine the result of the call. In
11075 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11076 COMPOUND_EXPR will be an argument which must be evaluated.
11077 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11078 COMPOUND_EXPR in the chain will contain the tree for the simplified
11079 form of the builtin function call. */
11081 static tree
11082 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11084 if (!validate_arg (s1, POINTER_TYPE)
11085 || !validate_arg (s2, INTEGER_TYPE))
11086 return NULL_TREE;
11087 else
11089 tree fn;
11090 const char *p1;
11092 if (TREE_CODE (s2) != INTEGER_CST)
11093 return NULL_TREE;
11095 p1 = c_getstr (s1);
11096 if (p1 != NULL)
11098 char c;
11099 const char *r;
11100 tree tem;
11102 if (target_char_cast (s2, &c))
11103 return NULL_TREE;
11105 r = strrchr (p1, c);
11107 if (r == NULL)
11108 return build_int_cst (TREE_TYPE (s1), 0);
11110 /* Return an offset into the constant string argument. */
11111 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11112 return fold_convert_loc (loc, type, tem);
11115 if (! integer_zerop (s2))
11116 return NULL_TREE;
11118 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11119 if (!fn)
11120 return NULL_TREE;
11122 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11123 return build_call_expr_loc (loc, fn, 2, s1, s2);
11127 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11128 to the call, and TYPE is its return type.
11130 Return NULL_TREE if no simplification was possible, otherwise return the
11131 simplified form of the call as a tree.
11133 The simplified form may be a constant or other expression which
11134 computes the same value, but in a more efficient manner (including
11135 calls to other builtin functions).
11137 The call may contain arguments which need to be evaluated, but
11138 which are not useful to determine the result of the call. In
11139 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11140 COMPOUND_EXPR will be an argument which must be evaluated.
11141 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11142 COMPOUND_EXPR in the chain will contain the tree for the simplified
11143 form of the builtin function call. */
11145 static tree
11146 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11148 if (!validate_arg (s1, POINTER_TYPE)
11149 || !validate_arg (s2, POINTER_TYPE))
11150 return NULL_TREE;
11151 else
11153 tree fn;
11154 const char *p1, *p2;
11156 p2 = c_getstr (s2);
11157 if (p2 == NULL)
11158 return NULL_TREE;
11160 p1 = c_getstr (s1);
11161 if (p1 != NULL)
11163 const char *r = strpbrk (p1, p2);
11164 tree tem;
11166 if (r == NULL)
11167 return build_int_cst (TREE_TYPE (s1), 0);
11169 /* Return an offset into the constant string argument. */
11170 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11171 return fold_convert_loc (loc, type, tem);
11174 if (p2[0] == '\0')
11175 /* strpbrk(x, "") == NULL.
11176 Evaluate and ignore s1 in case it had side-effects. */
11177 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11179 if (p2[1] != '\0')
11180 return NULL_TREE; /* Really call strpbrk. */
11182 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11183 if (!fn)
11184 return NULL_TREE;
11186 /* New argument list transforming strpbrk(s1, s2) to
11187 strchr(s1, s2[0]). */
11188 return build_call_expr_loc (loc, fn, 2, s1,
11189 build_int_cst (integer_type_node, p2[0]));
11193 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11194 to the call.
11196 Return NULL_TREE if no simplification was possible, otherwise return the
11197 simplified form of the call as a tree.
11199 The simplified form may be a constant or other expression which
11200 computes the same value, but in a more efficient manner (including
11201 calls to other builtin functions).
11203 The call may contain arguments which need to be evaluated, but
11204 which are not useful to determine the result of the call. In
11205 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11206 COMPOUND_EXPR will be an argument which must be evaluated.
11207 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11208 COMPOUND_EXPR in the chain will contain the tree for the simplified
11209 form of the builtin function call. */
11211 static tree
11212 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11214 if (!validate_arg (s1, POINTER_TYPE)
11215 || !validate_arg (s2, POINTER_TYPE))
11216 return NULL_TREE;
11217 else
11219 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11221 /* If both arguments are constants, evaluate at compile-time. */
11222 if (p1 && p2)
11224 const size_t r = strspn (p1, p2);
11225 return build_int_cst (size_type_node, r);
11228 /* If either argument is "", return NULL_TREE. */
11229 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11230 /* Evaluate and ignore both arguments in case either one has
11231 side-effects. */
11232 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11233 s1, s2);
11234 return NULL_TREE;
11238 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11239 to the call.
11241 Return NULL_TREE if no simplification was possible, otherwise return the
11242 simplified form of the call as a tree.
11244 The simplified form may be a constant or other expression which
11245 computes the same value, but in a more efficient manner (including
11246 calls to other builtin functions).
11248 The call may contain arguments which need to be evaluated, but
11249 which are not useful to determine the result of the call. In
11250 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11251 COMPOUND_EXPR will be an argument which must be evaluated.
11252 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11253 COMPOUND_EXPR in the chain will contain the tree for the simplified
11254 form of the builtin function call. */
11256 static tree
11257 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11259 if (!validate_arg (s1, POINTER_TYPE)
11260 || !validate_arg (s2, POINTER_TYPE))
11261 return NULL_TREE;
11262 else
11264 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11266 /* If both arguments are constants, evaluate at compile-time. */
11267 if (p1 && p2)
11269 const size_t r = strcspn (p1, p2);
11270 return build_int_cst (size_type_node, r);
11273 /* If the first argument is "", return NULL_TREE. */
11274 if (p1 && *p1 == '\0')
11276 /* Evaluate and ignore argument s2 in case it has
11277 side-effects. */
11278 return omit_one_operand_loc (loc, size_type_node,
11279 size_zero_node, s2);
11282 /* If the second argument is "", return __builtin_strlen(s1). */
11283 if (p2 && *p2 == '\0')
11285 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11287 /* If the replacement _DECL isn't initialized, don't do the
11288 transformation. */
11289 if (!fn)
11290 return NULL_TREE;
11292 return build_call_expr_loc (loc, fn, 1, s1);
11294 return NULL_TREE;
11298 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11299 produced. False otherwise. This is done so that we don't output the error
11300 or warning twice or three times. */
11302 bool
11303 fold_builtin_next_arg (tree exp, bool va_start_p)
11305 tree fntype = TREE_TYPE (current_function_decl);
11306 int nargs = call_expr_nargs (exp);
11307 tree arg;
11308 /* There is good chance the current input_location points inside the
11309 definition of the va_start macro (perhaps on the token for
11310 builtin) in a system header, so warnings will not be emitted.
11311 Use the location in real source code. */
11312 source_location current_location =
11313 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11314 NULL);
11316 if (!stdarg_p (fntype))
11318 error ("%<va_start%> used in function with fixed args");
11319 return true;
11322 if (va_start_p)
11324 if (va_start_p && (nargs != 2))
11326 error ("wrong number of arguments to function %<va_start%>");
11327 return true;
11329 arg = CALL_EXPR_ARG (exp, 1);
11331 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11332 when we checked the arguments and if needed issued a warning. */
11333 else
11335 if (nargs == 0)
11337 /* Evidently an out of date version of <stdarg.h>; can't validate
11338 va_start's second argument, but can still work as intended. */
11339 warning_at (current_location,
11340 OPT_Wvarargs,
11341 "%<__builtin_next_arg%> called without an argument");
11342 return true;
11344 else if (nargs > 1)
11346 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11347 return true;
11349 arg = CALL_EXPR_ARG (exp, 0);
11352 if (TREE_CODE (arg) == SSA_NAME)
11353 arg = SSA_NAME_VAR (arg);
11355 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11356 or __builtin_next_arg (0) the first time we see it, after checking
11357 the arguments and if needed issuing a warning. */
11358 if (!integer_zerop (arg))
11360 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11362 /* Strip off all nops for the sake of the comparison. This
11363 is not quite the same as STRIP_NOPS. It does more.
11364 We must also strip off INDIRECT_EXPR for C++ reference
11365 parameters. */
11366 while (CONVERT_EXPR_P (arg)
11367 || TREE_CODE (arg) == INDIRECT_REF)
11368 arg = TREE_OPERAND (arg, 0);
11369 if (arg != last_parm)
11371 /* FIXME: Sometimes with the tree optimizers we can get the
11372 not the last argument even though the user used the last
11373 argument. We just warn and set the arg to be the last
11374 argument so that we will get wrong-code because of
11375 it. */
11376 warning_at (current_location,
11377 OPT_Wvarargs,
11378 "second parameter of %<va_start%> not last named argument");
11381 /* Undefined by C99 7.15.1.4p4 (va_start):
11382 "If the parameter parmN is declared with the register storage
11383 class, with a function or array type, or with a type that is
11384 not compatible with the type that results after application of
11385 the default argument promotions, the behavior is undefined."
11387 else if (DECL_REGISTER (arg))
11389 warning_at (current_location,
11390 OPT_Wvarargs,
11391 "undefined behaviour when second parameter of "
11392 "%<va_start%> is declared with %<register%> storage");
11395 /* We want to verify the second parameter just once before the tree
11396 optimizers are run and then avoid keeping it in the tree,
11397 as otherwise we could warn even for correct code like:
11398 void foo (int i, ...)
11399 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11400 if (va_start_p)
11401 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11402 else
11403 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11405 return false;
11409 /* Expand a call EXP to __builtin_object_size. */
11411 static rtx
11412 expand_builtin_object_size (tree exp)
11414 tree ost;
11415 int object_size_type;
11416 tree fndecl = get_callee_fndecl (exp);
11418 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11420 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11421 exp, fndecl);
11422 expand_builtin_trap ();
11423 return const0_rtx;
11426 ost = CALL_EXPR_ARG (exp, 1);
11427 STRIP_NOPS (ost);
11429 if (TREE_CODE (ost) != INTEGER_CST
11430 || tree_int_cst_sgn (ost) < 0
11431 || compare_tree_int (ost, 3) > 0)
11433 error ("%Klast argument of %D is not integer constant between 0 and 3",
11434 exp, fndecl);
11435 expand_builtin_trap ();
11436 return const0_rtx;
11439 object_size_type = tree_to_shwi (ost);
11441 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11444 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11445 FCODE is the BUILT_IN_* to use.
11446 Return NULL_RTX if we failed; the caller should emit a normal call,
11447 otherwise try to get the result in TARGET, if convenient (and in
11448 mode MODE if that's convenient). */
11450 static rtx
11451 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11452 enum built_in_function fcode)
11454 tree dest, src, len, size;
11456 if (!validate_arglist (exp,
11457 POINTER_TYPE,
11458 fcode == BUILT_IN_MEMSET_CHK
11459 ? INTEGER_TYPE : POINTER_TYPE,
11460 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11461 return NULL_RTX;
11463 dest = CALL_EXPR_ARG (exp, 0);
11464 src = CALL_EXPR_ARG (exp, 1);
11465 len = CALL_EXPR_ARG (exp, 2);
11466 size = CALL_EXPR_ARG (exp, 3);
11468 if (! tree_fits_uhwi_p (size))
11469 return NULL_RTX;
11471 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11473 tree fn;
11475 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11477 warning_at (tree_nonartificial_location (exp),
11478 0, "%Kcall to %D will always overflow destination buffer",
11479 exp, get_callee_fndecl (exp));
11480 return NULL_RTX;
11483 fn = NULL_TREE;
11484 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11485 mem{cpy,pcpy,move,set} is available. */
11486 switch (fcode)
11488 case BUILT_IN_MEMCPY_CHK:
11489 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11490 break;
11491 case BUILT_IN_MEMPCPY_CHK:
11492 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11493 break;
11494 case BUILT_IN_MEMMOVE_CHK:
11495 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11496 break;
11497 case BUILT_IN_MEMSET_CHK:
11498 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11499 break;
11500 default:
11501 break;
11504 if (! fn)
11505 return NULL_RTX;
11507 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11508 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11509 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11510 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11512 else if (fcode == BUILT_IN_MEMSET_CHK)
11513 return NULL_RTX;
11514 else
11516 unsigned int dest_align = get_pointer_alignment (dest);
11518 /* If DEST is not a pointer type, call the normal function. */
11519 if (dest_align == 0)
11520 return NULL_RTX;
11522 /* If SRC and DEST are the same (and not volatile), do nothing. */
11523 if (operand_equal_p (src, dest, 0))
11525 tree expr;
11527 if (fcode != BUILT_IN_MEMPCPY_CHK)
11529 /* Evaluate and ignore LEN in case it has side-effects. */
11530 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11531 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11534 expr = fold_build_pointer_plus (dest, len);
11535 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11538 /* __memmove_chk special case. */
11539 if (fcode == BUILT_IN_MEMMOVE_CHK)
11541 unsigned int src_align = get_pointer_alignment (src);
11543 if (src_align == 0)
11544 return NULL_RTX;
11546 /* If src is categorized for a readonly section we can use
11547 normal __memcpy_chk. */
11548 if (readonly_data_expr (src))
11550 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11551 if (!fn)
11552 return NULL_RTX;
11553 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11554 dest, src, len, size);
11555 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11556 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11557 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11560 return NULL_RTX;
11564 /* Emit warning if a buffer overflow is detected at compile time. */
11566 static void
11567 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11569 int is_strlen = 0;
11570 tree len, size;
11571 location_t loc = tree_nonartificial_location (exp);
11573 switch (fcode)
11575 case BUILT_IN_STRCPY_CHK:
11576 case BUILT_IN_STPCPY_CHK:
11577 /* For __strcat_chk the warning will be emitted only if overflowing
11578 by at least strlen (dest) + 1 bytes. */
11579 case BUILT_IN_STRCAT_CHK:
11580 len = CALL_EXPR_ARG (exp, 1);
11581 size = CALL_EXPR_ARG (exp, 2);
11582 is_strlen = 1;
11583 break;
11584 case BUILT_IN_STRNCAT_CHK:
11585 case BUILT_IN_STRNCPY_CHK:
11586 case BUILT_IN_STPNCPY_CHK:
11587 len = CALL_EXPR_ARG (exp, 2);
11588 size = CALL_EXPR_ARG (exp, 3);
11589 break;
11590 case BUILT_IN_SNPRINTF_CHK:
11591 case BUILT_IN_VSNPRINTF_CHK:
11592 len = CALL_EXPR_ARG (exp, 1);
11593 size = CALL_EXPR_ARG (exp, 3);
11594 break;
11595 default:
11596 gcc_unreachable ();
11599 if (!len || !size)
11600 return;
11602 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11603 return;
11605 if (is_strlen)
11607 len = c_strlen (len, 1);
11608 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11609 return;
11611 else if (fcode == BUILT_IN_STRNCAT_CHK)
11613 tree src = CALL_EXPR_ARG (exp, 1);
11614 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11615 return;
11616 src = c_strlen (src, 1);
11617 if (! src || ! tree_fits_uhwi_p (src))
11619 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11620 exp, get_callee_fndecl (exp));
11621 return;
11623 else if (tree_int_cst_lt (src, size))
11624 return;
11626 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11627 return;
11629 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11630 exp, get_callee_fndecl (exp));
11633 /* Emit warning if a buffer overflow is detected at compile time
11634 in __sprintf_chk/__vsprintf_chk calls. */
11636 static void
11637 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11639 tree size, len, fmt;
11640 const char *fmt_str;
11641 int nargs = call_expr_nargs (exp);
11643 /* Verify the required arguments in the original call. */
11645 if (nargs < 4)
11646 return;
11647 size = CALL_EXPR_ARG (exp, 2);
11648 fmt = CALL_EXPR_ARG (exp, 3);
11650 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11651 return;
11653 /* Check whether the format is a literal string constant. */
11654 fmt_str = c_getstr (fmt);
11655 if (fmt_str == NULL)
11656 return;
11658 if (!init_target_chars ())
11659 return;
11661 /* If the format doesn't contain % args or %%, we know its size. */
11662 if (strchr (fmt_str, target_percent) == 0)
11663 len = build_int_cstu (size_type_node, strlen (fmt_str));
11664 /* If the format is "%s" and first ... argument is a string literal,
11665 we know it too. */
11666 else if (fcode == BUILT_IN_SPRINTF_CHK
11667 && strcmp (fmt_str, target_percent_s) == 0)
11669 tree arg;
11671 if (nargs < 5)
11672 return;
11673 arg = CALL_EXPR_ARG (exp, 4);
11674 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11675 return;
11677 len = c_strlen (arg, 1);
11678 if (!len || ! tree_fits_uhwi_p (len))
11679 return;
11681 else
11682 return;
11684 if (! tree_int_cst_lt (len, size))
11685 warning_at (tree_nonartificial_location (exp),
11686 0, "%Kcall to %D will always overflow destination buffer",
11687 exp, get_callee_fndecl (exp));
11690 /* Emit warning if a free is called with address of a variable. */
11692 static void
11693 maybe_emit_free_warning (tree exp)
11695 tree arg = CALL_EXPR_ARG (exp, 0);
11697 STRIP_NOPS (arg);
11698 if (TREE_CODE (arg) != ADDR_EXPR)
11699 return;
11701 arg = get_base_address (TREE_OPERAND (arg, 0));
11702 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11703 return;
11705 if (SSA_VAR_P (arg))
11706 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11707 "%Kattempt to free a non-heap object %qD", exp, arg);
11708 else
11709 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11710 "%Kattempt to free a non-heap object", exp);
11713 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11714 if possible. */
11716 static tree
11717 fold_builtin_object_size (tree ptr, tree ost)
11719 unsigned HOST_WIDE_INT bytes;
11720 int object_size_type;
11722 if (!validate_arg (ptr, POINTER_TYPE)
11723 || !validate_arg (ost, INTEGER_TYPE))
11724 return NULL_TREE;
11726 STRIP_NOPS (ost);
11728 if (TREE_CODE (ost) != INTEGER_CST
11729 || tree_int_cst_sgn (ost) < 0
11730 || compare_tree_int (ost, 3) > 0)
11731 return NULL_TREE;
11733 object_size_type = tree_to_shwi (ost);
11735 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11736 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11737 and (size_t) 0 for types 2 and 3. */
11738 if (TREE_SIDE_EFFECTS (ptr))
11739 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11741 if (TREE_CODE (ptr) == ADDR_EXPR)
11743 bytes = compute_builtin_object_size (ptr, object_size_type);
11744 if (wi::fits_to_tree_p (bytes, size_type_node))
11745 return build_int_cstu (size_type_node, bytes);
11747 else if (TREE_CODE (ptr) == SSA_NAME)
11749 /* If object size is not known yet, delay folding until
11750 later. Maybe subsequent passes will help determining
11751 it. */
11752 bytes = compute_builtin_object_size (ptr, object_size_type);
11753 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11754 && wi::fits_to_tree_p (bytes, size_type_node))
11755 return build_int_cstu (size_type_node, bytes);
11758 return NULL_TREE;
11761 /* Builtins with folding operations that operate on "..." arguments
11762 need special handling; we need to store the arguments in a convenient
11763 data structure before attempting any folding. Fortunately there are
11764 only a few builtins that fall into this category. FNDECL is the
11765 function, EXP is the CALL_EXPR for the call. */
11767 static tree
11768 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11770 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11771 tree ret = NULL_TREE;
11773 switch (fcode)
11775 case BUILT_IN_FPCLASSIFY:
11776 ret = fold_builtin_fpclassify (loc, args, nargs);
11777 break;
11779 default:
11780 break;
11782 if (ret)
11784 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11785 SET_EXPR_LOCATION (ret, loc);
11786 TREE_NO_WARNING (ret) = 1;
11787 return ret;
11789 return NULL_TREE;
11792 /* Initialize format string characters in the target charset. */
11794 bool
11795 init_target_chars (void)
11797 static bool init;
11798 if (!init)
11800 target_newline = lang_hooks.to_target_charset ('\n');
11801 target_percent = lang_hooks.to_target_charset ('%');
11802 target_c = lang_hooks.to_target_charset ('c');
11803 target_s = lang_hooks.to_target_charset ('s');
11804 if (target_newline == 0 || target_percent == 0 || target_c == 0
11805 || target_s == 0)
11806 return false;
11808 target_percent_c[0] = target_percent;
11809 target_percent_c[1] = target_c;
11810 target_percent_c[2] = '\0';
11812 target_percent_s[0] = target_percent;
11813 target_percent_s[1] = target_s;
11814 target_percent_s[2] = '\0';
11816 target_percent_s_newline[0] = target_percent;
11817 target_percent_s_newline[1] = target_s;
11818 target_percent_s_newline[2] = target_newline;
11819 target_percent_s_newline[3] = '\0';
11821 init = true;
11823 return true;
11826 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11827 and no overflow/underflow occurred. INEXACT is true if M was not
11828 exactly calculated. TYPE is the tree type for the result. This
11829 function assumes that you cleared the MPFR flags and then
11830 calculated M to see if anything subsequently set a flag prior to
11831 entering this function. Return NULL_TREE if any checks fail. */
11833 static tree
11834 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11836 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11837 overflow/underflow occurred. If -frounding-math, proceed iff the
11838 result of calling FUNC was exact. */
11839 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11840 && (!flag_rounding_math || !inexact))
11842 REAL_VALUE_TYPE rr;
11844 real_from_mpfr (&rr, m, type, GMP_RNDN);
11845 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11846 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11847 but the mpft_t is not, then we underflowed in the
11848 conversion. */
11849 if (real_isfinite (&rr)
11850 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11852 REAL_VALUE_TYPE rmode;
11854 real_convert (&rmode, TYPE_MODE (type), &rr);
11855 /* Proceed iff the specified mode can hold the value. */
11856 if (real_identical (&rmode, &rr))
11857 return build_real (type, rmode);
11860 return NULL_TREE;
11863 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11864 number and no overflow/underflow occurred. INEXACT is true if M
11865 was not exactly calculated. TYPE is the tree type for the result.
11866 This function assumes that you cleared the MPFR flags and then
11867 calculated M to see if anything subsequently set a flag prior to
11868 entering this function. Return NULL_TREE if any checks fail, if
11869 FORCE_CONVERT is true, then bypass the checks. */
11871 static tree
11872 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11874 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11875 overflow/underflow occurred. If -frounding-math, proceed iff the
11876 result of calling FUNC was exact. */
11877 if (force_convert
11878 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11879 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11880 && (!flag_rounding_math || !inexact)))
11882 REAL_VALUE_TYPE re, im;
11884 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11885 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11886 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11887 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11888 but the mpft_t is not, then we underflowed in the
11889 conversion. */
11890 if (force_convert
11891 || (real_isfinite (&re) && real_isfinite (&im)
11892 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11893 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11895 REAL_VALUE_TYPE re_mode, im_mode;
11897 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11898 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11899 /* Proceed iff the specified mode can hold the value. */
11900 if (force_convert
11901 || (real_identical (&re_mode, &re)
11902 && real_identical (&im_mode, &im)))
11903 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11904 build_real (TREE_TYPE (type), im_mode));
11907 return NULL_TREE;
11910 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11911 FUNC on it and return the resulting value as a tree with type TYPE.
11912 If MIN and/or MAX are not NULL, then the supplied ARG must be
11913 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11914 acceptable values, otherwise they are not. The mpfr precision is
11915 set to the precision of TYPE. We assume that function FUNC returns
11916 zero if the result could be calculated exactly within the requested
11917 precision. */
11919 static tree
11920 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11921 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11922 bool inclusive)
11924 tree result = NULL_TREE;
11926 STRIP_NOPS (arg);
11928 /* To proceed, MPFR must exactly represent the target floating point
11929 format, which only happens when the target base equals two. */
11930 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11931 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11933 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11935 if (real_isfinite (ra)
11936 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11937 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11939 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11940 const int prec = fmt->p;
11941 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11942 int inexact;
11943 mpfr_t m;
11945 mpfr_init2 (m, prec);
11946 mpfr_from_real (m, ra, GMP_RNDN);
11947 mpfr_clear_flags ();
11948 inexact = func (m, m, rnd);
11949 result = do_mpfr_ckconv (m, type, inexact);
11950 mpfr_clear (m);
11954 return result;
11957 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11958 FUNC on it and return the resulting value as a tree with type TYPE.
11959 The mpfr precision is set to the precision of TYPE. We assume that
11960 function FUNC returns zero if the result could be calculated
11961 exactly within the requested precision. */
11963 static tree
11964 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11965 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11967 tree result = NULL_TREE;
11969 STRIP_NOPS (arg1);
11970 STRIP_NOPS (arg2);
11972 /* To proceed, MPFR must exactly represent the target floating point
11973 format, which only happens when the target base equals two. */
11974 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11975 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11976 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11978 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11979 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11981 if (real_isfinite (ra1) && real_isfinite (ra2))
11983 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11984 const int prec = fmt->p;
11985 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11986 int inexact;
11987 mpfr_t m1, m2;
11989 mpfr_inits2 (prec, m1, m2, NULL);
11990 mpfr_from_real (m1, ra1, GMP_RNDN);
11991 mpfr_from_real (m2, ra2, GMP_RNDN);
11992 mpfr_clear_flags ();
11993 inexact = func (m1, m1, m2, rnd);
11994 result = do_mpfr_ckconv (m1, type, inexact);
11995 mpfr_clears (m1, m2, NULL);
11999 return result;
12002 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12003 FUNC on it and return the resulting value as a tree with type TYPE.
12004 The mpfr precision is set to the precision of TYPE. We assume that
12005 function FUNC returns zero if the result could be calculated
12006 exactly within the requested precision. */
12008 static tree
12009 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12010 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12012 tree result = NULL_TREE;
12014 STRIP_NOPS (arg1);
12015 STRIP_NOPS (arg2);
12016 STRIP_NOPS (arg3);
12018 /* To proceed, MPFR must exactly represent the target floating point
12019 format, which only happens when the target base equals two. */
12020 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12021 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12022 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12023 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12025 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12026 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12027 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12029 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12031 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12032 const int prec = fmt->p;
12033 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12034 int inexact;
12035 mpfr_t m1, m2, m3;
12037 mpfr_inits2 (prec, m1, m2, m3, NULL);
12038 mpfr_from_real (m1, ra1, GMP_RNDN);
12039 mpfr_from_real (m2, ra2, GMP_RNDN);
12040 mpfr_from_real (m3, ra3, GMP_RNDN);
12041 mpfr_clear_flags ();
12042 inexact = func (m1, m1, m2, m3, rnd);
12043 result = do_mpfr_ckconv (m1, type, inexact);
12044 mpfr_clears (m1, m2, m3, NULL);
12048 return result;
12051 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12052 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12053 If ARG_SINP and ARG_COSP are NULL then the result is returned
12054 as a complex value.
12055 The type is taken from the type of ARG and is used for setting the
12056 precision of the calculation and results. */
12058 static tree
12059 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12061 tree const type = TREE_TYPE (arg);
12062 tree result = NULL_TREE;
12064 STRIP_NOPS (arg);
12066 /* To proceed, MPFR must exactly represent the target floating point
12067 format, which only happens when the target base equals two. */
12068 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12069 && TREE_CODE (arg) == REAL_CST
12070 && !TREE_OVERFLOW (arg))
12072 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12074 if (real_isfinite (ra))
12076 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12077 const int prec = fmt->p;
12078 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12079 tree result_s, result_c;
12080 int inexact;
12081 mpfr_t m, ms, mc;
12083 mpfr_inits2 (prec, m, ms, mc, NULL);
12084 mpfr_from_real (m, ra, GMP_RNDN);
12085 mpfr_clear_flags ();
12086 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12087 result_s = do_mpfr_ckconv (ms, type, inexact);
12088 result_c = do_mpfr_ckconv (mc, type, inexact);
12089 mpfr_clears (m, ms, mc, NULL);
12090 if (result_s && result_c)
12092 /* If we are to return in a complex value do so. */
12093 if (!arg_sinp && !arg_cosp)
12094 return build_complex (build_complex_type (type),
12095 result_c, result_s);
12097 /* Dereference the sin/cos pointer arguments. */
12098 arg_sinp = build_fold_indirect_ref (arg_sinp);
12099 arg_cosp = build_fold_indirect_ref (arg_cosp);
12100 /* Proceed if valid pointer type were passed in. */
12101 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12102 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12104 /* Set the values. */
12105 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12106 result_s);
12107 TREE_SIDE_EFFECTS (result_s) = 1;
12108 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12109 result_c);
12110 TREE_SIDE_EFFECTS (result_c) = 1;
12111 /* Combine the assignments into a compound expr. */
12112 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12113 result_s, result_c));
12118 return result;
12121 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12122 two-argument mpfr order N Bessel function FUNC on them and return
12123 the resulting value as a tree with type TYPE. The mpfr precision
12124 is set to the precision of TYPE. We assume that function FUNC
12125 returns zero if the result could be calculated exactly within the
12126 requested precision. */
12127 static tree
12128 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12129 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12130 const REAL_VALUE_TYPE *min, bool inclusive)
12132 tree result = NULL_TREE;
12134 STRIP_NOPS (arg1);
12135 STRIP_NOPS (arg2);
12137 /* To proceed, MPFR must exactly represent the target floating point
12138 format, which only happens when the target base equals two. */
12139 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12140 && tree_fits_shwi_p (arg1)
12141 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12143 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12144 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12146 if (n == (long)n
12147 && real_isfinite (ra)
12148 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12150 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12151 const int prec = fmt->p;
12152 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12153 int inexact;
12154 mpfr_t m;
12156 mpfr_init2 (m, prec);
12157 mpfr_from_real (m, ra, GMP_RNDN);
12158 mpfr_clear_flags ();
12159 inexact = func (m, n, m, rnd);
12160 result = do_mpfr_ckconv (m, type, inexact);
12161 mpfr_clear (m);
12165 return result;
12168 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12169 the pointer *(ARG_QUO) and return the result. The type is taken
12170 from the type of ARG0 and is used for setting the precision of the
12171 calculation and results. */
12173 static tree
12174 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12176 tree const type = TREE_TYPE (arg0);
12177 tree result = NULL_TREE;
12179 STRIP_NOPS (arg0);
12180 STRIP_NOPS (arg1);
12182 /* To proceed, MPFR must exactly represent the target floating point
12183 format, which only happens when the target base equals two. */
12184 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12185 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12186 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12188 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12189 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12191 if (real_isfinite (ra0) && real_isfinite (ra1))
12193 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12194 const int prec = fmt->p;
12195 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12196 tree result_rem;
12197 long integer_quo;
12198 mpfr_t m0, m1;
12200 mpfr_inits2 (prec, m0, m1, NULL);
12201 mpfr_from_real (m0, ra0, GMP_RNDN);
12202 mpfr_from_real (m1, ra1, GMP_RNDN);
12203 mpfr_clear_flags ();
12204 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12205 /* Remquo is independent of the rounding mode, so pass
12206 inexact=0 to do_mpfr_ckconv(). */
12207 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12208 mpfr_clears (m0, m1, NULL);
12209 if (result_rem)
12211 /* MPFR calculates quo in the host's long so it may
12212 return more bits in quo than the target int can hold
12213 if sizeof(host long) > sizeof(target int). This can
12214 happen even for native compilers in LP64 mode. In
12215 these cases, modulo the quo value with the largest
12216 number that the target int can hold while leaving one
12217 bit for the sign. */
12218 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12219 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12221 /* Dereference the quo pointer argument. */
12222 arg_quo = build_fold_indirect_ref (arg_quo);
12223 /* Proceed iff a valid pointer type was passed in. */
12224 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12226 /* Set the value. */
12227 tree result_quo
12228 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12229 build_int_cst (TREE_TYPE (arg_quo),
12230 integer_quo));
12231 TREE_SIDE_EFFECTS (result_quo) = 1;
12232 /* Combine the quo assignment with the rem. */
12233 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12234 result_quo, result_rem));
12239 return result;
12242 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12243 resulting value as a tree with type TYPE. The mpfr precision is
12244 set to the precision of TYPE. We assume that this mpfr function
12245 returns zero if the result could be calculated exactly within the
12246 requested precision. In addition, the integer pointer represented
12247 by ARG_SG will be dereferenced and set to the appropriate signgam
12248 (-1,1) value. */
12250 static tree
12251 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12253 tree result = NULL_TREE;
12255 STRIP_NOPS (arg);
12257 /* To proceed, MPFR must exactly represent the target floating point
12258 format, which only happens when the target base equals two. Also
12259 verify ARG is a constant and that ARG_SG is an int pointer. */
12260 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12261 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12262 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12263 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12265 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12267 /* In addition to NaN and Inf, the argument cannot be zero or a
12268 negative integer. */
12269 if (real_isfinite (ra)
12270 && ra->cl != rvc_zero
12271 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12273 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12274 const int prec = fmt->p;
12275 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12276 int inexact, sg;
12277 mpfr_t m;
12278 tree result_lg;
12280 mpfr_init2 (m, prec);
12281 mpfr_from_real (m, ra, GMP_RNDN);
12282 mpfr_clear_flags ();
12283 inexact = mpfr_lgamma (m, &sg, m, rnd);
12284 result_lg = do_mpfr_ckconv (m, type, inexact);
12285 mpfr_clear (m);
12286 if (result_lg)
12288 tree result_sg;
12290 /* Dereference the arg_sg pointer argument. */
12291 arg_sg = build_fold_indirect_ref (arg_sg);
12292 /* Assign the signgam value into *arg_sg. */
12293 result_sg = fold_build2 (MODIFY_EXPR,
12294 TREE_TYPE (arg_sg), arg_sg,
12295 build_int_cst (TREE_TYPE (arg_sg), sg));
12296 TREE_SIDE_EFFECTS (result_sg) = 1;
12297 /* Combine the signgam assignment with the lgamma result. */
12298 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12299 result_sg, result_lg));
12304 return result;
12307 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12308 function FUNC on it and return the resulting value as a tree with
12309 type TYPE. The mpfr precision is set to the precision of TYPE. We
12310 assume that function FUNC returns zero if the result could be
12311 calculated exactly within the requested precision. */
12313 static tree
12314 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12316 tree result = NULL_TREE;
12318 STRIP_NOPS (arg);
12320 /* To proceed, MPFR must exactly represent the target floating point
12321 format, which only happens when the target base equals two. */
12322 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12323 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12324 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12326 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12327 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12329 if (real_isfinite (re) && real_isfinite (im))
12331 const struct real_format *const fmt =
12332 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12333 const int prec = fmt->p;
12334 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12335 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12336 int inexact;
12337 mpc_t m;
12339 mpc_init2 (m, prec);
12340 mpfr_from_real (mpc_realref (m), re, rnd);
12341 mpfr_from_real (mpc_imagref (m), im, rnd);
12342 mpfr_clear_flags ();
12343 inexact = func (m, m, crnd);
12344 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12345 mpc_clear (m);
12349 return result;
12352 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12353 mpc function FUNC on it and return the resulting value as a tree
12354 with type TYPE. The mpfr precision is set to the precision of
12355 TYPE. We assume that function FUNC returns zero if the result
12356 could be calculated exactly within the requested precision. If
12357 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12358 in the arguments and/or results. */
12360 tree
12361 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12362 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12364 tree result = NULL_TREE;
12366 STRIP_NOPS (arg0);
12367 STRIP_NOPS (arg1);
12369 /* To proceed, MPFR must exactly represent the target floating point
12370 format, which only happens when the target base equals two. */
12371 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12372 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12373 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12374 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12375 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12377 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12378 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12379 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12380 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12382 if (do_nonfinite
12383 || (real_isfinite (re0) && real_isfinite (im0)
12384 && real_isfinite (re1) && real_isfinite (im1)))
12386 const struct real_format *const fmt =
12387 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12388 const int prec = fmt->p;
12389 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12390 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12391 int inexact;
12392 mpc_t m0, m1;
12394 mpc_init2 (m0, prec);
12395 mpc_init2 (m1, prec);
12396 mpfr_from_real (mpc_realref (m0), re0, rnd);
12397 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12398 mpfr_from_real (mpc_realref (m1), re1, rnd);
12399 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12400 mpfr_clear_flags ();
12401 inexact = func (m0, m0, m1, crnd);
12402 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12403 mpc_clear (m0);
12404 mpc_clear (m1);
12408 return result;
12411 /* A wrapper function for builtin folding that prevents warnings for
12412 "statement without effect" and the like, caused by removing the
12413 call node earlier than the warning is generated. */
12415 tree
12416 fold_call_stmt (gcall *stmt, bool ignore)
12418 tree ret = NULL_TREE;
12419 tree fndecl = gimple_call_fndecl (stmt);
12420 location_t loc = gimple_location (stmt);
12421 if (fndecl
12422 && TREE_CODE (fndecl) == FUNCTION_DECL
12423 && DECL_BUILT_IN (fndecl)
12424 && !gimple_call_va_arg_pack_p (stmt))
12426 int nargs = gimple_call_num_args (stmt);
12427 tree *args = (nargs > 0
12428 ? gimple_call_arg_ptr (stmt, 0)
12429 : &error_mark_node);
12431 if (avoid_folding_inline_builtin (fndecl))
12432 return NULL_TREE;
12433 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12435 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12437 else
12439 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12440 if (ret)
12442 /* Propagate location information from original call to
12443 expansion of builtin. Otherwise things like
12444 maybe_emit_chk_warning, that operate on the expansion
12445 of a builtin, will use the wrong location information. */
12446 if (gimple_has_location (stmt))
12448 tree realret = ret;
12449 if (TREE_CODE (ret) == NOP_EXPR)
12450 realret = TREE_OPERAND (ret, 0);
12451 if (CAN_HAVE_LOCATION_P (realret)
12452 && !EXPR_HAS_LOCATION (realret))
12453 SET_EXPR_LOCATION (realret, loc);
12454 return realret;
12456 return ret;
12460 return NULL_TREE;
12463 /* Look up the function in builtin_decl that corresponds to DECL
12464 and set ASMSPEC as its user assembler name. DECL must be a
12465 function decl that declares a builtin. */
12467 void
12468 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12470 tree builtin;
12471 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12472 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12473 && asmspec != 0);
12475 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12476 set_user_assembler_name (builtin, asmspec);
12477 switch (DECL_FUNCTION_CODE (decl))
12479 case BUILT_IN_MEMCPY:
12480 init_block_move_fn (asmspec);
12481 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12482 break;
12483 case BUILT_IN_MEMSET:
12484 init_block_clear_fn (asmspec);
12485 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12486 break;
12487 case BUILT_IN_MEMMOVE:
12488 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12489 break;
12490 case BUILT_IN_MEMCMP:
12491 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12492 break;
12493 case BUILT_IN_ABORT:
12494 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12495 break;
12496 case BUILT_IN_FFS:
12497 if (INT_TYPE_SIZE < BITS_PER_WORD)
12499 set_user_assembler_libfunc ("ffs", asmspec);
12500 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12501 MODE_INT, 0), "ffs");
12503 break;
12504 default:
12505 break;
12509 /* Return true if DECL is a builtin that expands to a constant or similarly
12510 simple code. */
12511 bool
12512 is_simple_builtin (tree decl)
12514 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12515 switch (DECL_FUNCTION_CODE (decl))
12517 /* Builtins that expand to constants. */
12518 case BUILT_IN_CONSTANT_P:
12519 case BUILT_IN_EXPECT:
12520 case BUILT_IN_OBJECT_SIZE:
12521 case BUILT_IN_UNREACHABLE:
12522 /* Simple register moves or loads from stack. */
12523 case BUILT_IN_ASSUME_ALIGNED:
12524 case BUILT_IN_RETURN_ADDRESS:
12525 case BUILT_IN_EXTRACT_RETURN_ADDR:
12526 case BUILT_IN_FROB_RETURN_ADDR:
12527 case BUILT_IN_RETURN:
12528 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12529 case BUILT_IN_FRAME_ADDRESS:
12530 case BUILT_IN_VA_END:
12531 case BUILT_IN_STACK_SAVE:
12532 case BUILT_IN_STACK_RESTORE:
12533 /* Exception state returns or moves registers around. */
12534 case BUILT_IN_EH_FILTER:
12535 case BUILT_IN_EH_POINTER:
12536 case BUILT_IN_EH_COPY_VALUES:
12537 return true;
12539 default:
12540 return false;
12543 return false;
12546 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12547 most probably expanded inline into reasonably simple code. This is a
12548 superset of is_simple_builtin. */
12549 bool
12550 is_inexpensive_builtin (tree decl)
12552 if (!decl)
12553 return false;
12554 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12555 return true;
12556 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12557 switch (DECL_FUNCTION_CODE (decl))
12559 case BUILT_IN_ABS:
12560 case BUILT_IN_ALLOCA:
12561 case BUILT_IN_ALLOCA_WITH_ALIGN:
12562 case BUILT_IN_BSWAP16:
12563 case BUILT_IN_BSWAP32:
12564 case BUILT_IN_BSWAP64:
12565 case BUILT_IN_CLZ:
12566 case BUILT_IN_CLZIMAX:
12567 case BUILT_IN_CLZL:
12568 case BUILT_IN_CLZLL:
12569 case BUILT_IN_CTZ:
12570 case BUILT_IN_CTZIMAX:
12571 case BUILT_IN_CTZL:
12572 case BUILT_IN_CTZLL:
12573 case BUILT_IN_FFS:
12574 case BUILT_IN_FFSIMAX:
12575 case BUILT_IN_FFSL:
12576 case BUILT_IN_FFSLL:
12577 case BUILT_IN_IMAXABS:
12578 case BUILT_IN_FINITE:
12579 case BUILT_IN_FINITEF:
12580 case BUILT_IN_FINITEL:
12581 case BUILT_IN_FINITED32:
12582 case BUILT_IN_FINITED64:
12583 case BUILT_IN_FINITED128:
12584 case BUILT_IN_FPCLASSIFY:
12585 case BUILT_IN_ISFINITE:
12586 case BUILT_IN_ISINF_SIGN:
12587 case BUILT_IN_ISINF:
12588 case BUILT_IN_ISINFF:
12589 case BUILT_IN_ISINFL:
12590 case BUILT_IN_ISINFD32:
12591 case BUILT_IN_ISINFD64:
12592 case BUILT_IN_ISINFD128:
12593 case BUILT_IN_ISNAN:
12594 case BUILT_IN_ISNANF:
12595 case BUILT_IN_ISNANL:
12596 case BUILT_IN_ISNAND32:
12597 case BUILT_IN_ISNAND64:
12598 case BUILT_IN_ISNAND128:
12599 case BUILT_IN_ISNORMAL:
12600 case BUILT_IN_ISGREATER:
12601 case BUILT_IN_ISGREATEREQUAL:
12602 case BUILT_IN_ISLESS:
12603 case BUILT_IN_ISLESSEQUAL:
12604 case BUILT_IN_ISLESSGREATER:
12605 case BUILT_IN_ISUNORDERED:
12606 case BUILT_IN_VA_ARG_PACK:
12607 case BUILT_IN_VA_ARG_PACK_LEN:
12608 case BUILT_IN_VA_COPY:
12609 case BUILT_IN_TRAP:
12610 case BUILT_IN_SAVEREGS:
12611 case BUILT_IN_POPCOUNTL:
12612 case BUILT_IN_POPCOUNTLL:
12613 case BUILT_IN_POPCOUNTIMAX:
12614 case BUILT_IN_POPCOUNT:
12615 case BUILT_IN_PARITYL:
12616 case BUILT_IN_PARITYLL:
12617 case BUILT_IN_PARITYIMAX:
12618 case BUILT_IN_PARITY:
12619 case BUILT_IN_LABS:
12620 case BUILT_IN_LLABS:
12621 case BUILT_IN_PREFETCH:
12622 case BUILT_IN_ACC_ON_DEVICE:
12623 return true;
12625 default:
12626 return is_simple_builtin (decl);
12629 return false;