Fix PR ada/97504 on hppa*-*-hpux*.
[official-gcc.git] / gcc / builtins.c
blob40e77c7bf40eb010305b237a5e8a7ee2ab4c3151
1 /* Expand builtin functions.
2 Copyright (C) 1988-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76 #include "gimple-ssa.h"
77 #include "tree-ssa-live.h"
78 #include "tree-outof-ssa.h"
79 #include "attr-fnspec.h"
81 struct target_builtins default_target_builtins;
82 #if SWITCHABLE_TARGET
83 struct target_builtins *this_target_builtins = &default_target_builtins;
84 #endif
86 /* Define the names of the builtin function types and codes. */
87 const char *const built_in_class_names[BUILT_IN_LAST]
88 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
90 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
91 const char * built_in_names[(int) END_BUILTINS] =
93 #include "builtins.def"
96 /* Setup an array of builtin_info_type, make sure each element decl is
97 initialized to NULL_TREE. */
98 builtin_info_type builtin_info[(int)END_BUILTINS];
100 /* Non-zero if __builtin_constant_p should be folded right away. */
101 bool force_folding_builtin_constant_p;
103 static int target_char_cast (tree, char *);
104 static rtx get_memory_rtx (tree, tree);
105 static int apply_args_size (void);
106 static int apply_result_size (void);
107 static rtx result_vector (int, rtx);
108 static void expand_builtin_prefetch (tree);
109 static rtx expand_builtin_apply_args (void);
110 static rtx expand_builtin_apply_args_1 (void);
111 static rtx expand_builtin_apply (rtx, rtx, rtx);
112 static void expand_builtin_return (rtx);
113 static enum type_class type_to_class (tree);
114 static rtx expand_builtin_classify_type (tree);
115 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
116 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
117 static rtx expand_builtin_interclass_mathfn (tree, rtx);
118 static rtx expand_builtin_sincos (tree);
119 static rtx expand_builtin_cexpi (tree, rtx);
120 static rtx expand_builtin_int_roundingfn (tree, rtx);
121 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
122 static rtx expand_builtin_next_arg (void);
123 static rtx expand_builtin_va_start (tree);
124 static rtx expand_builtin_va_end (tree);
125 static rtx expand_builtin_va_copy (tree);
126 static rtx inline_expand_builtin_bytecmp (tree, rtx);
127 static rtx expand_builtin_strcmp (tree, rtx);
128 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
129 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
130 static rtx expand_builtin_memchr (tree, rtx);
131 static rtx expand_builtin_memcpy (tree, rtx);
132 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
133 rtx target, tree exp,
134 memop_ret retmode,
135 bool might_overlap);
136 static rtx expand_builtin_memmove (tree, rtx);
137 static rtx expand_builtin_mempcpy (tree, rtx);
138 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
139 static rtx expand_builtin_strcat (tree);
140 static rtx expand_builtin_strcpy (tree, rtx);
141 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
142 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
143 static rtx expand_builtin_stpncpy (tree, rtx);
144 static rtx expand_builtin_strncat (tree, rtx);
145 static rtx expand_builtin_strncpy (tree, rtx);
146 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
147 static rtx expand_builtin_memset (tree, rtx, machine_mode);
148 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
149 static rtx expand_builtin_bzero (tree);
150 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
151 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
152 static rtx expand_builtin_alloca (tree);
153 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
154 static rtx expand_builtin_frame_address (tree, tree);
155 static tree stabilize_va_list_loc (location_t, tree, int);
156 static rtx expand_builtin_expect (tree, rtx);
157 static rtx expand_builtin_expect_with_probability (tree, rtx);
158 static tree fold_builtin_constant_p (tree);
159 static tree fold_builtin_classify_type (tree);
160 static tree fold_builtin_strlen (location_t, tree, tree, tree);
161 static tree fold_builtin_inf (location_t, tree, int);
162 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
163 static bool validate_arg (const_tree, enum tree_code code);
164 static rtx expand_builtin_fabs (tree, rtx, rtx);
165 static rtx expand_builtin_signbit (tree, rtx);
166 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
167 static tree fold_builtin_isascii (location_t, tree);
168 static tree fold_builtin_toascii (location_t, tree);
169 static tree fold_builtin_isdigit (location_t, tree);
170 static tree fold_builtin_fabs (location_t, tree, tree);
171 static tree fold_builtin_abs (location_t, tree, tree);
172 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
173 enum tree_code);
174 static tree fold_builtin_varargs (location_t, tree, tree*, int);
176 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
177 static tree fold_builtin_strspn (location_t, tree, tree, tree);
178 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
180 static rtx expand_builtin_object_size (tree);
181 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
182 enum built_in_function);
183 static void maybe_emit_chk_warning (tree, enum built_in_function);
184 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
185 static void maybe_emit_free_warning (tree);
186 static tree fold_builtin_object_size (tree, tree);
187 static bool check_read_access (tree, tree, tree = NULL_TREE, int = 1);
189 unsigned HOST_WIDE_INT target_newline;
190 unsigned HOST_WIDE_INT target_percent;
191 static unsigned HOST_WIDE_INT target_c;
192 static unsigned HOST_WIDE_INT target_s;
193 char target_percent_c[3];
194 char target_percent_s[3];
195 char target_percent_s_newline[4];
196 static tree do_mpfr_remquo (tree, tree, tree);
197 static tree do_mpfr_lgamma_r (tree, tree, tree);
198 static void expand_builtin_sync_synchronize (void);
200 access_ref::access_ref (tree bound /* = NULL_TREE */,
201 bool minaccess /* = false */)
202 : ref (), eval ([](tree x){ return x; }), trail1special (true), base0 (true)
204 /* Set to valid. */
205 offrng[0] = offrng[1] = 0;
206 /* Invalidate. */
207 sizrng[0] = sizrng[1] = -1;
209 /* Set the default bounds of the access and adjust below. */
210 bndrng[0] = minaccess ? 1 : 0;
211 bndrng[1] = HOST_WIDE_INT_M1U;
213 /* When BOUND is nonnull and a range can be extracted from it,
214 set the bounds of the access to reflect both it and MINACCESS.
215 BNDRNG[0] is the size of the minimum access. */
216 tree rng[2];
217 if (bound && get_size_range (bound, rng, SR_ALLOW_ZERO))
219 bndrng[0] = wi::to_offset (rng[0]);
220 bndrng[1] = wi::to_offset (rng[1]);
221 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
225 /* Return the maximum amount of space remaining and if non-null, set
226 argument to the minimum. */
228 offset_int
229 access_ref::size_remaining (offset_int *pmin /* = NULL */) const
231 offset_int minbuf;
232 if (!pmin)
233 pmin = &minbuf;
235 /* add_offset() ensures the offset range isn't inverted. */
236 gcc_checking_assert (offrng[0] <= offrng[1]);
238 if (base0)
240 /* The offset into referenced object is zero-based (i.e., it's
241 not referenced by a pointer into middle of some unknown object). */
242 if (offrng[0] < 0 && offrng[1] < 0)
244 /* If the offset is negative the remaining size is zero. */
245 *pmin = 0;
246 return 0;
249 if (sizrng[1] <= offrng[0])
251 /* If the starting offset is greater than or equal to the upper
252 bound on the size of the object, the space remaining is zero.
253 As a special case, if it's equal, set *PMIN to -1 to let
254 the caller know the offset is valid and just past the end. */
255 *pmin = sizrng[1] == offrng[0] ? -1 : 0;
256 return 0;
259 /* Otherwise return the size minus the lower bound of the offset. */
260 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
262 *pmin = sizrng[0] - or0;
263 return sizrng[1] - or0;
266 /* The offset to the referenced object isn't zero-based (i.e., it may
267 refer to a byte other than the first. The size of such an object
268 is constrained only by the size of the address space (the result
269 of max_object_size()). */
270 if (sizrng[1] <= offrng[0])
272 *pmin = 0;
273 return 0;
276 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
278 *pmin = sizrng[0] - or0;
279 return sizrng[1] - or0;
282 /* Add the range [MIN, MAX] to the offset range. For known objects (with
283 zero-based offsets) at least one of whose offset's bounds is in range,
284 constrain the other (or both) to the bounds of the object (i.e., zero
285 and the upper bound of its size). This improves the quality of
286 diagnostics. */
288 void access_ref::add_offset (const offset_int &min, const offset_int &max)
290 if (min <= max)
292 /* To add an ordinary range just add it to the bounds. */
293 offrng[0] += min;
294 offrng[1] += max;
296 else if (!base0)
298 /* To add an inverted range to an offset to an unknown object
299 expand it to the maximum. */
300 add_max_offset ();
301 return;
303 else
305 /* To add an inverted range to an offset to an known object set
306 the upper bound to the maximum representable offset value
307 (which may be greater than MAX_OBJECT_SIZE).
308 The lower bound is either the sum of the current offset and
309 MIN when abs(MAX) is greater than the former, or zero otherwise.
310 Zero because then then inverted range includes the negative of
311 the lower bound. */
312 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
313 offrng[1] = maxoff;
315 if (max >= 0)
317 offrng[0] = 0;
318 return;
321 offrng[1] = maxoff;
322 offset_int absmax = wi::abs (max);
323 if (offrng[0] < absmax)
325 offrng[0] += min;
326 /* Cap the lower bound at the upper (set to MAXOFF above)
327 to avoid inadvertently recreating an inverted range. */
328 if (offrng[1] < offrng[0])
329 offrng[0] = offrng[1];
331 else
332 offrng[0] = 0;
335 if (!base0)
336 return;
338 /* When referencing a known object check to see if the offset computed
339 so far is in bounds... */
340 offset_int remrng[2];
341 remrng[1] = size_remaining (remrng);
342 if (remrng[1] > 0 || remrng[0] < 0)
344 /* ...if so, constrain it so that neither bound exceeds the size of
345 the object. Out of bounds offsets are left unchanged, and, for
346 better or worse, become in bounds later. They should be detected
347 and diagnosed at the point they first become invalid by
348 -Warray-bounds. */
349 if (offrng[0] < 0)
350 offrng[0] = 0;
351 if (offrng[1] > sizrng[1])
352 offrng[1] = sizrng[1];
356 /* Return true if NAME starts with __builtin_ or __sync_. */
358 static bool
359 is_builtin_name (const char *name)
361 if (strncmp (name, "__builtin_", 10) == 0)
362 return true;
363 if (strncmp (name, "__sync_", 7) == 0)
364 return true;
365 if (strncmp (name, "__atomic_", 9) == 0)
366 return true;
367 return false;
370 /* Return true if NODE should be considered for inline expansion regardless
371 of the optimization level. This means whenever a function is invoked with
372 its "internal" name, which normally contains the prefix "__builtin". */
374 bool
375 called_as_built_in (tree node)
377 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
378 we want the name used to call the function, not the name it
379 will have. */
380 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
381 return is_builtin_name (name);
384 /* Compute values M and N such that M divides (address of EXP - N) and such
385 that N < M. If these numbers can be determined, store M in alignp and N in
386 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
387 *alignp and any bit-offset to *bitposp.
389 Note that the address (and thus the alignment) computed here is based
390 on the address to which a symbol resolves, whereas DECL_ALIGN is based
391 on the address at which an object is actually located. These two
392 addresses are not always the same. For example, on ARM targets,
393 the address &foo of a Thumb function foo() has the lowest bit set,
394 whereas foo() itself starts on an even address.
396 If ADDR_P is true we are taking the address of the memory reference EXP
397 and thus cannot rely on the access taking place. */
399 static bool
400 get_object_alignment_2 (tree exp, unsigned int *alignp,
401 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
403 poly_int64 bitsize, bitpos;
404 tree offset;
405 machine_mode mode;
406 int unsignedp, reversep, volatilep;
407 unsigned int align = BITS_PER_UNIT;
408 bool known_alignment = false;
410 /* Get the innermost object and the constant (bitpos) and possibly
411 variable (offset) offset of the access. */
412 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
413 &unsignedp, &reversep, &volatilep);
415 /* Extract alignment information from the innermost object and
416 possibly adjust bitpos and offset. */
417 if (TREE_CODE (exp) == FUNCTION_DECL)
419 /* Function addresses can encode extra information besides their
420 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
421 allows the low bit to be used as a virtual bit, we know
422 that the address itself must be at least 2-byte aligned. */
423 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
424 align = 2 * BITS_PER_UNIT;
426 else if (TREE_CODE (exp) == LABEL_DECL)
428 else if (TREE_CODE (exp) == CONST_DECL)
430 /* The alignment of a CONST_DECL is determined by its initializer. */
431 exp = DECL_INITIAL (exp);
432 align = TYPE_ALIGN (TREE_TYPE (exp));
433 if (CONSTANT_CLASS_P (exp))
434 align = targetm.constant_alignment (exp, align);
436 known_alignment = true;
438 else if (DECL_P (exp))
440 align = DECL_ALIGN (exp);
441 known_alignment = true;
443 else if (TREE_CODE (exp) == INDIRECT_REF
444 || TREE_CODE (exp) == MEM_REF
445 || TREE_CODE (exp) == TARGET_MEM_REF)
447 tree addr = TREE_OPERAND (exp, 0);
448 unsigned ptr_align;
449 unsigned HOST_WIDE_INT ptr_bitpos;
450 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
452 /* If the address is explicitely aligned, handle that. */
453 if (TREE_CODE (addr) == BIT_AND_EXPR
454 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
456 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
457 ptr_bitmask *= BITS_PER_UNIT;
458 align = least_bit_hwi (ptr_bitmask);
459 addr = TREE_OPERAND (addr, 0);
462 known_alignment
463 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
464 align = MAX (ptr_align, align);
466 /* Re-apply explicit alignment to the bitpos. */
467 ptr_bitpos &= ptr_bitmask;
469 /* The alignment of the pointer operand in a TARGET_MEM_REF
470 has to take the variable offset parts into account. */
471 if (TREE_CODE (exp) == TARGET_MEM_REF)
473 if (TMR_INDEX (exp))
475 unsigned HOST_WIDE_INT step = 1;
476 if (TMR_STEP (exp))
477 step = TREE_INT_CST_LOW (TMR_STEP (exp));
478 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
480 if (TMR_INDEX2 (exp))
481 align = BITS_PER_UNIT;
482 known_alignment = false;
485 /* When EXP is an actual memory reference then we can use
486 TYPE_ALIGN of a pointer indirection to derive alignment.
487 Do so only if get_pointer_alignment_1 did not reveal absolute
488 alignment knowledge and if using that alignment would
489 improve the situation. */
490 unsigned int talign;
491 if (!addr_p && !known_alignment
492 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
493 && talign > align)
494 align = talign;
495 else
497 /* Else adjust bitpos accordingly. */
498 bitpos += ptr_bitpos;
499 if (TREE_CODE (exp) == MEM_REF
500 || TREE_CODE (exp) == TARGET_MEM_REF)
501 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
504 else if (TREE_CODE (exp) == STRING_CST)
506 /* STRING_CST are the only constant objects we allow to be not
507 wrapped inside a CONST_DECL. */
508 align = TYPE_ALIGN (TREE_TYPE (exp));
509 if (CONSTANT_CLASS_P (exp))
510 align = targetm.constant_alignment (exp, align);
512 known_alignment = true;
515 /* If there is a non-constant offset part extract the maximum
516 alignment that can prevail. */
517 if (offset)
519 unsigned int trailing_zeros = tree_ctz (offset);
520 if (trailing_zeros < HOST_BITS_PER_INT)
522 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
523 if (inner)
524 align = MIN (align, inner);
528 /* Account for the alignment of runtime coefficients, so that the constant
529 bitpos is guaranteed to be accurate. */
530 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
531 if (alt_align != 0 && alt_align < align)
533 align = alt_align;
534 known_alignment = false;
537 *alignp = align;
538 *bitposp = bitpos.coeffs[0] & (align - 1);
539 return known_alignment;
542 /* For a memory reference expression EXP compute values M and N such that M
543 divides (&EXP - N) and such that N < M. If these numbers can be determined,
544 store M in alignp and N in *BITPOSP and return true. Otherwise return false
545 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
547 bool
548 get_object_alignment_1 (tree exp, unsigned int *alignp,
549 unsigned HOST_WIDE_INT *bitposp)
551 return get_object_alignment_2 (exp, alignp, bitposp, false);
554 /* Return the alignment in bits of EXP, an object. */
556 unsigned int
557 get_object_alignment (tree exp)
559 unsigned HOST_WIDE_INT bitpos = 0;
560 unsigned int align;
562 get_object_alignment_1 (exp, &align, &bitpos);
564 /* align and bitpos now specify known low bits of the pointer.
565 ptr & (align - 1) == bitpos. */
567 if (bitpos != 0)
568 align = least_bit_hwi (bitpos);
569 return align;
572 /* For a pointer valued expression EXP compute values M and N such that M
573 divides (EXP - N) and such that N < M. If these numbers can be determined,
574 store M in alignp and N in *BITPOSP and return true. Return false if
575 the results are just a conservative approximation.
577 If EXP is not a pointer, false is returned too. */
579 bool
580 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
581 unsigned HOST_WIDE_INT *bitposp)
583 STRIP_NOPS (exp);
585 if (TREE_CODE (exp) == ADDR_EXPR)
586 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
587 alignp, bitposp, true);
588 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
590 unsigned int align;
591 unsigned HOST_WIDE_INT bitpos;
592 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
593 &align, &bitpos);
594 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
595 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
596 else
598 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
599 if (trailing_zeros < HOST_BITS_PER_INT)
601 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
602 if (inner)
603 align = MIN (align, inner);
606 *alignp = align;
607 *bitposp = bitpos & (align - 1);
608 return res;
610 else if (TREE_CODE (exp) == SSA_NAME
611 && POINTER_TYPE_P (TREE_TYPE (exp)))
613 unsigned int ptr_align, ptr_misalign;
614 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
616 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
618 *bitposp = ptr_misalign * BITS_PER_UNIT;
619 *alignp = ptr_align * BITS_PER_UNIT;
620 /* Make sure to return a sensible alignment when the multiplication
621 by BITS_PER_UNIT overflowed. */
622 if (*alignp == 0)
623 *alignp = 1u << (HOST_BITS_PER_INT - 1);
624 /* We cannot really tell whether this result is an approximation. */
625 return false;
627 else
629 *bitposp = 0;
630 *alignp = BITS_PER_UNIT;
631 return false;
634 else if (TREE_CODE (exp) == INTEGER_CST)
636 *alignp = BIGGEST_ALIGNMENT;
637 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
638 & (BIGGEST_ALIGNMENT - 1));
639 return true;
642 *bitposp = 0;
643 *alignp = BITS_PER_UNIT;
644 return false;
647 /* Return the alignment in bits of EXP, a pointer valued expression.
648 The alignment returned is, by default, the alignment of the thing that
649 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
651 Otherwise, look at the expression to see if we can do better, i.e., if the
652 expression is actually pointing at an object whose alignment is tighter. */
654 unsigned int
655 get_pointer_alignment (tree exp)
657 unsigned HOST_WIDE_INT bitpos = 0;
658 unsigned int align;
660 get_pointer_alignment_1 (exp, &align, &bitpos);
662 /* align and bitpos now specify known low bits of the pointer.
663 ptr & (align - 1) == bitpos. */
665 if (bitpos != 0)
666 align = least_bit_hwi (bitpos);
668 return align;
671 /* Return the number of leading non-zero elements in the sequence
672 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
673 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
675 unsigned
676 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
678 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
680 unsigned n;
682 if (eltsize == 1)
684 /* Optimize the common case of plain char. */
685 for (n = 0; n < maxelts; n++)
687 const char *elt = (const char*) ptr + n;
688 if (!*elt)
689 break;
692 else
694 for (n = 0; n < maxelts; n++)
696 const char *elt = (const char*) ptr + n * eltsize;
697 if (!memcmp (elt, "\0\0\0\0", eltsize))
698 break;
701 return n;
704 /* For a call EXPR at LOC to a function FNAME that expects a string
705 in the argument ARG, issue a diagnostic due to it being a called
706 with an argument that is a character array with no terminating
707 NUL. SIZE is the EXACT size of the array, and BNDRNG the number
708 of characters in which the NUL is expected. Either EXPR or FNAME
709 may be null but noth both. SIZE may be null when BNDRNG is null. */
711 void
712 warn_string_no_nul (location_t loc, tree expr, const char *fname,
713 tree arg, tree decl, tree size /* = NULL_TREE */,
714 bool exact /* = false */,
715 const wide_int bndrng[2] /* = NULL */)
717 if ((expr && TREE_NO_WARNING (expr)) || TREE_NO_WARNING (arg))
718 return;
720 loc = expansion_point_location_if_in_system_header (loc);
721 bool warned;
723 /* Format the bound range as a string to keep the nuber of messages
724 from exploding. */
725 char bndstr[80];
726 *bndstr = 0;
727 if (bndrng)
729 if (bndrng[0] == bndrng[1])
730 sprintf (bndstr, "%llu", (unsigned long long) bndrng[0].to_uhwi ());
731 else
732 sprintf (bndstr, "[%llu, %llu]",
733 (unsigned long long) bndrng[0].to_uhwi (),
734 (unsigned long long) bndrng[1].to_uhwi ());
737 const tree maxobjsize = max_object_size ();
738 const wide_int maxsiz = wi::to_wide (maxobjsize);
739 if (expr)
741 tree func = get_callee_fndecl (expr);
742 if (bndrng)
744 if (wi::ltu_p (maxsiz, bndrng[0]))
745 warned = warning_at (loc, OPT_Wstringop_overread,
746 "%K%qD specified bound %s exceeds "
747 "maximum object size %E",
748 expr, func, bndstr, maxobjsize);
749 else
751 bool maybe = wi::to_wide (size) == bndrng[0];
752 warned = warning_at (loc, OPT_Wstringop_overread,
753 exact
754 ? G_("%K%qD specified bound %s exceeds "
755 "the size %E of unterminated array")
756 : (maybe
757 ? G_("%K%qD specified bound %s may "
758 "exceed the size of at most %E "
759 "of unterminated array")
760 : G_("%K%qD specified bound %s exceeds "
761 "the size of at most %E "
762 "of unterminated array")),
763 expr, func, bndstr, size);
766 else
767 warned = warning_at (loc, OPT_Wstringop_overread,
768 "%K%qD argument missing terminating nul",
769 expr, func);
771 else
773 if (bndrng)
775 if (wi::ltu_p (maxsiz, bndrng[0]))
776 warned = warning_at (loc, OPT_Wstringop_overread,
777 "%qs specified bound %s exceeds "
778 "maximum object size %E",
779 fname, bndstr, maxobjsize);
780 else
782 bool maybe = wi::to_wide (size) == bndrng[0];
783 warned = warning_at (loc, OPT_Wstringop_overread,
784 exact
785 ? G_("%qs specified bound %s exceeds "
786 "the size %E of unterminated array")
787 : (maybe
788 ? G_("%qs specified bound %s may "
789 "exceed the size of at most %E "
790 "of unterminated array")
791 : G_("%qs specified bound %s exceeds "
792 "the size of at most %E "
793 "of unterminated array")),
794 fname, bndstr, size);
797 else
798 warned = warning_at (loc, OPT_Wstringop_overread,
799 "%qsargument missing terminating nul",
800 fname);
803 if (warned)
805 inform (DECL_SOURCE_LOCATION (decl),
806 "referenced argument declared here");
807 TREE_NO_WARNING (arg) = 1;
808 if (expr)
809 TREE_NO_WARNING (expr) = 1;
813 /* For a call EXPR (which may be null) that expects a string argument
814 SRC as an argument, returns false if SRC is a character array with
815 no terminating NUL. When nonnull, BOUND is the number of characters
816 in which to expect the terminating NUL. RDONLY is true for read-only
817 accesses such as strcmp, false for read-write such as strcpy. When
818 EXPR is also issues a warning. */
820 bool
821 check_nul_terminated_array (tree expr, tree src,
822 tree bound /* = NULL_TREE */)
824 /* The constant size of the array SRC points to. The actual size
825 may be less of EXACT is true, but not more. */
826 tree size;
827 /* True if SRC involves a non-constant offset into the array. */
828 bool exact;
829 /* The unterminated constant array SRC points to. */
830 tree nonstr = unterminated_array (src, &size, &exact);
831 if (!nonstr)
832 return true;
834 /* NONSTR refers to the non-nul terminated constant array and SIZE
835 is the constant size of the array in bytes. EXACT is true when
836 SIZE is exact. */
838 wide_int bndrng[2];
839 if (bound)
841 if (TREE_CODE (bound) == INTEGER_CST)
842 bndrng[0] = bndrng[1] = wi::to_wide (bound);
843 else
845 value_range_kind rng = get_range_info (bound, bndrng, bndrng + 1);
846 if (rng != VR_RANGE)
847 return true;
850 if (exact)
852 if (wi::leu_p (bndrng[0], wi::to_wide (size)))
853 return true;
855 else if (wi::lt_p (bndrng[0], wi::to_wide (size), UNSIGNED))
856 return true;
859 if (expr)
860 warn_string_no_nul (EXPR_LOCATION (expr), expr, NULL, src, nonstr,
861 size, exact, bound ? bndrng : NULL);
863 return false;
866 /* If EXP refers to an unterminated constant character array return
867 the declaration of the object of which the array is a member or
868 element and if SIZE is not null, set *SIZE to the size of
869 the unterminated array and set *EXACT if the size is exact or
870 clear it otherwise. Otherwise return null. */
872 tree
873 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
875 /* C_STRLEN will return NULL and set DECL in the info
876 structure if EXP references a unterminated array. */
877 c_strlen_data lendata = { };
878 tree len = c_strlen (exp, 1, &lendata);
879 if (len == NULL_TREE && lendata.minlen && lendata.decl)
881 if (size)
883 len = lendata.minlen;
884 if (lendata.off)
886 /* Constant offsets are already accounted for in LENDATA.MINLEN,
887 but not in a SSA_NAME + CST expression. */
888 if (TREE_CODE (lendata.off) == INTEGER_CST)
889 *exact = true;
890 else if (TREE_CODE (lendata.off) == PLUS_EXPR
891 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
893 /* Subtract the offset from the size of the array. */
894 *exact = false;
895 tree temp = TREE_OPERAND (lendata.off, 1);
896 temp = fold_convert (ssizetype, temp);
897 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
899 else
900 *exact = false;
902 else
903 *exact = true;
905 *size = len;
907 return lendata.decl;
910 return NULL_TREE;
913 /* Compute the length of a null-terminated character string or wide
914 character string handling character sizes of 1, 2, and 4 bytes.
915 TREE_STRING_LENGTH is not the right way because it evaluates to
916 the size of the character array in bytes (as opposed to characters)
917 and because it can contain a zero byte in the middle.
919 ONLY_VALUE should be nonzero if the result is not going to be emitted
920 into the instruction stream and zero if it is going to be expanded.
921 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
922 is returned, otherwise NULL, since
923 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
924 evaluate the side-effects.
926 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
927 accesses. Note that this implies the result is not going to be emitted
928 into the instruction stream.
930 Additional information about the string accessed may be recorded
931 in DATA. For example, if ARG references an unterminated string,
932 then the declaration will be stored in the DECL field. If the
933 length of the unterminated string can be determined, it'll be
934 stored in the LEN field. Note this length could well be different
935 than what a C strlen call would return.
937 ELTSIZE is 1 for normal single byte character strings, and 2 or
938 4 for wide characer strings. ELTSIZE is by default 1.
940 The value returned is of type `ssizetype'. */
942 tree
943 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
945 /* If we were not passed a DATA pointer, then get one to a local
946 structure. That avoids having to check DATA for NULL before
947 each time we want to use it. */
948 c_strlen_data local_strlen_data = { };
949 if (!data)
950 data = &local_strlen_data;
952 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
954 tree src = STRIP_NOPS (arg);
955 if (TREE_CODE (src) == COND_EXPR
956 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
958 tree len1, len2;
960 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
961 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
962 if (tree_int_cst_equal (len1, len2))
963 return len1;
966 if (TREE_CODE (src) == COMPOUND_EXPR
967 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
968 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
970 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
972 /* Offset from the beginning of the string in bytes. */
973 tree byteoff;
974 tree memsize;
975 tree decl;
976 src = string_constant (src, &byteoff, &memsize, &decl);
977 if (src == 0)
978 return NULL_TREE;
980 /* Determine the size of the string element. */
981 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
982 return NULL_TREE;
984 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
985 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
986 in case the latter is less than the size of the array, such as when
987 SRC refers to a short string literal used to initialize a large array.
988 In that case, the elements of the array after the terminating NUL are
989 all NUL. */
990 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
991 strelts = strelts / eltsize;
993 if (!tree_fits_uhwi_p (memsize))
994 return NULL_TREE;
996 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
998 /* PTR can point to the byte representation of any string type, including
999 char* and wchar_t*. */
1000 const char *ptr = TREE_STRING_POINTER (src);
1002 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
1004 /* The code below works only for single byte character types. */
1005 if (eltsize != 1)
1006 return NULL_TREE;
1008 /* If the string has an internal NUL character followed by any
1009 non-NUL characters (e.g., "foo\0bar"), we can't compute
1010 the offset to the following NUL if we don't know where to
1011 start searching for it. */
1012 unsigned len = string_length (ptr, eltsize, strelts);
1014 /* Return when an embedded null character is found or none at all.
1015 In the latter case, set the DECL/LEN field in the DATA structure
1016 so that callers may examine them. */
1017 if (len + 1 < strelts)
1018 return NULL_TREE;
1019 else if (len >= maxelts)
1021 data->decl = decl;
1022 data->off = byteoff;
1023 data->minlen = ssize_int (len);
1024 return NULL_TREE;
1027 /* For empty strings the result should be zero. */
1028 if (len == 0)
1029 return ssize_int (0);
1031 /* We don't know the starting offset, but we do know that the string
1032 has no internal zero bytes. If the offset falls within the bounds
1033 of the string subtract the offset from the length of the string,
1034 and return that. Otherwise the length is zero. Take care to
1035 use SAVE_EXPR in case the OFFSET has side-effects. */
1036 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
1037 : byteoff;
1038 offsave = fold_convert_loc (loc, sizetype, offsave);
1039 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
1040 size_int (len));
1041 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
1042 offsave);
1043 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
1044 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
1045 build_zero_cst (ssizetype));
1048 /* Offset from the beginning of the string in elements. */
1049 HOST_WIDE_INT eltoff;
1051 /* We have a known offset into the string. Start searching there for
1052 a null character if we can represent it as a single HOST_WIDE_INT. */
1053 if (byteoff == 0)
1054 eltoff = 0;
1055 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
1056 eltoff = -1;
1057 else
1058 eltoff = tree_to_uhwi (byteoff) / eltsize;
1060 /* If the offset is known to be out of bounds, warn, and call strlen at
1061 runtime. */
1062 if (eltoff < 0 || eltoff >= maxelts)
1064 /* Suppress multiple warnings for propagated constant strings. */
1065 if (only_value != 2
1066 && !TREE_NO_WARNING (arg)
1067 && warning_at (loc, OPT_Warray_bounds,
1068 "offset %qwi outside bounds of constant string",
1069 eltoff))
1071 if (decl)
1072 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
1073 TREE_NO_WARNING (arg) = 1;
1075 return NULL_TREE;
1078 /* If eltoff is larger than strelts but less than maxelts the
1079 string length is zero, since the excess memory will be zero. */
1080 if (eltoff > strelts)
1081 return ssize_int (0);
1083 /* Use strlen to search for the first zero byte. Since any strings
1084 constructed with build_string will have nulls appended, we win even
1085 if we get handed something like (char[4])"abcd".
1087 Since ELTOFF is our starting index into the string, no further
1088 calculation is needed. */
1089 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
1090 strelts - eltoff);
1092 /* Don't know what to return if there was no zero termination.
1093 Ideally this would turn into a gcc_checking_assert over time.
1094 Set DECL/LEN so callers can examine them. */
1095 if (len >= maxelts - eltoff)
1097 data->decl = decl;
1098 data->off = byteoff;
1099 data->minlen = ssize_int (len);
1100 return NULL_TREE;
1103 return ssize_int (len);
1106 /* Return a constant integer corresponding to target reading
1107 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
1108 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
1109 are assumed to be zero, otherwise it reads as many characters
1110 as needed. */
1113 c_readstr (const char *str, scalar_int_mode mode,
1114 bool null_terminated_p/*=true*/)
1116 HOST_WIDE_INT ch;
1117 unsigned int i, j;
1118 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
1120 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
1121 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
1122 / HOST_BITS_PER_WIDE_INT;
1124 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
1125 for (i = 0; i < len; i++)
1126 tmp[i] = 0;
1128 ch = 1;
1129 for (i = 0; i < GET_MODE_SIZE (mode); i++)
1131 j = i;
1132 if (WORDS_BIG_ENDIAN)
1133 j = GET_MODE_SIZE (mode) - i - 1;
1134 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
1135 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
1136 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
1137 j *= BITS_PER_UNIT;
1139 if (ch || !null_terminated_p)
1140 ch = (unsigned char) str[i];
1141 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
1144 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
1145 return immed_wide_int_const (c, mode);
1148 /* Cast a target constant CST to target CHAR and if that value fits into
1149 host char type, return zero and put that value into variable pointed to by
1150 P. */
1152 static int
1153 target_char_cast (tree cst, char *p)
1155 unsigned HOST_WIDE_INT val, hostval;
1157 if (TREE_CODE (cst) != INTEGER_CST
1158 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
1159 return 1;
1161 /* Do not care if it fits or not right here. */
1162 val = TREE_INT_CST_LOW (cst);
1164 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
1165 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
1167 hostval = val;
1168 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
1169 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
1171 if (val != hostval)
1172 return 1;
1174 *p = hostval;
1175 return 0;
1178 /* Similar to save_expr, but assumes that arbitrary code is not executed
1179 in between the multiple evaluations. In particular, we assume that a
1180 non-addressable local variable will not be modified. */
1182 static tree
1183 builtin_save_expr (tree exp)
1185 if (TREE_CODE (exp) == SSA_NAME
1186 || (TREE_ADDRESSABLE (exp) == 0
1187 && (TREE_CODE (exp) == PARM_DECL
1188 || (VAR_P (exp) && !TREE_STATIC (exp)))))
1189 return exp;
1191 return save_expr (exp);
1194 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
1195 times to get the address of either a higher stack frame, or a return
1196 address located within it (depending on FNDECL_CODE). */
1198 static rtx
1199 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
1201 int i;
1202 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
1203 if (tem == NULL_RTX)
1205 /* For a zero count with __builtin_return_address, we don't care what
1206 frame address we return, because target-specific definitions will
1207 override us. Therefore frame pointer elimination is OK, and using
1208 the soft frame pointer is OK.
1210 For a nonzero count, or a zero count with __builtin_frame_address,
1211 we require a stable offset from the current frame pointer to the
1212 previous one, so we must use the hard frame pointer, and
1213 we must disable frame pointer elimination. */
1214 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1215 tem = frame_pointer_rtx;
1216 else
1218 tem = hard_frame_pointer_rtx;
1220 /* Tell reload not to eliminate the frame pointer. */
1221 crtl->accesses_prior_frames = 1;
1225 if (count > 0)
1226 SETUP_FRAME_ADDRESSES ();
1228 /* On the SPARC, the return address is not in the frame, it is in a
1229 register. There is no way to access it off of the current frame
1230 pointer, but it can be accessed off the previous frame pointer by
1231 reading the value from the register window save area. */
1232 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1233 count--;
1235 /* Scan back COUNT frames to the specified frame. */
1236 for (i = 0; i < count; i++)
1238 /* Assume the dynamic chain pointer is in the word that the
1239 frame address points to, unless otherwise specified. */
1240 tem = DYNAMIC_CHAIN_ADDRESS (tem);
1241 tem = memory_address (Pmode, tem);
1242 tem = gen_frame_mem (Pmode, tem);
1243 tem = copy_to_reg (tem);
1246 /* For __builtin_frame_address, return what we've got. But, on
1247 the SPARC for example, we may have to add a bias. */
1248 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
1249 return FRAME_ADDR_RTX (tem);
1251 /* For __builtin_return_address, get the return address from that frame. */
1252 #ifdef RETURN_ADDR_RTX
1253 tem = RETURN_ADDR_RTX (count, tem);
1254 #else
1255 tem = memory_address (Pmode,
1256 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
1257 tem = gen_frame_mem (Pmode, tem);
1258 #endif
1259 return tem;
1262 /* Alias set used for setjmp buffer. */
1263 static alias_set_type setjmp_alias_set = -1;
1265 /* Construct the leading half of a __builtin_setjmp call. Control will
1266 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1267 exception handling code. */
1269 void
1270 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
1272 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1273 rtx stack_save;
1274 rtx mem;
1276 if (setjmp_alias_set == -1)
1277 setjmp_alias_set = new_alias_set ();
1279 buf_addr = convert_memory_address (Pmode, buf_addr);
1281 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
1283 /* We store the frame pointer and the address of receiver_label in
1284 the buffer and use the rest of it for the stack save area, which
1285 is machine-dependent. */
1287 mem = gen_rtx_MEM (Pmode, buf_addr);
1288 set_mem_alias_set (mem, setjmp_alias_set);
1289 emit_move_insn (mem, hard_frame_pointer_rtx);
1291 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1292 GET_MODE_SIZE (Pmode))),
1293 set_mem_alias_set (mem, setjmp_alias_set);
1295 emit_move_insn (validize_mem (mem),
1296 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
1298 stack_save = gen_rtx_MEM (sa_mode,
1299 plus_constant (Pmode, buf_addr,
1300 2 * GET_MODE_SIZE (Pmode)));
1301 set_mem_alias_set (stack_save, setjmp_alias_set);
1302 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1304 /* If there is further processing to do, do it. */
1305 if (targetm.have_builtin_setjmp_setup ())
1306 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1308 /* We have a nonlocal label. */
1309 cfun->has_nonlocal_label = 1;
1312 /* Construct the trailing part of a __builtin_setjmp call. This is
1313 also called directly by the SJLJ exception handling code.
1314 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1316 void
1317 expand_builtin_setjmp_receiver (rtx receiver_label)
1319 rtx chain;
1321 /* Mark the FP as used when we get here, so we have to make sure it's
1322 marked as used by this function. */
1323 emit_use (hard_frame_pointer_rtx);
1325 /* Mark the static chain as clobbered here so life information
1326 doesn't get messed up for it. */
1327 chain = rtx_for_static_chain (current_function_decl, true);
1328 if (chain && REG_P (chain))
1329 emit_clobber (chain);
1331 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1333 /* If the argument pointer can be eliminated in favor of the
1334 frame pointer, we don't need to restore it. We assume here
1335 that if such an elimination is present, it can always be used.
1336 This is the case on all known machines; if we don't make this
1337 assumption, we do unnecessary saving on many machines. */
1338 size_t i;
1339 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1341 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1342 if (elim_regs[i].from == ARG_POINTER_REGNUM
1343 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1344 break;
1346 if (i == ARRAY_SIZE (elim_regs))
1348 /* Now restore our arg pointer from the address at which it
1349 was saved in our stack frame. */
1350 emit_move_insn (crtl->args.internal_arg_pointer,
1351 copy_to_reg (get_arg_pointer_save_area ()));
1355 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1356 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1357 else if (targetm.have_nonlocal_goto_receiver ())
1358 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1359 else
1360 { /* Nothing */ }
1362 /* We must not allow the code we just generated to be reordered by
1363 scheduling. Specifically, the update of the frame pointer must
1364 happen immediately, not later. */
1365 emit_insn (gen_blockage ());
1368 /* __builtin_longjmp is passed a pointer to an array of five words (not
1369 all will be used on all machines). It operates similarly to the C
1370 library function of the same name, but is more efficient. Much of
1371 the code below is copied from the handling of non-local gotos. */
1373 static void
1374 expand_builtin_longjmp (rtx buf_addr, rtx value)
1376 rtx fp, lab, stack;
1377 rtx_insn *insn, *last;
1378 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1380 /* DRAP is needed for stack realign if longjmp is expanded to current
1381 function */
1382 if (SUPPORTS_STACK_ALIGNMENT)
1383 crtl->need_drap = true;
1385 if (setjmp_alias_set == -1)
1386 setjmp_alias_set = new_alias_set ();
1388 buf_addr = convert_memory_address (Pmode, buf_addr);
1390 buf_addr = force_reg (Pmode, buf_addr);
1392 /* We require that the user must pass a second argument of 1, because
1393 that is what builtin_setjmp will return. */
1394 gcc_assert (value == const1_rtx);
1396 last = get_last_insn ();
1397 if (targetm.have_builtin_longjmp ())
1398 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1399 else
1401 fp = gen_rtx_MEM (Pmode, buf_addr);
1402 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1403 GET_MODE_SIZE (Pmode)));
1405 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1406 2 * GET_MODE_SIZE (Pmode)));
1407 set_mem_alias_set (fp, setjmp_alias_set);
1408 set_mem_alias_set (lab, setjmp_alias_set);
1409 set_mem_alias_set (stack, setjmp_alias_set);
1411 /* Pick up FP, label, and SP from the block and jump. This code is
1412 from expand_goto in stmt.c; see there for detailed comments. */
1413 if (targetm.have_nonlocal_goto ())
1414 /* We have to pass a value to the nonlocal_goto pattern that will
1415 get copied into the static_chain pointer, but it does not matter
1416 what that value is, because builtin_setjmp does not use it. */
1417 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1418 else
1420 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1421 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1423 lab = copy_to_reg (lab);
1425 /* Restore the frame pointer and stack pointer. We must use a
1426 temporary since the setjmp buffer may be a local. */
1427 fp = copy_to_reg (fp);
1428 emit_stack_restore (SAVE_NONLOCAL, stack);
1430 /* Ensure the frame pointer move is not optimized. */
1431 emit_insn (gen_blockage ());
1432 emit_clobber (hard_frame_pointer_rtx);
1433 emit_clobber (frame_pointer_rtx);
1434 emit_move_insn (hard_frame_pointer_rtx, fp);
1436 emit_use (hard_frame_pointer_rtx);
1437 emit_use (stack_pointer_rtx);
1438 emit_indirect_jump (lab);
1442 /* Search backwards and mark the jump insn as a non-local goto.
1443 Note that this precludes the use of __builtin_longjmp to a
1444 __builtin_setjmp target in the same function. However, we've
1445 already cautioned the user that these functions are for
1446 internal exception handling use only. */
1447 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1449 gcc_assert (insn != last);
1451 if (JUMP_P (insn))
1453 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1454 break;
1456 else if (CALL_P (insn))
1457 break;
1461 static inline bool
1462 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1464 return (iter->i < iter->n);
1467 /* This function validates the types of a function call argument list
1468 against a specified list of tree_codes. If the last specifier is a 0,
1469 that represents an ellipsis, otherwise the last specifier must be a
1470 VOID_TYPE. */
1472 static bool
1473 validate_arglist (const_tree callexpr, ...)
1475 enum tree_code code;
1476 bool res = 0;
1477 va_list ap;
1478 const_call_expr_arg_iterator iter;
1479 const_tree arg;
1481 va_start (ap, callexpr);
1482 init_const_call_expr_arg_iterator (callexpr, &iter);
1484 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1485 tree fn = CALL_EXPR_FN (callexpr);
1486 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1488 for (unsigned argno = 1; ; ++argno)
1490 code = (enum tree_code) va_arg (ap, int);
1492 switch (code)
1494 case 0:
1495 /* This signifies an ellipses, any further arguments are all ok. */
1496 res = true;
1497 goto end;
1498 case VOID_TYPE:
1499 /* This signifies an endlink, if no arguments remain, return
1500 true, otherwise return false. */
1501 res = !more_const_call_expr_args_p (&iter);
1502 goto end;
1503 case POINTER_TYPE:
1504 /* The actual argument must be nonnull when either the whole
1505 called function has been declared nonnull, or when the formal
1506 argument corresponding to the actual argument has been. */
1507 if (argmap
1508 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1510 arg = next_const_call_expr_arg (&iter);
1511 if (!validate_arg (arg, code) || integer_zerop (arg))
1512 goto end;
1513 break;
1515 /* FALLTHRU */
1516 default:
1517 /* If no parameters remain or the parameter's code does not
1518 match the specified code, return false. Otherwise continue
1519 checking any remaining arguments. */
1520 arg = next_const_call_expr_arg (&iter);
1521 if (!validate_arg (arg, code))
1522 goto end;
1523 break;
1527 /* We need gotos here since we can only have one VA_CLOSE in a
1528 function. */
1529 end: ;
1530 va_end (ap);
1532 BITMAP_FREE (argmap);
1534 return res;
1537 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1538 and the address of the save area. */
1540 static rtx
1541 expand_builtin_nonlocal_goto (tree exp)
1543 tree t_label, t_save_area;
1544 rtx r_label, r_save_area, r_fp, r_sp;
1545 rtx_insn *insn;
1547 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1548 return NULL_RTX;
1550 t_label = CALL_EXPR_ARG (exp, 0);
1551 t_save_area = CALL_EXPR_ARG (exp, 1);
1553 r_label = expand_normal (t_label);
1554 r_label = convert_memory_address (Pmode, r_label);
1555 r_save_area = expand_normal (t_save_area);
1556 r_save_area = convert_memory_address (Pmode, r_save_area);
1557 /* Copy the address of the save location to a register just in case it was
1558 based on the frame pointer. */
1559 r_save_area = copy_to_reg (r_save_area);
1560 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1561 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1562 plus_constant (Pmode, r_save_area,
1563 GET_MODE_SIZE (Pmode)));
1565 crtl->has_nonlocal_goto = 1;
1567 /* ??? We no longer need to pass the static chain value, afaik. */
1568 if (targetm.have_nonlocal_goto ())
1569 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1570 else
1572 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1573 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1575 r_label = copy_to_reg (r_label);
1577 /* Restore the frame pointer and stack pointer. We must use a
1578 temporary since the setjmp buffer may be a local. */
1579 r_fp = copy_to_reg (r_fp);
1580 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1582 /* Ensure the frame pointer move is not optimized. */
1583 emit_insn (gen_blockage ());
1584 emit_clobber (hard_frame_pointer_rtx);
1585 emit_clobber (frame_pointer_rtx);
1586 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1588 /* USE of hard_frame_pointer_rtx added for consistency;
1589 not clear if really needed. */
1590 emit_use (hard_frame_pointer_rtx);
1591 emit_use (stack_pointer_rtx);
1593 /* If the architecture is using a GP register, we must
1594 conservatively assume that the target function makes use of it.
1595 The prologue of functions with nonlocal gotos must therefore
1596 initialize the GP register to the appropriate value, and we
1597 must then make sure that this value is live at the point
1598 of the jump. (Note that this doesn't necessarily apply
1599 to targets with a nonlocal_goto pattern; they are free
1600 to implement it in their own way. Note also that this is
1601 a no-op if the GP register is a global invariant.) */
1602 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1603 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1604 emit_use (pic_offset_table_rtx);
1606 emit_indirect_jump (r_label);
1609 /* Search backwards to the jump insn and mark it as a
1610 non-local goto. */
1611 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1613 if (JUMP_P (insn))
1615 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1616 break;
1618 else if (CALL_P (insn))
1619 break;
1622 return const0_rtx;
1625 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1626 (not all will be used on all machines) that was passed to __builtin_setjmp.
1627 It updates the stack pointer in that block to the current value. This is
1628 also called directly by the SJLJ exception handling code. */
1630 void
1631 expand_builtin_update_setjmp_buf (rtx buf_addr)
1633 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1634 buf_addr = convert_memory_address (Pmode, buf_addr);
1635 rtx stack_save
1636 = gen_rtx_MEM (sa_mode,
1637 memory_address
1638 (sa_mode,
1639 plus_constant (Pmode, buf_addr,
1640 2 * GET_MODE_SIZE (Pmode))));
1642 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1645 /* Expand a call to __builtin_prefetch. For a target that does not support
1646 data prefetch, evaluate the memory address argument in case it has side
1647 effects. */
1649 static void
1650 expand_builtin_prefetch (tree exp)
1652 tree arg0, arg1, arg2;
1653 int nargs;
1654 rtx op0, op1, op2;
1656 if (!validate_arglist (exp, POINTER_TYPE, 0))
1657 return;
1659 arg0 = CALL_EXPR_ARG (exp, 0);
1661 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1662 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1663 locality). */
1664 nargs = call_expr_nargs (exp);
1665 if (nargs > 1)
1666 arg1 = CALL_EXPR_ARG (exp, 1);
1667 else
1668 arg1 = integer_zero_node;
1669 if (nargs > 2)
1670 arg2 = CALL_EXPR_ARG (exp, 2);
1671 else
1672 arg2 = integer_three_node;
1674 /* Argument 0 is an address. */
1675 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1677 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1678 if (TREE_CODE (arg1) != INTEGER_CST)
1680 error ("second argument to %<__builtin_prefetch%> must be a constant");
1681 arg1 = integer_zero_node;
1683 op1 = expand_normal (arg1);
1684 /* Argument 1 must be either zero or one. */
1685 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1687 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1688 " using zero");
1689 op1 = const0_rtx;
1692 /* Argument 2 (locality) must be a compile-time constant int. */
1693 if (TREE_CODE (arg2) != INTEGER_CST)
1695 error ("third argument to %<__builtin_prefetch%> must be a constant");
1696 arg2 = integer_zero_node;
1698 op2 = expand_normal (arg2);
1699 /* Argument 2 must be 0, 1, 2, or 3. */
1700 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1702 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1703 op2 = const0_rtx;
1706 if (targetm.have_prefetch ())
1708 class expand_operand ops[3];
1710 create_address_operand (&ops[0], op0);
1711 create_integer_operand (&ops[1], INTVAL (op1));
1712 create_integer_operand (&ops[2], INTVAL (op2));
1713 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1714 return;
1717 /* Don't do anything with direct references to volatile memory, but
1718 generate code to handle other side effects. */
1719 if (!MEM_P (op0) && side_effects_p (op0))
1720 emit_insn (op0);
1723 /* Get a MEM rtx for expression EXP which is the address of an operand
1724 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1725 the maximum length of the block of memory that might be accessed or
1726 NULL if unknown. */
1728 static rtx
1729 get_memory_rtx (tree exp, tree len)
1731 tree orig_exp = exp;
1732 rtx addr, mem;
1734 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1735 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1736 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1737 exp = TREE_OPERAND (exp, 0);
1739 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1740 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1742 /* Get an expression we can use to find the attributes to assign to MEM.
1743 First remove any nops. */
1744 while (CONVERT_EXPR_P (exp)
1745 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1746 exp = TREE_OPERAND (exp, 0);
1748 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1749 (as builtin stringops may alias with anything). */
1750 exp = fold_build2 (MEM_REF,
1751 build_array_type (char_type_node,
1752 build_range_type (sizetype,
1753 size_one_node, len)),
1754 exp, build_int_cst (ptr_type_node, 0));
1756 /* If the MEM_REF has no acceptable address, try to get the base object
1757 from the original address we got, and build an all-aliasing
1758 unknown-sized access to that one. */
1759 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1760 set_mem_attributes (mem, exp, 0);
1761 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1762 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1763 0))))
1765 exp = build_fold_addr_expr (exp);
1766 exp = fold_build2 (MEM_REF,
1767 build_array_type (char_type_node,
1768 build_range_type (sizetype,
1769 size_zero_node,
1770 NULL)),
1771 exp, build_int_cst (ptr_type_node, 0));
1772 set_mem_attributes (mem, exp, 0);
1774 set_mem_alias_set (mem, 0);
1775 return mem;
1778 /* Built-in functions to perform an untyped call and return. */
1780 #define apply_args_mode \
1781 (this_target_builtins->x_apply_args_mode)
1782 #define apply_result_mode \
1783 (this_target_builtins->x_apply_result_mode)
1785 /* Return the size required for the block returned by __builtin_apply_args,
1786 and initialize apply_args_mode. */
1788 static int
1789 apply_args_size (void)
1791 static int size = -1;
1792 int align;
1793 unsigned int regno;
1795 /* The values computed by this function never change. */
1796 if (size < 0)
1798 /* The first value is the incoming arg-pointer. */
1799 size = GET_MODE_SIZE (Pmode);
1801 /* The second value is the structure value address unless this is
1802 passed as an "invisible" first argument. */
1803 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1804 size += GET_MODE_SIZE (Pmode);
1806 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1807 if (FUNCTION_ARG_REGNO_P (regno))
1809 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1811 gcc_assert (mode != VOIDmode);
1813 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1814 if (size % align != 0)
1815 size = CEIL (size, align) * align;
1816 size += GET_MODE_SIZE (mode);
1817 apply_args_mode[regno] = mode;
1819 else
1821 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1824 return size;
1827 /* Return the size required for the block returned by __builtin_apply,
1828 and initialize apply_result_mode. */
1830 static int
1831 apply_result_size (void)
1833 static int size = -1;
1834 int align, regno;
1836 /* The values computed by this function never change. */
1837 if (size < 0)
1839 size = 0;
1841 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1842 if (targetm.calls.function_value_regno_p (regno))
1844 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1846 gcc_assert (mode != VOIDmode);
1848 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1849 if (size % align != 0)
1850 size = CEIL (size, align) * align;
1851 size += GET_MODE_SIZE (mode);
1852 apply_result_mode[regno] = mode;
1854 else
1855 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1857 /* Allow targets that use untyped_call and untyped_return to override
1858 the size so that machine-specific information can be stored here. */
1859 #ifdef APPLY_RESULT_SIZE
1860 size = APPLY_RESULT_SIZE;
1861 #endif
1863 return size;
1866 /* Create a vector describing the result block RESULT. If SAVEP is true,
1867 the result block is used to save the values; otherwise it is used to
1868 restore the values. */
1870 static rtx
1871 result_vector (int savep, rtx result)
1873 int regno, size, align, nelts;
1874 fixed_size_mode mode;
1875 rtx reg, mem;
1876 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1878 size = nelts = 0;
1879 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1880 if ((mode = apply_result_mode[regno]) != VOIDmode)
1882 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1883 if (size % align != 0)
1884 size = CEIL (size, align) * align;
1885 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1886 mem = adjust_address (result, mode, size);
1887 savevec[nelts++] = (savep
1888 ? gen_rtx_SET (mem, reg)
1889 : gen_rtx_SET (reg, mem));
1890 size += GET_MODE_SIZE (mode);
1892 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1895 /* Save the state required to perform an untyped call with the same
1896 arguments as were passed to the current function. */
1898 static rtx
1899 expand_builtin_apply_args_1 (void)
1901 rtx registers, tem;
1902 int size, align, regno;
1903 fixed_size_mode mode;
1904 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1906 /* Create a block where the arg-pointer, structure value address,
1907 and argument registers can be saved. */
1908 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1910 /* Walk past the arg-pointer and structure value address. */
1911 size = GET_MODE_SIZE (Pmode);
1912 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1913 size += GET_MODE_SIZE (Pmode);
1915 /* Save each register used in calling a function to the block. */
1916 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1917 if ((mode = apply_args_mode[regno]) != VOIDmode)
1919 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1920 if (size % align != 0)
1921 size = CEIL (size, align) * align;
1923 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1925 emit_move_insn (adjust_address (registers, mode, size), tem);
1926 size += GET_MODE_SIZE (mode);
1929 /* Save the arg pointer to the block. */
1930 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1931 /* We need the pointer as the caller actually passed them to us, not
1932 as we might have pretended they were passed. Make sure it's a valid
1933 operand, as emit_move_insn isn't expected to handle a PLUS. */
1934 if (STACK_GROWS_DOWNWARD)
1936 = force_operand (plus_constant (Pmode, tem,
1937 crtl->args.pretend_args_size),
1938 NULL_RTX);
1939 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1941 size = GET_MODE_SIZE (Pmode);
1943 /* Save the structure value address unless this is passed as an
1944 "invisible" first argument. */
1945 if (struct_incoming_value)
1946 emit_move_insn (adjust_address (registers, Pmode, size),
1947 copy_to_reg (struct_incoming_value));
1949 /* Return the address of the block. */
1950 return copy_addr_to_reg (XEXP (registers, 0));
1953 /* __builtin_apply_args returns block of memory allocated on
1954 the stack into which is stored the arg pointer, structure
1955 value address, static chain, and all the registers that might
1956 possibly be used in performing a function call. The code is
1957 moved to the start of the function so the incoming values are
1958 saved. */
1960 static rtx
1961 expand_builtin_apply_args (void)
1963 /* Don't do __builtin_apply_args more than once in a function.
1964 Save the result of the first call and reuse it. */
1965 if (apply_args_value != 0)
1966 return apply_args_value;
1968 /* When this function is called, it means that registers must be
1969 saved on entry to this function. So we migrate the
1970 call to the first insn of this function. */
1971 rtx temp;
1973 start_sequence ();
1974 temp = expand_builtin_apply_args_1 ();
1975 rtx_insn *seq = get_insns ();
1976 end_sequence ();
1978 apply_args_value = temp;
1980 /* Put the insns after the NOTE that starts the function.
1981 If this is inside a start_sequence, make the outer-level insn
1982 chain current, so the code is placed at the start of the
1983 function. If internal_arg_pointer is a non-virtual pseudo,
1984 it needs to be placed after the function that initializes
1985 that pseudo. */
1986 push_topmost_sequence ();
1987 if (REG_P (crtl->args.internal_arg_pointer)
1988 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1989 emit_insn_before (seq, parm_birth_insn);
1990 else
1991 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1992 pop_topmost_sequence ();
1993 return temp;
1997 /* Perform an untyped call and save the state required to perform an
1998 untyped return of whatever value was returned by the given function. */
2000 static rtx
2001 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
2003 int size, align, regno;
2004 fixed_size_mode mode;
2005 rtx incoming_args, result, reg, dest, src;
2006 rtx_call_insn *call_insn;
2007 rtx old_stack_level = 0;
2008 rtx call_fusage = 0;
2009 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
2011 arguments = convert_memory_address (Pmode, arguments);
2013 /* Create a block where the return registers can be saved. */
2014 result = assign_stack_local (BLKmode, apply_result_size (), -1);
2016 /* Fetch the arg pointer from the ARGUMENTS block. */
2017 incoming_args = gen_reg_rtx (Pmode);
2018 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
2019 if (!STACK_GROWS_DOWNWARD)
2020 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
2021 incoming_args, 0, OPTAB_LIB_WIDEN);
2023 /* Push a new argument block and copy the arguments. Do not allow
2024 the (potential) memcpy call below to interfere with our stack
2025 manipulations. */
2026 do_pending_stack_adjust ();
2027 NO_DEFER_POP;
2029 /* Save the stack with nonlocal if available. */
2030 if (targetm.have_save_stack_nonlocal ())
2031 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
2032 else
2033 emit_stack_save (SAVE_BLOCK, &old_stack_level);
2035 /* Allocate a block of memory onto the stack and copy the memory
2036 arguments to the outgoing arguments address. We can pass TRUE
2037 as the 4th argument because we just saved the stack pointer
2038 and will restore it right after the call. */
2039 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
2041 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
2042 may have already set current_function_calls_alloca to true.
2043 current_function_calls_alloca won't be set if argsize is zero,
2044 so we have to guarantee need_drap is true here. */
2045 if (SUPPORTS_STACK_ALIGNMENT)
2046 crtl->need_drap = true;
2048 dest = virtual_outgoing_args_rtx;
2049 if (!STACK_GROWS_DOWNWARD)
2051 if (CONST_INT_P (argsize))
2052 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
2053 else
2054 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
2056 dest = gen_rtx_MEM (BLKmode, dest);
2057 set_mem_align (dest, PARM_BOUNDARY);
2058 src = gen_rtx_MEM (BLKmode, incoming_args);
2059 set_mem_align (src, PARM_BOUNDARY);
2060 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
2062 /* Refer to the argument block. */
2063 apply_args_size ();
2064 arguments = gen_rtx_MEM (BLKmode, arguments);
2065 set_mem_align (arguments, PARM_BOUNDARY);
2067 /* Walk past the arg-pointer and structure value address. */
2068 size = GET_MODE_SIZE (Pmode);
2069 if (struct_value)
2070 size += GET_MODE_SIZE (Pmode);
2072 /* Restore each of the registers previously saved. Make USE insns
2073 for each of these registers for use in making the call. */
2074 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2075 if ((mode = apply_args_mode[regno]) != VOIDmode)
2077 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2078 if (size % align != 0)
2079 size = CEIL (size, align) * align;
2080 reg = gen_rtx_REG (mode, regno);
2081 emit_move_insn (reg, adjust_address (arguments, mode, size));
2082 use_reg (&call_fusage, reg);
2083 size += GET_MODE_SIZE (mode);
2086 /* Restore the structure value address unless this is passed as an
2087 "invisible" first argument. */
2088 size = GET_MODE_SIZE (Pmode);
2089 if (struct_value)
2091 rtx value = gen_reg_rtx (Pmode);
2092 emit_move_insn (value, adjust_address (arguments, Pmode, size));
2093 emit_move_insn (struct_value, value);
2094 if (REG_P (struct_value))
2095 use_reg (&call_fusage, struct_value);
2098 /* All arguments and registers used for the call are set up by now! */
2099 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
2101 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
2102 and we don't want to load it into a register as an optimization,
2103 because prepare_call_address already did it if it should be done. */
2104 if (GET_CODE (function) != SYMBOL_REF)
2105 function = memory_address (FUNCTION_MODE, function);
2107 /* Generate the actual call instruction and save the return value. */
2108 if (targetm.have_untyped_call ())
2110 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
2111 emit_call_insn (targetm.gen_untyped_call (mem, result,
2112 result_vector (1, result)));
2114 else if (targetm.have_call_value ())
2116 rtx valreg = 0;
2118 /* Locate the unique return register. It is not possible to
2119 express a call that sets more than one return register using
2120 call_value; use untyped_call for that. In fact, untyped_call
2121 only needs to save the return registers in the given block. */
2122 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2123 if ((mode = apply_result_mode[regno]) != VOIDmode)
2125 gcc_assert (!valreg); /* have_untyped_call required. */
2127 valreg = gen_rtx_REG (mode, regno);
2130 emit_insn (targetm.gen_call_value (valreg,
2131 gen_rtx_MEM (FUNCTION_MODE, function),
2132 const0_rtx, NULL_RTX, const0_rtx));
2134 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
2136 else
2137 gcc_unreachable ();
2139 /* Find the CALL insn we just emitted, and attach the register usage
2140 information. */
2141 call_insn = last_call_insn ();
2142 add_function_usage_to (call_insn, call_fusage);
2144 /* Restore the stack. */
2145 if (targetm.have_save_stack_nonlocal ())
2146 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
2147 else
2148 emit_stack_restore (SAVE_BLOCK, old_stack_level);
2149 fixup_args_size_notes (call_insn, get_last_insn (), 0);
2151 OK_DEFER_POP;
2153 /* Return the address of the result block. */
2154 result = copy_addr_to_reg (XEXP (result, 0));
2155 return convert_memory_address (ptr_mode, result);
2158 /* Perform an untyped return. */
2160 static void
2161 expand_builtin_return (rtx result)
2163 int size, align, regno;
2164 fixed_size_mode mode;
2165 rtx reg;
2166 rtx_insn *call_fusage = 0;
2168 result = convert_memory_address (Pmode, result);
2170 apply_result_size ();
2171 result = gen_rtx_MEM (BLKmode, result);
2173 if (targetm.have_untyped_return ())
2175 rtx vector = result_vector (0, result);
2176 emit_jump_insn (targetm.gen_untyped_return (result, vector));
2177 emit_barrier ();
2178 return;
2181 /* Restore the return value and note that each value is used. */
2182 size = 0;
2183 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2184 if ((mode = apply_result_mode[regno]) != VOIDmode)
2186 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2187 if (size % align != 0)
2188 size = CEIL (size, align) * align;
2189 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
2190 emit_move_insn (reg, adjust_address (result, mode, size));
2192 push_to_sequence (call_fusage);
2193 emit_use (reg);
2194 call_fusage = get_insns ();
2195 end_sequence ();
2196 size += GET_MODE_SIZE (mode);
2199 /* Put the USE insns before the return. */
2200 emit_insn (call_fusage);
2202 /* Return whatever values was restored by jumping directly to the end
2203 of the function. */
2204 expand_naked_return ();
2207 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
2209 static enum type_class
2210 type_to_class (tree type)
2212 switch (TREE_CODE (type))
2214 case VOID_TYPE: return void_type_class;
2215 case INTEGER_TYPE: return integer_type_class;
2216 case ENUMERAL_TYPE: return enumeral_type_class;
2217 case BOOLEAN_TYPE: return boolean_type_class;
2218 case POINTER_TYPE: return pointer_type_class;
2219 case REFERENCE_TYPE: return reference_type_class;
2220 case OFFSET_TYPE: return offset_type_class;
2221 case REAL_TYPE: return real_type_class;
2222 case COMPLEX_TYPE: return complex_type_class;
2223 case FUNCTION_TYPE: return function_type_class;
2224 case METHOD_TYPE: return method_type_class;
2225 case RECORD_TYPE: return record_type_class;
2226 case UNION_TYPE:
2227 case QUAL_UNION_TYPE: return union_type_class;
2228 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
2229 ? string_type_class : array_type_class);
2230 case LANG_TYPE: return lang_type_class;
2231 case OPAQUE_TYPE: return opaque_type_class;
2232 default: return no_type_class;
2236 /* Expand a call EXP to __builtin_classify_type. */
2238 static rtx
2239 expand_builtin_classify_type (tree exp)
2241 if (call_expr_nargs (exp))
2242 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
2243 return GEN_INT (no_type_class);
2246 /* This helper macro, meant to be used in mathfn_built_in below, determines
2247 which among a set of builtin math functions is appropriate for a given type
2248 mode. The `F' (float) and `L' (long double) are automatically generated
2249 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
2250 types, there are additional types that are considered with 'F32', 'F64',
2251 'F128', etc. suffixes. */
2252 #define CASE_MATHFN(MATHFN) \
2253 CASE_CFN_##MATHFN: \
2254 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2255 fcodel = BUILT_IN_##MATHFN##L ; break;
2256 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
2257 types. */
2258 #define CASE_MATHFN_FLOATN(MATHFN) \
2259 CASE_CFN_##MATHFN: \
2260 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2261 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2262 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2263 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2264 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2265 break;
2266 /* Similar to above, but appends _R after any F/L suffix. */
2267 #define CASE_MATHFN_REENT(MATHFN) \
2268 case CFN_BUILT_IN_##MATHFN##_R: \
2269 case CFN_BUILT_IN_##MATHFN##F_R: \
2270 case CFN_BUILT_IN_##MATHFN##L_R: \
2271 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2272 fcodel = BUILT_IN_##MATHFN##L_R ; break;
2274 /* Return a function equivalent to FN but operating on floating-point
2275 values of type TYPE, or END_BUILTINS if no such function exists.
2276 This is purely an operation on function codes; it does not guarantee
2277 that the target actually has an implementation of the function. */
2279 static built_in_function
2280 mathfn_built_in_2 (tree type, combined_fn fn)
2282 tree mtype;
2283 built_in_function fcode, fcodef, fcodel;
2284 built_in_function fcodef16 = END_BUILTINS;
2285 built_in_function fcodef32 = END_BUILTINS;
2286 built_in_function fcodef64 = END_BUILTINS;
2287 built_in_function fcodef128 = END_BUILTINS;
2288 built_in_function fcodef32x = END_BUILTINS;
2289 built_in_function fcodef64x = END_BUILTINS;
2290 built_in_function fcodef128x = END_BUILTINS;
2292 switch (fn)
2294 #define SEQ_OF_CASE_MATHFN \
2295 CASE_MATHFN (ACOS) \
2296 CASE_MATHFN (ACOSH) \
2297 CASE_MATHFN (ASIN) \
2298 CASE_MATHFN (ASINH) \
2299 CASE_MATHFN (ATAN) \
2300 CASE_MATHFN (ATAN2) \
2301 CASE_MATHFN (ATANH) \
2302 CASE_MATHFN (CBRT) \
2303 CASE_MATHFN_FLOATN (CEIL) \
2304 CASE_MATHFN (CEXPI) \
2305 CASE_MATHFN_FLOATN (COPYSIGN) \
2306 CASE_MATHFN (COS) \
2307 CASE_MATHFN (COSH) \
2308 CASE_MATHFN (DREM) \
2309 CASE_MATHFN (ERF) \
2310 CASE_MATHFN (ERFC) \
2311 CASE_MATHFN (EXP) \
2312 CASE_MATHFN (EXP10) \
2313 CASE_MATHFN (EXP2) \
2314 CASE_MATHFN (EXPM1) \
2315 CASE_MATHFN (FABS) \
2316 CASE_MATHFN (FDIM) \
2317 CASE_MATHFN_FLOATN (FLOOR) \
2318 CASE_MATHFN_FLOATN (FMA) \
2319 CASE_MATHFN_FLOATN (FMAX) \
2320 CASE_MATHFN_FLOATN (FMIN) \
2321 CASE_MATHFN (FMOD) \
2322 CASE_MATHFN (FREXP) \
2323 CASE_MATHFN (GAMMA) \
2324 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
2325 CASE_MATHFN (HUGE_VAL) \
2326 CASE_MATHFN (HYPOT) \
2327 CASE_MATHFN (ILOGB) \
2328 CASE_MATHFN (ICEIL) \
2329 CASE_MATHFN (IFLOOR) \
2330 CASE_MATHFN (INF) \
2331 CASE_MATHFN (IRINT) \
2332 CASE_MATHFN (IROUND) \
2333 CASE_MATHFN (ISINF) \
2334 CASE_MATHFN (J0) \
2335 CASE_MATHFN (J1) \
2336 CASE_MATHFN (JN) \
2337 CASE_MATHFN (LCEIL) \
2338 CASE_MATHFN (LDEXP) \
2339 CASE_MATHFN (LFLOOR) \
2340 CASE_MATHFN (LGAMMA) \
2341 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
2342 CASE_MATHFN (LLCEIL) \
2343 CASE_MATHFN (LLFLOOR) \
2344 CASE_MATHFN (LLRINT) \
2345 CASE_MATHFN (LLROUND) \
2346 CASE_MATHFN (LOG) \
2347 CASE_MATHFN (LOG10) \
2348 CASE_MATHFN (LOG1P) \
2349 CASE_MATHFN (LOG2) \
2350 CASE_MATHFN (LOGB) \
2351 CASE_MATHFN (LRINT) \
2352 CASE_MATHFN (LROUND) \
2353 CASE_MATHFN (MODF) \
2354 CASE_MATHFN (NAN) \
2355 CASE_MATHFN (NANS) \
2356 CASE_MATHFN_FLOATN (NEARBYINT) \
2357 CASE_MATHFN (NEXTAFTER) \
2358 CASE_MATHFN (NEXTTOWARD) \
2359 CASE_MATHFN (POW) \
2360 CASE_MATHFN (POWI) \
2361 CASE_MATHFN (POW10) \
2362 CASE_MATHFN (REMAINDER) \
2363 CASE_MATHFN (REMQUO) \
2364 CASE_MATHFN_FLOATN (RINT) \
2365 CASE_MATHFN_FLOATN (ROUND) \
2366 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2367 CASE_MATHFN (SCALB) \
2368 CASE_MATHFN (SCALBLN) \
2369 CASE_MATHFN (SCALBN) \
2370 CASE_MATHFN (SIGNBIT) \
2371 CASE_MATHFN (SIGNIFICAND) \
2372 CASE_MATHFN (SIN) \
2373 CASE_MATHFN (SINCOS) \
2374 CASE_MATHFN (SINH) \
2375 CASE_MATHFN_FLOATN (SQRT) \
2376 CASE_MATHFN (TAN) \
2377 CASE_MATHFN (TANH) \
2378 CASE_MATHFN (TGAMMA) \
2379 CASE_MATHFN_FLOATN (TRUNC) \
2380 CASE_MATHFN (Y0) \
2381 CASE_MATHFN (Y1) \
2382 CASE_MATHFN (YN)
2384 SEQ_OF_CASE_MATHFN
2386 default:
2387 return END_BUILTINS;
2390 mtype = TYPE_MAIN_VARIANT (type);
2391 if (mtype == double_type_node)
2392 return fcode;
2393 else if (mtype == float_type_node)
2394 return fcodef;
2395 else if (mtype == long_double_type_node)
2396 return fcodel;
2397 else if (mtype == float16_type_node)
2398 return fcodef16;
2399 else if (mtype == float32_type_node)
2400 return fcodef32;
2401 else if (mtype == float64_type_node)
2402 return fcodef64;
2403 else if (mtype == float128_type_node)
2404 return fcodef128;
2405 else if (mtype == float32x_type_node)
2406 return fcodef32x;
2407 else if (mtype == float64x_type_node)
2408 return fcodef64x;
2409 else if (mtype == float128x_type_node)
2410 return fcodef128x;
2411 else
2412 return END_BUILTINS;
2415 #undef CASE_MATHFN
2416 #undef CASE_MATHFN_FLOATN
2417 #undef CASE_MATHFN_REENT
2419 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2420 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2421 otherwise use the explicit declaration. If we can't do the conversion,
2422 return null. */
2424 static tree
2425 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2427 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2428 if (fcode2 == END_BUILTINS)
2429 return NULL_TREE;
2431 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2432 return NULL_TREE;
2434 return builtin_decl_explicit (fcode2);
2437 /* Like mathfn_built_in_1, but always use the implicit array. */
2439 tree
2440 mathfn_built_in (tree type, combined_fn fn)
2442 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2445 /* Like mathfn_built_in_1, but take a built_in_function and
2446 always use the implicit array. */
2448 tree
2449 mathfn_built_in (tree type, enum built_in_function fn)
2451 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2454 /* Return the type associated with a built in function, i.e., the one
2455 to be passed to mathfn_built_in to get the type-specific
2456 function. */
2458 tree
2459 mathfn_built_in_type (combined_fn fn)
2461 #define CASE_MATHFN(MATHFN) \
2462 case CFN_BUILT_IN_##MATHFN: \
2463 return double_type_node; \
2464 case CFN_BUILT_IN_##MATHFN##F: \
2465 return float_type_node; \
2466 case CFN_BUILT_IN_##MATHFN##L: \
2467 return long_double_type_node;
2469 #define CASE_MATHFN_FLOATN(MATHFN) \
2470 CASE_MATHFN(MATHFN) \
2471 case CFN_BUILT_IN_##MATHFN##F16: \
2472 return float16_type_node; \
2473 case CFN_BUILT_IN_##MATHFN##F32: \
2474 return float32_type_node; \
2475 case CFN_BUILT_IN_##MATHFN##F64: \
2476 return float64_type_node; \
2477 case CFN_BUILT_IN_##MATHFN##F128: \
2478 return float128_type_node; \
2479 case CFN_BUILT_IN_##MATHFN##F32X: \
2480 return float32x_type_node; \
2481 case CFN_BUILT_IN_##MATHFN##F64X: \
2482 return float64x_type_node; \
2483 case CFN_BUILT_IN_##MATHFN##F128X: \
2484 return float128x_type_node;
2486 /* Similar to above, but appends _R after any F/L suffix. */
2487 #define CASE_MATHFN_REENT(MATHFN) \
2488 case CFN_BUILT_IN_##MATHFN##_R: \
2489 return double_type_node; \
2490 case CFN_BUILT_IN_##MATHFN##F_R: \
2491 return float_type_node; \
2492 case CFN_BUILT_IN_##MATHFN##L_R: \
2493 return long_double_type_node;
2495 switch (fn)
2497 SEQ_OF_CASE_MATHFN
2499 default:
2500 return NULL_TREE;
2503 #undef CASE_MATHFN
2504 #undef CASE_MATHFN_FLOATN
2505 #undef CASE_MATHFN_REENT
2506 #undef SEQ_OF_CASE_MATHFN
2509 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2510 return its code, otherwise return IFN_LAST. Note that this function
2511 only tests whether the function is defined in internals.def, not whether
2512 it is actually available on the target. */
2514 internal_fn
2515 associated_internal_fn (tree fndecl)
2517 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2518 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2519 switch (DECL_FUNCTION_CODE (fndecl))
2521 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2522 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2523 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2524 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2525 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2526 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2527 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2528 #include "internal-fn.def"
2530 CASE_FLT_FN (BUILT_IN_POW10):
2531 return IFN_EXP10;
2533 CASE_FLT_FN (BUILT_IN_DREM):
2534 return IFN_REMAINDER;
2536 CASE_FLT_FN (BUILT_IN_SCALBN):
2537 CASE_FLT_FN (BUILT_IN_SCALBLN):
2538 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2539 return IFN_LDEXP;
2540 return IFN_LAST;
2542 default:
2543 return IFN_LAST;
2547 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2548 on the current target by a call to an internal function, return the
2549 code of that internal function, otherwise return IFN_LAST. The caller
2550 is responsible for ensuring that any side-effects of the built-in
2551 call are dealt with correctly. E.g. if CALL sets errno, the caller
2552 must decide that the errno result isn't needed or make it available
2553 in some other way. */
2555 internal_fn
2556 replacement_internal_fn (gcall *call)
2558 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2560 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2561 if (ifn != IFN_LAST)
2563 tree_pair types = direct_internal_fn_types (ifn, call);
2564 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2565 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2566 return ifn;
2569 return IFN_LAST;
2572 /* Expand a call to the builtin trinary math functions (fma).
2573 Return NULL_RTX if a normal call should be emitted rather than expanding the
2574 function in-line. EXP is the expression that is a call to the builtin
2575 function; if convenient, the result should be placed in TARGET.
2576 SUBTARGET may be used as the target for computing one of EXP's
2577 operands. */
2579 static rtx
2580 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2582 optab builtin_optab;
2583 rtx op0, op1, op2, result;
2584 rtx_insn *insns;
2585 tree fndecl = get_callee_fndecl (exp);
2586 tree arg0, arg1, arg2;
2587 machine_mode mode;
2589 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2590 return NULL_RTX;
2592 arg0 = CALL_EXPR_ARG (exp, 0);
2593 arg1 = CALL_EXPR_ARG (exp, 1);
2594 arg2 = CALL_EXPR_ARG (exp, 2);
2596 switch (DECL_FUNCTION_CODE (fndecl))
2598 CASE_FLT_FN (BUILT_IN_FMA):
2599 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2600 builtin_optab = fma_optab; break;
2601 default:
2602 gcc_unreachable ();
2605 /* Make a suitable register to place result in. */
2606 mode = TYPE_MODE (TREE_TYPE (exp));
2608 /* Before working hard, check whether the instruction is available. */
2609 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2610 return NULL_RTX;
2612 result = gen_reg_rtx (mode);
2614 /* Always stabilize the argument list. */
2615 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2616 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2617 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2619 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2620 op1 = expand_normal (arg1);
2621 op2 = expand_normal (arg2);
2623 start_sequence ();
2625 /* Compute into RESULT.
2626 Set RESULT to wherever the result comes back. */
2627 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2628 result, 0);
2630 /* If we were unable to expand via the builtin, stop the sequence
2631 (without outputting the insns) and call to the library function
2632 with the stabilized argument list. */
2633 if (result == 0)
2635 end_sequence ();
2636 return expand_call (exp, target, target == const0_rtx);
2639 /* Output the entire sequence. */
2640 insns = get_insns ();
2641 end_sequence ();
2642 emit_insn (insns);
2644 return result;
2647 /* Expand a call to the builtin sin and cos math functions.
2648 Return NULL_RTX if a normal call should be emitted rather than expanding the
2649 function in-line. EXP is the expression that is a call to the builtin
2650 function; if convenient, the result should be placed in TARGET.
2651 SUBTARGET may be used as the target for computing one of EXP's
2652 operands. */
2654 static rtx
2655 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2657 optab builtin_optab;
2658 rtx op0;
2659 rtx_insn *insns;
2660 tree fndecl = get_callee_fndecl (exp);
2661 machine_mode mode;
2662 tree arg;
2664 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2665 return NULL_RTX;
2667 arg = CALL_EXPR_ARG (exp, 0);
2669 switch (DECL_FUNCTION_CODE (fndecl))
2671 CASE_FLT_FN (BUILT_IN_SIN):
2672 CASE_FLT_FN (BUILT_IN_COS):
2673 builtin_optab = sincos_optab; break;
2674 default:
2675 gcc_unreachable ();
2678 /* Make a suitable register to place result in. */
2679 mode = TYPE_MODE (TREE_TYPE (exp));
2681 /* Check if sincos insn is available, otherwise fallback
2682 to sin or cos insn. */
2683 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2684 switch (DECL_FUNCTION_CODE (fndecl))
2686 CASE_FLT_FN (BUILT_IN_SIN):
2687 builtin_optab = sin_optab; break;
2688 CASE_FLT_FN (BUILT_IN_COS):
2689 builtin_optab = cos_optab; break;
2690 default:
2691 gcc_unreachable ();
2694 /* Before working hard, check whether the instruction is available. */
2695 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2697 rtx result = gen_reg_rtx (mode);
2699 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2700 need to expand the argument again. This way, we will not perform
2701 side-effects more the once. */
2702 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2704 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2706 start_sequence ();
2708 /* Compute into RESULT.
2709 Set RESULT to wherever the result comes back. */
2710 if (builtin_optab == sincos_optab)
2712 int ok;
2714 switch (DECL_FUNCTION_CODE (fndecl))
2716 CASE_FLT_FN (BUILT_IN_SIN):
2717 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2718 break;
2719 CASE_FLT_FN (BUILT_IN_COS):
2720 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2721 break;
2722 default:
2723 gcc_unreachable ();
2725 gcc_assert (ok);
2727 else
2728 result = expand_unop (mode, builtin_optab, op0, result, 0);
2730 if (result != 0)
2732 /* Output the entire sequence. */
2733 insns = get_insns ();
2734 end_sequence ();
2735 emit_insn (insns);
2736 return result;
2739 /* If we were unable to expand via the builtin, stop the sequence
2740 (without outputting the insns) and call to the library function
2741 with the stabilized argument list. */
2742 end_sequence ();
2745 return expand_call (exp, target, target == const0_rtx);
2748 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2749 return an RTL instruction code that implements the functionality.
2750 If that isn't possible or available return CODE_FOR_nothing. */
2752 static enum insn_code
2753 interclass_mathfn_icode (tree arg, tree fndecl)
2755 bool errno_set = false;
2756 optab builtin_optab = unknown_optab;
2757 machine_mode mode;
2759 switch (DECL_FUNCTION_CODE (fndecl))
2761 CASE_FLT_FN (BUILT_IN_ILOGB):
2762 errno_set = true; builtin_optab = ilogb_optab; break;
2763 CASE_FLT_FN (BUILT_IN_ISINF):
2764 builtin_optab = isinf_optab; break;
2765 case BUILT_IN_ISNORMAL:
2766 case BUILT_IN_ISFINITE:
2767 CASE_FLT_FN (BUILT_IN_FINITE):
2768 case BUILT_IN_FINITED32:
2769 case BUILT_IN_FINITED64:
2770 case BUILT_IN_FINITED128:
2771 case BUILT_IN_ISINFD32:
2772 case BUILT_IN_ISINFD64:
2773 case BUILT_IN_ISINFD128:
2774 /* These builtins have no optabs (yet). */
2775 break;
2776 default:
2777 gcc_unreachable ();
2780 /* There's no easy way to detect the case we need to set EDOM. */
2781 if (flag_errno_math && errno_set)
2782 return CODE_FOR_nothing;
2784 /* Optab mode depends on the mode of the input argument. */
2785 mode = TYPE_MODE (TREE_TYPE (arg));
2787 if (builtin_optab)
2788 return optab_handler (builtin_optab, mode);
2789 return CODE_FOR_nothing;
2792 /* Expand a call to one of the builtin math functions that operate on
2793 floating point argument and output an integer result (ilogb, isinf,
2794 isnan, etc).
2795 Return 0 if a normal call should be emitted rather than expanding the
2796 function in-line. EXP is the expression that is a call to the builtin
2797 function; if convenient, the result should be placed in TARGET. */
2799 static rtx
2800 expand_builtin_interclass_mathfn (tree exp, rtx target)
2802 enum insn_code icode = CODE_FOR_nothing;
2803 rtx op0;
2804 tree fndecl = get_callee_fndecl (exp);
2805 machine_mode mode;
2806 tree arg;
2808 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2809 return NULL_RTX;
2811 arg = CALL_EXPR_ARG (exp, 0);
2812 icode = interclass_mathfn_icode (arg, fndecl);
2813 mode = TYPE_MODE (TREE_TYPE (arg));
2815 if (icode != CODE_FOR_nothing)
2817 class expand_operand ops[1];
2818 rtx_insn *last = get_last_insn ();
2819 tree orig_arg = arg;
2821 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2822 need to expand the argument again. This way, we will not perform
2823 side-effects more the once. */
2824 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2826 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2828 if (mode != GET_MODE (op0))
2829 op0 = convert_to_mode (mode, op0, 0);
2831 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2832 if (maybe_legitimize_operands (icode, 0, 1, ops)
2833 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2834 return ops[0].value;
2836 delete_insns_since (last);
2837 CALL_EXPR_ARG (exp, 0) = orig_arg;
2840 return NULL_RTX;
2843 /* Expand a call to the builtin sincos math function.
2844 Return NULL_RTX if a normal call should be emitted rather than expanding the
2845 function in-line. EXP is the expression that is a call to the builtin
2846 function. */
2848 static rtx
2849 expand_builtin_sincos (tree exp)
2851 rtx op0, op1, op2, target1, target2;
2852 machine_mode mode;
2853 tree arg, sinp, cosp;
2854 int result;
2855 location_t loc = EXPR_LOCATION (exp);
2856 tree alias_type, alias_off;
2858 if (!validate_arglist (exp, REAL_TYPE,
2859 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2860 return NULL_RTX;
2862 arg = CALL_EXPR_ARG (exp, 0);
2863 sinp = CALL_EXPR_ARG (exp, 1);
2864 cosp = CALL_EXPR_ARG (exp, 2);
2866 /* Make a suitable register to place result in. */
2867 mode = TYPE_MODE (TREE_TYPE (arg));
2869 /* Check if sincos insn is available, otherwise emit the call. */
2870 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2871 return NULL_RTX;
2873 target1 = gen_reg_rtx (mode);
2874 target2 = gen_reg_rtx (mode);
2876 op0 = expand_normal (arg);
2877 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2878 alias_off = build_int_cst (alias_type, 0);
2879 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2880 sinp, alias_off));
2881 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2882 cosp, alias_off));
2884 /* Compute into target1 and target2.
2885 Set TARGET to wherever the result comes back. */
2886 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2887 gcc_assert (result);
2889 /* Move target1 and target2 to the memory locations indicated
2890 by op1 and op2. */
2891 emit_move_insn (op1, target1);
2892 emit_move_insn (op2, target2);
2894 return const0_rtx;
2897 /* Expand a call to the internal cexpi builtin to the sincos math function.
2898 EXP is the expression that is a call to the builtin function; if convenient,
2899 the result should be placed in TARGET. */
2901 static rtx
2902 expand_builtin_cexpi (tree exp, rtx target)
2904 tree fndecl = get_callee_fndecl (exp);
2905 tree arg, type;
2906 machine_mode mode;
2907 rtx op0, op1, op2;
2908 location_t loc = EXPR_LOCATION (exp);
2910 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2911 return NULL_RTX;
2913 arg = CALL_EXPR_ARG (exp, 0);
2914 type = TREE_TYPE (arg);
2915 mode = TYPE_MODE (TREE_TYPE (arg));
2917 /* Try expanding via a sincos optab, fall back to emitting a libcall
2918 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2919 is only generated from sincos, cexp or if we have either of them. */
2920 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2922 op1 = gen_reg_rtx (mode);
2923 op2 = gen_reg_rtx (mode);
2925 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2927 /* Compute into op1 and op2. */
2928 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2930 else if (targetm.libc_has_function (function_sincos, type))
2932 tree call, fn = NULL_TREE;
2933 tree top1, top2;
2934 rtx op1a, op2a;
2936 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2937 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2938 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2939 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2940 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2941 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2942 else
2943 gcc_unreachable ();
2945 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2946 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2947 op1a = copy_addr_to_reg (XEXP (op1, 0));
2948 op2a = copy_addr_to_reg (XEXP (op2, 0));
2949 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2950 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2952 /* Make sure not to fold the sincos call again. */
2953 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2954 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2955 call, 3, arg, top1, top2));
2957 else
2959 tree call, fn = NULL_TREE, narg;
2960 tree ctype = build_complex_type (type);
2962 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2963 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2964 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2965 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2966 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2967 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2968 else
2969 gcc_unreachable ();
2971 /* If we don't have a decl for cexp create one. This is the
2972 friendliest fallback if the user calls __builtin_cexpi
2973 without full target C99 function support. */
2974 if (fn == NULL_TREE)
2976 tree fntype;
2977 const char *name = NULL;
2979 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2980 name = "cexpf";
2981 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2982 name = "cexp";
2983 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2984 name = "cexpl";
2986 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2987 fn = build_fn_decl (name, fntype);
2990 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2991 build_real (type, dconst0), arg);
2993 /* Make sure not to fold the cexp call again. */
2994 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2995 return expand_expr (build_call_nary (ctype, call, 1, narg),
2996 target, VOIDmode, EXPAND_NORMAL);
2999 /* Now build the proper return type. */
3000 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
3001 make_tree (TREE_TYPE (arg), op2),
3002 make_tree (TREE_TYPE (arg), op1)),
3003 target, VOIDmode, EXPAND_NORMAL);
3006 /* Conveniently construct a function call expression. FNDECL names the
3007 function to be called, N is the number of arguments, and the "..."
3008 parameters are the argument expressions. Unlike build_call_exr
3009 this doesn't fold the call, hence it will always return a CALL_EXPR. */
3011 static tree
3012 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
3014 va_list ap;
3015 tree fntype = TREE_TYPE (fndecl);
3016 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
3018 va_start (ap, n);
3019 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
3020 va_end (ap);
3021 SET_EXPR_LOCATION (fn, loc);
3022 return fn;
3025 /* Expand a call to one of the builtin rounding functions gcc defines
3026 as an extension (lfloor and lceil). As these are gcc extensions we
3027 do not need to worry about setting errno to EDOM.
3028 If expanding via optab fails, lower expression to (int)(floor(x)).
3029 EXP is the expression that is a call to the builtin function;
3030 if convenient, the result should be placed in TARGET. */
3032 static rtx
3033 expand_builtin_int_roundingfn (tree exp, rtx target)
3035 convert_optab builtin_optab;
3036 rtx op0, tmp;
3037 rtx_insn *insns;
3038 tree fndecl = get_callee_fndecl (exp);
3039 enum built_in_function fallback_fn;
3040 tree fallback_fndecl;
3041 machine_mode mode;
3042 tree arg;
3044 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3045 return NULL_RTX;
3047 arg = CALL_EXPR_ARG (exp, 0);
3049 switch (DECL_FUNCTION_CODE (fndecl))
3051 CASE_FLT_FN (BUILT_IN_ICEIL):
3052 CASE_FLT_FN (BUILT_IN_LCEIL):
3053 CASE_FLT_FN (BUILT_IN_LLCEIL):
3054 builtin_optab = lceil_optab;
3055 fallback_fn = BUILT_IN_CEIL;
3056 break;
3058 CASE_FLT_FN (BUILT_IN_IFLOOR):
3059 CASE_FLT_FN (BUILT_IN_LFLOOR):
3060 CASE_FLT_FN (BUILT_IN_LLFLOOR):
3061 builtin_optab = lfloor_optab;
3062 fallback_fn = BUILT_IN_FLOOR;
3063 break;
3065 default:
3066 gcc_unreachable ();
3069 /* Make a suitable register to place result in. */
3070 mode = TYPE_MODE (TREE_TYPE (exp));
3072 target = gen_reg_rtx (mode);
3074 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3075 need to expand the argument again. This way, we will not perform
3076 side-effects more the once. */
3077 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3079 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3081 start_sequence ();
3083 /* Compute into TARGET. */
3084 if (expand_sfix_optab (target, op0, builtin_optab))
3086 /* Output the entire sequence. */
3087 insns = get_insns ();
3088 end_sequence ();
3089 emit_insn (insns);
3090 return target;
3093 /* If we were unable to expand via the builtin, stop the sequence
3094 (without outputting the insns). */
3095 end_sequence ();
3097 /* Fall back to floating point rounding optab. */
3098 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
3100 /* For non-C99 targets we may end up without a fallback fndecl here
3101 if the user called __builtin_lfloor directly. In this case emit
3102 a call to the floor/ceil variants nevertheless. This should result
3103 in the best user experience for not full C99 targets. */
3104 if (fallback_fndecl == NULL_TREE)
3106 tree fntype;
3107 const char *name = NULL;
3109 switch (DECL_FUNCTION_CODE (fndecl))
3111 case BUILT_IN_ICEIL:
3112 case BUILT_IN_LCEIL:
3113 case BUILT_IN_LLCEIL:
3114 name = "ceil";
3115 break;
3116 case BUILT_IN_ICEILF:
3117 case BUILT_IN_LCEILF:
3118 case BUILT_IN_LLCEILF:
3119 name = "ceilf";
3120 break;
3121 case BUILT_IN_ICEILL:
3122 case BUILT_IN_LCEILL:
3123 case BUILT_IN_LLCEILL:
3124 name = "ceill";
3125 break;
3126 case BUILT_IN_IFLOOR:
3127 case BUILT_IN_LFLOOR:
3128 case BUILT_IN_LLFLOOR:
3129 name = "floor";
3130 break;
3131 case BUILT_IN_IFLOORF:
3132 case BUILT_IN_LFLOORF:
3133 case BUILT_IN_LLFLOORF:
3134 name = "floorf";
3135 break;
3136 case BUILT_IN_IFLOORL:
3137 case BUILT_IN_LFLOORL:
3138 case BUILT_IN_LLFLOORL:
3139 name = "floorl";
3140 break;
3141 default:
3142 gcc_unreachable ();
3145 fntype = build_function_type_list (TREE_TYPE (arg),
3146 TREE_TYPE (arg), NULL_TREE);
3147 fallback_fndecl = build_fn_decl (name, fntype);
3150 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
3152 tmp = expand_normal (exp);
3153 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3155 /* Truncate the result of floating point optab to integer
3156 via expand_fix (). */
3157 target = gen_reg_rtx (mode);
3158 expand_fix (target, tmp, 0);
3160 return target;
3163 /* Expand a call to one of the builtin math functions doing integer
3164 conversion (lrint).
3165 Return 0 if a normal call should be emitted rather than expanding the
3166 function in-line. EXP is the expression that is a call to the builtin
3167 function; if convenient, the result should be placed in TARGET. */
3169 static rtx
3170 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3172 convert_optab builtin_optab;
3173 rtx op0;
3174 rtx_insn *insns;
3175 tree fndecl = get_callee_fndecl (exp);
3176 tree arg;
3177 machine_mode mode;
3178 enum built_in_function fallback_fn = BUILT_IN_NONE;
3180 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3181 return NULL_RTX;
3183 arg = CALL_EXPR_ARG (exp, 0);
3185 switch (DECL_FUNCTION_CODE (fndecl))
3187 CASE_FLT_FN (BUILT_IN_IRINT):
3188 fallback_fn = BUILT_IN_LRINT;
3189 gcc_fallthrough ();
3190 CASE_FLT_FN (BUILT_IN_LRINT):
3191 CASE_FLT_FN (BUILT_IN_LLRINT):
3192 builtin_optab = lrint_optab;
3193 break;
3195 CASE_FLT_FN (BUILT_IN_IROUND):
3196 fallback_fn = BUILT_IN_LROUND;
3197 gcc_fallthrough ();
3198 CASE_FLT_FN (BUILT_IN_LROUND):
3199 CASE_FLT_FN (BUILT_IN_LLROUND):
3200 builtin_optab = lround_optab;
3201 break;
3203 default:
3204 gcc_unreachable ();
3207 /* There's no easy way to detect the case we need to set EDOM. */
3208 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3209 return NULL_RTX;
3211 /* Make a suitable register to place result in. */
3212 mode = TYPE_MODE (TREE_TYPE (exp));
3214 /* There's no easy way to detect the case we need to set EDOM. */
3215 if (!flag_errno_math)
3217 rtx result = gen_reg_rtx (mode);
3219 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3220 need to expand the argument again. This way, we will not perform
3221 side-effects more the once. */
3222 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3224 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3226 start_sequence ();
3228 if (expand_sfix_optab (result, op0, builtin_optab))
3230 /* Output the entire sequence. */
3231 insns = get_insns ();
3232 end_sequence ();
3233 emit_insn (insns);
3234 return result;
3237 /* If we were unable to expand via the builtin, stop the sequence
3238 (without outputting the insns) and call to the library function
3239 with the stabilized argument list. */
3240 end_sequence ();
3243 if (fallback_fn != BUILT_IN_NONE)
3245 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3246 targets, (int) round (x) should never be transformed into
3247 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3248 a call to lround in the hope that the target provides at least some
3249 C99 functions. This should result in the best user experience for
3250 not full C99 targets. */
3251 tree fallback_fndecl = mathfn_built_in_1
3252 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
3254 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3255 fallback_fndecl, 1, arg);
3257 target = expand_call (exp, NULL_RTX, target == const0_rtx);
3258 target = maybe_emit_group_store (target, TREE_TYPE (exp));
3259 return convert_to_mode (mode, target, 0);
3262 return expand_call (exp, target, target == const0_rtx);
3265 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3266 a normal call should be emitted rather than expanding the function
3267 in-line. EXP is the expression that is a call to the builtin
3268 function; if convenient, the result should be placed in TARGET. */
3270 static rtx
3271 expand_builtin_powi (tree exp, rtx target)
3273 tree arg0, arg1;
3274 rtx op0, op1;
3275 machine_mode mode;
3276 machine_mode mode2;
3278 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3279 return NULL_RTX;
3281 arg0 = CALL_EXPR_ARG (exp, 0);
3282 arg1 = CALL_EXPR_ARG (exp, 1);
3283 mode = TYPE_MODE (TREE_TYPE (exp));
3285 /* Emit a libcall to libgcc. */
3287 /* Mode of the 2nd argument must match that of an int. */
3288 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3290 if (target == NULL_RTX)
3291 target = gen_reg_rtx (mode);
3293 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3294 if (GET_MODE (op0) != mode)
3295 op0 = convert_to_mode (mode, op0, 0);
3296 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3297 if (GET_MODE (op1) != mode2)
3298 op1 = convert_to_mode (mode2, op1, 0);
3300 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3301 target, LCT_CONST, mode,
3302 op0, mode, op1, mode2);
3304 return target;
3307 /* Expand expression EXP which is a call to the strlen builtin. Return
3308 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3309 try to get the result in TARGET, if convenient. */
3311 static rtx
3312 expand_builtin_strlen (tree exp, rtx target,
3313 machine_mode target_mode)
3315 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3316 return NULL_RTX;
3318 tree src = CALL_EXPR_ARG (exp, 0);
3319 if (!check_read_access (exp, src))
3320 return NULL_RTX;
3322 /* If the length can be computed at compile-time, return it. */
3323 if (tree len = c_strlen (src, 0))
3324 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3326 /* If the length can be computed at compile-time and is constant
3327 integer, but there are side-effects in src, evaluate
3328 src for side-effects, then return len.
3329 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3330 can be optimized into: i++; x = 3; */
3331 tree len = c_strlen (src, 1);
3332 if (len && TREE_CODE (len) == INTEGER_CST)
3334 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3335 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3338 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3340 /* If SRC is not a pointer type, don't do this operation inline. */
3341 if (align == 0)
3342 return NULL_RTX;
3344 /* Bail out if we can't compute strlen in the right mode. */
3345 machine_mode insn_mode;
3346 enum insn_code icode = CODE_FOR_nothing;
3347 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3349 icode = optab_handler (strlen_optab, insn_mode);
3350 if (icode != CODE_FOR_nothing)
3351 break;
3353 if (insn_mode == VOIDmode)
3354 return NULL_RTX;
3356 /* Make a place to hold the source address. We will not expand
3357 the actual source until we are sure that the expansion will
3358 not fail -- there are trees that cannot be expanded twice. */
3359 rtx src_reg = gen_reg_rtx (Pmode);
3361 /* Mark the beginning of the strlen sequence so we can emit the
3362 source operand later. */
3363 rtx_insn *before_strlen = get_last_insn ();
3365 class expand_operand ops[4];
3366 create_output_operand (&ops[0], target, insn_mode);
3367 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3368 create_integer_operand (&ops[2], 0);
3369 create_integer_operand (&ops[3], align);
3370 if (!maybe_expand_insn (icode, 4, ops))
3371 return NULL_RTX;
3373 /* Check to see if the argument was declared attribute nonstring
3374 and if so, issue a warning since at this point it's not known
3375 to be nul-terminated. */
3376 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3378 /* Now that we are assured of success, expand the source. */
3379 start_sequence ();
3380 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3381 if (pat != src_reg)
3383 #ifdef POINTERS_EXTEND_UNSIGNED
3384 if (GET_MODE (pat) != Pmode)
3385 pat = convert_to_mode (Pmode, pat,
3386 POINTERS_EXTEND_UNSIGNED);
3387 #endif
3388 emit_move_insn (src_reg, pat);
3390 pat = get_insns ();
3391 end_sequence ();
3393 if (before_strlen)
3394 emit_insn_after (pat, before_strlen);
3395 else
3396 emit_insn_before (pat, get_insns ());
3398 /* Return the value in the proper mode for this function. */
3399 if (GET_MODE (ops[0].value) == target_mode)
3400 target = ops[0].value;
3401 else if (target != 0)
3402 convert_move (target, ops[0].value, 0);
3403 else
3404 target = convert_to_mode (target_mode, ops[0].value, 0);
3406 return target;
3409 /* Expand call EXP to the strnlen built-in, returning the result
3410 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3412 static rtx
3413 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3415 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3416 return NULL_RTX;
3418 tree src = CALL_EXPR_ARG (exp, 0);
3419 tree bound = CALL_EXPR_ARG (exp, 1);
3421 if (!bound)
3422 return NULL_RTX;
3424 check_read_access (exp, src, bound);
3426 location_t loc = UNKNOWN_LOCATION;
3427 if (EXPR_HAS_LOCATION (exp))
3428 loc = EXPR_LOCATION (exp);
3430 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3431 so these conversions aren't necessary. */
3432 c_strlen_data lendata = { };
3433 tree len = c_strlen (src, 0, &lendata, 1);
3434 if (len)
3435 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3437 if (TREE_CODE (bound) == INTEGER_CST)
3439 if (!len)
3440 return NULL_RTX;
3442 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3443 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3446 if (TREE_CODE (bound) != SSA_NAME)
3447 return NULL_RTX;
3449 wide_int min, max;
3450 enum value_range_kind rng = get_range_info (bound, &min, &max);
3451 if (rng != VR_RANGE)
3452 return NULL_RTX;
3454 if (!len || TREE_CODE (len) != INTEGER_CST)
3456 bool exact;
3457 lendata.decl = unterminated_array (src, &len, &exact);
3458 if (!lendata.decl)
3459 return NULL_RTX;
3462 if (lendata.decl)
3463 return NULL_RTX;
3465 if (wi::gtu_p (min, wi::to_wide (len)))
3466 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3468 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3469 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3472 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3473 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3474 a target constant. */
3476 static rtx
3477 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3478 scalar_int_mode mode)
3480 /* The REPresentation pointed to by DATA need not be a nul-terminated
3481 string but the caller guarantees it's large enough for MODE. */
3482 const char *rep = (const char *) data;
3484 return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3487 /* LEN specify length of the block of memcpy/memset operation.
3488 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3489 In some cases we can make very likely guess on max size, then we
3490 set it into PROBABLE_MAX_SIZE. */
3492 static void
3493 determine_block_size (tree len, rtx len_rtx,
3494 unsigned HOST_WIDE_INT *min_size,
3495 unsigned HOST_WIDE_INT *max_size,
3496 unsigned HOST_WIDE_INT *probable_max_size)
3498 if (CONST_INT_P (len_rtx))
3500 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3501 return;
3503 else
3505 wide_int min, max;
3506 enum value_range_kind range_type = VR_UNDEFINED;
3508 /* Determine bounds from the type. */
3509 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3510 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3511 else
3512 *min_size = 0;
3513 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3514 *probable_max_size = *max_size
3515 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3516 else
3517 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3519 if (TREE_CODE (len) == SSA_NAME)
3520 range_type = get_range_info (len, &min, &max);
3521 if (range_type == VR_RANGE)
3523 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3524 *min_size = min.to_uhwi ();
3525 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3526 *probable_max_size = *max_size = max.to_uhwi ();
3528 else if (range_type == VR_ANTI_RANGE)
3530 /* Code like
3532 int n;
3533 if (n < 100)
3534 memcpy (a, b, n)
3536 Produce anti range allowing negative values of N. We still
3537 can use the information and make a guess that N is not negative.
3539 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3540 *probable_max_size = min.to_uhwi () - 1;
3543 gcc_checking_assert (*max_size <=
3544 (unsigned HOST_WIDE_INT)
3545 GET_MODE_MASK (GET_MODE (len_rtx)));
3548 /* Issue a warning OPT for a bounded call EXP with a bound in RANGE
3549 accessing an object with SIZE. */
3551 static bool
3552 maybe_warn_for_bound (int opt, location_t loc, tree exp, tree func,
3553 tree bndrng[2], tree size, const access_data *pad = NULL)
3555 if (!bndrng[0] || TREE_NO_WARNING (exp))
3556 return false;
3558 tree maxobjsize = max_object_size ();
3560 bool warned = false;
3562 if (opt == OPT_Wstringop_overread)
3564 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
3566 if (bndrng[0] == bndrng[1])
3567 warned = (func
3568 ? warning_at (loc, opt,
3569 "%K%qD specified bound %E "
3570 "exceeds maximum object size %E",
3571 exp, func, bndrng[0], maxobjsize)
3572 : warning_at (loc, opt,
3573 "%Kspecified bound %E "
3574 "exceeds maximum object size %E",
3575 exp, bndrng[0], maxobjsize));
3576 else
3577 warned = (func
3578 ? warning_at (loc, opt,
3579 "%K%qD specified bound [%E, %E] "
3580 "exceeds maximum object size %E",
3581 exp, func,
3582 bndrng[0], bndrng[1], maxobjsize)
3583 : warning_at (loc, opt,
3584 "%Kspecified bound [%E, %E] "
3585 "exceeds maximum object size %E",
3586 exp, bndrng[0], bndrng[1], maxobjsize));
3588 else if (!size || tree_int_cst_le (bndrng[0], size))
3589 return false;
3590 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
3591 warned = (func
3592 ? warning_at (loc, opt,
3593 "%K%qD specified bound %E exceeds "
3594 "source size %E",
3595 exp, func, bndrng[0], size)
3596 : warning_at (loc, opt,
3597 "%Kspecified bound %E exceeds "
3598 "source size %E",
3599 exp, bndrng[0], size));
3600 else
3601 warned = (func
3602 ? warning_at (loc, opt,
3603 "%K%qD specified bound [%E, %E] exceeds "
3604 "source size %E",
3605 exp, func, bndrng[0], bndrng[1], size)
3606 : warning_at (loc, opt,
3607 "%Kspecified bound [%E, %E] exceeds "
3608 "source size %E",
3609 exp, bndrng[0], bndrng[1], size));
3610 if (warned)
3612 if (pad && pad->src.ref)
3614 if (DECL_P (pad->src.ref))
3615 inform (DECL_SOURCE_LOCATION (pad->src.ref),
3616 "source object declared here");
3617 else if (EXPR_HAS_LOCATION (pad->src.ref))
3618 inform (EXPR_LOCATION (pad->src.ref),
3619 "source object allocated here");
3621 TREE_NO_WARNING (exp) = true;
3624 return warned;
3627 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
3629 if (bndrng[0] == bndrng[1])
3630 warned = (func
3631 ? warning_at (loc, opt,
3632 "%K%qD specified size %E "
3633 "exceeds maximum object size %E",
3634 exp, func, bndrng[0], maxobjsize)
3635 : warning_at (loc, opt,
3636 "%Kspecified size %E "
3637 "exceeds maximum object size %E",
3638 exp, bndrng[0], maxobjsize));
3639 else
3640 warned = (func
3641 ? warning_at (loc, opt,
3642 "%K%qD specified size between %E and %E "
3643 "exceeds maximum object size %E",
3644 exp, func,
3645 bndrng[0], bndrng[1], maxobjsize)
3646 : warning_at (loc, opt,
3647 "%Kspecified size between %E and %E "
3648 "exceeds maximum object size %E",
3649 exp, bndrng[0], bndrng[1], maxobjsize));
3651 else if (!size || tree_int_cst_le (bndrng[0], size))
3652 return false;
3653 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
3654 warned = (func
3655 ? warning_at (loc, OPT_Wstringop_overflow_,
3656 "%K%qD specified bound %E exceeds "
3657 "destination size %E",
3658 exp, func, bndrng[0], size)
3659 : warning_at (loc, OPT_Wstringop_overflow_,
3660 "%Kspecified bound %E exceeds "
3661 "destination size %E",
3662 exp, bndrng[0], size));
3663 else
3664 warned = (func
3665 ? warning_at (loc, OPT_Wstringop_overflow_,
3666 "%K%qD specified bound [%E, %E] exceeds "
3667 "destination size %E",
3668 exp, func, bndrng[0], bndrng[1], size)
3669 : warning_at (loc, OPT_Wstringop_overflow_,
3670 "%Kspecified bound [%E, %E] exceeds "
3671 "destination size %E",
3672 exp, bndrng[0], bndrng[1], size));
3674 if (warned)
3676 if (pad && pad->dst.ref)
3678 if (DECL_P (pad->dst.ref))
3679 inform (DECL_SOURCE_LOCATION (pad->dst.ref),
3680 "destination object declared here");
3681 else if (EXPR_HAS_LOCATION (pad->dst.ref))
3682 inform (EXPR_LOCATION (pad->dst.ref),
3683 "destination object allocated here");
3685 TREE_NO_WARNING (exp) = true;
3688 return warned;
3691 /* For an expression EXP issue an access warning controlled by option OPT
3692 with access to a region SIZE bytes in size in the RANGE of sizes.
3693 WRITE is true for a write access, READ for a read access, neither for
3694 call that may or may not perform an access but for which the range
3695 is expected to valid.
3696 Returns true when a warning has been issued. */
3698 static bool
3699 warn_for_access (location_t loc, tree func, tree exp, int opt, tree range[2],
3700 tree size, bool write, bool read)
3702 bool warned = false;
3704 if (write && read)
3706 if (tree_int_cst_equal (range[0], range[1]))
3707 warned = (func
3708 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3709 "%K%qD accessing %E byte in a region "
3710 "of size %E",
3711 "%K%qD accessing %E bytes in a region "
3712 "of size %E",
3713 exp, func, range[0], size)
3714 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3715 "%Kaccessing %E byte in a region "
3716 "of size %E",
3717 "%Kaccessing %E bytes in a region "
3718 "of size %E",
3719 exp, range[0], size));
3720 else if (tree_int_cst_sign_bit (range[1]))
3722 /* Avoid printing the upper bound if it's invalid. */
3723 warned = (func
3724 ? warning_at (loc, opt,
3725 "%K%qD accessing %E or more bytes in "
3726 "a region of size %E",
3727 exp, func, range[0], size)
3728 : warning_at (loc, opt,
3729 "%Kaccessing %E or more bytes in "
3730 "a region of size %E",
3731 exp, range[0], size));
3733 else
3734 warned = (func
3735 ? warning_at (loc, opt,
3736 "%K%qD accessing between %E and %E bytes "
3737 "in a region of size %E",
3738 exp, func, range[0], range[1],
3739 size)
3740 : warning_at (loc, opt,
3741 "%Kaccessing between %E and %E bytes "
3742 "in a region of size %E",
3743 exp, range[0], range[1],
3744 size));
3745 return warned;
3748 if (write)
3750 if (tree_int_cst_equal (range[0], range[1]))
3751 warned = (func
3752 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3753 "%K%qD writing %E byte into a region "
3754 "of size %E overflows the destination",
3755 "%K%qD writing %E bytes into a region "
3756 "of size %E overflows the destination",
3757 exp, func, range[0], size)
3758 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3759 "%Kwriting %E byte into a region "
3760 "of size %E overflows the destination",
3761 "%Kwriting %E bytes into a region "
3762 "of size %E overflows the destination",
3763 exp, range[0], size));
3764 else if (tree_int_cst_sign_bit (range[1]))
3766 /* Avoid printing the upper bound if it's invalid. */
3767 warned = (func
3768 ? warning_at (loc, opt,
3769 "%K%qD writing %E or more bytes into "
3770 "a region of size %E overflows "
3771 "the destination",
3772 exp, func, range[0], size)
3773 : warning_at (loc, opt,
3774 "%Kwriting %E or more bytes into "
3775 "a region of size %E overflows "
3776 "the destination",
3777 exp, range[0], size));
3779 else
3780 warned = (func
3781 ? warning_at (loc, opt,
3782 "%K%qD writing between %E and %E bytes "
3783 "into a region of size %E overflows "
3784 "the destination",
3785 exp, func, range[0], range[1],
3786 size)
3787 : warning_at (loc, opt,
3788 "%Kwriting between %E and %E bytes "
3789 "into a region of size %E overflows "
3790 "the destination",
3791 exp, range[0], range[1],
3792 size));
3793 return warned;
3796 if (read)
3798 if (tree_int_cst_equal (range[0], range[1]))
3799 warned = (func
3800 ? warning_n (loc, OPT_Wstringop_overread,
3801 tree_to_uhwi (range[0]),
3802 "%K%qD reading %E byte from a region of size %E",
3803 "%K%qD reading %E bytes from a region of size %E", exp, func, range[0], size)
3804 : warning_n (loc, OPT_Wstringop_overread,
3805 tree_to_uhwi (range[0]),
3806 "%Kreading %E byte from a region of size %E",
3807 "%Kreading %E bytes from a region of size %E",
3808 exp, range[0], size));
3809 else if (tree_int_cst_sign_bit (range[1]))
3811 /* Avoid printing the upper bound if it's invalid. */
3812 warned = (func
3813 ? warning_at (loc, OPT_Wstringop_overread,
3814 "%K%qD reading %E or more bytes from "
3815 "a region of size %E",
3816 exp, func, range[0], size)
3817 : warning_at (loc, OPT_Wstringop_overread,
3818 "%Kreading %E or more bytes from a region "
3819 "of size %E",
3820 exp, range[0], size));
3822 else
3823 warned = (func
3824 ? warning_at (loc, OPT_Wstringop_overread,
3825 "%K%qD reading between %E and %E bytes from "
3826 "a region of size %E",
3827 exp, func, range[0], range[1], size)
3828 : warning_at (loc, opt,
3829 "%K reading between %E and %E bytes from "
3830 "a region of size %E",
3831 exp, range[0], range[1], size));
3833 if (warned)
3834 TREE_NO_WARNING (exp) = true;
3836 return warned;
3839 if (tree_int_cst_equal (range[0], range[1])
3840 || tree_int_cst_sign_bit (range[1]))
3841 warned = (func
3842 ? warning_n (loc, OPT_Wstringop_overread,
3843 tree_to_uhwi (range[0]),
3844 "%K%qD epecting %E byte in a region of size %E",
3845 "%K%qD expecting %E bytes in a region of size %E",
3846 exp, func, range[0], size)
3847 : warning_n (loc, OPT_Wstringop_overread,
3848 tree_to_uhwi (range[0]),
3849 "%Kexpecting %E byte in a region of size %E",
3850 "%Kexpecting %E bytes in a region of size %E",
3851 exp, range[0], size));
3852 else if (tree_int_cst_sign_bit (range[1]))
3854 /* Avoid printing the upper bound if it's invalid. */
3855 warned = (func
3856 ? warning_at (loc, OPT_Wstringop_overread,
3857 "%K%qD expecting %E or more bytes in a region "
3858 "of size %E",
3859 exp, func, range[0], size)
3860 : warning_at (loc, OPT_Wstringop_overread,
3861 "%Kexpecting %E or more bytes in a region "
3862 "of size %E",
3863 exp, range[0], size));
3865 else
3866 warned = (func
3867 ? warning_at (loc, OPT_Wstringop_overread,
3868 "%K%qD expecting between %E and %E bytes in "
3869 "a region of size %E",
3870 exp, func, range[0], range[1], size)
3871 : warning_at (loc, OPT_Wstringop_overread,
3872 "%Kexpectting between %E and %E bytes in "
3873 "a region of size %E",
3874 exp, range[0], range[1], size));
3876 if (warned)
3877 TREE_NO_WARNING (exp) = true;
3879 return warned;
3882 /* Issue an inform message describing the target of an access REF.
3883 WRITE is set for a write access and clear for a read access. */
3885 static void
3886 inform_access (const access_ref &ref, access_mode mode)
3888 if (!ref.ref)
3889 return;
3891 /* Convert offset range and avoid including a zero range since it
3892 isn't necessarily meaningful. */
3893 HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
3894 HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
3895 HOST_WIDE_INT minoff;
3896 HOST_WIDE_INT maxoff = diff_max;
3897 if (wi::fits_shwi_p (ref.offrng[0]))
3898 minoff = ref.offrng[0].to_shwi ();
3899 else
3900 minoff = ref.offrng[0] < 0 ? diff_min : diff_max;
3902 if (wi::fits_shwi_p (ref.offrng[1]))
3903 maxoff = ref.offrng[1].to_shwi ();
3905 if (maxoff <= diff_min || maxoff >= diff_max)
3906 /* Avoid mentioning an upper bound that's equal to or in excess
3907 of the maximum of ptrdiff_t. */
3908 maxoff = minoff;
3910 /* Convert size range and always include it since all sizes are
3911 meaningful. */
3912 unsigned long long minsize = 0, maxsize = 0;
3913 if (wi::fits_shwi_p (ref.sizrng[0])
3914 && wi::fits_shwi_p (ref.sizrng[1]))
3916 minsize = ref.sizrng[0].to_shwi ();
3917 maxsize = ref.sizrng[1].to_shwi ();
3920 char sizestr[80];
3921 location_t loc;
3922 tree allocfn = NULL_TREE;
3923 if (TREE_CODE (ref.ref) == SSA_NAME)
3925 gimple *stmt = SSA_NAME_DEF_STMT (ref.ref);
3926 gcc_assert (is_gimple_call (stmt));
3927 loc = gimple_location (stmt);
3928 allocfn = gimple_call_fndecl (stmt);
3929 if (!allocfn)
3930 /* Handle calls through pointers to functions. */
3931 allocfn = gimple_call_fn (stmt);
3933 /* SIZRNG doesn't necessarily have the same range as the allocation
3934 size determined by gimple_call_alloc_size (). */
3936 if (minsize == maxsize)
3937 sprintf (sizestr, "%llu", minsize);
3938 else
3939 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
3942 else if (DECL_P (ref.ref))
3943 loc = DECL_SOURCE_LOCATION (ref.ref);
3944 else if (EXPR_P (ref.ref) && EXPR_HAS_LOCATION (ref.ref))
3945 loc = EXPR_LOCATION (ref.ref);
3946 else
3947 return;
3949 if (mode == access_read_write || mode == access_write_only)
3951 if (allocfn == NULL_TREE)
3953 if (minoff == maxoff)
3955 if (minoff == 0)
3956 inform (loc, "destination object %qE", ref.ref);
3957 else
3958 inform (loc, "at offset %wi into destination object %qE",
3959 minoff, ref.ref);
3961 else
3962 inform (loc, "at offset [%wi, %wi] into destination object %qE",
3963 minoff, maxoff, ref.ref);
3964 return;
3967 if (minoff == maxoff)
3969 if (minoff == 0)
3970 inform (loc, "destination object of size %s allocated by %qE",
3971 sizestr, allocfn);
3972 else
3973 inform (loc,
3974 "at offset %wi into destination object of size %s "
3975 "allocated by %qE", minoff, sizestr, allocfn);
3977 else
3978 inform (loc,
3979 "at offset [%wi, %wi] into destination object of size %s "
3980 "allocated by %qE",
3981 minoff, maxoff, sizestr, allocfn);
3983 return;
3986 if (DECL_P (ref.ref))
3988 if (minoff == maxoff)
3990 if (minoff == 0)
3991 inform (loc, "source object %qD", ref.ref);
3992 else
3993 inform (loc, "at offset %wi into source object %qD",
3994 minoff, ref.ref);
3996 else
3997 inform (loc, "at offset [%wi, %wi] into source object %qD",
3998 minoff, maxoff, ref.ref);
3999 return;
4002 if (minoff == maxoff)
4004 if (minoff == 0)
4005 inform (loc, "source object of size %s allocated by %qE",
4006 sizestr, allocfn);
4007 else
4008 inform (loc,
4009 "at offset %wi into source object of size %s "
4010 "allocated by %qE", minoff, sizestr, allocfn);
4012 else
4013 inform (loc,
4014 "at offset [%wi, %wi] into source object of size %s "
4015 "allocated by %qE",
4016 minoff, maxoff, sizestr, allocfn);
4019 /* Helper to set RANGE to the range of BOUND if it's nonnull, bounded
4020 by BNDRNG if nonnull and valid. */
4022 static void
4023 get_size_range (tree bound, tree range[2], const offset_int bndrng[2])
4025 if (bound)
4026 get_size_range (bound, range);
4028 if (!bndrng || (bndrng[0] == 0 && bndrng[1] == HOST_WIDE_INT_M1U))
4029 return;
4031 if (range[0] && TREE_CODE (range[0]) == INTEGER_CST)
4033 offset_int r[] =
4034 { wi::to_offset (range[0]), wi::to_offset (range[1]) };
4035 if (r[0] < bndrng[0])
4036 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
4037 if (bndrng[1] < r[1])
4038 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
4040 else
4042 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
4043 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
4047 /* Try to verify that the sizes and lengths of the arguments to a string
4048 manipulation function given by EXP are within valid bounds and that
4049 the operation does not lead to buffer overflow or read past the end.
4050 Arguments other than EXP may be null. When non-null, the arguments
4051 have the following meaning:
4052 DST is the destination of a copy call or NULL otherwise.
4053 SRC is the source of a copy call or NULL otherwise.
4054 DSTWRITE is the number of bytes written into the destination obtained
4055 from the user-supplied size argument to the function (such as in
4056 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
4057 MAXREAD is the user-supplied bound on the length of the source sequence
4058 (such as in strncat(d, s, N). It specifies the upper limit on the number
4059 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
4060 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
4061 expression EXP is a string function call (as opposed to a memory call
4062 like memcpy). As an exception, SRCSTR can also be an integer denoting
4063 the precomputed size of the source string or object (for functions like
4064 memcpy).
4065 DSTSIZE is the size of the destination object.
4067 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
4068 SIZE_MAX.
4070 WRITE is true for write accesses, READ is true for reads. Both are
4071 false for simple size checks in calls to functions that neither read
4072 from nor write to the region.
4074 When nonnull, PAD points to a more detailed description of the access.
4076 If the call is successfully verified as safe return true, otherwise
4077 return false. */
4079 bool
4080 check_access (tree exp, tree dstwrite,
4081 tree maxread, tree srcstr, tree dstsize,
4082 access_mode mode, const access_data *pad /* = NULL */)
4084 /* The size of the largest object is half the address space, or
4085 PTRDIFF_MAX. (This is way too permissive.) */
4086 tree maxobjsize = max_object_size ();
4088 /* Either an approximate/minimum the length of the source string for
4089 string functions or the size of the source object for raw memory
4090 functions. */
4091 tree slen = NULL_TREE;
4093 /* The range of the access in bytes; first set to the write access
4094 for functions that write and then read for those that also (or
4095 just) read. */
4096 tree range[2] = { NULL_TREE, NULL_TREE };
4098 /* Set to true when the exact number of bytes written by a string
4099 function like strcpy is not known and the only thing that is
4100 known is that it must be at least one (for the terminating nul). */
4101 bool at_least_one = false;
4102 if (srcstr)
4104 /* SRCSTR is normally a pointer to string but as a special case
4105 it can be an integer denoting the length of a string. */
4106 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
4108 if (!check_nul_terminated_array (exp, srcstr, maxread))
4109 return false;
4110 /* Try to determine the range of lengths the source string
4111 refers to. If it can be determined and is less than
4112 the upper bound given by MAXREAD add one to it for
4113 the terminating nul. Otherwise, set it to one for
4114 the same reason, or to MAXREAD as appropriate. */
4115 c_strlen_data lendata = { };
4116 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
4117 range[0] = lendata.minlen;
4118 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
4119 if (range[0]
4120 && TREE_CODE (range[0]) == INTEGER_CST
4121 && TREE_CODE (range[1]) == INTEGER_CST
4122 && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
4124 if (maxread && tree_int_cst_le (maxread, range[0]))
4125 range[0] = range[1] = maxread;
4126 else
4127 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
4128 range[0], size_one_node);
4130 if (maxread && tree_int_cst_le (maxread, range[1]))
4131 range[1] = maxread;
4132 else if (!integer_all_onesp (range[1]))
4133 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
4134 range[1], size_one_node);
4136 slen = range[0];
4138 else
4140 at_least_one = true;
4141 slen = size_one_node;
4144 else
4145 slen = srcstr;
4148 if (!dstwrite && !maxread)
4150 /* When the only available piece of data is the object size
4151 there is nothing to do. */
4152 if (!slen)
4153 return true;
4155 /* Otherwise, when the length of the source sequence is known
4156 (as with strlen), set DSTWRITE to it. */
4157 if (!range[0])
4158 dstwrite = slen;
4161 if (!dstsize)
4162 dstsize = maxobjsize;
4164 /* Set RANGE to that of DSTWRITE if non-null, bounded by PAD->DST.BNDRNG
4165 if valid. */
4166 get_size_range (dstwrite, range, pad ? pad->dst.bndrng : NULL);
4168 tree func = get_callee_fndecl (exp);
4169 /* Read vs write access by built-ins can be determined from the const
4170 qualifiers on the pointer argument. In the absence of attribute
4171 access, non-const qualified pointer arguments to user-defined
4172 functions are assumed to both read and write the objects. */
4173 const bool builtin = func ? fndecl_built_in_p (func) : false;
4175 /* First check the number of bytes to be written against the maximum
4176 object size. */
4177 if (range[0]
4178 && TREE_CODE (range[0]) == INTEGER_CST
4179 && tree_int_cst_lt (maxobjsize, range[0]))
4181 location_t loc = tree_nonartificial_location (exp);
4182 loc = expansion_point_location_if_in_system_header (loc);
4184 maybe_warn_for_bound (OPT_Wstringop_overflow_, loc, exp, func, range,
4185 NULL_TREE, pad);
4186 return false;
4189 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
4190 constant, and in range of unsigned HOST_WIDE_INT. */
4191 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
4193 /* Next check the number of bytes to be written against the destination
4194 object size. */
4195 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
4197 if (range[0]
4198 && TREE_CODE (range[0]) == INTEGER_CST
4199 && ((tree_fits_uhwi_p (dstsize)
4200 && tree_int_cst_lt (dstsize, range[0]))
4201 || (dstwrite
4202 && tree_fits_uhwi_p (dstwrite)
4203 && tree_int_cst_lt (dstwrite, range[0]))))
4205 if (TREE_NO_WARNING (exp)
4206 || (pad && pad->dst.ref && TREE_NO_WARNING (pad->dst.ref)))
4207 return false;
4209 location_t loc = tree_nonartificial_location (exp);
4210 loc = expansion_point_location_if_in_system_header (loc);
4212 bool warned = false;
4213 if (dstwrite == slen && at_least_one)
4215 /* This is a call to strcpy with a destination of 0 size
4216 and a source of unknown length. The call will write
4217 at least one byte past the end of the destination. */
4218 warned = (func
4219 ? warning_at (loc, OPT_Wstringop_overflow_,
4220 "%K%qD writing %E or more bytes into "
4221 "a region of size %E overflows "
4222 "the destination",
4223 exp, func, range[0], dstsize)
4224 : warning_at (loc, OPT_Wstringop_overflow_,
4225 "%Kwriting %E or more bytes into "
4226 "a region of size %E overflows "
4227 "the destination",
4228 exp, range[0], dstsize));
4230 else
4232 const bool read
4233 = mode == access_read_only || mode == access_read_write;
4234 const bool write
4235 = mode == access_write_only || mode == access_read_write;
4236 warned = warn_for_access (loc, func, exp,
4237 OPT_Wstringop_overflow_,
4238 range, dstsize,
4239 write, read && !builtin);
4242 if (warned)
4244 TREE_NO_WARNING (exp) = true;
4245 if (pad)
4246 inform_access (pad->dst, pad->mode);
4249 /* Return error when an overflow has been detected. */
4250 return false;
4254 /* Check the maximum length of the source sequence against the size
4255 of the destination object if known, or against the maximum size
4256 of an object. */
4257 if (maxread)
4259 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4260 PAD is nonnull and BNDRNG is valid. */
4261 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4263 location_t loc = tree_nonartificial_location (exp);
4264 loc = expansion_point_location_if_in_system_header (loc);
4266 tree size = dstsize;
4267 if (pad && pad->mode == access_read_only)
4268 size = wide_int_to_tree (sizetype, pad->src.sizrng[1]);
4270 if (range[0] && maxread && tree_fits_uhwi_p (size))
4272 if (tree_int_cst_lt (maxobjsize, range[0]))
4274 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
4275 range, size, pad);
4276 return false;
4279 if (size != maxobjsize && tree_int_cst_lt (size, range[0]))
4281 int opt = (dstwrite || mode != access_read_only
4282 ? OPT_Wstringop_overflow_
4283 : OPT_Wstringop_overread);
4284 maybe_warn_for_bound (opt, loc, exp, func, range, size, pad);
4285 return false;
4289 maybe_warn_nonstring_arg (func, exp);
4292 /* Check for reading past the end of SRC. */
4293 bool overread = (slen
4294 && slen == srcstr
4295 && dstwrite
4296 && range[0]
4297 && TREE_CODE (slen) == INTEGER_CST
4298 && tree_int_cst_lt (slen, range[0]));
4299 /* If none is determined try to get a better answer based on the details
4300 in PAD. */
4301 if (!overread
4302 && pad
4303 && pad->src.sizrng[1] >= 0
4304 && pad->src.offrng[0] >= 0
4305 && (pad->src.offrng[1] < 0
4306 || pad->src.offrng[0] <= pad->src.offrng[1]))
4308 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4309 PAD is nonnull and BNDRNG is valid. */
4310 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4311 /* Set OVERREAD for reads starting just past the end of an object. */
4312 overread = pad->src.sizrng[1] - pad->src.offrng[0] < pad->src.bndrng[0];
4313 range[0] = wide_int_to_tree (sizetype, pad->src.bndrng[0]);
4314 slen = size_zero_node;
4317 if (overread)
4319 if (TREE_NO_WARNING (exp)
4320 || (srcstr && TREE_NO_WARNING (srcstr))
4321 || (pad && pad->src.ref && TREE_NO_WARNING (pad->src.ref)))
4322 return false;
4324 location_t loc = tree_nonartificial_location (exp);
4325 loc = expansion_point_location_if_in_system_header (loc);
4327 const bool read
4328 = mode == access_read_only || mode == access_read_write;
4329 if (warn_for_access (loc, func, exp, OPT_Wstringop_overread, range,
4330 slen, false, read))
4332 TREE_NO_WARNING (exp) = true;
4333 if (pad)
4334 inform_access (pad->src, access_read_only);
4336 return false;
4339 return true;
4342 /* A convenience wrapper for check_access above to check access
4343 by a read-only function like puts. */
4345 static bool
4346 check_read_access (tree exp, tree src, tree bound /* = NULL_TREE */,
4347 int ost /* = 1 */)
4349 if (!warn_stringop_overread)
4350 return true;
4352 access_data data (exp, access_read_only, NULL_TREE, false, bound, true);
4353 compute_objsize (src, ost, &data.src);
4354 return check_access (exp, /*dstwrite=*/ NULL_TREE, /*maxread=*/ bound,
4355 /*srcstr=*/ src, /*dstsize=*/ NULL_TREE, data.mode,
4356 &data);
4359 /* If STMT is a call to an allocation function, returns the constant
4360 maximum size of the object allocated by the call represented as
4361 sizetype. If nonnull, sets RNG1[] to the range of the size.
4362 When nonnull, uses RVALS for range information, otherwise calls
4363 get_range_info to get it.
4364 Returns null when STMT is not a call to a valid allocation function. */
4366 tree
4367 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
4368 range_query * /* = NULL */)
4370 if (!stmt)
4371 return NULL_TREE;
4373 tree allocfntype;
4374 if (tree fndecl = gimple_call_fndecl (stmt))
4375 allocfntype = TREE_TYPE (fndecl);
4376 else
4377 allocfntype = gimple_call_fntype (stmt);
4379 if (!allocfntype)
4380 return NULL_TREE;
4382 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
4383 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
4384 if (!at)
4386 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4387 return NULL_TREE;
4389 argidx1 = 0;
4392 unsigned nargs = gimple_call_num_args (stmt);
4394 if (argidx1 == UINT_MAX)
4396 tree atval = TREE_VALUE (at);
4397 if (!atval)
4398 return NULL_TREE;
4400 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4401 if (nargs <= argidx1)
4402 return NULL_TREE;
4404 atval = TREE_CHAIN (atval);
4405 if (atval)
4407 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4408 if (nargs <= argidx2)
4409 return NULL_TREE;
4413 tree size = gimple_call_arg (stmt, argidx1);
4415 wide_int rng1_buf[2];
4416 /* If RNG1 is not set, use the buffer. */
4417 if (!rng1)
4418 rng1 = rng1_buf;
4420 /* Use maximum precision to avoid overflow below. */
4421 const int prec = ADDR_MAX_PRECISION;
4424 tree r[2];
4425 /* Determine the largest valid range size, including zero. */
4426 if (!get_size_range (size, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
4427 return NULL_TREE;
4428 rng1[0] = wi::to_wide (r[0], prec);
4429 rng1[1] = wi::to_wide (r[1], prec);
4432 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
4433 return fold_convert (sizetype, size);
4435 /* To handle ranges do the math in wide_int and return the product
4436 of the upper bounds as a constant. Ignore anti-ranges. */
4437 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
4438 wide_int rng2[2];
4440 tree r[2];
4441 /* As above, use the full non-negative range on failure. */
4442 if (!get_size_range (n, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
4443 return NULL_TREE;
4444 rng2[0] = wi::to_wide (r[0], prec);
4445 rng2[1] = wi::to_wide (r[1], prec);
4448 /* Compute products of both bounds for the caller but return the lesser
4449 of SIZE_MAX and the product of the upper bounds as a constant. */
4450 rng1[0] = rng1[0] * rng2[0];
4451 rng1[1] = rng1[1] * rng2[1];
4453 const tree size_max = TYPE_MAX_VALUE (sizetype);
4454 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
4456 rng1[1] = wi::to_wide (size_max, prec);
4457 return size_max;
4460 return wide_int_to_tree (sizetype, rng1[1]);
4463 /* For an access to an object referenced to by the function parameter PTR
4464 of pointer type, and set RNG[] to the range of sizes of the object
4465 obtainedfrom the attribute access specification for the current function.
4466 Return the function parameter on success and null otherwise. */
4468 tree
4469 gimple_parm_array_size (tree ptr, wide_int rng[2],
4470 range_query * /* = NULL */)
4472 /* For a function argument try to determine the byte size of the array
4473 from the current function declaratation (e.g., attribute access or
4474 related). */
4475 tree var = SSA_NAME_VAR (ptr);
4476 if (TREE_CODE (var) != PARM_DECL)
4477 return NULL_TREE;
4479 const unsigned prec = TYPE_PRECISION (sizetype);
4481 rdwr_map rdwr_idx;
4482 attr_access *access = get_parm_access (rdwr_idx, var);
4483 if (!access)
4484 return NULL_TREE;
4486 if (access->sizarg != UINT_MAX)
4488 /* TODO: Try to extract the range from the argument based on
4489 those of subsequent assertions or based on known calls to
4490 the current function. */
4491 return NULL_TREE;
4494 if (!access->minsize)
4495 return NULL_TREE;
4497 /* Only consider ordinary array bound at level 2 (or above if it's
4498 ever added). */
4499 if (warn_array_parameter < 2 && !access->static_p)
4500 return NULL_TREE;
4502 rng[0] = wi::zero (prec);
4503 rng[1] = wi::uhwi (access->minsize, prec);
4504 /* Multiply the array bound encoded in the attribute by the size
4505 of what the pointer argument to which it decays points to. */
4506 tree eltype = TREE_TYPE (TREE_TYPE (ptr));
4507 tree size = TYPE_SIZE_UNIT (eltype);
4508 if (!size || TREE_CODE (size) != INTEGER_CST)
4509 return NULL_TREE;
4511 rng[1] *= wi::to_wide (size, prec);
4512 return var;
4515 /* Wrapper around the wide_int overload of get_range that accepts
4516 offset_int instead. For middle end expressions returns the same
4517 result. For a subset of nonconstamt expressions emitted by the front
4518 end determines a more precise range than would be possible otherwise. */
4520 static bool
4521 get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
4523 offset_int add = 0;
4524 if (TREE_CODE (x) == PLUS_EXPR)
4526 /* Handle constant offsets in pointer addition expressions seen
4527 n the front end IL. */
4528 tree op = TREE_OPERAND (x, 1);
4529 if (TREE_CODE (op) == INTEGER_CST)
4531 op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
4532 add = wi::to_offset (op);
4533 x = TREE_OPERAND (x, 0);
4537 if (TREE_CODE (x) == NOP_EXPR)
4538 /* Also handle conversions to sizetype seen in the front end IL. */
4539 x = TREE_OPERAND (x, 0);
4541 tree type = TREE_TYPE (x);
4543 if (TREE_CODE (x) != INTEGER_CST
4544 && TREE_CODE (x) != SSA_NAME)
4546 if (TYPE_UNSIGNED (type)
4547 && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
4548 type = signed_type_for (type);
4550 r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
4551 r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
4552 return x;
4555 wide_int wr[2];
4556 if (!get_range (x, stmt, wr, rvals))
4557 return false;
4559 signop sgn = SIGNED;
4560 /* Only convert signed integers or unsigned sizetype to a signed
4561 offset and avoid converting large positive values in narrower
4562 types to negative offsets. */
4563 if (TYPE_UNSIGNED (type)
4564 && wr[0].get_precision () < TYPE_PRECISION (sizetype))
4565 sgn = UNSIGNED;
4567 r[0] = offset_int::from (wr[0], sgn);
4568 r[1] = offset_int::from (wr[1], sgn);
4569 return true;
4572 /* Return the argument that the call STMT to a built-in function returns
4573 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
4574 from the argument reflected in the value returned by the built-in if it
4575 can be determined, otherwise to 0 and HWI_M1U respectively. */
4577 static tree
4578 gimple_call_return_array (gimple *stmt, offset_int offrng[2],
4579 range_query *rvals)
4581 if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4582 || gimple_call_num_args (stmt) < 1)
4583 return NULL_TREE;
4585 tree fn = gimple_call_fndecl (stmt);
4586 switch (DECL_FUNCTION_CODE (fn))
4588 case BUILT_IN_MEMCPY:
4589 case BUILT_IN_MEMCPY_CHK:
4590 case BUILT_IN_MEMMOVE:
4591 case BUILT_IN_MEMMOVE_CHK:
4592 case BUILT_IN_MEMSET:
4593 case BUILT_IN_STPCPY:
4594 case BUILT_IN_STPCPY_CHK:
4595 case BUILT_IN_STPNCPY:
4596 case BUILT_IN_STPNCPY_CHK:
4597 case BUILT_IN_STRCAT:
4598 case BUILT_IN_STRCAT_CHK:
4599 case BUILT_IN_STRCPY:
4600 case BUILT_IN_STRCPY_CHK:
4601 case BUILT_IN_STRNCAT:
4602 case BUILT_IN_STRNCAT_CHK:
4603 case BUILT_IN_STRNCPY:
4604 case BUILT_IN_STRNCPY_CHK:
4605 offrng[0] = offrng[1] = 0;
4606 return gimple_call_arg (stmt, 0);
4608 case BUILT_IN_MEMPCPY:
4609 case BUILT_IN_MEMPCPY_CHK:
4611 tree off = gimple_call_arg (stmt, 2);
4612 if (!get_offset_range (off, stmt, offrng, rvals))
4614 offrng[0] = 0;
4615 offrng[1] = HOST_WIDE_INT_M1U;
4617 return gimple_call_arg (stmt, 0);
4620 case BUILT_IN_MEMCHR:
4622 tree off = gimple_call_arg (stmt, 2);
4623 if (get_offset_range (off, stmt, offrng, rvals))
4624 offrng[0] = 0;
4625 else
4627 offrng[0] = 0;
4628 offrng[1] = HOST_WIDE_INT_M1U;
4630 return gimple_call_arg (stmt, 0);
4633 case BUILT_IN_STRCHR:
4634 case BUILT_IN_STRRCHR:
4635 case BUILT_IN_STRSTR:
4637 offrng[0] = 0;
4638 offrng[1] = HOST_WIDE_INT_M1U;
4640 return gimple_call_arg (stmt, 0);
4642 default:
4643 break;
4646 return NULL_TREE;
4649 /* Helper to compute the size of the object referenced by the PTR
4650 expression which must have pointer type, using Object Size type
4651 OSTYPE (only the least significant 2 bits are used).
4652 On success, sets PREF->REF to the DECL of the referenced object
4653 if it's unique, otherwise to null, PREF->OFFRNG to the range of
4654 offsets into it, and PREF->SIZRNG to the range of sizes of
4655 the object(s).
4656 VISITED is used to avoid visiting the same PHI operand multiple
4657 times, and, when nonnull, RVALS to determine range information.
4658 Returns true on success, false when a meaningful size (or range)
4659 cannot be determined.
4661 The function is intended for diagnostics and should not be used
4662 to influence code generation or optimization. */
4664 static bool
4665 compute_objsize (tree ptr, int ostype, access_ref *pref, bitmap *visited,
4666 range_query *rvals)
4668 STRIP_NOPS (ptr);
4670 const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
4671 if (addr)
4672 ptr = TREE_OPERAND (ptr, 0);
4674 if (DECL_P (ptr))
4676 pref->ref = ptr;
4678 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
4680 /* Set the maximum size if the reference is to the pointer
4681 itself (as opposed to what it points to). */
4682 pref->set_max_size_range ();
4683 return true;
4686 if (tree size = decl_init_size (ptr, false))
4687 if (TREE_CODE (size) == INTEGER_CST)
4689 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
4690 return true;
4693 pref->set_max_size_range ();
4694 return true;
4697 const tree_code code = TREE_CODE (ptr);
4699 if (code == BIT_FIELD_REF)
4701 tree ref = TREE_OPERAND (ptr, 0);
4702 if (!compute_objsize (ref, ostype, pref, visited, rvals))
4703 return false;
4705 offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
4706 pref->add_offset (off / BITS_PER_UNIT);
4707 return true;
4710 if (code == COMPONENT_REF)
4712 tree ref = TREE_OPERAND (ptr, 0);
4713 tree field = TREE_OPERAND (ptr, 1);
4715 if (ostype == 0)
4717 /* In OSTYPE zero (for raw memory functions like memcpy), use
4718 the maximum size instead if the identity of the enclosing
4719 object cannot be determined. */
4720 if (!compute_objsize (ref, ostype, pref, visited, rvals))
4721 return false;
4723 /* Otherwise, use the size of the enclosing object and add
4724 the offset of the member to the offset computed so far. */
4725 tree offset = byte_position (field);
4726 if (TREE_CODE (offset) == INTEGER_CST)
4727 pref->add_offset (wi::to_offset (offset));
4728 else
4729 pref->add_max_offset ();
4730 return true;
4733 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
4735 /* Set maximum size if the reference is to the pointer member
4736 itself (as opposed to what it points to). */
4737 pref->set_max_size_range ();
4738 return true;
4741 pref->ref = field;
4743 /* SAM is set for array members that might need special treatment. */
4744 special_array_member sam;
4745 tree size = component_ref_size (ptr, &sam);
4746 if (sam == special_array_member::int_0)
4747 pref->sizrng[0] = pref->sizrng[1] = 0;
4748 else if (!pref->trail1special && sam == special_array_member::trail_1)
4749 pref->sizrng[0] = pref->sizrng[1] = 1;
4750 else if (size && TREE_CODE (size) == INTEGER_CST)
4751 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
4752 else
4754 /* When the size of the member is unknown it's either a flexible
4755 array member or a trailing special array member (either zero
4756 length or one-element). Set the size to the maximum minus
4757 the constant size of the type. */
4758 pref->sizrng[0] = 0;
4759 pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
4760 if (tree recsize = TYPE_SIZE_UNIT (TREE_TYPE (ref)))
4761 if (TREE_CODE (recsize) == INTEGER_CST)
4762 pref->sizrng[1] -= wi::to_offset (recsize);
4764 return true;
4767 if (code == ARRAY_REF || code == MEM_REF)
4769 tree ref = TREE_OPERAND (ptr, 0);
4770 tree reftype = TREE_TYPE (ref);
4771 if (code == ARRAY_REF
4772 && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
4773 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
4774 of known bound. */
4775 return false;
4777 if (code == MEM_REF && TREE_CODE (reftype) == POINTER_TYPE)
4779 /* Give up for MEM_REFs of vector types; those may be synthesized
4780 from multiple assignments to consecutive data members. See PR
4781 93200.
4782 FIXME: Deal with this more generally, e.g., by marking up such
4783 MEM_REFs at the time they're created. */
4784 reftype = TREE_TYPE (reftype);
4785 if (TREE_CODE (reftype) == VECTOR_TYPE)
4786 return false;
4789 if (!compute_objsize (ref, ostype, pref, visited, rvals))
4790 return false;
4792 offset_int orng[2];
4793 tree off = pref->eval (TREE_OPERAND (ptr, 1));
4794 if (!get_offset_range (off, NULL, orng, rvals))
4796 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
4797 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
4798 orng[0] = -orng[1] - 1;
4801 if (TREE_CODE (ptr) == ARRAY_REF)
4803 /* Convert the array index range determined above to a byte
4804 offset. */
4805 tree lowbnd = array_ref_low_bound (ptr);
4806 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
4808 /* Adjust the index by the low bound of the array domain
4809 (normally zero but 1 in Fortran). */
4810 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
4811 orng[0] -= lb;
4812 orng[1] -= lb;
4815 tree eltype = TREE_TYPE (ptr);
4816 tree tpsize = TYPE_SIZE_UNIT (eltype);
4817 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
4819 pref->add_max_offset ();
4820 return true;
4823 offset_int sz = wi::to_offset (tpsize);
4824 orng[0] *= sz;
4825 orng[1] *= sz;
4827 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
4829 /* Except for the permissive raw memory functions which use
4830 the size of the whole object determined above, use the size
4831 of the referenced array. Because the overall offset is from
4832 the beginning of the complete array object add this overall
4833 offset to the size of array. */
4834 offset_int sizrng[2] =
4836 pref->offrng[0] + orng[0] + sz,
4837 pref->offrng[1] + orng[1] + sz
4839 if (sizrng[1] < sizrng[0])
4840 std::swap (sizrng[0], sizrng[1]);
4841 if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
4842 pref->sizrng[0] = sizrng[0];
4843 if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
4844 pref->sizrng[1] = sizrng[1];
4848 pref->add_offset (orng[0], orng[1]);
4849 return true;
4852 if (code == TARGET_MEM_REF)
4854 tree ref = TREE_OPERAND (ptr, 0);
4855 if (!compute_objsize (ref, ostype, pref, visited, rvals))
4856 return false;
4858 /* TODO: Handle remaining operands. Until then, add maximum offset. */
4859 pref->ref = ptr;
4860 pref->add_max_offset ();
4861 return true;
4864 if (code == INTEGER_CST)
4866 /* Pointer constants other than null are most likely the result
4867 of erroneous null pointer addition/subtraction. Set size to
4868 zero. For null pointers, set size to the maximum for now
4869 since those may be the result of jump threading. */
4870 if (integer_zerop (ptr))
4871 pref->set_max_size_range ();
4872 else
4873 pref->sizrng[0] = pref->sizrng[1] = 0;
4874 pref->ref = ptr;
4876 return true;
4879 if (code == STRING_CST)
4881 pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
4882 return true;
4885 if (code == POINTER_PLUS_EXPR)
4887 tree ref = TREE_OPERAND (ptr, 0);
4888 if (!compute_objsize (ref, ostype, pref, visited, rvals))
4889 return false;
4891 offset_int orng[2];
4892 tree off = pref->eval (TREE_OPERAND (ptr, 1));
4893 if (get_offset_range (off, NULL, orng, rvals))
4894 pref->add_offset (orng[0], orng[1]);
4895 else
4896 pref->add_max_offset ();
4897 return true;
4900 if (code == VIEW_CONVERT_EXPR)
4902 ptr = TREE_OPERAND (ptr, 0);
4903 return compute_objsize (ptr, ostype, pref, visited, rvals);
4906 if (TREE_CODE (ptr) == SSA_NAME)
4908 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
4909 if (is_gimple_call (stmt))
4911 /* If STMT is a call to an allocation function get the size
4912 from its argument(s). If successful, also set *PREF->REF
4913 to PTR for the caller to include in diagnostics. */
4914 wide_int wr[2];
4915 if (gimple_call_alloc_size (stmt, wr, rvals))
4917 pref->ref = ptr;
4918 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
4919 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
4920 /* Constrain both bounds to a valid size. */
4921 offset_int maxsize = wi::to_offset (max_object_size ());
4922 if (pref->sizrng[0] > maxsize)
4923 pref->sizrng[0] = maxsize;
4924 if (pref->sizrng[1] > maxsize)
4925 pref->sizrng[1] = maxsize;
4927 else
4929 /* For functions known to return one of their pointer arguments
4930 try to determine what the returned pointer points to, and on
4931 success add OFFRNG which was set to the offset added by
4932 the function (e.g., memchr) to the overall offset. */
4933 offset_int offrng[2];
4934 if (tree ret = gimple_call_return_array (stmt, offrng, rvals))
4936 if (!compute_objsize (ret, ostype, pref, visited, rvals))
4937 return false;
4939 /* Cap OFFRNG[1] to at most the remaining size of
4940 the object. */
4941 offset_int remrng[2];
4942 remrng[1] = pref->size_remaining (remrng);
4943 if (remrng[1] < offrng[1])
4944 offrng[1] = remrng[1];
4945 pref->add_offset (offrng[0], offrng[1]);
4947 else
4949 /* For other calls that might return arbitrary pointers
4950 including into the middle of objects set the size
4951 range to maximum, clear PREF->BASE0, and also set
4952 PREF->REF to include in diagnostics. */
4953 pref->set_max_size_range ();
4954 pref->base0 = false;
4955 pref->ref = ptr;
4958 return true;
4961 if (gimple_nop_p (stmt))
4963 /* For a function argument try to determine the byte size
4964 of the array from the current function declaratation
4965 (e.g., attribute access or related). */
4966 wide_int wr[2];
4967 if (tree ref = gimple_parm_array_size (ptr, wr, rvals))
4969 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
4970 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
4971 pref->ref = ref;
4972 return true;
4975 pref->set_max_size_range ();
4976 pref->base0 = false;
4977 pref->ref = ptr;
4978 if (tree var = SSA_NAME_VAR (ptr))
4979 if (TREE_CODE (var) == PARM_DECL)
4980 pref->ref = var;
4982 return true;
4985 /* TODO: Handle PHI. */
4987 if (!is_gimple_assign (stmt))
4989 /* Clear BASE0 since the assigned pointer might point into
4990 the middle of the object, set the maximum size range and,
4991 if the SSA_NAME refers to a function argumnent, set
4992 PREF->REF to it. */
4993 pref->base0 = false;
4994 pref->set_max_size_range ();
4995 if (tree var = SSA_NAME_VAR (ptr))
4996 if (TREE_CODE (var) == PARM_DECL)
4997 pref->ref = var;
4998 return true;
5001 ptr = gimple_assign_rhs1 (stmt);
5003 tree_code code = gimple_assign_rhs_code (stmt);
5005 if (code == POINTER_PLUS_EXPR
5006 && TREE_CODE (TREE_TYPE (ptr)) == POINTER_TYPE)
5008 /* Compute the size of the object first. */
5009 if (!compute_objsize (ptr, ostype, pref, visited, rvals))
5010 return false;
5012 offset_int orng[2];
5013 tree off = gimple_assign_rhs2 (stmt);
5014 if (get_offset_range (off, stmt, orng, rvals))
5015 pref->add_offset (orng[0], orng[1]);
5016 else
5017 pref->add_max_offset ();
5018 return true;
5021 if (code == ADDR_EXPR)
5022 return compute_objsize (ptr, ostype, pref, visited, rvals);
5024 /* This could be an assignment from a nonlocal pointer. Save PTR
5025 to mention in diagnostics but otherwise treat it as a pointer
5026 to an unknown object. */
5027 pref->ref = ptr;
5030 /* Assume all other expressions point into an unknown object
5031 of the maximum valid size. */
5032 pref->base0 = false;
5033 pref->set_max_size_range ();
5034 return true;
5037 /* A "public" wrapper around the above. Clients should use this overload
5038 instead. */
5040 tree
5041 compute_objsize (tree ptr, int ostype, access_ref *pref,
5042 range_query *rvals /* = NULL */)
5044 bitmap visited = NULL;
5046 bool success
5047 = compute_objsize (ptr, ostype, pref, &visited, rvals);
5049 if (visited)
5050 BITMAP_FREE (visited);
5052 if (!success)
5053 return NULL_TREE;
5055 offset_int maxsize = pref->size_remaining ();
5056 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
5057 pref->offrng[0] = 0;
5058 return wide_int_to_tree (sizetype, maxsize);
5061 /* Transitional wrapper around the above. The function should be removed
5062 once callers transition to one of the two above. */
5064 tree
5065 compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
5066 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
5068 /* Set the initial offsets to zero and size to negative to indicate
5069 none has been computed yet. */
5070 access_ref ref;
5071 tree size = compute_objsize (ptr, ostype, &ref, rvals);
5072 if (!size || !ref.base0)
5073 return NULL_TREE;
5075 if (pdecl)
5076 *pdecl = ref.ref;
5078 if (poff)
5079 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
5081 return size;
5084 /* Helper to determine and check the sizes of the source and the destination
5085 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
5086 call expression, DEST is the destination argument, SRC is the source
5087 argument or null, and LEN is the number of bytes. Use Object Size type-0
5088 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
5089 (no overflow or invalid sizes), false otherwise. */
5091 static bool
5092 check_memop_access (tree exp, tree dest, tree src, tree size)
5094 /* For functions like memset and memcpy that operate on raw memory
5095 try to determine the size of the largest source and destination
5096 object using type-0 Object Size regardless of the object size
5097 type specified by the option. */
5098 access_data data (exp, access_read_write);
5099 tree srcsize = src ? compute_objsize (src, 0, &data.src) : NULL_TREE;
5100 tree dstsize = compute_objsize (dest, 0, &data.dst);
5102 return check_access (exp, size, /*maxread=*/NULL_TREE,
5103 srcsize, dstsize, data.mode, &data);
5106 /* Validate memchr arguments without performing any expansion.
5107 Return NULL_RTX. */
5109 static rtx
5110 expand_builtin_memchr (tree exp, rtx)
5112 if (!validate_arglist (exp,
5113 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5114 return NULL_RTX;
5116 tree arg1 = CALL_EXPR_ARG (exp, 0);
5117 tree len = CALL_EXPR_ARG (exp, 2);
5119 check_read_access (exp, arg1, len, 0);
5121 return NULL_RTX;
5124 /* Expand a call EXP to the memcpy builtin.
5125 Return NULL_RTX if we failed, the caller should emit a normal call,
5126 otherwise try to get the result in TARGET, if convenient (and in
5127 mode MODE if that's convenient). */
5129 static rtx
5130 expand_builtin_memcpy (tree exp, rtx target)
5132 if (!validate_arglist (exp,
5133 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5134 return NULL_RTX;
5136 tree dest = CALL_EXPR_ARG (exp, 0);
5137 tree src = CALL_EXPR_ARG (exp, 1);
5138 tree len = CALL_EXPR_ARG (exp, 2);
5140 check_memop_access (exp, dest, src, len);
5142 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
5143 /*retmode=*/ RETURN_BEGIN, false);
5146 /* Check a call EXP to the memmove built-in for validity.
5147 Return NULL_RTX on both success and failure. */
5149 static rtx
5150 expand_builtin_memmove (tree exp, rtx target)
5152 if (!validate_arglist (exp,
5153 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5154 return NULL_RTX;
5156 tree dest = CALL_EXPR_ARG (exp, 0);
5157 tree src = CALL_EXPR_ARG (exp, 1);
5158 tree len = CALL_EXPR_ARG (exp, 2);
5160 check_memop_access (exp, dest, src, len);
5162 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
5163 /*retmode=*/ RETURN_BEGIN, true);
5166 /* Expand a call EXP to the mempcpy builtin.
5167 Return NULL_RTX if we failed; the caller should emit a normal call,
5168 otherwise try to get the result in TARGET, if convenient (and in
5169 mode MODE if that's convenient). */
5171 static rtx
5172 expand_builtin_mempcpy (tree exp, rtx target)
5174 if (!validate_arglist (exp,
5175 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5176 return NULL_RTX;
5178 tree dest = CALL_EXPR_ARG (exp, 0);
5179 tree src = CALL_EXPR_ARG (exp, 1);
5180 tree len = CALL_EXPR_ARG (exp, 2);
5182 /* Policy does not generally allow using compute_objsize (which
5183 is used internally by check_memop_size) to change code generation
5184 or drive optimization decisions.
5186 In this instance it is safe because the code we generate has
5187 the same semantics regardless of the return value of
5188 check_memop_sizes. Exactly the same amount of data is copied
5189 and the return value is exactly the same in both cases.
5191 Furthermore, check_memop_size always uses mode 0 for the call to
5192 compute_objsize, so the imprecise nature of compute_objsize is
5193 avoided. */
5195 /* Avoid expanding mempcpy into memcpy when the call is determined
5196 to overflow the buffer. This also prevents the same overflow
5197 from being diagnosed again when expanding memcpy. */
5198 if (!check_memop_access (exp, dest, src, len))
5199 return NULL_RTX;
5201 return expand_builtin_mempcpy_args (dest, src, len,
5202 target, exp, /*retmode=*/ RETURN_END);
5205 /* Helper function to do the actual work for expand of memory copy family
5206 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
5207 of memory from SRC to DEST and assign to TARGET if convenient. Return
5208 value is based on RETMODE argument. */
5210 static rtx
5211 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
5212 rtx target, tree exp, memop_ret retmode,
5213 bool might_overlap)
5215 unsigned int src_align = get_pointer_alignment (src);
5216 unsigned int dest_align = get_pointer_alignment (dest);
5217 rtx dest_mem, src_mem, dest_addr, len_rtx;
5218 HOST_WIDE_INT expected_size = -1;
5219 unsigned int expected_align = 0;
5220 unsigned HOST_WIDE_INT min_size;
5221 unsigned HOST_WIDE_INT max_size;
5222 unsigned HOST_WIDE_INT probable_max_size;
5224 bool is_move_done;
5226 /* If DEST is not a pointer type, call the normal function. */
5227 if (dest_align == 0)
5228 return NULL_RTX;
5230 /* If either SRC is not a pointer type, don't do this
5231 operation in-line. */
5232 if (src_align == 0)
5233 return NULL_RTX;
5235 if (currently_expanding_gimple_stmt)
5236 stringop_block_profile (currently_expanding_gimple_stmt,
5237 &expected_align, &expected_size);
5239 if (expected_align < dest_align)
5240 expected_align = dest_align;
5241 dest_mem = get_memory_rtx (dest, len);
5242 set_mem_align (dest_mem, dest_align);
5243 len_rtx = expand_normal (len);
5244 determine_block_size (len, len_rtx, &min_size, &max_size,
5245 &probable_max_size);
5247 /* Try to get the byte representation of the constant SRC points to,
5248 with its byte size in NBYTES. */
5249 unsigned HOST_WIDE_INT nbytes;
5250 const char *rep = getbyterep (src, &nbytes);
5252 /* If the function's constant bound LEN_RTX is less than or equal
5253 to the byte size of the representation of the constant argument,
5254 and if block move would be done by pieces, we can avoid loading
5255 the bytes from memory and only store the computed constant.
5256 This works in the overlap (memmove) case as well because
5257 store_by_pieces just generates a series of stores of constants
5258 from the representation returned by getbyterep(). */
5259 if (rep
5260 && CONST_INT_P (len_rtx)
5261 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
5262 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
5263 CONST_CAST (char *, rep),
5264 dest_align, false))
5266 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
5267 builtin_memcpy_read_str,
5268 CONST_CAST (char *, rep),
5269 dest_align, false, retmode);
5270 dest_mem = force_operand (XEXP (dest_mem, 0), target);
5271 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5272 return dest_mem;
5275 src_mem = get_memory_rtx (src, len);
5276 set_mem_align (src_mem, src_align);
5278 /* Copy word part most expediently. */
5279 enum block_op_methods method = BLOCK_OP_NORMAL;
5280 if (CALL_EXPR_TAILCALL (exp)
5281 && (retmode == RETURN_BEGIN || target == const0_rtx))
5282 method = BLOCK_OP_TAILCALL;
5283 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
5284 && retmode == RETURN_END
5285 && !might_overlap
5286 && target != const0_rtx);
5287 if (use_mempcpy_call)
5288 method = BLOCK_OP_NO_LIBCALL_RET;
5289 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
5290 expected_align, expected_size,
5291 min_size, max_size, probable_max_size,
5292 use_mempcpy_call, &is_move_done,
5293 might_overlap);
5295 /* Bail out when a mempcpy call would be expanded as libcall and when
5296 we have a target that provides a fast implementation
5297 of mempcpy routine. */
5298 if (!is_move_done)
5299 return NULL_RTX;
5301 if (dest_addr == pc_rtx)
5302 return NULL_RTX;
5304 if (dest_addr == 0)
5306 dest_addr = force_operand (XEXP (dest_mem, 0), target);
5307 dest_addr = convert_memory_address (ptr_mode, dest_addr);
5310 if (retmode != RETURN_BEGIN && target != const0_rtx)
5312 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
5313 /* stpcpy pointer to last byte. */
5314 if (retmode == RETURN_END_MINUS_ONE)
5315 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
5318 return dest_addr;
5321 static rtx
5322 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
5323 rtx target, tree orig_exp, memop_ret retmode)
5325 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
5326 retmode, false);
5329 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
5330 we failed, the caller should emit a normal call, otherwise try to
5331 get the result in TARGET, if convenient.
5332 Return value is based on RETMODE argument. */
5334 static rtx
5335 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
5337 class expand_operand ops[3];
5338 rtx dest_mem;
5339 rtx src_mem;
5341 if (!targetm.have_movstr ())
5342 return NULL_RTX;
5344 dest_mem = get_memory_rtx (dest, NULL);
5345 src_mem = get_memory_rtx (src, NULL);
5346 if (retmode == RETURN_BEGIN)
5348 target = force_reg (Pmode, XEXP (dest_mem, 0));
5349 dest_mem = replace_equiv_address (dest_mem, target);
5352 create_output_operand (&ops[0],
5353 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
5354 create_fixed_operand (&ops[1], dest_mem);
5355 create_fixed_operand (&ops[2], src_mem);
5356 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
5357 return NULL_RTX;
5359 if (retmode != RETURN_BEGIN && target != const0_rtx)
5361 target = ops[0].value;
5362 /* movstr is supposed to set end to the address of the NUL
5363 terminator. If the caller requested a mempcpy-like return value,
5364 adjust it. */
5365 if (retmode == RETURN_END)
5367 rtx tem = plus_constant (GET_MODE (target),
5368 gen_lowpart (GET_MODE (target), target), 1);
5369 emit_move_insn (target, force_operand (tem, NULL_RTX));
5372 return target;
5375 /* Do some very basic size validation of a call to the strcpy builtin
5376 given by EXP. Return NULL_RTX to have the built-in expand to a call
5377 to the library function. */
5379 static rtx
5380 expand_builtin_strcat (tree exp)
5382 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
5383 || !warn_stringop_overflow)
5384 return NULL_RTX;
5386 tree dest = CALL_EXPR_ARG (exp, 0);
5387 tree src = CALL_EXPR_ARG (exp, 1);
5389 /* There is no way here to determine the length of the string in
5390 the destination to which the SRC string is being appended so
5391 just diagnose cases when the souce string is longer than
5392 the destination object. */
5393 access_data data (exp, access_read_write, NULL_TREE, true,
5394 NULL_TREE, true);
5395 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
5396 compute_objsize (src, ost, &data.src);
5397 tree destsize = compute_objsize (dest, ost, &data.dst);
5399 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
5400 src, destsize, data.mode, &data);
5402 return NULL_RTX;
5405 /* Expand expression EXP, which is a call to the strcpy builtin. Return
5406 NULL_RTX if we failed the caller should emit a normal call, otherwise
5407 try to get the result in TARGET, if convenient (and in mode MODE if that's
5408 convenient). */
5410 static rtx
5411 expand_builtin_strcpy (tree exp, rtx target)
5413 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5414 return NULL_RTX;
5416 tree dest = CALL_EXPR_ARG (exp, 0);
5417 tree src = CALL_EXPR_ARG (exp, 1);
5419 if (warn_stringop_overflow)
5421 access_data data (exp, access_read_write, NULL_TREE, true,
5422 NULL_TREE, true);
5423 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
5424 compute_objsize (src, ost, &data.src);
5425 tree dstsize = compute_objsize (dest, ost, &data.dst);
5426 check_access (exp, /*dstwrite=*/ NULL_TREE,
5427 /*maxread=*/ NULL_TREE, /*srcstr=*/ src,
5428 dstsize, data.mode, &data);
5431 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
5433 /* Check to see if the argument was declared attribute nonstring
5434 and if so, issue a warning since at this point it's not known
5435 to be nul-terminated. */
5436 tree fndecl = get_callee_fndecl (exp);
5437 maybe_warn_nonstring_arg (fndecl, exp);
5438 return ret;
5441 return NULL_RTX;
5444 /* Helper function to do the actual work for expand_builtin_strcpy. The
5445 arguments to the builtin_strcpy call DEST and SRC are broken out
5446 so that this can also be called without constructing an actual CALL_EXPR.
5447 The other arguments and return value are the same as for
5448 expand_builtin_strcpy. */
5450 static rtx
5451 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
5453 /* Detect strcpy calls with unterminated arrays.. */
5454 tree size;
5455 bool exact;
5456 if (tree nonstr = unterminated_array (src, &size, &exact))
5458 /* NONSTR refers to the non-nul terminated constant array. */
5459 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, nonstr,
5460 size, exact);
5461 return NULL_RTX;
5464 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
5467 /* Expand a call EXP to the stpcpy builtin.
5468 Return NULL_RTX if we failed the caller should emit a normal call,
5469 otherwise try to get the result in TARGET, if convenient (and in
5470 mode MODE if that's convenient). */
5472 static rtx
5473 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
5475 tree dst, src;
5476 location_t loc = EXPR_LOCATION (exp);
5478 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5479 return NULL_RTX;
5481 dst = CALL_EXPR_ARG (exp, 0);
5482 src = CALL_EXPR_ARG (exp, 1);
5484 if (warn_stringop_overflow)
5486 access_data data (exp, access_read_write);
5487 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1,
5488 &data.dst);
5489 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
5490 src, destsize, data.mode, &data);
5493 /* If return value is ignored, transform stpcpy into strcpy. */
5494 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
5496 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
5497 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
5498 return expand_expr (result, target, mode, EXPAND_NORMAL);
5500 else
5502 tree len, lenp1;
5503 rtx ret;
5505 /* Ensure we get an actual string whose length can be evaluated at
5506 compile-time, not an expression containing a string. This is
5507 because the latter will potentially produce pessimized code
5508 when used to produce the return value. */
5509 c_strlen_data lendata = { };
5510 if (!c_getstr (src)
5511 || !(len = c_strlen (src, 0, &lendata, 1)))
5512 return expand_movstr (dst, src, target,
5513 /*retmode=*/ RETURN_END_MINUS_ONE);
5515 if (lendata.decl)
5516 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, lendata.decl);
5518 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
5519 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
5520 target, exp,
5521 /*retmode=*/ RETURN_END_MINUS_ONE);
5523 if (ret)
5524 return ret;
5526 if (TREE_CODE (len) == INTEGER_CST)
5528 rtx len_rtx = expand_normal (len);
5530 if (CONST_INT_P (len_rtx))
5532 ret = expand_builtin_strcpy_args (exp, dst, src, target);
5534 if (ret)
5536 if (! target)
5538 if (mode != VOIDmode)
5539 target = gen_reg_rtx (mode);
5540 else
5541 target = gen_reg_rtx (GET_MODE (ret));
5543 if (GET_MODE (target) != GET_MODE (ret))
5544 ret = gen_lowpart (GET_MODE (target), ret);
5546 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
5547 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
5548 gcc_assert (ret);
5550 return target;
5555 return expand_movstr (dst, src, target,
5556 /*retmode=*/ RETURN_END_MINUS_ONE);
5560 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
5561 arguments while being careful to avoid duplicate warnings (which could
5562 be issued if the expander were to expand the call, resulting in it
5563 being emitted in expand_call(). */
5565 static rtx
5566 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
5568 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
5570 /* The call has been successfully expanded. Check for nonstring
5571 arguments and issue warnings as appropriate. */
5572 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
5573 return ret;
5576 return NULL_RTX;
5579 /* Check a call EXP to the stpncpy built-in for validity.
5580 Return NULL_RTX on both success and failure. */
5582 static rtx
5583 expand_builtin_stpncpy (tree exp, rtx)
5585 if (!validate_arglist (exp,
5586 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5587 || !warn_stringop_overflow)
5588 return NULL_RTX;
5590 /* The source and destination of the call. */
5591 tree dest = CALL_EXPR_ARG (exp, 0);
5592 tree src = CALL_EXPR_ARG (exp, 1);
5594 /* The exact number of bytes to write (not the maximum). */
5595 tree len = CALL_EXPR_ARG (exp, 2);
5596 access_data data (exp, access_read_write);
5597 /* The size of the destination object. */
5598 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
5599 check_access (exp, len, /*maxread=*/len, src, destsize, data.mode, &data);
5600 return NULL_RTX;
5603 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
5604 bytes from constant string DATA + OFFSET and return it as target
5605 constant. */
5608 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
5609 scalar_int_mode mode)
5611 const char *str = (const char *) data;
5613 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
5614 return const0_rtx;
5616 return c_readstr (str + offset, mode);
5619 /* Helper to check the sizes of sequences and the destination of calls
5620 to __builtin_strncat and __builtin___strncat_chk. Returns true on
5621 success (no overflow or invalid sizes), false otherwise. */
5623 static bool
5624 check_strncat_sizes (tree exp, tree objsize)
5626 tree dest = CALL_EXPR_ARG (exp, 0);
5627 tree src = CALL_EXPR_ARG (exp, 1);
5628 tree maxread = CALL_EXPR_ARG (exp, 2);
5630 /* Try to determine the range of lengths that the source expression
5631 refers to. */
5632 c_strlen_data lendata = { };
5633 get_range_strlen (src, &lendata, /* eltsize = */ 1);
5635 /* Try to verify that the destination is big enough for the shortest
5636 string. */
5638 access_data data (exp, access_read_write, maxread, true);
5639 if (!objsize && warn_stringop_overflow)
5641 /* If it hasn't been provided by __strncat_chk, try to determine
5642 the size of the destination object into which the source is
5643 being copied. */
5644 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
5647 /* Add one for the terminating nul. */
5648 tree srclen = (lendata.minlen
5649 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
5650 size_one_node)
5651 : NULL_TREE);
5653 /* The strncat function copies at most MAXREAD bytes and always appends
5654 the terminating nul so the specified upper bound should never be equal
5655 to (or greater than) the size of the destination. */
5656 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
5657 && tree_int_cst_equal (objsize, maxread))
5659 location_t loc = tree_nonartificial_location (exp);
5660 loc = expansion_point_location_if_in_system_header (loc);
5662 warning_at (loc, OPT_Wstringop_overflow_,
5663 "%K%qD specified bound %E equals destination size",
5664 exp, get_callee_fndecl (exp), maxread);
5666 return false;
5669 if (!srclen
5670 || (maxread && tree_fits_uhwi_p (maxread)
5671 && tree_fits_uhwi_p (srclen)
5672 && tree_int_cst_lt (maxread, srclen)))
5673 srclen = maxread;
5675 /* The number of bytes to write is LEN but check_access will alsoa
5676 check SRCLEN if LEN's value isn't known. */
5677 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
5678 objsize, data.mode, &data);
5681 /* Similar to expand_builtin_strcat, do some very basic size validation
5682 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
5683 the built-in expand to a call to the library function. */
5685 static rtx
5686 expand_builtin_strncat (tree exp, rtx)
5688 if (!validate_arglist (exp,
5689 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5690 || !warn_stringop_overflow)
5691 return NULL_RTX;
5693 tree dest = CALL_EXPR_ARG (exp, 0);
5694 tree src = CALL_EXPR_ARG (exp, 1);
5695 /* The upper bound on the number of bytes to write. */
5696 tree maxread = CALL_EXPR_ARG (exp, 2);
5698 /* Detect unterminated source (only). */
5699 if (!check_nul_terminated_array (exp, src, maxread))
5700 return NULL_RTX;
5702 /* The length of the source sequence. */
5703 tree slen = c_strlen (src, 1);
5705 /* Try to determine the range of lengths that the source expression
5706 refers to. Since the lengths are only used for warning and not
5707 for code generation disable strict mode below. */
5708 tree maxlen = slen;
5709 if (!maxlen)
5711 c_strlen_data lendata = { };
5712 get_range_strlen (src, &lendata, /* eltsize = */ 1);
5713 maxlen = lendata.maxbound;
5716 access_data data (exp, access_read_write);
5717 /* Try to verify that the destination is big enough for the shortest
5718 string. First try to determine the size of the destination object
5719 into which the source is being copied. */
5720 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
5722 /* Add one for the terminating nul. */
5723 tree srclen = (maxlen
5724 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
5725 size_one_node)
5726 : NULL_TREE);
5728 /* The strncat function copies at most MAXREAD bytes and always appends
5729 the terminating nul so the specified upper bound should never be equal
5730 to (or greater than) the size of the destination. */
5731 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
5732 && tree_int_cst_equal (destsize, maxread))
5734 location_t loc = tree_nonartificial_location (exp);
5735 loc = expansion_point_location_if_in_system_header (loc);
5737 warning_at (loc, OPT_Wstringop_overflow_,
5738 "%K%qD specified bound %E equals destination size",
5739 exp, get_callee_fndecl (exp), maxread);
5741 return NULL_RTX;
5744 if (!srclen
5745 || (maxread && tree_fits_uhwi_p (maxread)
5746 && tree_fits_uhwi_p (srclen)
5747 && tree_int_cst_lt (maxread, srclen)))
5748 srclen = maxread;
5750 check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
5751 destsize, data.mode, &data);
5752 return NULL_RTX;
5755 /* Expand expression EXP, which is a call to the strncpy builtin. Return
5756 NULL_RTX if we failed the caller should emit a normal call. */
5758 static rtx
5759 expand_builtin_strncpy (tree exp, rtx target)
5761 location_t loc = EXPR_LOCATION (exp);
5763 if (!validate_arglist (exp,
5764 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5765 return NULL_RTX;
5766 tree dest = CALL_EXPR_ARG (exp, 0);
5767 tree src = CALL_EXPR_ARG (exp, 1);
5768 /* The number of bytes to write (not the maximum). */
5769 tree len = CALL_EXPR_ARG (exp, 2);
5771 /* The length of the source sequence. */
5772 tree slen = c_strlen (src, 1);
5774 if (warn_stringop_overflow)
5776 access_data data (exp, access_read_write, len, true, len, true);
5777 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
5778 compute_objsize (src, ost, &data.src);
5779 tree dstsize = compute_objsize (dest, ost, &data.dst);
5780 /* The number of bytes to write is LEN but check_access will also
5781 check SLEN if LEN's value isn't known. */
5782 check_access (exp, /*dstwrite=*/len,
5783 /*maxread=*/len, src, dstsize, data.mode, &data);
5786 /* We must be passed a constant len and src parameter. */
5787 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
5788 return NULL_RTX;
5790 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
5792 /* We're required to pad with trailing zeros if the requested
5793 len is greater than strlen(s2)+1. In that case try to
5794 use store_by_pieces, if it fails, punt. */
5795 if (tree_int_cst_lt (slen, len))
5797 unsigned int dest_align = get_pointer_alignment (dest);
5798 const char *p = c_getstr (src);
5799 rtx dest_mem;
5801 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
5802 || !can_store_by_pieces (tree_to_uhwi (len),
5803 builtin_strncpy_read_str,
5804 CONST_CAST (char *, p),
5805 dest_align, false))
5806 return NULL_RTX;
5808 dest_mem = get_memory_rtx (dest, len);
5809 store_by_pieces (dest_mem, tree_to_uhwi (len),
5810 builtin_strncpy_read_str,
5811 CONST_CAST (char *, p), dest_align, false,
5812 RETURN_BEGIN);
5813 dest_mem = force_operand (XEXP (dest_mem, 0), target);
5814 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5815 return dest_mem;
5818 return NULL_RTX;
5821 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
5822 bytes from constant string DATA + OFFSET and return it as target
5823 constant. */
5826 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
5827 scalar_int_mode mode)
5829 const char *c = (const char *) data;
5830 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
5832 memset (p, *c, GET_MODE_SIZE (mode));
5834 return c_readstr (p, mode);
5837 /* Callback routine for store_by_pieces. Return the RTL of a register
5838 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
5839 char value given in the RTL register data. For example, if mode is
5840 4 bytes wide, return the RTL for 0x01010101*data. */
5842 static rtx
5843 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
5844 scalar_int_mode mode)
5846 rtx target, coeff;
5847 size_t size;
5848 char *p;
5850 size = GET_MODE_SIZE (mode);
5851 if (size == 1)
5852 return (rtx) data;
5854 p = XALLOCAVEC (char, size);
5855 memset (p, 1, size);
5856 coeff = c_readstr (p, mode);
5858 target = convert_to_mode (mode, (rtx) data, 1);
5859 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
5860 return force_reg (mode, target);
5863 /* Expand expression EXP, which is a call to the memset builtin. Return
5864 NULL_RTX if we failed the caller should emit a normal call, otherwise
5865 try to get the result in TARGET, if convenient (and in mode MODE if that's
5866 convenient). */
5868 static rtx
5869 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
5871 if (!validate_arglist (exp,
5872 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5873 return NULL_RTX;
5875 tree dest = CALL_EXPR_ARG (exp, 0);
5876 tree val = CALL_EXPR_ARG (exp, 1);
5877 tree len = CALL_EXPR_ARG (exp, 2);
5879 check_memop_access (exp, dest, NULL_TREE, len);
5881 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
5884 /* Helper function to do the actual work for expand_builtin_memset. The
5885 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
5886 so that this can also be called without constructing an actual CALL_EXPR.
5887 The other arguments and return value are the same as for
5888 expand_builtin_memset. */
5890 static rtx
5891 expand_builtin_memset_args (tree dest, tree val, tree len,
5892 rtx target, machine_mode mode, tree orig_exp)
5894 tree fndecl, fn;
5895 enum built_in_function fcode;
5896 machine_mode val_mode;
5897 char c;
5898 unsigned int dest_align;
5899 rtx dest_mem, dest_addr, len_rtx;
5900 HOST_WIDE_INT expected_size = -1;
5901 unsigned int expected_align = 0;
5902 unsigned HOST_WIDE_INT min_size;
5903 unsigned HOST_WIDE_INT max_size;
5904 unsigned HOST_WIDE_INT probable_max_size;
5906 dest_align = get_pointer_alignment (dest);
5908 /* If DEST is not a pointer type, don't do this operation in-line. */
5909 if (dest_align == 0)
5910 return NULL_RTX;
5912 if (currently_expanding_gimple_stmt)
5913 stringop_block_profile (currently_expanding_gimple_stmt,
5914 &expected_align, &expected_size);
5916 if (expected_align < dest_align)
5917 expected_align = dest_align;
5919 /* If the LEN parameter is zero, return DEST. */
5920 if (integer_zerop (len))
5922 /* Evaluate and ignore VAL in case it has side-effects. */
5923 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
5924 return expand_expr (dest, target, mode, EXPAND_NORMAL);
5927 /* Stabilize the arguments in case we fail. */
5928 dest = builtin_save_expr (dest);
5929 val = builtin_save_expr (val);
5930 len = builtin_save_expr (len);
5932 len_rtx = expand_normal (len);
5933 determine_block_size (len, len_rtx, &min_size, &max_size,
5934 &probable_max_size);
5935 dest_mem = get_memory_rtx (dest, len);
5936 val_mode = TYPE_MODE (unsigned_char_type_node);
5938 if (TREE_CODE (val) != INTEGER_CST)
5940 rtx val_rtx;
5942 val_rtx = expand_normal (val);
5943 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
5945 /* Assume that we can memset by pieces if we can store
5946 * the coefficients by pieces (in the required modes).
5947 * We can't pass builtin_memset_gen_str as that emits RTL. */
5948 c = 1;
5949 if (tree_fits_uhwi_p (len)
5950 && can_store_by_pieces (tree_to_uhwi (len),
5951 builtin_memset_read_str, &c, dest_align,
5952 true))
5954 val_rtx = force_reg (val_mode, val_rtx);
5955 store_by_pieces (dest_mem, tree_to_uhwi (len),
5956 builtin_memset_gen_str, val_rtx, dest_align,
5957 true, RETURN_BEGIN);
5959 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
5960 dest_align, expected_align,
5961 expected_size, min_size, max_size,
5962 probable_max_size))
5963 goto do_libcall;
5965 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5966 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5967 return dest_mem;
5970 if (target_char_cast (val, &c))
5971 goto do_libcall;
5973 if (c)
5975 if (tree_fits_uhwi_p (len)
5976 && can_store_by_pieces (tree_to_uhwi (len),
5977 builtin_memset_read_str, &c, dest_align,
5978 true))
5979 store_by_pieces (dest_mem, tree_to_uhwi (len),
5980 builtin_memset_read_str, &c, dest_align, true,
5981 RETURN_BEGIN);
5982 else if (!set_storage_via_setmem (dest_mem, len_rtx,
5983 gen_int_mode (c, val_mode),
5984 dest_align, expected_align,
5985 expected_size, min_size, max_size,
5986 probable_max_size))
5987 goto do_libcall;
5989 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5990 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5991 return dest_mem;
5994 set_mem_align (dest_mem, dest_align);
5995 dest_addr = clear_storage_hints (dest_mem, len_rtx,
5996 CALL_EXPR_TAILCALL (orig_exp)
5997 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
5998 expected_align, expected_size,
5999 min_size, max_size,
6000 probable_max_size);
6002 if (dest_addr == 0)
6004 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6005 dest_addr = convert_memory_address (ptr_mode, dest_addr);
6008 return dest_addr;
6010 do_libcall:
6011 fndecl = get_callee_fndecl (orig_exp);
6012 fcode = DECL_FUNCTION_CODE (fndecl);
6013 if (fcode == BUILT_IN_MEMSET)
6014 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
6015 dest, val, len);
6016 else if (fcode == BUILT_IN_BZERO)
6017 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
6018 dest, len);
6019 else
6020 gcc_unreachable ();
6021 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
6022 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
6023 return expand_call (fn, target, target == const0_rtx);
6026 /* Expand expression EXP, which is a call to the bzero builtin. Return
6027 NULL_RTX if we failed the caller should emit a normal call. */
6029 static rtx
6030 expand_builtin_bzero (tree exp)
6032 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6033 return NULL_RTX;
6035 tree dest = CALL_EXPR_ARG (exp, 0);
6036 tree size = CALL_EXPR_ARG (exp, 1);
6038 check_memop_access (exp, dest, NULL_TREE, size);
6040 /* New argument list transforming bzero(ptr x, int y) to
6041 memset(ptr x, int 0, size_t y). This is done this way
6042 so that if it isn't expanded inline, we fallback to
6043 calling bzero instead of memset. */
6045 location_t loc = EXPR_LOCATION (exp);
6047 return expand_builtin_memset_args (dest, integer_zero_node,
6048 fold_convert_loc (loc,
6049 size_type_node, size),
6050 const0_rtx, VOIDmode, exp);
6053 /* Try to expand cmpstr operation ICODE with the given operands.
6054 Return the result rtx on success, otherwise return null. */
6056 static rtx
6057 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
6058 HOST_WIDE_INT align)
6060 machine_mode insn_mode = insn_data[icode].operand[0].mode;
6062 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
6063 target = NULL_RTX;
6065 class expand_operand ops[4];
6066 create_output_operand (&ops[0], target, insn_mode);
6067 create_fixed_operand (&ops[1], arg1_rtx);
6068 create_fixed_operand (&ops[2], arg2_rtx);
6069 create_integer_operand (&ops[3], align);
6070 if (maybe_expand_insn (icode, 4, ops))
6071 return ops[0].value;
6072 return NULL_RTX;
6075 /* Expand expression EXP, which is a call to the memcmp built-in function.
6076 Return NULL_RTX if we failed and the caller should emit a normal call,
6077 otherwise try to get the result in TARGET, if convenient.
6078 RESULT_EQ is true if we can relax the returned value to be either zero
6079 or nonzero, without caring about the sign. */
6081 static rtx
6082 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
6084 if (!validate_arglist (exp,
6085 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6086 return NULL_RTX;
6088 tree arg1 = CALL_EXPR_ARG (exp, 0);
6089 tree arg2 = CALL_EXPR_ARG (exp, 1);
6090 tree len = CALL_EXPR_ARG (exp, 2);
6092 /* Diagnose calls where the specified length exceeds the size of either
6093 object. */
6094 if (!check_read_access (exp, arg1, len, 0)
6095 || !check_read_access (exp, arg2, len, 0))
6096 return NULL_RTX;
6098 /* Due to the performance benefit, always inline the calls first
6099 when result_eq is false. */
6100 rtx result = NULL_RTX;
6101 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
6102 if (!result_eq && fcode != BUILT_IN_BCMP)
6104 result = inline_expand_builtin_bytecmp (exp, target);
6105 if (result)
6106 return result;
6109 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6110 location_t loc = EXPR_LOCATION (exp);
6112 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
6113 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6115 /* If we don't have POINTER_TYPE, call the function. */
6116 if (arg1_align == 0 || arg2_align == 0)
6117 return NULL_RTX;
6119 rtx arg1_rtx = get_memory_rtx (arg1, len);
6120 rtx arg2_rtx = get_memory_rtx (arg2, len);
6121 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
6123 /* Set MEM_SIZE as appropriate. */
6124 if (CONST_INT_P (len_rtx))
6126 set_mem_size (arg1_rtx, INTVAL (len_rtx));
6127 set_mem_size (arg2_rtx, INTVAL (len_rtx));
6130 by_pieces_constfn constfn = NULL;
6132 /* Try to get the byte representation of the constant ARG2 (or, only
6133 when the function's result is used for equality to zero, ARG1)
6134 points to, with its byte size in NBYTES. */
6135 unsigned HOST_WIDE_INT nbytes;
6136 const char *rep = getbyterep (arg2, &nbytes);
6137 if (result_eq && rep == NULL)
6139 /* For equality to zero the arguments are interchangeable. */
6140 rep = getbyterep (arg1, &nbytes);
6141 if (rep != NULL)
6142 std::swap (arg1_rtx, arg2_rtx);
6145 /* If the function's constant bound LEN_RTX is less than or equal
6146 to the byte size of the representation of the constant argument,
6147 and if block move would be done by pieces, we can avoid loading
6148 the bytes from memory and only store the computed constant result. */
6149 if (rep
6150 && CONST_INT_P (len_rtx)
6151 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
6152 constfn = builtin_memcpy_read_str;
6154 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
6155 TREE_TYPE (len), target,
6156 result_eq, constfn,
6157 CONST_CAST (char *, rep));
6159 if (result)
6161 /* Return the value in the proper mode for this function. */
6162 if (GET_MODE (result) == mode)
6163 return result;
6165 if (target != 0)
6167 convert_move (target, result, 0);
6168 return target;
6171 return convert_to_mode (mode, result, 0);
6174 return NULL_RTX;
6177 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
6178 if we failed the caller should emit a normal call, otherwise try to get
6179 the result in TARGET, if convenient. */
6181 static rtx
6182 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
6184 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6185 return NULL_RTX;
6187 tree arg1 = CALL_EXPR_ARG (exp, 0);
6188 tree arg2 = CALL_EXPR_ARG (exp, 1);
6190 if (!check_read_access (exp, arg1)
6191 || !check_read_access (exp, arg2))
6192 return NULL_RTX;
6194 /* Due to the performance benefit, always inline the calls first. */
6195 rtx result = NULL_RTX;
6196 result = inline_expand_builtin_bytecmp (exp, target);
6197 if (result)
6198 return result;
6200 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
6201 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
6202 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
6203 return NULL_RTX;
6205 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
6206 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6208 /* If we don't have POINTER_TYPE, call the function. */
6209 if (arg1_align == 0 || arg2_align == 0)
6210 return NULL_RTX;
6212 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
6213 arg1 = builtin_save_expr (arg1);
6214 arg2 = builtin_save_expr (arg2);
6216 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
6217 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
6219 /* Try to call cmpstrsi. */
6220 if (cmpstr_icode != CODE_FOR_nothing)
6221 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
6222 MIN (arg1_align, arg2_align));
6224 /* Try to determine at least one length and call cmpstrnsi. */
6225 if (!result && cmpstrn_icode != CODE_FOR_nothing)
6227 tree len;
6228 rtx arg3_rtx;
6230 tree len1 = c_strlen (arg1, 1);
6231 tree len2 = c_strlen (arg2, 1);
6233 if (len1)
6234 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
6235 if (len2)
6236 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
6238 /* If we don't have a constant length for the first, use the length
6239 of the second, if we know it. We don't require a constant for
6240 this case; some cost analysis could be done if both are available
6241 but neither is constant. For now, assume they're equally cheap,
6242 unless one has side effects. If both strings have constant lengths,
6243 use the smaller. */
6245 if (!len1)
6246 len = len2;
6247 else if (!len2)
6248 len = len1;
6249 else if (TREE_SIDE_EFFECTS (len1))
6250 len = len2;
6251 else if (TREE_SIDE_EFFECTS (len2))
6252 len = len1;
6253 else if (TREE_CODE (len1) != INTEGER_CST)
6254 len = len2;
6255 else if (TREE_CODE (len2) != INTEGER_CST)
6256 len = len1;
6257 else if (tree_int_cst_lt (len1, len2))
6258 len = len1;
6259 else
6260 len = len2;
6262 /* If both arguments have side effects, we cannot optimize. */
6263 if (len && !TREE_SIDE_EFFECTS (len))
6265 arg3_rtx = expand_normal (len);
6266 result = expand_cmpstrn_or_cmpmem
6267 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
6268 arg3_rtx, MIN (arg1_align, arg2_align));
6272 tree fndecl = get_callee_fndecl (exp);
6273 if (result)
6275 /* Check to see if the argument was declared attribute nonstring
6276 and if so, issue a warning since at this point it's not known
6277 to be nul-terminated. */
6278 maybe_warn_nonstring_arg (fndecl, exp);
6280 /* Return the value in the proper mode for this function. */
6281 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6282 if (GET_MODE (result) == mode)
6283 return result;
6284 if (target == 0)
6285 return convert_to_mode (mode, result, 0);
6286 convert_move (target, result, 0);
6287 return target;
6290 /* Expand the library call ourselves using a stabilized argument
6291 list to avoid re-evaluating the function's arguments twice. */
6292 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
6293 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
6294 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
6295 return expand_call (fn, target, target == const0_rtx);
6298 /* Expand expression EXP, which is a call to the strncmp builtin. Return
6299 NULL_RTX if we failed the caller should emit a normal call, otherwise
6300 try to get the result in TARGET, if convenient. */
6302 static rtx
6303 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
6304 ATTRIBUTE_UNUSED machine_mode mode)
6306 if (!validate_arglist (exp,
6307 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6308 return NULL_RTX;
6310 tree arg1 = CALL_EXPR_ARG (exp, 0);
6311 tree arg2 = CALL_EXPR_ARG (exp, 1);
6312 tree arg3 = CALL_EXPR_ARG (exp, 2);
6314 if (!check_nul_terminated_array (exp, arg1, arg3)
6315 || !check_nul_terminated_array (exp, arg2, arg3))
6316 return NULL_RTX;
6318 location_t loc = tree_nonartificial_location (exp);
6319 loc = expansion_point_location_if_in_system_header (loc);
6321 tree len1 = c_strlen (arg1, 1);
6322 tree len2 = c_strlen (arg2, 1);
6324 if (!len1 || !len2)
6326 /* Check to see if the argument was declared attribute nonstring
6327 and if so, issue a warning since at this point it's not known
6328 to be nul-terminated. */
6329 if (!maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp)
6330 && !len1 && !len2)
6332 /* A strncmp read is constrained not just by the bound but
6333 also by the length of the shorter string. Specifying
6334 a bound that's larger than the size of either array makes
6335 no sense and is likely a bug. When the length of neither
6336 of the two strings is known but the sizes of both of
6337 the arrays they are stored in is, issue a warning if
6338 the bound is larger than than the size of the larger
6339 of the two arrays. */
6341 access_ref ref1 (arg3, true);
6342 access_ref ref2 (arg3, true);
6344 tree bndrng[2] = { NULL_TREE, NULL_TREE };
6345 get_size_range (arg3, bndrng, ref1.bndrng);
6347 tree size1 = compute_objsize (arg1, 1, &ref1);
6348 tree size2 = compute_objsize (arg2, 1, &ref2);
6349 tree func = get_callee_fndecl (exp);
6351 if (size1 && size2 && bndrng[0] && !integer_zerop (bndrng[0]))
6353 offset_int rem1 = ref1.size_remaining ();
6354 offset_int rem2 = ref2.size_remaining ();
6355 if (rem1 == 0 || rem2 == 0)
6356 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
6357 bndrng, integer_zero_node);
6358 else
6360 offset_int maxrem = wi::max (rem1, rem2, UNSIGNED);
6361 if (maxrem < wi::to_offset (bndrng[0]))
6362 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp,
6363 func, bndrng,
6364 wide_int_to_tree (sizetype, maxrem));
6367 else if (bndrng[0]
6368 && !integer_zerop (bndrng[0])
6369 && ((size1 && integer_zerop (size1))
6370 || (size2 && integer_zerop (size2))))
6371 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
6372 bndrng, integer_zero_node);
6376 /* Due to the performance benefit, always inline the calls first. */
6377 rtx result = NULL_RTX;
6378 result = inline_expand_builtin_bytecmp (exp, target);
6379 if (result)
6380 return result;
6382 /* If c_strlen can determine an expression for one of the string
6383 lengths, and it doesn't have side effects, then emit cmpstrnsi
6384 using length MIN(strlen(string)+1, arg3). */
6385 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
6386 if (cmpstrn_icode == CODE_FOR_nothing)
6387 return NULL_RTX;
6389 tree len;
6391 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
6392 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6394 if (len1)
6395 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
6396 if (len2)
6397 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
6399 tree len3 = fold_convert_loc (loc, sizetype, arg3);
6401 /* If we don't have a constant length for the first, use the length
6402 of the second, if we know it. If neither string is constant length,
6403 use the given length argument. We don't require a constant for
6404 this case; some cost analysis could be done if both are available
6405 but neither is constant. For now, assume they're equally cheap,
6406 unless one has side effects. If both strings have constant lengths,
6407 use the smaller. */
6409 if (!len1 && !len2)
6410 len = len3;
6411 else if (!len1)
6412 len = len2;
6413 else if (!len2)
6414 len = len1;
6415 else if (TREE_SIDE_EFFECTS (len1))
6416 len = len2;
6417 else if (TREE_SIDE_EFFECTS (len2))
6418 len = len1;
6419 else if (TREE_CODE (len1) != INTEGER_CST)
6420 len = len2;
6421 else if (TREE_CODE (len2) != INTEGER_CST)
6422 len = len1;
6423 else if (tree_int_cst_lt (len1, len2))
6424 len = len1;
6425 else
6426 len = len2;
6428 /* If we are not using the given length, we must incorporate it here.
6429 The actual new length parameter will be MIN(len,arg3) in this case. */
6430 if (len != len3)
6432 len = fold_convert_loc (loc, sizetype, len);
6433 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
6435 rtx arg1_rtx = get_memory_rtx (arg1, len);
6436 rtx arg2_rtx = get_memory_rtx (arg2, len);
6437 rtx arg3_rtx = expand_normal (len);
6438 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
6439 arg2_rtx, TREE_TYPE (len), arg3_rtx,
6440 MIN (arg1_align, arg2_align));
6442 tree fndecl = get_callee_fndecl (exp);
6443 if (result)
6445 /* Return the value in the proper mode for this function. */
6446 mode = TYPE_MODE (TREE_TYPE (exp));
6447 if (GET_MODE (result) == mode)
6448 return result;
6449 if (target == 0)
6450 return convert_to_mode (mode, result, 0);
6451 convert_move (target, result, 0);
6452 return target;
6455 /* Expand the library call ourselves using a stabilized argument
6456 list to avoid re-evaluating the function's arguments twice. */
6457 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
6458 if (TREE_NO_WARNING (exp))
6459 TREE_NO_WARNING (call) = true;
6460 gcc_assert (TREE_CODE (call) == CALL_EXPR);
6461 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
6462 return expand_call (call, target, target == const0_rtx);
6465 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
6466 if that's convenient. */
6469 expand_builtin_saveregs (void)
6471 rtx val;
6472 rtx_insn *seq;
6474 /* Don't do __builtin_saveregs more than once in a function.
6475 Save the result of the first call and reuse it. */
6476 if (saveregs_value != 0)
6477 return saveregs_value;
6479 /* When this function is called, it means that registers must be
6480 saved on entry to this function. So we migrate the call to the
6481 first insn of this function. */
6483 start_sequence ();
6485 /* Do whatever the machine needs done in this case. */
6486 val = targetm.calls.expand_builtin_saveregs ();
6488 seq = get_insns ();
6489 end_sequence ();
6491 saveregs_value = val;
6493 /* Put the insns after the NOTE that starts the function. If this
6494 is inside a start_sequence, make the outer-level insn chain current, so
6495 the code is placed at the start of the function. */
6496 push_topmost_sequence ();
6497 emit_insn_after (seq, entry_of_function ());
6498 pop_topmost_sequence ();
6500 return val;
6503 /* Expand a call to __builtin_next_arg. */
6505 static rtx
6506 expand_builtin_next_arg (void)
6508 /* Checking arguments is already done in fold_builtin_next_arg
6509 that must be called before this function. */
6510 return expand_binop (ptr_mode, add_optab,
6511 crtl->args.internal_arg_pointer,
6512 crtl->args.arg_offset_rtx,
6513 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6516 /* Make it easier for the backends by protecting the valist argument
6517 from multiple evaluations. */
6519 static tree
6520 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
6522 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
6524 /* The current way of determining the type of valist is completely
6525 bogus. We should have the information on the va builtin instead. */
6526 if (!vatype)
6527 vatype = targetm.fn_abi_va_list (cfun->decl);
6529 if (TREE_CODE (vatype) == ARRAY_TYPE)
6531 if (TREE_SIDE_EFFECTS (valist))
6532 valist = save_expr (valist);
6534 /* For this case, the backends will be expecting a pointer to
6535 vatype, but it's possible we've actually been given an array
6536 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
6537 So fix it. */
6538 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
6540 tree p1 = build_pointer_type (TREE_TYPE (vatype));
6541 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
6544 else
6546 tree pt = build_pointer_type (vatype);
6548 if (! needs_lvalue)
6550 if (! TREE_SIDE_EFFECTS (valist))
6551 return valist;
6553 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
6554 TREE_SIDE_EFFECTS (valist) = 1;
6557 if (TREE_SIDE_EFFECTS (valist))
6558 valist = save_expr (valist);
6559 valist = fold_build2_loc (loc, MEM_REF,
6560 vatype, valist, build_int_cst (pt, 0));
6563 return valist;
6566 /* The "standard" definition of va_list is void*. */
6568 tree
6569 std_build_builtin_va_list (void)
6571 return ptr_type_node;
6574 /* The "standard" abi va_list is va_list_type_node. */
6576 tree
6577 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
6579 return va_list_type_node;
6582 /* The "standard" type of va_list is va_list_type_node. */
6584 tree
6585 std_canonical_va_list_type (tree type)
6587 tree wtype, htype;
6589 wtype = va_list_type_node;
6590 htype = type;
6592 if (TREE_CODE (wtype) == ARRAY_TYPE)
6594 /* If va_list is an array type, the argument may have decayed
6595 to a pointer type, e.g. by being passed to another function.
6596 In that case, unwrap both types so that we can compare the
6597 underlying records. */
6598 if (TREE_CODE (htype) == ARRAY_TYPE
6599 || POINTER_TYPE_P (htype))
6601 wtype = TREE_TYPE (wtype);
6602 htype = TREE_TYPE (htype);
6605 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
6606 return va_list_type_node;
6608 return NULL_TREE;
6611 /* The "standard" implementation of va_start: just assign `nextarg' to
6612 the variable. */
6614 void
6615 std_expand_builtin_va_start (tree valist, rtx nextarg)
6617 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
6618 convert_move (va_r, nextarg, 0);
6621 /* Expand EXP, a call to __builtin_va_start. */
6623 static rtx
6624 expand_builtin_va_start (tree exp)
6626 rtx nextarg;
6627 tree valist;
6628 location_t loc = EXPR_LOCATION (exp);
6630 if (call_expr_nargs (exp) < 2)
6632 error_at (loc, "too few arguments to function %<va_start%>");
6633 return const0_rtx;
6636 if (fold_builtin_next_arg (exp, true))
6637 return const0_rtx;
6639 nextarg = expand_builtin_next_arg ();
6640 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
6642 if (targetm.expand_builtin_va_start)
6643 targetm.expand_builtin_va_start (valist, nextarg);
6644 else
6645 std_expand_builtin_va_start (valist, nextarg);
6647 return const0_rtx;
6650 /* Expand EXP, a call to __builtin_va_end. */
6652 static rtx
6653 expand_builtin_va_end (tree exp)
6655 tree valist = CALL_EXPR_ARG (exp, 0);
6657 /* Evaluate for side effects, if needed. I hate macros that don't
6658 do that. */
6659 if (TREE_SIDE_EFFECTS (valist))
6660 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
6662 return const0_rtx;
6665 /* Expand EXP, a call to __builtin_va_copy. We do this as a
6666 builtin rather than just as an assignment in stdarg.h because of the
6667 nastiness of array-type va_list types. */
6669 static rtx
6670 expand_builtin_va_copy (tree exp)
6672 tree dst, src, t;
6673 location_t loc = EXPR_LOCATION (exp);
6675 dst = CALL_EXPR_ARG (exp, 0);
6676 src = CALL_EXPR_ARG (exp, 1);
6678 dst = stabilize_va_list_loc (loc, dst, 1);
6679 src = stabilize_va_list_loc (loc, src, 0);
6681 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
6683 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
6685 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
6686 TREE_SIDE_EFFECTS (t) = 1;
6687 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6689 else
6691 rtx dstb, srcb, size;
6693 /* Evaluate to pointers. */
6694 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
6695 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
6696 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
6697 NULL_RTX, VOIDmode, EXPAND_NORMAL);
6699 dstb = convert_memory_address (Pmode, dstb);
6700 srcb = convert_memory_address (Pmode, srcb);
6702 /* "Dereference" to BLKmode memories. */
6703 dstb = gen_rtx_MEM (BLKmode, dstb);
6704 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
6705 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
6706 srcb = gen_rtx_MEM (BLKmode, srcb);
6707 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
6708 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
6710 /* Copy. */
6711 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
6714 return const0_rtx;
6717 /* Expand a call to one of the builtin functions __builtin_frame_address or
6718 __builtin_return_address. */
6720 static rtx
6721 expand_builtin_frame_address (tree fndecl, tree exp)
6723 /* The argument must be a nonnegative integer constant.
6724 It counts the number of frames to scan up the stack.
6725 The value is either the frame pointer value or the return
6726 address saved in that frame. */
6727 if (call_expr_nargs (exp) == 0)
6728 /* Warning about missing arg was already issued. */
6729 return const0_rtx;
6730 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
6732 error ("invalid argument to %qD", fndecl);
6733 return const0_rtx;
6735 else
6737 /* Number of frames to scan up the stack. */
6738 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
6740 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
6742 /* Some ports cannot access arbitrary stack frames. */
6743 if (tem == NULL)
6745 warning (0, "unsupported argument to %qD", fndecl);
6746 return const0_rtx;
6749 if (count)
6751 /* Warn since no effort is made to ensure that any frame
6752 beyond the current one exists or can be safely reached. */
6753 warning (OPT_Wframe_address, "calling %qD with "
6754 "a nonzero argument is unsafe", fndecl);
6757 /* For __builtin_frame_address, return what we've got. */
6758 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6759 return tem;
6761 if (!REG_P (tem)
6762 && ! CONSTANT_P (tem))
6763 tem = copy_addr_to_reg (tem);
6764 return tem;
6768 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
6769 failed and the caller should emit a normal call. */
6771 static rtx
6772 expand_builtin_alloca (tree exp)
6774 rtx op0;
6775 rtx result;
6776 unsigned int align;
6777 tree fndecl = get_callee_fndecl (exp);
6778 HOST_WIDE_INT max_size;
6779 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6780 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
6781 bool valid_arglist
6782 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
6783 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
6784 VOID_TYPE)
6785 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
6786 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
6787 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
6789 if (!valid_arglist)
6790 return NULL_RTX;
6792 if ((alloca_for_var
6793 && warn_vla_limit >= HOST_WIDE_INT_MAX
6794 && warn_alloc_size_limit < warn_vla_limit)
6795 || (!alloca_for_var
6796 && warn_alloca_limit >= HOST_WIDE_INT_MAX
6797 && warn_alloc_size_limit < warn_alloca_limit
6800 /* -Walloca-larger-than and -Wvla-larger-than settings of
6801 less than HOST_WIDE_INT_MAX override the more general
6802 -Walloc-size-larger-than so unless either of the former
6803 options is smaller than the last one (wchich would imply
6804 that the call was already checked), check the alloca
6805 arguments for overflow. */
6806 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
6807 int idx[] = { 0, -1 };
6808 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
6811 /* Compute the argument. */
6812 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
6814 /* Compute the alignment. */
6815 align = (fcode == BUILT_IN_ALLOCA
6816 ? BIGGEST_ALIGNMENT
6817 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
6819 /* Compute the maximum size. */
6820 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
6821 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
6822 : -1);
6824 /* Allocate the desired space. If the allocation stems from the declaration
6825 of a variable-sized object, it cannot accumulate. */
6826 result
6827 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
6828 result = convert_memory_address (ptr_mode, result);
6830 /* Dynamic allocations for variables are recorded during gimplification. */
6831 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
6832 record_dynamic_alloc (exp);
6834 return result;
6837 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
6838 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
6839 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
6840 handle_builtin_stack_restore function. */
6842 static rtx
6843 expand_asan_emit_allocas_unpoison (tree exp)
6845 tree arg0 = CALL_EXPR_ARG (exp, 0);
6846 tree arg1 = CALL_EXPR_ARG (exp, 1);
6847 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
6848 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
6849 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
6850 stack_pointer_rtx, NULL_RTX, 0,
6851 OPTAB_LIB_WIDEN);
6852 off = convert_modes (ptr_mode, Pmode, off, 0);
6853 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
6854 OPTAB_LIB_WIDEN);
6855 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
6856 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
6857 top, ptr_mode, bot, ptr_mode);
6858 return ret;
6861 /* Expand a call to bswap builtin in EXP.
6862 Return NULL_RTX if a normal call should be emitted rather than expanding the
6863 function in-line. If convenient, the result should be placed in TARGET.
6864 SUBTARGET may be used as the target for computing one of EXP's operands. */
6866 static rtx
6867 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
6868 rtx subtarget)
6870 tree arg;
6871 rtx op0;
6873 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
6874 return NULL_RTX;
6876 arg = CALL_EXPR_ARG (exp, 0);
6877 op0 = expand_expr (arg,
6878 subtarget && GET_MODE (subtarget) == target_mode
6879 ? subtarget : NULL_RTX,
6880 target_mode, EXPAND_NORMAL);
6881 if (GET_MODE (op0) != target_mode)
6882 op0 = convert_to_mode (target_mode, op0, 1);
6884 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
6886 gcc_assert (target);
6888 return convert_to_mode (target_mode, target, 1);
6891 /* Expand a call to a unary builtin in EXP.
6892 Return NULL_RTX if a normal call should be emitted rather than expanding the
6893 function in-line. If convenient, the result should be placed in TARGET.
6894 SUBTARGET may be used as the target for computing one of EXP's operands. */
6896 static rtx
6897 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
6898 rtx subtarget, optab op_optab)
6900 rtx op0;
6902 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
6903 return NULL_RTX;
6905 /* Compute the argument. */
6906 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
6907 (subtarget
6908 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
6909 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
6910 VOIDmode, EXPAND_NORMAL);
6911 /* Compute op, into TARGET if possible.
6912 Set TARGET to wherever the result comes back. */
6913 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6914 op_optab, op0, target, op_optab != clrsb_optab);
6915 gcc_assert (target);
6917 return convert_to_mode (target_mode, target, 0);
6920 /* Expand a call to __builtin_expect. We just return our argument
6921 as the builtin_expect semantic should've been already executed by
6922 tree branch prediction pass. */
6924 static rtx
6925 expand_builtin_expect (tree exp, rtx target)
6927 tree arg;
6929 if (call_expr_nargs (exp) < 2)
6930 return const0_rtx;
6931 arg = CALL_EXPR_ARG (exp, 0);
6933 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6934 /* When guessing was done, the hints should be already stripped away. */
6935 gcc_assert (!flag_guess_branch_prob
6936 || optimize == 0 || seen_error ());
6937 return target;
6940 /* Expand a call to __builtin_expect_with_probability. We just return our
6941 argument as the builtin_expect semantic should've been already executed by
6942 tree branch prediction pass. */
6944 static rtx
6945 expand_builtin_expect_with_probability (tree exp, rtx target)
6947 tree arg;
6949 if (call_expr_nargs (exp) < 3)
6950 return const0_rtx;
6951 arg = CALL_EXPR_ARG (exp, 0);
6953 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6954 /* When guessing was done, the hints should be already stripped away. */
6955 gcc_assert (!flag_guess_branch_prob
6956 || optimize == 0 || seen_error ());
6957 return target;
6961 /* Expand a call to __builtin_assume_aligned. We just return our first
6962 argument as the builtin_assume_aligned semantic should've been already
6963 executed by CCP. */
6965 static rtx
6966 expand_builtin_assume_aligned (tree exp, rtx target)
6968 if (call_expr_nargs (exp) < 2)
6969 return const0_rtx;
6970 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
6971 EXPAND_NORMAL);
6972 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
6973 && (call_expr_nargs (exp) < 3
6974 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
6975 return target;
6978 void
6979 expand_builtin_trap (void)
6981 if (targetm.have_trap ())
6983 rtx_insn *insn = emit_insn (targetm.gen_trap ());
6984 /* For trap insns when not accumulating outgoing args force
6985 REG_ARGS_SIZE note to prevent crossjumping of calls with
6986 different args sizes. */
6987 if (!ACCUMULATE_OUTGOING_ARGS)
6988 add_args_size_note (insn, stack_pointer_delta);
6990 else
6992 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
6993 tree call_expr = build_call_expr (fn, 0);
6994 expand_call (call_expr, NULL_RTX, false);
6997 emit_barrier ();
7000 /* Expand a call to __builtin_unreachable. We do nothing except emit
7001 a barrier saying that control flow will not pass here.
7003 It is the responsibility of the program being compiled to ensure
7004 that control flow does never reach __builtin_unreachable. */
7005 static void
7006 expand_builtin_unreachable (void)
7008 emit_barrier ();
7011 /* Expand EXP, a call to fabs, fabsf or fabsl.
7012 Return NULL_RTX if a normal call should be emitted rather than expanding
7013 the function inline. If convenient, the result should be placed
7014 in TARGET. SUBTARGET may be used as the target for computing
7015 the operand. */
7017 static rtx
7018 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
7020 machine_mode mode;
7021 tree arg;
7022 rtx op0;
7024 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
7025 return NULL_RTX;
7027 arg = CALL_EXPR_ARG (exp, 0);
7028 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7029 mode = TYPE_MODE (TREE_TYPE (arg));
7030 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
7031 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
7034 /* Expand EXP, a call to copysign, copysignf, or copysignl.
7035 Return NULL is a normal call should be emitted rather than expanding the
7036 function inline. If convenient, the result should be placed in TARGET.
7037 SUBTARGET may be used as the target for computing the operand. */
7039 static rtx
7040 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
7042 rtx op0, op1;
7043 tree arg;
7045 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
7046 return NULL_RTX;
7048 arg = CALL_EXPR_ARG (exp, 0);
7049 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
7051 arg = CALL_EXPR_ARG (exp, 1);
7052 op1 = expand_normal (arg);
7054 return expand_copysign (op0, op1, target);
7057 /* Expand a call to __builtin___clear_cache. */
7059 static rtx
7060 expand_builtin___clear_cache (tree exp)
7062 if (!targetm.code_for_clear_cache)
7064 #ifdef CLEAR_INSN_CACHE
7065 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
7066 does something. Just do the default expansion to a call to
7067 __clear_cache(). */
7068 return NULL_RTX;
7069 #else
7070 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
7071 does nothing. There is no need to call it. Do nothing. */
7072 return const0_rtx;
7073 #endif /* CLEAR_INSN_CACHE */
7076 /* We have a "clear_cache" insn, and it will handle everything. */
7077 tree begin, end;
7078 rtx begin_rtx, end_rtx;
7080 /* We must not expand to a library call. If we did, any
7081 fallback library function in libgcc that might contain a call to
7082 __builtin___clear_cache() would recurse infinitely. */
7083 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7085 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
7086 return const0_rtx;
7089 if (targetm.have_clear_cache ())
7091 class expand_operand ops[2];
7093 begin = CALL_EXPR_ARG (exp, 0);
7094 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
7096 end = CALL_EXPR_ARG (exp, 1);
7097 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
7099 create_address_operand (&ops[0], begin_rtx);
7100 create_address_operand (&ops[1], end_rtx);
7101 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
7102 return const0_rtx;
7104 return const0_rtx;
7107 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
7109 static rtx
7110 round_trampoline_addr (rtx tramp)
7112 rtx temp, addend, mask;
7114 /* If we don't need too much alignment, we'll have been guaranteed
7115 proper alignment by get_trampoline_type. */
7116 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
7117 return tramp;
7119 /* Round address up to desired boundary. */
7120 temp = gen_reg_rtx (Pmode);
7121 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
7122 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
7124 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
7125 temp, 0, OPTAB_LIB_WIDEN);
7126 tramp = expand_simple_binop (Pmode, AND, temp, mask,
7127 temp, 0, OPTAB_LIB_WIDEN);
7129 return tramp;
7132 static rtx
7133 expand_builtin_init_trampoline (tree exp, bool onstack)
7135 tree t_tramp, t_func, t_chain;
7136 rtx m_tramp, r_tramp, r_chain, tmp;
7138 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
7139 POINTER_TYPE, VOID_TYPE))
7140 return NULL_RTX;
7142 t_tramp = CALL_EXPR_ARG (exp, 0);
7143 t_func = CALL_EXPR_ARG (exp, 1);
7144 t_chain = CALL_EXPR_ARG (exp, 2);
7146 r_tramp = expand_normal (t_tramp);
7147 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
7148 MEM_NOTRAP_P (m_tramp) = 1;
7150 /* If ONSTACK, the TRAMP argument should be the address of a field
7151 within the local function's FRAME decl. Either way, let's see if
7152 we can fill in the MEM_ATTRs for this memory. */
7153 if (TREE_CODE (t_tramp) == ADDR_EXPR)
7154 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
7156 /* Creator of a heap trampoline is responsible for making sure the
7157 address is aligned to at least STACK_BOUNDARY. Normally malloc
7158 will ensure this anyhow. */
7159 tmp = round_trampoline_addr (r_tramp);
7160 if (tmp != r_tramp)
7162 m_tramp = change_address (m_tramp, BLKmode, tmp);
7163 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
7164 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
7167 /* The FUNC argument should be the address of the nested function.
7168 Extract the actual function decl to pass to the hook. */
7169 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
7170 t_func = TREE_OPERAND (t_func, 0);
7171 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
7173 r_chain = expand_normal (t_chain);
7175 /* Generate insns to initialize the trampoline. */
7176 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
7178 if (onstack)
7180 trampolines_created = 1;
7182 if (targetm.calls.custom_function_descriptors != 0)
7183 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
7184 "trampoline generated for nested function %qD", t_func);
7187 return const0_rtx;
7190 static rtx
7191 expand_builtin_adjust_trampoline (tree exp)
7193 rtx tramp;
7195 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7196 return NULL_RTX;
7198 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
7199 tramp = round_trampoline_addr (tramp);
7200 if (targetm.calls.trampoline_adjust_address)
7201 tramp = targetm.calls.trampoline_adjust_address (tramp);
7203 return tramp;
7206 /* Expand a call to the builtin descriptor initialization routine.
7207 A descriptor is made up of a couple of pointers to the static
7208 chain and the code entry in this order. */
7210 static rtx
7211 expand_builtin_init_descriptor (tree exp)
7213 tree t_descr, t_func, t_chain;
7214 rtx m_descr, r_descr, r_func, r_chain;
7216 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
7217 VOID_TYPE))
7218 return NULL_RTX;
7220 t_descr = CALL_EXPR_ARG (exp, 0);
7221 t_func = CALL_EXPR_ARG (exp, 1);
7222 t_chain = CALL_EXPR_ARG (exp, 2);
7224 r_descr = expand_normal (t_descr);
7225 m_descr = gen_rtx_MEM (BLKmode, r_descr);
7226 MEM_NOTRAP_P (m_descr) = 1;
7227 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
7229 r_func = expand_normal (t_func);
7230 r_chain = expand_normal (t_chain);
7232 /* Generate insns to initialize the descriptor. */
7233 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
7234 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
7235 POINTER_SIZE / BITS_PER_UNIT), r_func);
7237 return const0_rtx;
7240 /* Expand a call to the builtin descriptor adjustment routine. */
7242 static rtx
7243 expand_builtin_adjust_descriptor (tree exp)
7245 rtx tramp;
7247 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7248 return NULL_RTX;
7250 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
7252 /* Unalign the descriptor to allow runtime identification. */
7253 tramp = plus_constant (ptr_mode, tramp,
7254 targetm.calls.custom_function_descriptors);
7256 return force_operand (tramp, NULL_RTX);
7259 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
7260 function. The function first checks whether the back end provides
7261 an insn to implement signbit for the respective mode. If not, it
7262 checks whether the floating point format of the value is such that
7263 the sign bit can be extracted. If that is not the case, error out.
7264 EXP is the expression that is a call to the builtin function; if
7265 convenient, the result should be placed in TARGET. */
7266 static rtx
7267 expand_builtin_signbit (tree exp, rtx target)
7269 const struct real_format *fmt;
7270 scalar_float_mode fmode;
7271 scalar_int_mode rmode, imode;
7272 tree arg;
7273 int word, bitpos;
7274 enum insn_code icode;
7275 rtx temp;
7276 location_t loc = EXPR_LOCATION (exp);
7278 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
7279 return NULL_RTX;
7281 arg = CALL_EXPR_ARG (exp, 0);
7282 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
7283 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
7284 fmt = REAL_MODE_FORMAT (fmode);
7286 arg = builtin_save_expr (arg);
7288 /* Expand the argument yielding a RTX expression. */
7289 temp = expand_normal (arg);
7291 /* Check if the back end provides an insn that handles signbit for the
7292 argument's mode. */
7293 icode = optab_handler (signbit_optab, fmode);
7294 if (icode != CODE_FOR_nothing)
7296 rtx_insn *last = get_last_insn ();
7297 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7298 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
7299 return target;
7300 delete_insns_since (last);
7303 /* For floating point formats without a sign bit, implement signbit
7304 as "ARG < 0.0". */
7305 bitpos = fmt->signbit_ro;
7306 if (bitpos < 0)
7308 /* But we can't do this if the format supports signed zero. */
7309 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
7311 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
7312 build_real (TREE_TYPE (arg), dconst0));
7313 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
7316 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
7318 imode = int_mode_for_mode (fmode).require ();
7319 temp = gen_lowpart (imode, temp);
7321 else
7323 imode = word_mode;
7324 /* Handle targets with different FP word orders. */
7325 if (FLOAT_WORDS_BIG_ENDIAN)
7326 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
7327 else
7328 word = bitpos / BITS_PER_WORD;
7329 temp = operand_subword_force (temp, word, fmode);
7330 bitpos = bitpos % BITS_PER_WORD;
7333 /* Force the intermediate word_mode (or narrower) result into a
7334 register. This avoids attempting to create paradoxical SUBREGs
7335 of floating point modes below. */
7336 temp = force_reg (imode, temp);
7338 /* If the bitpos is within the "result mode" lowpart, the operation
7339 can be implement with a single bitwise AND. Otherwise, we need
7340 a right shift and an AND. */
7342 if (bitpos < GET_MODE_BITSIZE (rmode))
7344 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
7346 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
7347 temp = gen_lowpart (rmode, temp);
7348 temp = expand_binop (rmode, and_optab, temp,
7349 immed_wide_int_const (mask, rmode),
7350 NULL_RTX, 1, OPTAB_LIB_WIDEN);
7352 else
7354 /* Perform a logical right shift to place the signbit in the least
7355 significant bit, then truncate the result to the desired mode
7356 and mask just this bit. */
7357 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
7358 temp = gen_lowpart (rmode, temp);
7359 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
7360 NULL_RTX, 1, OPTAB_LIB_WIDEN);
7363 return temp;
7366 /* Expand fork or exec calls. TARGET is the desired target of the
7367 call. EXP is the call. FN is the
7368 identificator of the actual function. IGNORE is nonzero if the
7369 value is to be ignored. */
7371 static rtx
7372 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
7374 tree id, decl;
7375 tree call;
7377 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
7379 tree path = CALL_EXPR_ARG (exp, 0);
7380 /* Detect unterminated path. */
7381 if (!check_read_access (exp, path))
7382 return NULL_RTX;
7384 /* Also detect unterminated first argument. */
7385 switch (DECL_FUNCTION_CODE (fn))
7387 case BUILT_IN_EXECL:
7388 case BUILT_IN_EXECLE:
7389 case BUILT_IN_EXECLP:
7390 if (!check_read_access (exp, path))
7391 return NULL_RTX;
7392 default:
7393 break;
7398 /* If we are not profiling, just call the function. */
7399 if (!profile_arc_flag)
7400 return NULL_RTX;
7402 /* Otherwise call the wrapper. This should be equivalent for the rest of
7403 compiler, so the code does not diverge, and the wrapper may run the
7404 code necessary for keeping the profiling sane. */
7406 switch (DECL_FUNCTION_CODE (fn))
7408 case BUILT_IN_FORK:
7409 id = get_identifier ("__gcov_fork");
7410 break;
7412 case BUILT_IN_EXECL:
7413 id = get_identifier ("__gcov_execl");
7414 break;
7416 case BUILT_IN_EXECV:
7417 id = get_identifier ("__gcov_execv");
7418 break;
7420 case BUILT_IN_EXECLP:
7421 id = get_identifier ("__gcov_execlp");
7422 break;
7424 case BUILT_IN_EXECLE:
7425 id = get_identifier ("__gcov_execle");
7426 break;
7428 case BUILT_IN_EXECVP:
7429 id = get_identifier ("__gcov_execvp");
7430 break;
7432 case BUILT_IN_EXECVE:
7433 id = get_identifier ("__gcov_execve");
7434 break;
7436 default:
7437 gcc_unreachable ();
7440 decl = build_decl (DECL_SOURCE_LOCATION (fn),
7441 FUNCTION_DECL, id, TREE_TYPE (fn));
7442 DECL_EXTERNAL (decl) = 1;
7443 TREE_PUBLIC (decl) = 1;
7444 DECL_ARTIFICIAL (decl) = 1;
7445 TREE_NOTHROW (decl) = 1;
7446 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
7447 DECL_VISIBILITY_SPECIFIED (decl) = 1;
7448 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
7449 return expand_call (call, target, ignore);
7454 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
7455 the pointer in these functions is void*, the tree optimizers may remove
7456 casts. The mode computed in expand_builtin isn't reliable either, due
7457 to __sync_bool_compare_and_swap.
7459 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
7460 group of builtins. This gives us log2 of the mode size. */
7462 static inline machine_mode
7463 get_builtin_sync_mode (int fcode_diff)
7465 /* The size is not negotiable, so ask not to get BLKmode in return
7466 if the target indicates that a smaller size would be better. */
7467 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
7470 /* Expand the memory expression LOC and return the appropriate memory operand
7471 for the builtin_sync operations. */
7473 static rtx
7474 get_builtin_sync_mem (tree loc, machine_mode mode)
7476 rtx addr, mem;
7477 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
7478 ? TREE_TYPE (TREE_TYPE (loc))
7479 : TREE_TYPE (loc));
7480 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
7482 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
7483 addr = convert_memory_address (addr_mode, addr);
7485 /* Note that we explicitly do not want any alias information for this
7486 memory, so that we kill all other live memories. Otherwise we don't
7487 satisfy the full barrier semantics of the intrinsic. */
7488 mem = gen_rtx_MEM (mode, addr);
7490 set_mem_addr_space (mem, addr_space);
7492 mem = validize_mem (mem);
7494 /* The alignment needs to be at least according to that of the mode. */
7495 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
7496 get_pointer_alignment (loc)));
7497 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
7498 MEM_VOLATILE_P (mem) = 1;
7500 return mem;
7503 /* Make sure an argument is in the right mode.
7504 EXP is the tree argument.
7505 MODE is the mode it should be in. */
7507 static rtx
7508 expand_expr_force_mode (tree exp, machine_mode mode)
7510 rtx val;
7511 machine_mode old_mode;
7513 if (TREE_CODE (exp) == SSA_NAME
7514 && TYPE_MODE (TREE_TYPE (exp)) != mode)
7516 /* Undo argument promotion if possible, as combine might not
7517 be able to do it later due to MEM_VOLATILE_P uses in the
7518 patterns. */
7519 gimple *g = get_gimple_for_ssa_name (exp);
7520 if (g && gimple_assign_cast_p (g))
7522 tree rhs = gimple_assign_rhs1 (g);
7523 tree_code code = gimple_assign_rhs_code (g);
7524 if (CONVERT_EXPR_CODE_P (code)
7525 && TYPE_MODE (TREE_TYPE (rhs)) == mode
7526 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
7527 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
7528 && (TYPE_PRECISION (TREE_TYPE (exp))
7529 > TYPE_PRECISION (TREE_TYPE (rhs))))
7530 exp = rhs;
7534 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
7535 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
7536 of CONST_INTs, where we know the old_mode only from the call argument. */
7538 old_mode = GET_MODE (val);
7539 if (old_mode == VOIDmode)
7540 old_mode = TYPE_MODE (TREE_TYPE (exp));
7541 val = convert_modes (mode, old_mode, val, 1);
7542 return val;
7546 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
7547 EXP is the CALL_EXPR. CODE is the rtx code
7548 that corresponds to the arithmetic or logical operation from the name;
7549 an exception here is that NOT actually means NAND. TARGET is an optional
7550 place for us to store the results; AFTER is true if this is the
7551 fetch_and_xxx form. */
7553 static rtx
7554 expand_builtin_sync_operation (machine_mode mode, tree exp,
7555 enum rtx_code code, bool after,
7556 rtx target)
7558 rtx val, mem;
7559 location_t loc = EXPR_LOCATION (exp);
7561 if (code == NOT && warn_sync_nand)
7563 tree fndecl = get_callee_fndecl (exp);
7564 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7566 static bool warned_f_a_n, warned_n_a_f;
7568 switch (fcode)
7570 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7571 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7572 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7573 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7574 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7575 if (warned_f_a_n)
7576 break;
7578 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
7579 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
7580 warned_f_a_n = true;
7581 break;
7583 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7584 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7585 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7586 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7587 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7588 if (warned_n_a_f)
7589 break;
7591 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
7592 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
7593 warned_n_a_f = true;
7594 break;
7596 default:
7597 gcc_unreachable ();
7601 /* Expand the operands. */
7602 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7603 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7605 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
7606 after);
7609 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
7610 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
7611 true if this is the boolean form. TARGET is a place for us to store the
7612 results; this is NOT optional if IS_BOOL is true. */
7614 static rtx
7615 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
7616 bool is_bool, rtx target)
7618 rtx old_val, new_val, mem;
7619 rtx *pbool, *poval;
7621 /* Expand the operands. */
7622 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7623 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7624 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
7626 pbool = poval = NULL;
7627 if (target != const0_rtx)
7629 if (is_bool)
7630 pbool = &target;
7631 else
7632 poval = &target;
7634 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
7635 false, MEMMODEL_SYNC_SEQ_CST,
7636 MEMMODEL_SYNC_SEQ_CST))
7637 return NULL_RTX;
7639 return target;
7642 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
7643 general form is actually an atomic exchange, and some targets only
7644 support a reduced form with the second argument being a constant 1.
7645 EXP is the CALL_EXPR; TARGET is an optional place for us to store
7646 the results. */
7648 static rtx
7649 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
7650 rtx target)
7652 rtx val, mem;
7654 /* Expand the operands. */
7655 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7656 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7658 return expand_sync_lock_test_and_set (target, mem, val);
7661 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
7663 static void
7664 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
7666 rtx mem;
7668 /* Expand the operands. */
7669 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7671 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
7674 /* Given an integer representing an ``enum memmodel'', verify its
7675 correctness and return the memory model enum. */
7677 static enum memmodel
7678 get_memmodel (tree exp)
7680 rtx op;
7681 unsigned HOST_WIDE_INT val;
7682 location_t loc
7683 = expansion_point_location_if_in_system_header (input_location);
7685 /* If the parameter is not a constant, it's a run time value so we'll just
7686 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
7687 if (TREE_CODE (exp) != INTEGER_CST)
7688 return MEMMODEL_SEQ_CST;
7690 op = expand_normal (exp);
7692 val = INTVAL (op);
7693 if (targetm.memmodel_check)
7694 val = targetm.memmodel_check (val);
7695 else if (val & ~MEMMODEL_MASK)
7697 warning_at (loc, OPT_Winvalid_memory_model,
7698 "unknown architecture specifier in memory model to builtin");
7699 return MEMMODEL_SEQ_CST;
7702 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
7703 if (memmodel_base (val) >= MEMMODEL_LAST)
7705 warning_at (loc, OPT_Winvalid_memory_model,
7706 "invalid memory model argument to builtin");
7707 return MEMMODEL_SEQ_CST;
7710 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
7711 be conservative and promote consume to acquire. */
7712 if (val == MEMMODEL_CONSUME)
7713 val = MEMMODEL_ACQUIRE;
7715 return (enum memmodel) val;
7718 /* Expand the __atomic_exchange intrinsic:
7719 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
7720 EXP is the CALL_EXPR.
7721 TARGET is an optional place for us to store the results. */
7723 static rtx
7724 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
7726 rtx val, mem;
7727 enum memmodel model;
7729 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7731 if (!flag_inline_atomics)
7732 return NULL_RTX;
7734 /* Expand the operands. */
7735 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7736 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7738 return expand_atomic_exchange (target, mem, val, model);
7741 /* Expand the __atomic_compare_exchange intrinsic:
7742 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
7743 TYPE desired, BOOL weak,
7744 enum memmodel success,
7745 enum memmodel failure)
7746 EXP is the CALL_EXPR.
7747 TARGET is an optional place for us to store the results. */
7749 static rtx
7750 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
7751 rtx target)
7753 rtx expect, desired, mem, oldval;
7754 rtx_code_label *label;
7755 enum memmodel success, failure;
7756 tree weak;
7757 bool is_weak;
7758 location_t loc
7759 = expansion_point_location_if_in_system_header (input_location);
7761 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
7762 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
7764 if (failure > success)
7766 warning_at (loc, OPT_Winvalid_memory_model,
7767 "failure memory model cannot be stronger than success "
7768 "memory model for %<__atomic_compare_exchange%>");
7769 success = MEMMODEL_SEQ_CST;
7772 if (is_mm_release (failure) || is_mm_acq_rel (failure))
7774 warning_at (loc, OPT_Winvalid_memory_model,
7775 "invalid failure memory model for "
7776 "%<__atomic_compare_exchange%>");
7777 failure = MEMMODEL_SEQ_CST;
7778 success = MEMMODEL_SEQ_CST;
7782 if (!flag_inline_atomics)
7783 return NULL_RTX;
7785 /* Expand the operands. */
7786 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7788 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
7789 expect = convert_memory_address (Pmode, expect);
7790 expect = gen_rtx_MEM (mode, expect);
7791 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
7793 weak = CALL_EXPR_ARG (exp, 3);
7794 is_weak = false;
7795 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
7796 is_weak = true;
7798 if (target == const0_rtx)
7799 target = NULL;
7801 /* Lest the rtl backend create a race condition with an imporoper store
7802 to memory, always create a new pseudo for OLDVAL. */
7803 oldval = NULL;
7805 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
7806 is_weak, success, failure))
7807 return NULL_RTX;
7809 /* Conditionally store back to EXPECT, lest we create a race condition
7810 with an improper store to memory. */
7811 /* ??? With a rearrangement of atomics at the gimple level, we can handle
7812 the normal case where EXPECT is totally private, i.e. a register. At
7813 which point the store can be unconditional. */
7814 label = gen_label_rtx ();
7815 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
7816 GET_MODE (target), 1, label);
7817 emit_move_insn (expect, oldval);
7818 emit_label (label);
7820 return target;
7823 /* Helper function for expand_ifn_atomic_compare_exchange - expand
7824 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
7825 call. The weak parameter must be dropped to match the expected parameter
7826 list and the expected argument changed from value to pointer to memory
7827 slot. */
7829 static void
7830 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
7832 unsigned int z;
7833 vec<tree, va_gc> *vec;
7835 vec_alloc (vec, 5);
7836 vec->quick_push (gimple_call_arg (call, 0));
7837 tree expected = gimple_call_arg (call, 1);
7838 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
7839 TREE_TYPE (expected));
7840 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
7841 if (expd != x)
7842 emit_move_insn (x, expd);
7843 tree v = make_tree (TREE_TYPE (expected), x);
7844 vec->quick_push (build1 (ADDR_EXPR,
7845 build_pointer_type (TREE_TYPE (expected)), v));
7846 vec->quick_push (gimple_call_arg (call, 2));
7847 /* Skip the boolean weak parameter. */
7848 for (z = 4; z < 6; z++)
7849 vec->quick_push (gimple_call_arg (call, z));
7850 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
7851 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
7852 gcc_assert (bytes_log2 < 5);
7853 built_in_function fncode
7854 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
7855 + bytes_log2);
7856 tree fndecl = builtin_decl_explicit (fncode);
7857 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
7858 fndecl);
7859 tree exp = build_call_vec (boolean_type_node, fn, vec);
7860 tree lhs = gimple_call_lhs (call);
7861 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
7862 if (lhs)
7864 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7865 if (GET_MODE (boolret) != mode)
7866 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
7867 x = force_reg (mode, x);
7868 write_complex_part (target, boolret, true);
7869 write_complex_part (target, x, false);
7873 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
7875 void
7876 expand_ifn_atomic_compare_exchange (gcall *call)
7878 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
7879 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
7880 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
7881 rtx expect, desired, mem, oldval, boolret;
7882 enum memmodel success, failure;
7883 tree lhs;
7884 bool is_weak;
7885 location_t loc
7886 = expansion_point_location_if_in_system_header (gimple_location (call));
7888 success = get_memmodel (gimple_call_arg (call, 4));
7889 failure = get_memmodel (gimple_call_arg (call, 5));
7891 if (failure > success)
7893 warning_at (loc, OPT_Winvalid_memory_model,
7894 "failure memory model cannot be stronger than success "
7895 "memory model for %<__atomic_compare_exchange%>");
7896 success = MEMMODEL_SEQ_CST;
7899 if (is_mm_release (failure) || is_mm_acq_rel (failure))
7901 warning_at (loc, OPT_Winvalid_memory_model,
7902 "invalid failure memory model for "
7903 "%<__atomic_compare_exchange%>");
7904 failure = MEMMODEL_SEQ_CST;
7905 success = MEMMODEL_SEQ_CST;
7908 if (!flag_inline_atomics)
7910 expand_ifn_atomic_compare_exchange_into_call (call, mode);
7911 return;
7914 /* Expand the operands. */
7915 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
7917 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
7918 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
7920 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
7922 boolret = NULL;
7923 oldval = NULL;
7925 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
7926 is_weak, success, failure))
7928 expand_ifn_atomic_compare_exchange_into_call (call, mode);
7929 return;
7932 lhs = gimple_call_lhs (call);
7933 if (lhs)
7935 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7936 if (GET_MODE (boolret) != mode)
7937 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
7938 write_complex_part (target, boolret, true);
7939 write_complex_part (target, oldval, false);
7943 /* Expand the __atomic_load intrinsic:
7944 TYPE __atomic_load (TYPE *object, enum memmodel)
7945 EXP is the CALL_EXPR.
7946 TARGET is an optional place for us to store the results. */
7948 static rtx
7949 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
7951 rtx mem;
7952 enum memmodel model;
7954 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7955 if (is_mm_release (model) || is_mm_acq_rel (model))
7957 location_t loc
7958 = expansion_point_location_if_in_system_header (input_location);
7959 warning_at (loc, OPT_Winvalid_memory_model,
7960 "invalid memory model for %<__atomic_load%>");
7961 model = MEMMODEL_SEQ_CST;
7964 if (!flag_inline_atomics)
7965 return NULL_RTX;
7967 /* Expand the operand. */
7968 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7970 return expand_atomic_load (target, mem, model);
7974 /* Expand the __atomic_store intrinsic:
7975 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
7976 EXP is the CALL_EXPR.
7977 TARGET is an optional place for us to store the results. */
7979 static rtx
7980 expand_builtin_atomic_store (machine_mode mode, tree exp)
7982 rtx mem, val;
7983 enum memmodel model;
7985 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7986 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
7987 || is_mm_release (model)))
7989 location_t loc
7990 = expansion_point_location_if_in_system_header (input_location);
7991 warning_at (loc, OPT_Winvalid_memory_model,
7992 "invalid memory model for %<__atomic_store%>");
7993 model = MEMMODEL_SEQ_CST;
7996 if (!flag_inline_atomics)
7997 return NULL_RTX;
7999 /* Expand the operands. */
8000 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8001 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8003 return expand_atomic_store (mem, val, model, false);
8006 /* Expand the __atomic_fetch_XXX intrinsic:
8007 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
8008 EXP is the CALL_EXPR.
8009 TARGET is an optional place for us to store the results.
8010 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
8011 FETCH_AFTER is true if returning the result of the operation.
8012 FETCH_AFTER is false if returning the value before the operation.
8013 IGNORE is true if the result is not used.
8014 EXT_CALL is the correct builtin for an external call if this cannot be
8015 resolved to an instruction sequence. */
8017 static rtx
8018 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
8019 enum rtx_code code, bool fetch_after,
8020 bool ignore, enum built_in_function ext_call)
8022 rtx val, mem, ret;
8023 enum memmodel model;
8024 tree fndecl;
8025 tree addr;
8027 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
8029 /* Expand the operands. */
8030 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8031 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8033 /* Only try generating instructions if inlining is turned on. */
8034 if (flag_inline_atomics)
8036 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
8037 if (ret)
8038 return ret;
8041 /* Return if a different routine isn't needed for the library call. */
8042 if (ext_call == BUILT_IN_NONE)
8043 return NULL_RTX;
8045 /* Change the call to the specified function. */
8046 fndecl = get_callee_fndecl (exp);
8047 addr = CALL_EXPR_FN (exp);
8048 STRIP_NOPS (addr);
8050 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
8051 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
8053 /* If we will emit code after the call, the call cannot be a tail call.
8054 If it is emitted as a tail call, a barrier is emitted after it, and
8055 then all trailing code is removed. */
8056 if (!ignore)
8057 CALL_EXPR_TAILCALL (exp) = 0;
8059 /* Expand the call here so we can emit trailing code. */
8060 ret = expand_call (exp, target, ignore);
8062 /* Replace the original function just in case it matters. */
8063 TREE_OPERAND (addr, 0) = fndecl;
8065 /* Then issue the arithmetic correction to return the right result. */
8066 if (!ignore)
8068 if (code == NOT)
8070 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
8071 OPTAB_LIB_WIDEN);
8072 ret = expand_simple_unop (mode, NOT, ret, target, true);
8074 else
8075 ret = expand_simple_binop (mode, code, ret, val, target, true,
8076 OPTAB_LIB_WIDEN);
8078 return ret;
8081 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
8083 void
8084 expand_ifn_atomic_bit_test_and (gcall *call)
8086 tree ptr = gimple_call_arg (call, 0);
8087 tree bit = gimple_call_arg (call, 1);
8088 tree flag = gimple_call_arg (call, 2);
8089 tree lhs = gimple_call_lhs (call);
8090 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
8091 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
8092 enum rtx_code code;
8093 optab optab;
8094 class expand_operand ops[5];
8096 gcc_assert (flag_inline_atomics);
8098 if (gimple_call_num_args (call) == 4)
8099 model = get_memmodel (gimple_call_arg (call, 3));
8101 rtx mem = get_builtin_sync_mem (ptr, mode);
8102 rtx val = expand_expr_force_mode (bit, mode);
8104 switch (gimple_call_internal_fn (call))
8106 case IFN_ATOMIC_BIT_TEST_AND_SET:
8107 code = IOR;
8108 optab = atomic_bit_test_and_set_optab;
8109 break;
8110 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
8111 code = XOR;
8112 optab = atomic_bit_test_and_complement_optab;
8113 break;
8114 case IFN_ATOMIC_BIT_TEST_AND_RESET:
8115 code = AND;
8116 optab = atomic_bit_test_and_reset_optab;
8117 break;
8118 default:
8119 gcc_unreachable ();
8122 if (lhs == NULL_TREE)
8124 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
8125 val, NULL_RTX, true, OPTAB_DIRECT);
8126 if (code == AND)
8127 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
8128 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
8129 return;
8132 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8133 enum insn_code icode = direct_optab_handler (optab, mode);
8134 gcc_assert (icode != CODE_FOR_nothing);
8135 create_output_operand (&ops[0], target, mode);
8136 create_fixed_operand (&ops[1], mem);
8137 create_convert_operand_to (&ops[2], val, mode, true);
8138 create_integer_operand (&ops[3], model);
8139 create_integer_operand (&ops[4], integer_onep (flag));
8140 if (maybe_expand_insn (icode, 5, ops))
8141 return;
8143 rtx bitval = val;
8144 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
8145 val, NULL_RTX, true, OPTAB_DIRECT);
8146 rtx maskval = val;
8147 if (code == AND)
8148 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
8149 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
8150 code, model, false);
8151 if (integer_onep (flag))
8153 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
8154 NULL_RTX, true, OPTAB_DIRECT);
8155 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
8156 true, OPTAB_DIRECT);
8158 else
8159 result = expand_simple_binop (mode, AND, result, maskval, target, true,
8160 OPTAB_DIRECT);
8161 if (result != target)
8162 emit_move_insn (target, result);
8165 /* Expand an atomic clear operation.
8166 void _atomic_clear (BOOL *obj, enum memmodel)
8167 EXP is the call expression. */
8169 static rtx
8170 expand_builtin_atomic_clear (tree exp)
8172 machine_mode mode;
8173 rtx mem, ret;
8174 enum memmodel model;
8176 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
8177 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8178 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
8180 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
8182 location_t loc
8183 = expansion_point_location_if_in_system_header (input_location);
8184 warning_at (loc, OPT_Winvalid_memory_model,
8185 "invalid memory model for %<__atomic_store%>");
8186 model = MEMMODEL_SEQ_CST;
8189 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
8190 Failing that, a store is issued by __atomic_store. The only way this can
8191 fail is if the bool type is larger than a word size. Unlikely, but
8192 handle it anyway for completeness. Assume a single threaded model since
8193 there is no atomic support in this case, and no barriers are required. */
8194 ret = expand_atomic_store (mem, const0_rtx, model, true);
8195 if (!ret)
8196 emit_move_insn (mem, const0_rtx);
8197 return const0_rtx;
8200 /* Expand an atomic test_and_set operation.
8201 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
8202 EXP is the call expression. */
8204 static rtx
8205 expand_builtin_atomic_test_and_set (tree exp, rtx target)
8207 rtx mem;
8208 enum memmodel model;
8209 machine_mode mode;
8211 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
8212 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8213 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
8215 return expand_atomic_test_and_set (target, mem, model);
8219 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
8220 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
8222 static tree
8223 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
8225 int size;
8226 machine_mode mode;
8227 unsigned int mode_align, type_align;
8229 if (TREE_CODE (arg0) != INTEGER_CST)
8230 return NULL_TREE;
8232 /* We need a corresponding integer mode for the access to be lock-free. */
8233 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
8234 if (!int_mode_for_size (size, 0).exists (&mode))
8235 return boolean_false_node;
8237 mode_align = GET_MODE_ALIGNMENT (mode);
8239 if (TREE_CODE (arg1) == INTEGER_CST)
8241 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
8243 /* Either this argument is null, or it's a fake pointer encoding
8244 the alignment of the object. */
8245 val = least_bit_hwi (val);
8246 val *= BITS_PER_UNIT;
8248 if (val == 0 || mode_align < val)
8249 type_align = mode_align;
8250 else
8251 type_align = val;
8253 else
8255 tree ttype = TREE_TYPE (arg1);
8257 /* This function is usually invoked and folded immediately by the front
8258 end before anything else has a chance to look at it. The pointer
8259 parameter at this point is usually cast to a void *, so check for that
8260 and look past the cast. */
8261 if (CONVERT_EXPR_P (arg1)
8262 && POINTER_TYPE_P (ttype)
8263 && VOID_TYPE_P (TREE_TYPE (ttype))
8264 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
8265 arg1 = TREE_OPERAND (arg1, 0);
8267 ttype = TREE_TYPE (arg1);
8268 gcc_assert (POINTER_TYPE_P (ttype));
8270 /* Get the underlying type of the object. */
8271 ttype = TREE_TYPE (ttype);
8272 type_align = TYPE_ALIGN (ttype);
8275 /* If the object has smaller alignment, the lock free routines cannot
8276 be used. */
8277 if (type_align < mode_align)
8278 return boolean_false_node;
8280 /* Check if a compare_and_swap pattern exists for the mode which represents
8281 the required size. The pattern is not allowed to fail, so the existence
8282 of the pattern indicates support is present. Also require that an
8283 atomic load exists for the required size. */
8284 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
8285 return boolean_true_node;
8286 else
8287 return boolean_false_node;
8290 /* Return true if the parameters to call EXP represent an object which will
8291 always generate lock free instructions. The first argument represents the
8292 size of the object, and the second parameter is a pointer to the object
8293 itself. If NULL is passed for the object, then the result is based on
8294 typical alignment for an object of the specified size. Otherwise return
8295 false. */
8297 static rtx
8298 expand_builtin_atomic_always_lock_free (tree exp)
8300 tree size;
8301 tree arg0 = CALL_EXPR_ARG (exp, 0);
8302 tree arg1 = CALL_EXPR_ARG (exp, 1);
8304 if (TREE_CODE (arg0) != INTEGER_CST)
8306 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
8307 return const0_rtx;
8310 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
8311 if (size == boolean_true_node)
8312 return const1_rtx;
8313 return const0_rtx;
8316 /* Return a one or zero if it can be determined that object ARG1 of size ARG
8317 is lock free on this architecture. */
8319 static tree
8320 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
8322 if (!flag_inline_atomics)
8323 return NULL_TREE;
8325 /* If it isn't always lock free, don't generate a result. */
8326 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
8327 return boolean_true_node;
8329 return NULL_TREE;
8332 /* Return true if the parameters to call EXP represent an object which will
8333 always generate lock free instructions. The first argument represents the
8334 size of the object, and the second parameter is a pointer to the object
8335 itself. If NULL is passed for the object, then the result is based on
8336 typical alignment for an object of the specified size. Otherwise return
8337 NULL*/
8339 static rtx
8340 expand_builtin_atomic_is_lock_free (tree exp)
8342 tree size;
8343 tree arg0 = CALL_EXPR_ARG (exp, 0);
8344 tree arg1 = CALL_EXPR_ARG (exp, 1);
8346 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
8348 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
8349 return NULL_RTX;
8352 if (!flag_inline_atomics)
8353 return NULL_RTX;
8355 /* If the value is known at compile time, return the RTX for it. */
8356 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
8357 if (size == boolean_true_node)
8358 return const1_rtx;
8360 return NULL_RTX;
8363 /* Expand the __atomic_thread_fence intrinsic:
8364 void __atomic_thread_fence (enum memmodel)
8365 EXP is the CALL_EXPR. */
8367 static void
8368 expand_builtin_atomic_thread_fence (tree exp)
8370 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
8371 expand_mem_thread_fence (model);
8374 /* Expand the __atomic_signal_fence intrinsic:
8375 void __atomic_signal_fence (enum memmodel)
8376 EXP is the CALL_EXPR. */
8378 static void
8379 expand_builtin_atomic_signal_fence (tree exp)
8381 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
8382 expand_mem_signal_fence (model);
8385 /* Expand the __sync_synchronize intrinsic. */
8387 static void
8388 expand_builtin_sync_synchronize (void)
8390 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
8393 static rtx
8394 expand_builtin_thread_pointer (tree exp, rtx target)
8396 enum insn_code icode;
8397 if (!validate_arglist (exp, VOID_TYPE))
8398 return const0_rtx;
8399 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
8400 if (icode != CODE_FOR_nothing)
8402 class expand_operand op;
8403 /* If the target is not sutitable then create a new target. */
8404 if (target == NULL_RTX
8405 || !REG_P (target)
8406 || GET_MODE (target) != Pmode)
8407 target = gen_reg_rtx (Pmode);
8408 create_output_operand (&op, target, Pmode);
8409 expand_insn (icode, 1, &op);
8410 return target;
8412 error ("%<__builtin_thread_pointer%> is not supported on this target");
8413 return const0_rtx;
8416 static void
8417 expand_builtin_set_thread_pointer (tree exp)
8419 enum insn_code icode;
8420 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8421 return;
8422 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
8423 if (icode != CODE_FOR_nothing)
8425 class expand_operand op;
8426 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
8427 Pmode, EXPAND_NORMAL);
8428 create_input_operand (&op, val, Pmode);
8429 expand_insn (icode, 1, &op);
8430 return;
8432 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
8436 /* Emit code to restore the current value of stack. */
8438 static void
8439 expand_stack_restore (tree var)
8441 rtx_insn *prev;
8442 rtx sa = expand_normal (var);
8444 sa = convert_memory_address (Pmode, sa);
8446 prev = get_last_insn ();
8447 emit_stack_restore (SAVE_BLOCK, sa);
8449 record_new_stack_level ();
8451 fixup_args_size_notes (prev, get_last_insn (), 0);
8454 /* Emit code to save the current value of stack. */
8456 static rtx
8457 expand_stack_save (void)
8459 rtx ret = NULL_RTX;
8461 emit_stack_save (SAVE_BLOCK, &ret);
8462 return ret;
8465 /* Emit code to get the openacc gang, worker or vector id or size. */
8467 static rtx
8468 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
8470 const char *name;
8471 rtx fallback_retval;
8472 rtx_insn *(*gen_fn) (rtx, rtx);
8473 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
8475 case BUILT_IN_GOACC_PARLEVEL_ID:
8476 name = "__builtin_goacc_parlevel_id";
8477 fallback_retval = const0_rtx;
8478 gen_fn = targetm.gen_oacc_dim_pos;
8479 break;
8480 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8481 name = "__builtin_goacc_parlevel_size";
8482 fallback_retval = const1_rtx;
8483 gen_fn = targetm.gen_oacc_dim_size;
8484 break;
8485 default:
8486 gcc_unreachable ();
8489 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
8491 error ("%qs only supported in OpenACC code", name);
8492 return const0_rtx;
8495 tree arg = CALL_EXPR_ARG (exp, 0);
8496 if (TREE_CODE (arg) != INTEGER_CST)
8498 error ("non-constant argument 0 to %qs", name);
8499 return const0_rtx;
8502 int dim = TREE_INT_CST_LOW (arg);
8503 switch (dim)
8505 case GOMP_DIM_GANG:
8506 case GOMP_DIM_WORKER:
8507 case GOMP_DIM_VECTOR:
8508 break;
8509 default:
8510 error ("illegal argument 0 to %qs", name);
8511 return const0_rtx;
8514 if (ignore)
8515 return target;
8517 if (target == NULL_RTX)
8518 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8520 if (!targetm.have_oacc_dim_size ())
8522 emit_move_insn (target, fallback_retval);
8523 return target;
8526 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
8527 emit_insn (gen_fn (reg, GEN_INT (dim)));
8528 if (reg != target)
8529 emit_move_insn (target, reg);
8531 return target;
8534 /* Expand a string compare operation using a sequence of char comparison
8535 to get rid of the calling overhead, with result going to TARGET if
8536 that's convenient.
8538 VAR_STR is the variable string source;
8539 CONST_STR is the constant string source;
8540 LENGTH is the number of chars to compare;
8541 CONST_STR_N indicates which source string is the constant string;
8542 IS_MEMCMP indicates whether it's a memcmp or strcmp.
8544 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
8546 target = (int) (unsigned char) var_str[0]
8547 - (int) (unsigned char) const_str[0];
8548 if (target != 0)
8549 goto ne_label;
8551 target = (int) (unsigned char) var_str[length - 2]
8552 - (int) (unsigned char) const_str[length - 2];
8553 if (target != 0)
8554 goto ne_label;
8555 target = (int) (unsigned char) var_str[length - 1]
8556 - (int) (unsigned char) const_str[length - 1];
8557 ne_label:
8560 static rtx
8561 inline_string_cmp (rtx target, tree var_str, const char *const_str,
8562 unsigned HOST_WIDE_INT length,
8563 int const_str_n, machine_mode mode)
8565 HOST_WIDE_INT offset = 0;
8566 rtx var_rtx_array
8567 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
8568 rtx var_rtx = NULL_RTX;
8569 rtx const_rtx = NULL_RTX;
8570 rtx result = target ? target : gen_reg_rtx (mode);
8571 rtx_code_label *ne_label = gen_label_rtx ();
8572 tree unit_type_node = unsigned_char_type_node;
8573 scalar_int_mode unit_mode
8574 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
8576 start_sequence ();
8578 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
8580 var_rtx
8581 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
8582 const_rtx = c_readstr (const_str + offset, unit_mode);
8583 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
8584 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
8586 op0 = convert_modes (mode, unit_mode, op0, 1);
8587 op1 = convert_modes (mode, unit_mode, op1, 1);
8588 result = expand_simple_binop (mode, MINUS, op0, op1,
8589 result, 1, OPTAB_WIDEN);
8590 if (i < length - 1)
8591 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
8592 mode, true, ne_label);
8593 offset += GET_MODE_SIZE (unit_mode);
8596 emit_label (ne_label);
8597 rtx_insn *insns = get_insns ();
8598 end_sequence ();
8599 emit_insn (insns);
8601 return result;
8604 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
8605 to TARGET if that's convenient.
8606 If the call is not been inlined, return NULL_RTX. */
8608 static rtx
8609 inline_expand_builtin_bytecmp (tree exp, rtx target)
8611 tree fndecl = get_callee_fndecl (exp);
8612 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8613 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
8615 /* Do NOT apply this inlining expansion when optimizing for size or
8616 optimization level below 2. */
8617 if (optimize < 2 || optimize_insn_for_size_p ())
8618 return NULL_RTX;
8620 gcc_checking_assert (fcode == BUILT_IN_STRCMP
8621 || fcode == BUILT_IN_STRNCMP
8622 || fcode == BUILT_IN_MEMCMP);
8624 /* On a target where the type of the call (int) has same or narrower presicion
8625 than unsigned char, give up the inlining expansion. */
8626 if (TYPE_PRECISION (unsigned_char_type_node)
8627 >= TYPE_PRECISION (TREE_TYPE (exp)))
8628 return NULL_RTX;
8630 tree arg1 = CALL_EXPR_ARG (exp, 0);
8631 tree arg2 = CALL_EXPR_ARG (exp, 1);
8632 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
8634 unsigned HOST_WIDE_INT len1 = 0;
8635 unsigned HOST_WIDE_INT len2 = 0;
8636 unsigned HOST_WIDE_INT len3 = 0;
8638 /* Get the object representation of the initializers of ARG1 and ARG2
8639 as strings, provided they refer to constant objects, with their byte
8640 sizes in LEN1 and LEN2, respectively. */
8641 const char *bytes1 = getbyterep (arg1, &len1);
8642 const char *bytes2 = getbyterep (arg2, &len2);
8644 /* Fail if neither argument refers to an initialized constant. */
8645 if (!bytes1 && !bytes2)
8646 return NULL_RTX;
8648 if (is_ncmp)
8650 /* Fail if the memcmp/strncmp bound is not a constant. */
8651 if (!tree_fits_uhwi_p (len3_tree))
8652 return NULL_RTX;
8654 len3 = tree_to_uhwi (len3_tree);
8656 if (fcode == BUILT_IN_MEMCMP)
8658 /* Fail if the memcmp bound is greater than the size of either
8659 of the two constant objects. */
8660 if ((bytes1 && len1 < len3)
8661 || (bytes2 && len2 < len3))
8662 return NULL_RTX;
8666 if (fcode != BUILT_IN_MEMCMP)
8668 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
8669 and LEN2 to the length of the nul-terminated string stored
8670 in each. */
8671 if (bytes1 != NULL)
8672 len1 = strnlen (bytes1, len1) + 1;
8673 if (bytes2 != NULL)
8674 len2 = strnlen (bytes2, len2) + 1;
8677 /* See inline_string_cmp. */
8678 int const_str_n;
8679 if (!len1)
8680 const_str_n = 2;
8681 else if (!len2)
8682 const_str_n = 1;
8683 else if (len2 > len1)
8684 const_str_n = 1;
8685 else
8686 const_str_n = 2;
8688 /* For strncmp only, compute the new bound as the smallest of
8689 the lengths of the two strings (plus 1) and the bound provided
8690 to the function. */
8691 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
8692 if (is_ncmp && len3 < bound)
8693 bound = len3;
8695 /* If the bound of the comparison is larger than the threshold,
8696 do nothing. */
8697 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
8698 return NULL_RTX;
8700 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8702 /* Now, start inline expansion the call. */
8703 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
8704 (const_str_n == 1) ? bytes1 : bytes2, bound,
8705 const_str_n, mode);
8708 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
8709 represents the size of the first argument to that call, or VOIDmode
8710 if the argument is a pointer. IGNORE will be true if the result
8711 isn't used. */
8712 static rtx
8713 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
8714 bool ignore)
8716 rtx val, failsafe;
8717 unsigned nargs = call_expr_nargs (exp);
8719 tree arg0 = CALL_EXPR_ARG (exp, 0);
8721 if (mode == VOIDmode)
8723 mode = TYPE_MODE (TREE_TYPE (arg0));
8724 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
8727 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
8729 /* An optional second argument can be used as a failsafe value on
8730 some machines. If it isn't present, then the failsafe value is
8731 assumed to be 0. */
8732 if (nargs > 1)
8734 tree arg1 = CALL_EXPR_ARG (exp, 1);
8735 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
8737 else
8738 failsafe = const0_rtx;
8740 /* If the result isn't used, the behavior is undefined. It would be
8741 nice to emit a warning here, but path splitting means this might
8742 happen with legitimate code. So simply drop the builtin
8743 expansion in that case; we've handled any side-effects above. */
8744 if (ignore)
8745 return const0_rtx;
8747 /* If we don't have a suitable target, create one to hold the result. */
8748 if (target == NULL || GET_MODE (target) != mode)
8749 target = gen_reg_rtx (mode);
8751 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
8752 val = convert_modes (mode, VOIDmode, val, false);
8754 return targetm.speculation_safe_value (mode, target, val, failsafe);
8757 /* Expand an expression EXP that calls a built-in function,
8758 with result going to TARGET if that's convenient
8759 (and in mode MODE if that's convenient).
8760 SUBTARGET may be used as the target for computing one of EXP's operands.
8761 IGNORE is nonzero if the value is to be ignored. */
8764 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
8765 int ignore)
8767 tree fndecl = get_callee_fndecl (exp);
8768 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
8769 int flags;
8771 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8772 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
8774 /* When ASan is enabled, we don't want to expand some memory/string
8775 builtins and rely on libsanitizer's hooks. This allows us to avoid
8776 redundant checks and be sure, that possible overflow will be detected
8777 by ASan. */
8779 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8780 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
8781 return expand_call (exp, target, ignore);
8783 /* When not optimizing, generate calls to library functions for a certain
8784 set of builtins. */
8785 if (!optimize
8786 && !called_as_built_in (fndecl)
8787 && fcode != BUILT_IN_FORK
8788 && fcode != BUILT_IN_EXECL
8789 && fcode != BUILT_IN_EXECV
8790 && fcode != BUILT_IN_EXECLP
8791 && fcode != BUILT_IN_EXECLE
8792 && fcode != BUILT_IN_EXECVP
8793 && fcode != BUILT_IN_EXECVE
8794 && !ALLOCA_FUNCTION_CODE_P (fcode)
8795 && fcode != BUILT_IN_FREE)
8796 return expand_call (exp, target, ignore);
8798 /* The built-in function expanders test for target == const0_rtx
8799 to determine whether the function's result will be ignored. */
8800 if (ignore)
8801 target = const0_rtx;
8803 /* If the result of a pure or const built-in function is ignored, and
8804 none of its arguments are volatile, we can avoid expanding the
8805 built-in call and just evaluate the arguments for side-effects. */
8806 if (target == const0_rtx
8807 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
8808 && !(flags & ECF_LOOPING_CONST_OR_PURE))
8810 bool volatilep = false;
8811 tree arg;
8812 call_expr_arg_iterator iter;
8814 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
8815 if (TREE_THIS_VOLATILE (arg))
8817 volatilep = true;
8818 break;
8821 if (! volatilep)
8823 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
8824 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8825 return const0_rtx;
8829 switch (fcode)
8831 CASE_FLT_FN (BUILT_IN_FABS):
8832 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8833 case BUILT_IN_FABSD32:
8834 case BUILT_IN_FABSD64:
8835 case BUILT_IN_FABSD128:
8836 target = expand_builtin_fabs (exp, target, subtarget);
8837 if (target)
8838 return target;
8839 break;
8841 CASE_FLT_FN (BUILT_IN_COPYSIGN):
8842 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
8843 target = expand_builtin_copysign (exp, target, subtarget);
8844 if (target)
8845 return target;
8846 break;
8848 /* Just do a normal library call if we were unable to fold
8849 the values. */
8850 CASE_FLT_FN (BUILT_IN_CABS):
8851 break;
8853 CASE_FLT_FN (BUILT_IN_FMA):
8854 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
8855 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
8856 if (target)
8857 return target;
8858 break;
8860 CASE_FLT_FN (BUILT_IN_ILOGB):
8861 if (! flag_unsafe_math_optimizations)
8862 break;
8863 gcc_fallthrough ();
8864 CASE_FLT_FN (BUILT_IN_ISINF):
8865 CASE_FLT_FN (BUILT_IN_FINITE):
8866 case BUILT_IN_ISFINITE:
8867 case BUILT_IN_ISNORMAL:
8868 target = expand_builtin_interclass_mathfn (exp, target);
8869 if (target)
8870 return target;
8871 break;
8873 CASE_FLT_FN (BUILT_IN_ICEIL):
8874 CASE_FLT_FN (BUILT_IN_LCEIL):
8875 CASE_FLT_FN (BUILT_IN_LLCEIL):
8876 CASE_FLT_FN (BUILT_IN_LFLOOR):
8877 CASE_FLT_FN (BUILT_IN_IFLOOR):
8878 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8879 target = expand_builtin_int_roundingfn (exp, target);
8880 if (target)
8881 return target;
8882 break;
8884 CASE_FLT_FN (BUILT_IN_IRINT):
8885 CASE_FLT_FN (BUILT_IN_LRINT):
8886 CASE_FLT_FN (BUILT_IN_LLRINT):
8887 CASE_FLT_FN (BUILT_IN_IROUND):
8888 CASE_FLT_FN (BUILT_IN_LROUND):
8889 CASE_FLT_FN (BUILT_IN_LLROUND):
8890 target = expand_builtin_int_roundingfn_2 (exp, target);
8891 if (target)
8892 return target;
8893 break;
8895 CASE_FLT_FN (BUILT_IN_POWI):
8896 target = expand_builtin_powi (exp, target);
8897 if (target)
8898 return target;
8899 break;
8901 CASE_FLT_FN (BUILT_IN_CEXPI):
8902 target = expand_builtin_cexpi (exp, target);
8903 gcc_assert (target);
8904 return target;
8906 CASE_FLT_FN (BUILT_IN_SIN):
8907 CASE_FLT_FN (BUILT_IN_COS):
8908 if (! flag_unsafe_math_optimizations)
8909 break;
8910 target = expand_builtin_mathfn_3 (exp, target, subtarget);
8911 if (target)
8912 return target;
8913 break;
8915 CASE_FLT_FN (BUILT_IN_SINCOS):
8916 if (! flag_unsafe_math_optimizations)
8917 break;
8918 target = expand_builtin_sincos (exp);
8919 if (target)
8920 return target;
8921 break;
8923 case BUILT_IN_APPLY_ARGS:
8924 return expand_builtin_apply_args ();
8926 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8927 FUNCTION with a copy of the parameters described by
8928 ARGUMENTS, and ARGSIZE. It returns a block of memory
8929 allocated on the stack into which is stored all the registers
8930 that might possibly be used for returning the result of a
8931 function. ARGUMENTS is the value returned by
8932 __builtin_apply_args. ARGSIZE is the number of bytes of
8933 arguments that must be copied. ??? How should this value be
8934 computed? We'll also need a safe worst case value for varargs
8935 functions. */
8936 case BUILT_IN_APPLY:
8937 if (!validate_arglist (exp, POINTER_TYPE,
8938 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
8939 && !validate_arglist (exp, REFERENCE_TYPE,
8940 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8941 return const0_rtx;
8942 else
8944 rtx ops[3];
8946 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
8947 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
8948 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
8950 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8953 /* __builtin_return (RESULT) causes the function to return the
8954 value described by RESULT. RESULT is address of the block of
8955 memory returned by __builtin_apply. */
8956 case BUILT_IN_RETURN:
8957 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8958 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
8959 return const0_rtx;
8961 case BUILT_IN_SAVEREGS:
8962 return expand_builtin_saveregs ();
8964 case BUILT_IN_VA_ARG_PACK:
8965 /* All valid uses of __builtin_va_arg_pack () are removed during
8966 inlining. */
8967 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
8968 return const0_rtx;
8970 case BUILT_IN_VA_ARG_PACK_LEN:
8971 /* All valid uses of __builtin_va_arg_pack_len () are removed during
8972 inlining. */
8973 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
8974 return const0_rtx;
8976 /* Return the address of the first anonymous stack arg. */
8977 case BUILT_IN_NEXT_ARG:
8978 if (fold_builtin_next_arg (exp, false))
8979 return const0_rtx;
8980 return expand_builtin_next_arg ();
8982 case BUILT_IN_CLEAR_CACHE:
8983 target = expand_builtin___clear_cache (exp);
8984 if (target)
8985 return target;
8986 break;
8988 case BUILT_IN_CLASSIFY_TYPE:
8989 return expand_builtin_classify_type (exp);
8991 case BUILT_IN_CONSTANT_P:
8992 return const0_rtx;
8994 case BUILT_IN_FRAME_ADDRESS:
8995 case BUILT_IN_RETURN_ADDRESS:
8996 return expand_builtin_frame_address (fndecl, exp);
8998 /* Returns the address of the area where the structure is returned.
8999 0 otherwise. */
9000 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9001 if (call_expr_nargs (exp) != 0
9002 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9003 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9004 return const0_rtx;
9005 else
9006 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9008 CASE_BUILT_IN_ALLOCA:
9009 target = expand_builtin_alloca (exp);
9010 if (target)
9011 return target;
9012 break;
9014 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
9015 return expand_asan_emit_allocas_unpoison (exp);
9017 case BUILT_IN_STACK_SAVE:
9018 return expand_stack_save ();
9020 case BUILT_IN_STACK_RESTORE:
9021 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
9022 return const0_rtx;
9024 case BUILT_IN_BSWAP16:
9025 case BUILT_IN_BSWAP32:
9026 case BUILT_IN_BSWAP64:
9027 case BUILT_IN_BSWAP128:
9028 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
9029 if (target)
9030 return target;
9031 break;
9033 CASE_INT_FN (BUILT_IN_FFS):
9034 target = expand_builtin_unop (target_mode, exp, target,
9035 subtarget, ffs_optab);
9036 if (target)
9037 return target;
9038 break;
9040 CASE_INT_FN (BUILT_IN_CLZ):
9041 target = expand_builtin_unop (target_mode, exp, target,
9042 subtarget, clz_optab);
9043 if (target)
9044 return target;
9045 break;
9047 CASE_INT_FN (BUILT_IN_CTZ):
9048 target = expand_builtin_unop (target_mode, exp, target,
9049 subtarget, ctz_optab);
9050 if (target)
9051 return target;
9052 break;
9054 CASE_INT_FN (BUILT_IN_CLRSB):
9055 target = expand_builtin_unop (target_mode, exp, target,
9056 subtarget, clrsb_optab);
9057 if (target)
9058 return target;
9059 break;
9061 CASE_INT_FN (BUILT_IN_POPCOUNT):
9062 target = expand_builtin_unop (target_mode, exp, target,
9063 subtarget, popcount_optab);
9064 if (target)
9065 return target;
9066 break;
9068 CASE_INT_FN (BUILT_IN_PARITY):
9069 target = expand_builtin_unop (target_mode, exp, target,
9070 subtarget, parity_optab);
9071 if (target)
9072 return target;
9073 break;
9075 case BUILT_IN_STRLEN:
9076 target = expand_builtin_strlen (exp, target, target_mode);
9077 if (target)
9078 return target;
9079 break;
9081 case BUILT_IN_STRNLEN:
9082 target = expand_builtin_strnlen (exp, target, target_mode);
9083 if (target)
9084 return target;
9085 break;
9087 case BUILT_IN_STRCAT:
9088 target = expand_builtin_strcat (exp);
9089 if (target)
9090 return target;
9091 break;
9093 case BUILT_IN_GETTEXT:
9094 case BUILT_IN_PUTS:
9095 case BUILT_IN_PUTS_UNLOCKED:
9096 case BUILT_IN_STRDUP:
9097 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9098 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9099 break;
9101 case BUILT_IN_INDEX:
9102 case BUILT_IN_RINDEX:
9103 case BUILT_IN_STRCHR:
9104 case BUILT_IN_STRRCHR:
9105 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9106 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9107 break;
9109 case BUILT_IN_FPUTS:
9110 case BUILT_IN_FPUTS_UNLOCKED:
9111 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9112 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9113 break;
9115 case BUILT_IN_STRNDUP:
9116 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9117 check_read_access (exp, CALL_EXPR_ARG (exp, 0), CALL_EXPR_ARG (exp, 1));
9118 break;
9120 case BUILT_IN_STRCASECMP:
9121 case BUILT_IN_STRPBRK:
9122 case BUILT_IN_STRSPN:
9123 case BUILT_IN_STRCSPN:
9124 case BUILT_IN_STRSTR:
9125 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9127 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9128 check_read_access (exp, CALL_EXPR_ARG (exp, 1));
9130 break;
9132 case BUILT_IN_STRCPY:
9133 target = expand_builtin_strcpy (exp, target);
9134 if (target)
9135 return target;
9136 break;
9138 case BUILT_IN_STRNCAT:
9139 target = expand_builtin_strncat (exp, target);
9140 if (target)
9141 return target;
9142 break;
9144 case BUILT_IN_STRNCPY:
9145 target = expand_builtin_strncpy (exp, target);
9146 if (target)
9147 return target;
9148 break;
9150 case BUILT_IN_STPCPY:
9151 target = expand_builtin_stpcpy (exp, target, mode);
9152 if (target)
9153 return target;
9154 break;
9156 case BUILT_IN_STPNCPY:
9157 target = expand_builtin_stpncpy (exp, target);
9158 if (target)
9159 return target;
9160 break;
9162 case BUILT_IN_MEMCHR:
9163 target = expand_builtin_memchr (exp, target);
9164 if (target)
9165 return target;
9166 break;
9168 case BUILT_IN_MEMCPY:
9169 target = expand_builtin_memcpy (exp, target);
9170 if (target)
9171 return target;
9172 break;
9174 case BUILT_IN_MEMMOVE:
9175 target = expand_builtin_memmove (exp, target);
9176 if (target)
9177 return target;
9178 break;
9180 case BUILT_IN_MEMPCPY:
9181 target = expand_builtin_mempcpy (exp, target);
9182 if (target)
9183 return target;
9184 break;
9186 case BUILT_IN_MEMSET:
9187 target = expand_builtin_memset (exp, target, mode);
9188 if (target)
9189 return target;
9190 break;
9192 case BUILT_IN_BZERO:
9193 target = expand_builtin_bzero (exp);
9194 if (target)
9195 return target;
9196 break;
9198 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
9199 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
9200 when changing it to a strcmp call. */
9201 case BUILT_IN_STRCMP_EQ:
9202 target = expand_builtin_memcmp (exp, target, true);
9203 if (target)
9204 return target;
9206 /* Change this call back to a BUILT_IN_STRCMP. */
9207 TREE_OPERAND (exp, 1)
9208 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
9210 /* Delete the last parameter. */
9211 unsigned int i;
9212 vec<tree, va_gc> *arg_vec;
9213 vec_alloc (arg_vec, 2);
9214 for (i = 0; i < 2; i++)
9215 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
9216 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
9217 /* FALLTHROUGH */
9219 case BUILT_IN_STRCMP:
9220 target = expand_builtin_strcmp (exp, target);
9221 if (target)
9222 return target;
9223 break;
9225 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
9226 back to a BUILT_IN_STRNCMP. */
9227 case BUILT_IN_STRNCMP_EQ:
9228 target = expand_builtin_memcmp (exp, target, true);
9229 if (target)
9230 return target;
9232 /* Change it back to a BUILT_IN_STRNCMP. */
9233 TREE_OPERAND (exp, 1)
9234 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
9235 /* FALLTHROUGH */
9237 case BUILT_IN_STRNCMP:
9238 target = expand_builtin_strncmp (exp, target, mode);
9239 if (target)
9240 return target;
9241 break;
9243 case BUILT_IN_BCMP:
9244 case BUILT_IN_MEMCMP:
9245 case BUILT_IN_MEMCMP_EQ:
9246 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
9247 if (target)
9248 return target;
9249 if (fcode == BUILT_IN_MEMCMP_EQ)
9251 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
9252 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
9254 break;
9256 case BUILT_IN_SETJMP:
9257 /* This should have been lowered to the builtins below. */
9258 gcc_unreachable ();
9260 case BUILT_IN_SETJMP_SETUP:
9261 /* __builtin_setjmp_setup is passed a pointer to an array of five words
9262 and the receiver label. */
9263 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9265 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
9266 VOIDmode, EXPAND_NORMAL);
9267 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
9268 rtx_insn *label_r = label_rtx (label);
9270 /* This is copied from the handling of non-local gotos. */
9271 expand_builtin_setjmp_setup (buf_addr, label_r);
9272 nonlocal_goto_handler_labels
9273 = gen_rtx_INSN_LIST (VOIDmode, label_r,
9274 nonlocal_goto_handler_labels);
9275 /* ??? Do not let expand_label treat us as such since we would
9276 not want to be both on the list of non-local labels and on
9277 the list of forced labels. */
9278 FORCED_LABEL (label) = 0;
9279 return const0_rtx;
9281 break;
9283 case BUILT_IN_SETJMP_RECEIVER:
9284 /* __builtin_setjmp_receiver is passed the receiver label. */
9285 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9287 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
9288 rtx_insn *label_r = label_rtx (label);
9290 expand_builtin_setjmp_receiver (label_r);
9291 return const0_rtx;
9293 break;
9295 /* __builtin_longjmp is passed a pointer to an array of five words.
9296 It's similar to the C library longjmp function but works with
9297 __builtin_setjmp above. */
9298 case BUILT_IN_LONGJMP:
9299 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9301 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
9302 VOIDmode, EXPAND_NORMAL);
9303 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
9305 if (value != const1_rtx)
9307 error ("%<__builtin_longjmp%> second argument must be 1");
9308 return const0_rtx;
9311 expand_builtin_longjmp (buf_addr, value);
9312 return const0_rtx;
9314 break;
9316 case BUILT_IN_NONLOCAL_GOTO:
9317 target = expand_builtin_nonlocal_goto (exp);
9318 if (target)
9319 return target;
9320 break;
9322 /* This updates the setjmp buffer that is its argument with the value
9323 of the current stack pointer. */
9324 case BUILT_IN_UPDATE_SETJMP_BUF:
9325 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9327 rtx buf_addr
9328 = expand_normal (CALL_EXPR_ARG (exp, 0));
9330 expand_builtin_update_setjmp_buf (buf_addr);
9331 return const0_rtx;
9333 break;
9335 case BUILT_IN_TRAP:
9336 expand_builtin_trap ();
9337 return const0_rtx;
9339 case BUILT_IN_UNREACHABLE:
9340 expand_builtin_unreachable ();
9341 return const0_rtx;
9343 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9344 case BUILT_IN_SIGNBITD32:
9345 case BUILT_IN_SIGNBITD64:
9346 case BUILT_IN_SIGNBITD128:
9347 target = expand_builtin_signbit (exp, target);
9348 if (target)
9349 return target;
9350 break;
9352 /* Various hooks for the DWARF 2 __throw routine. */
9353 case BUILT_IN_UNWIND_INIT:
9354 expand_builtin_unwind_init ();
9355 return const0_rtx;
9356 case BUILT_IN_DWARF_CFA:
9357 return virtual_cfa_rtx;
9358 #ifdef DWARF2_UNWIND_INFO
9359 case BUILT_IN_DWARF_SP_COLUMN:
9360 return expand_builtin_dwarf_sp_column ();
9361 case BUILT_IN_INIT_DWARF_REG_SIZES:
9362 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
9363 return const0_rtx;
9364 #endif
9365 case BUILT_IN_FROB_RETURN_ADDR:
9366 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
9367 case BUILT_IN_EXTRACT_RETURN_ADDR:
9368 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
9369 case BUILT_IN_EH_RETURN:
9370 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
9371 CALL_EXPR_ARG (exp, 1));
9372 return const0_rtx;
9373 case BUILT_IN_EH_RETURN_DATA_REGNO:
9374 return expand_builtin_eh_return_data_regno (exp);
9375 case BUILT_IN_EXTEND_POINTER:
9376 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
9377 case BUILT_IN_EH_POINTER:
9378 return expand_builtin_eh_pointer (exp);
9379 case BUILT_IN_EH_FILTER:
9380 return expand_builtin_eh_filter (exp);
9381 case BUILT_IN_EH_COPY_VALUES:
9382 return expand_builtin_eh_copy_values (exp);
9384 case BUILT_IN_VA_START:
9385 return expand_builtin_va_start (exp);
9386 case BUILT_IN_VA_END:
9387 return expand_builtin_va_end (exp);
9388 case BUILT_IN_VA_COPY:
9389 return expand_builtin_va_copy (exp);
9390 case BUILT_IN_EXPECT:
9391 return expand_builtin_expect (exp, target);
9392 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9393 return expand_builtin_expect_with_probability (exp, target);
9394 case BUILT_IN_ASSUME_ALIGNED:
9395 return expand_builtin_assume_aligned (exp, target);
9396 case BUILT_IN_PREFETCH:
9397 expand_builtin_prefetch (exp);
9398 return const0_rtx;
9400 case BUILT_IN_INIT_TRAMPOLINE:
9401 return expand_builtin_init_trampoline (exp, true);
9402 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
9403 return expand_builtin_init_trampoline (exp, false);
9404 case BUILT_IN_ADJUST_TRAMPOLINE:
9405 return expand_builtin_adjust_trampoline (exp);
9407 case BUILT_IN_INIT_DESCRIPTOR:
9408 return expand_builtin_init_descriptor (exp);
9409 case BUILT_IN_ADJUST_DESCRIPTOR:
9410 return expand_builtin_adjust_descriptor (exp);
9412 case BUILT_IN_FORK:
9413 case BUILT_IN_EXECL:
9414 case BUILT_IN_EXECV:
9415 case BUILT_IN_EXECLP:
9416 case BUILT_IN_EXECLE:
9417 case BUILT_IN_EXECVP:
9418 case BUILT_IN_EXECVE:
9419 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
9420 if (target)
9421 return target;
9422 break;
9424 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
9425 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
9426 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
9427 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
9428 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
9429 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
9430 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
9431 if (target)
9432 return target;
9433 break;
9435 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
9436 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
9437 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
9438 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
9439 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
9440 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
9441 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
9442 if (target)
9443 return target;
9444 break;
9446 case BUILT_IN_SYNC_FETCH_AND_OR_1:
9447 case BUILT_IN_SYNC_FETCH_AND_OR_2:
9448 case BUILT_IN_SYNC_FETCH_AND_OR_4:
9449 case BUILT_IN_SYNC_FETCH_AND_OR_8:
9450 case BUILT_IN_SYNC_FETCH_AND_OR_16:
9451 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
9452 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
9453 if (target)
9454 return target;
9455 break;
9457 case BUILT_IN_SYNC_FETCH_AND_AND_1:
9458 case BUILT_IN_SYNC_FETCH_AND_AND_2:
9459 case BUILT_IN_SYNC_FETCH_AND_AND_4:
9460 case BUILT_IN_SYNC_FETCH_AND_AND_8:
9461 case BUILT_IN_SYNC_FETCH_AND_AND_16:
9462 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
9463 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
9464 if (target)
9465 return target;
9466 break;
9468 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
9469 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
9470 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
9471 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
9472 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
9473 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
9474 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
9475 if (target)
9476 return target;
9477 break;
9479 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
9480 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
9481 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
9482 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
9483 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
9484 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
9485 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
9486 if (target)
9487 return target;
9488 break;
9490 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
9491 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
9492 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
9493 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
9494 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
9495 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
9496 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
9497 if (target)
9498 return target;
9499 break;
9501 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
9502 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
9503 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
9504 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
9505 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
9506 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
9507 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
9508 if (target)
9509 return target;
9510 break;
9512 case BUILT_IN_SYNC_OR_AND_FETCH_1:
9513 case BUILT_IN_SYNC_OR_AND_FETCH_2:
9514 case BUILT_IN_SYNC_OR_AND_FETCH_4:
9515 case BUILT_IN_SYNC_OR_AND_FETCH_8:
9516 case BUILT_IN_SYNC_OR_AND_FETCH_16:
9517 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
9518 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
9519 if (target)
9520 return target;
9521 break;
9523 case BUILT_IN_SYNC_AND_AND_FETCH_1:
9524 case BUILT_IN_SYNC_AND_AND_FETCH_2:
9525 case BUILT_IN_SYNC_AND_AND_FETCH_4:
9526 case BUILT_IN_SYNC_AND_AND_FETCH_8:
9527 case BUILT_IN_SYNC_AND_AND_FETCH_16:
9528 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
9529 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
9530 if (target)
9531 return target;
9532 break;
9534 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
9535 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
9536 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
9537 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
9538 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
9539 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
9540 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
9541 if (target)
9542 return target;
9543 break;
9545 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
9546 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
9547 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
9548 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
9549 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
9550 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
9551 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
9552 if (target)
9553 return target;
9554 break;
9556 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
9557 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
9558 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
9559 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
9560 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
9561 if (mode == VOIDmode)
9562 mode = TYPE_MODE (boolean_type_node);
9563 if (!target || !register_operand (target, mode))
9564 target = gen_reg_rtx (mode);
9566 mode = get_builtin_sync_mode
9567 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
9568 target = expand_builtin_compare_and_swap (mode, exp, true, target);
9569 if (target)
9570 return target;
9571 break;
9573 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
9574 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
9575 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
9576 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
9577 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
9578 mode = get_builtin_sync_mode
9579 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
9580 target = expand_builtin_compare_and_swap (mode, exp, false, target);
9581 if (target)
9582 return target;
9583 break;
9585 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
9586 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
9587 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
9588 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
9589 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
9590 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
9591 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
9592 if (target)
9593 return target;
9594 break;
9596 case BUILT_IN_SYNC_LOCK_RELEASE_1:
9597 case BUILT_IN_SYNC_LOCK_RELEASE_2:
9598 case BUILT_IN_SYNC_LOCK_RELEASE_4:
9599 case BUILT_IN_SYNC_LOCK_RELEASE_8:
9600 case BUILT_IN_SYNC_LOCK_RELEASE_16:
9601 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
9602 expand_builtin_sync_lock_release (mode, exp);
9603 return const0_rtx;
9605 case BUILT_IN_SYNC_SYNCHRONIZE:
9606 expand_builtin_sync_synchronize ();
9607 return const0_rtx;
9609 case BUILT_IN_ATOMIC_EXCHANGE_1:
9610 case BUILT_IN_ATOMIC_EXCHANGE_2:
9611 case BUILT_IN_ATOMIC_EXCHANGE_4:
9612 case BUILT_IN_ATOMIC_EXCHANGE_8:
9613 case BUILT_IN_ATOMIC_EXCHANGE_16:
9614 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
9615 target = expand_builtin_atomic_exchange (mode, exp, target);
9616 if (target)
9617 return target;
9618 break;
9620 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
9621 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
9622 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
9623 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
9624 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
9626 unsigned int nargs, z;
9627 vec<tree, va_gc> *vec;
9629 mode =
9630 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
9631 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
9632 if (target)
9633 return target;
9635 /* If this is turned into an external library call, the weak parameter
9636 must be dropped to match the expected parameter list. */
9637 nargs = call_expr_nargs (exp);
9638 vec_alloc (vec, nargs - 1);
9639 for (z = 0; z < 3; z++)
9640 vec->quick_push (CALL_EXPR_ARG (exp, z));
9641 /* Skip the boolean weak parameter. */
9642 for (z = 4; z < 6; z++)
9643 vec->quick_push (CALL_EXPR_ARG (exp, z));
9644 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
9645 break;
9648 case BUILT_IN_ATOMIC_LOAD_1:
9649 case BUILT_IN_ATOMIC_LOAD_2:
9650 case BUILT_IN_ATOMIC_LOAD_4:
9651 case BUILT_IN_ATOMIC_LOAD_8:
9652 case BUILT_IN_ATOMIC_LOAD_16:
9653 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
9654 target = expand_builtin_atomic_load (mode, exp, target);
9655 if (target)
9656 return target;
9657 break;
9659 case BUILT_IN_ATOMIC_STORE_1:
9660 case BUILT_IN_ATOMIC_STORE_2:
9661 case BUILT_IN_ATOMIC_STORE_4:
9662 case BUILT_IN_ATOMIC_STORE_8:
9663 case BUILT_IN_ATOMIC_STORE_16:
9664 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
9665 target = expand_builtin_atomic_store (mode, exp);
9666 if (target)
9667 return const0_rtx;
9668 break;
9670 case BUILT_IN_ATOMIC_ADD_FETCH_1:
9671 case BUILT_IN_ATOMIC_ADD_FETCH_2:
9672 case BUILT_IN_ATOMIC_ADD_FETCH_4:
9673 case BUILT_IN_ATOMIC_ADD_FETCH_8:
9674 case BUILT_IN_ATOMIC_ADD_FETCH_16:
9676 enum built_in_function lib;
9677 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
9678 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
9679 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
9680 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
9681 ignore, lib);
9682 if (target)
9683 return target;
9684 break;
9686 case BUILT_IN_ATOMIC_SUB_FETCH_1:
9687 case BUILT_IN_ATOMIC_SUB_FETCH_2:
9688 case BUILT_IN_ATOMIC_SUB_FETCH_4:
9689 case BUILT_IN_ATOMIC_SUB_FETCH_8:
9690 case BUILT_IN_ATOMIC_SUB_FETCH_16:
9692 enum built_in_function lib;
9693 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
9694 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
9695 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
9696 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
9697 ignore, lib);
9698 if (target)
9699 return target;
9700 break;
9702 case BUILT_IN_ATOMIC_AND_FETCH_1:
9703 case BUILT_IN_ATOMIC_AND_FETCH_2:
9704 case BUILT_IN_ATOMIC_AND_FETCH_4:
9705 case BUILT_IN_ATOMIC_AND_FETCH_8:
9706 case BUILT_IN_ATOMIC_AND_FETCH_16:
9708 enum built_in_function lib;
9709 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
9710 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
9711 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
9712 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
9713 ignore, lib);
9714 if (target)
9715 return target;
9716 break;
9718 case BUILT_IN_ATOMIC_NAND_FETCH_1:
9719 case BUILT_IN_ATOMIC_NAND_FETCH_2:
9720 case BUILT_IN_ATOMIC_NAND_FETCH_4:
9721 case BUILT_IN_ATOMIC_NAND_FETCH_8:
9722 case BUILT_IN_ATOMIC_NAND_FETCH_16:
9724 enum built_in_function lib;
9725 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
9726 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
9727 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
9728 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
9729 ignore, lib);
9730 if (target)
9731 return target;
9732 break;
9734 case BUILT_IN_ATOMIC_XOR_FETCH_1:
9735 case BUILT_IN_ATOMIC_XOR_FETCH_2:
9736 case BUILT_IN_ATOMIC_XOR_FETCH_4:
9737 case BUILT_IN_ATOMIC_XOR_FETCH_8:
9738 case BUILT_IN_ATOMIC_XOR_FETCH_16:
9740 enum built_in_function lib;
9741 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
9742 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
9743 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
9744 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
9745 ignore, lib);
9746 if (target)
9747 return target;
9748 break;
9750 case BUILT_IN_ATOMIC_OR_FETCH_1:
9751 case BUILT_IN_ATOMIC_OR_FETCH_2:
9752 case BUILT_IN_ATOMIC_OR_FETCH_4:
9753 case BUILT_IN_ATOMIC_OR_FETCH_8:
9754 case BUILT_IN_ATOMIC_OR_FETCH_16:
9756 enum built_in_function lib;
9757 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
9758 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
9759 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
9760 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
9761 ignore, lib);
9762 if (target)
9763 return target;
9764 break;
9766 case BUILT_IN_ATOMIC_FETCH_ADD_1:
9767 case BUILT_IN_ATOMIC_FETCH_ADD_2:
9768 case BUILT_IN_ATOMIC_FETCH_ADD_4:
9769 case BUILT_IN_ATOMIC_FETCH_ADD_8:
9770 case BUILT_IN_ATOMIC_FETCH_ADD_16:
9771 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
9772 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
9773 ignore, BUILT_IN_NONE);
9774 if (target)
9775 return target;
9776 break;
9778 case BUILT_IN_ATOMIC_FETCH_SUB_1:
9779 case BUILT_IN_ATOMIC_FETCH_SUB_2:
9780 case BUILT_IN_ATOMIC_FETCH_SUB_4:
9781 case BUILT_IN_ATOMIC_FETCH_SUB_8:
9782 case BUILT_IN_ATOMIC_FETCH_SUB_16:
9783 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
9784 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
9785 ignore, BUILT_IN_NONE);
9786 if (target)
9787 return target;
9788 break;
9790 case BUILT_IN_ATOMIC_FETCH_AND_1:
9791 case BUILT_IN_ATOMIC_FETCH_AND_2:
9792 case BUILT_IN_ATOMIC_FETCH_AND_4:
9793 case BUILT_IN_ATOMIC_FETCH_AND_8:
9794 case BUILT_IN_ATOMIC_FETCH_AND_16:
9795 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
9796 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
9797 ignore, BUILT_IN_NONE);
9798 if (target)
9799 return target;
9800 break;
9802 case BUILT_IN_ATOMIC_FETCH_NAND_1:
9803 case BUILT_IN_ATOMIC_FETCH_NAND_2:
9804 case BUILT_IN_ATOMIC_FETCH_NAND_4:
9805 case BUILT_IN_ATOMIC_FETCH_NAND_8:
9806 case BUILT_IN_ATOMIC_FETCH_NAND_16:
9807 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
9808 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
9809 ignore, BUILT_IN_NONE);
9810 if (target)
9811 return target;
9812 break;
9814 case BUILT_IN_ATOMIC_FETCH_XOR_1:
9815 case BUILT_IN_ATOMIC_FETCH_XOR_2:
9816 case BUILT_IN_ATOMIC_FETCH_XOR_4:
9817 case BUILT_IN_ATOMIC_FETCH_XOR_8:
9818 case BUILT_IN_ATOMIC_FETCH_XOR_16:
9819 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
9820 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
9821 ignore, BUILT_IN_NONE);
9822 if (target)
9823 return target;
9824 break;
9826 case BUILT_IN_ATOMIC_FETCH_OR_1:
9827 case BUILT_IN_ATOMIC_FETCH_OR_2:
9828 case BUILT_IN_ATOMIC_FETCH_OR_4:
9829 case BUILT_IN_ATOMIC_FETCH_OR_8:
9830 case BUILT_IN_ATOMIC_FETCH_OR_16:
9831 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
9832 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
9833 ignore, BUILT_IN_NONE);
9834 if (target)
9835 return target;
9836 break;
9838 case BUILT_IN_ATOMIC_TEST_AND_SET:
9839 return expand_builtin_atomic_test_and_set (exp, target);
9841 case BUILT_IN_ATOMIC_CLEAR:
9842 return expand_builtin_atomic_clear (exp);
9844 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9845 return expand_builtin_atomic_always_lock_free (exp);
9847 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9848 target = expand_builtin_atomic_is_lock_free (exp);
9849 if (target)
9850 return target;
9851 break;
9853 case BUILT_IN_ATOMIC_THREAD_FENCE:
9854 expand_builtin_atomic_thread_fence (exp);
9855 return const0_rtx;
9857 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
9858 expand_builtin_atomic_signal_fence (exp);
9859 return const0_rtx;
9861 case BUILT_IN_OBJECT_SIZE:
9862 return expand_builtin_object_size (exp);
9864 case BUILT_IN_MEMCPY_CHK:
9865 case BUILT_IN_MEMPCPY_CHK:
9866 case BUILT_IN_MEMMOVE_CHK:
9867 case BUILT_IN_MEMSET_CHK:
9868 target = expand_builtin_memory_chk (exp, target, mode, fcode);
9869 if (target)
9870 return target;
9871 break;
9873 case BUILT_IN_STRCPY_CHK:
9874 case BUILT_IN_STPCPY_CHK:
9875 case BUILT_IN_STRNCPY_CHK:
9876 case BUILT_IN_STPNCPY_CHK:
9877 case BUILT_IN_STRCAT_CHK:
9878 case BUILT_IN_STRNCAT_CHK:
9879 case BUILT_IN_SNPRINTF_CHK:
9880 case BUILT_IN_VSNPRINTF_CHK:
9881 maybe_emit_chk_warning (exp, fcode);
9882 break;
9884 case BUILT_IN_SPRINTF_CHK:
9885 case BUILT_IN_VSPRINTF_CHK:
9886 maybe_emit_sprintf_chk_warning (exp, fcode);
9887 break;
9889 case BUILT_IN_FREE:
9890 if (warn_free_nonheap_object)
9891 maybe_emit_free_warning (exp);
9892 break;
9894 case BUILT_IN_THREAD_POINTER:
9895 return expand_builtin_thread_pointer (exp, target);
9897 case BUILT_IN_SET_THREAD_POINTER:
9898 expand_builtin_set_thread_pointer (exp);
9899 return const0_rtx;
9901 case BUILT_IN_ACC_ON_DEVICE:
9902 /* Do library call, if we failed to expand the builtin when
9903 folding. */
9904 break;
9906 case BUILT_IN_GOACC_PARLEVEL_ID:
9907 case BUILT_IN_GOACC_PARLEVEL_SIZE:
9908 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
9910 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
9911 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
9913 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
9914 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
9915 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
9916 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
9917 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
9918 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
9919 return expand_speculation_safe_value (mode, exp, target, ignore);
9921 default: /* just do library call, if unknown builtin */
9922 break;
9925 /* The switch statement above can drop through to cause the function
9926 to be called normally. */
9927 return expand_call (exp, target, ignore);
9930 /* Determine whether a tree node represents a call to a built-in
9931 function. If the tree T is a call to a built-in function with
9932 the right number of arguments of the appropriate types, return
9933 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
9934 Otherwise the return value is END_BUILTINS. */
9936 enum built_in_function
9937 builtin_mathfn_code (const_tree t)
9939 const_tree fndecl, arg, parmlist;
9940 const_tree argtype, parmtype;
9941 const_call_expr_arg_iterator iter;
9943 if (TREE_CODE (t) != CALL_EXPR)
9944 return END_BUILTINS;
9946 fndecl = get_callee_fndecl (t);
9947 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9948 return END_BUILTINS;
9950 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
9951 init_const_call_expr_arg_iterator (t, &iter);
9952 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
9954 /* If a function doesn't take a variable number of arguments,
9955 the last element in the list will have type `void'. */
9956 parmtype = TREE_VALUE (parmlist);
9957 if (VOID_TYPE_P (parmtype))
9959 if (more_const_call_expr_args_p (&iter))
9960 return END_BUILTINS;
9961 return DECL_FUNCTION_CODE (fndecl);
9964 if (! more_const_call_expr_args_p (&iter))
9965 return END_BUILTINS;
9967 arg = next_const_call_expr_arg (&iter);
9968 argtype = TREE_TYPE (arg);
9970 if (SCALAR_FLOAT_TYPE_P (parmtype))
9972 if (! SCALAR_FLOAT_TYPE_P (argtype))
9973 return END_BUILTINS;
9975 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
9977 if (! COMPLEX_FLOAT_TYPE_P (argtype))
9978 return END_BUILTINS;
9980 else if (POINTER_TYPE_P (parmtype))
9982 if (! POINTER_TYPE_P (argtype))
9983 return END_BUILTINS;
9985 else if (INTEGRAL_TYPE_P (parmtype))
9987 if (! INTEGRAL_TYPE_P (argtype))
9988 return END_BUILTINS;
9990 else
9991 return END_BUILTINS;
9994 /* Variable-length argument list. */
9995 return DECL_FUNCTION_CODE (fndecl);
9998 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
9999 evaluate to a constant. */
10001 static tree
10002 fold_builtin_constant_p (tree arg)
10004 /* We return 1 for a numeric type that's known to be a constant
10005 value at compile-time or for an aggregate type that's a
10006 literal constant. */
10007 STRIP_NOPS (arg);
10009 /* If we know this is a constant, emit the constant of one. */
10010 if (CONSTANT_CLASS_P (arg)
10011 || (TREE_CODE (arg) == CONSTRUCTOR
10012 && TREE_CONSTANT (arg)))
10013 return integer_one_node;
10014 if (TREE_CODE (arg) == ADDR_EXPR)
10016 tree op = TREE_OPERAND (arg, 0);
10017 if (TREE_CODE (op) == STRING_CST
10018 || (TREE_CODE (op) == ARRAY_REF
10019 && integer_zerop (TREE_OPERAND (op, 1))
10020 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
10021 return integer_one_node;
10024 /* If this expression has side effects, show we don't know it to be a
10025 constant. Likewise if it's a pointer or aggregate type since in
10026 those case we only want literals, since those are only optimized
10027 when generating RTL, not later.
10028 And finally, if we are compiling an initializer, not code, we
10029 need to return a definite result now; there's not going to be any
10030 more optimization done. */
10031 if (TREE_SIDE_EFFECTS (arg)
10032 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
10033 || POINTER_TYPE_P (TREE_TYPE (arg))
10034 || cfun == 0
10035 || folding_initializer
10036 || force_folding_builtin_constant_p)
10037 return integer_zero_node;
10039 return NULL_TREE;
10042 /* Create builtin_expect or builtin_expect_with_probability
10043 with PRED and EXPECTED as its arguments and return it as a truthvalue.
10044 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
10045 builtin_expect_with_probability instead uses third argument as PROBABILITY
10046 value. */
10048 static tree
10049 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
10050 tree predictor, tree probability)
10052 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
10054 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
10055 : BUILT_IN_EXPECT_WITH_PROBABILITY);
10056 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
10057 ret_type = TREE_TYPE (TREE_TYPE (fn));
10058 pred_type = TREE_VALUE (arg_types);
10059 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
10061 pred = fold_convert_loc (loc, pred_type, pred);
10062 expected = fold_convert_loc (loc, expected_type, expected);
10064 if (probability)
10065 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
10066 else
10067 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
10068 predictor);
10070 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
10071 build_int_cst (ret_type, 0));
10074 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
10075 NULL_TREE if no simplification is possible. */
10077 tree
10078 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
10079 tree arg3)
10081 tree inner, fndecl, inner_arg0;
10082 enum tree_code code;
10084 /* Distribute the expected value over short-circuiting operators.
10085 See through the cast from truthvalue_type_node to long. */
10086 inner_arg0 = arg0;
10087 while (CONVERT_EXPR_P (inner_arg0)
10088 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
10089 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
10090 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
10092 /* If this is a builtin_expect within a builtin_expect keep the
10093 inner one. See through a comparison against a constant. It
10094 might have been added to create a thruthvalue. */
10095 inner = inner_arg0;
10097 if (COMPARISON_CLASS_P (inner)
10098 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
10099 inner = TREE_OPERAND (inner, 0);
10101 if (TREE_CODE (inner) == CALL_EXPR
10102 && (fndecl = get_callee_fndecl (inner))
10103 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
10104 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
10105 return arg0;
10107 inner = inner_arg0;
10108 code = TREE_CODE (inner);
10109 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
10111 tree op0 = TREE_OPERAND (inner, 0);
10112 tree op1 = TREE_OPERAND (inner, 1);
10113 arg1 = save_expr (arg1);
10115 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
10116 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
10117 inner = build2 (code, TREE_TYPE (inner), op0, op1);
10119 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
10122 /* If the argument isn't invariant then there's nothing else we can do. */
10123 if (!TREE_CONSTANT (inner_arg0))
10124 return NULL_TREE;
10126 /* If we expect that a comparison against the argument will fold to
10127 a constant return the constant. In practice, this means a true
10128 constant or the address of a non-weak symbol. */
10129 inner = inner_arg0;
10130 STRIP_NOPS (inner);
10131 if (TREE_CODE (inner) == ADDR_EXPR)
10135 inner = TREE_OPERAND (inner, 0);
10137 while (TREE_CODE (inner) == COMPONENT_REF
10138 || TREE_CODE (inner) == ARRAY_REF);
10139 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
10140 return NULL_TREE;
10143 /* Otherwise, ARG0 already has the proper type for the return value. */
10144 return arg0;
10147 /* Fold a call to __builtin_classify_type with argument ARG. */
10149 static tree
10150 fold_builtin_classify_type (tree arg)
10152 if (arg == 0)
10153 return build_int_cst (integer_type_node, no_type_class);
10155 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
10158 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
10159 ARG. */
10161 static tree
10162 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
10164 if (!validate_arg (arg, POINTER_TYPE))
10165 return NULL_TREE;
10166 else
10168 c_strlen_data lendata = { };
10169 tree len = c_strlen (arg, 0, &lendata);
10171 if (len)
10172 return fold_convert_loc (loc, type, len);
10174 if (!lendata.decl)
10175 c_strlen (arg, 1, &lendata);
10177 if (lendata.decl)
10179 if (EXPR_HAS_LOCATION (arg))
10180 loc = EXPR_LOCATION (arg);
10181 else if (loc == UNKNOWN_LOCATION)
10182 loc = input_location;
10183 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
10186 return NULL_TREE;
10190 /* Fold a call to __builtin_inf or __builtin_huge_val. */
10192 static tree
10193 fold_builtin_inf (location_t loc, tree type, int warn)
10195 REAL_VALUE_TYPE real;
10197 /* __builtin_inff is intended to be usable to define INFINITY on all
10198 targets. If an infinity is not available, INFINITY expands "to a
10199 positive constant of type float that overflows at translation
10200 time", footnote "In this case, using INFINITY will violate the
10201 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
10202 Thus we pedwarn to ensure this constraint violation is
10203 diagnosed. */
10204 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
10205 pedwarn (loc, 0, "target format does not support infinity");
10207 real_inf (&real);
10208 return build_real (type, real);
10211 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
10212 NULL_TREE if no simplification can be made. */
10214 static tree
10215 fold_builtin_sincos (location_t loc,
10216 tree arg0, tree arg1, tree arg2)
10218 tree type;
10219 tree fndecl, call = NULL_TREE;
10221 if (!validate_arg (arg0, REAL_TYPE)
10222 || !validate_arg (arg1, POINTER_TYPE)
10223 || !validate_arg (arg2, POINTER_TYPE))
10224 return NULL_TREE;
10226 type = TREE_TYPE (arg0);
10228 /* Calculate the result when the argument is a constant. */
10229 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
10230 if (fn == END_BUILTINS)
10231 return NULL_TREE;
10233 /* Canonicalize sincos to cexpi. */
10234 if (TREE_CODE (arg0) == REAL_CST)
10236 tree complex_type = build_complex_type (type);
10237 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
10239 if (!call)
10241 if (!targetm.libc_has_function (function_c99_math_complex, type)
10242 || !builtin_decl_implicit_p (fn))
10243 return NULL_TREE;
10244 fndecl = builtin_decl_explicit (fn);
10245 call = build_call_expr_loc (loc, fndecl, 1, arg0);
10246 call = builtin_save_expr (call);
10249 tree ptype = build_pointer_type (type);
10250 arg1 = fold_convert (ptype, arg1);
10251 arg2 = fold_convert (ptype, arg2);
10252 return build2 (COMPOUND_EXPR, void_type_node,
10253 build2 (MODIFY_EXPR, void_type_node,
10254 build_fold_indirect_ref_loc (loc, arg1),
10255 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
10256 build2 (MODIFY_EXPR, void_type_node,
10257 build_fold_indirect_ref_loc (loc, arg2),
10258 fold_build1_loc (loc, REALPART_EXPR, type, call)));
10261 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
10262 Return NULL_TREE if no simplification can be made. */
10264 static tree
10265 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
10267 if (!validate_arg (arg1, POINTER_TYPE)
10268 || !validate_arg (arg2, POINTER_TYPE)
10269 || !validate_arg (len, INTEGER_TYPE))
10270 return NULL_TREE;
10272 /* If the LEN parameter is zero, return zero. */
10273 if (integer_zerop (len))
10274 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
10275 arg1, arg2);
10277 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
10278 if (operand_equal_p (arg1, arg2, 0))
10279 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
10281 /* If len parameter is one, return an expression corresponding to
10282 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
10283 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
10285 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
10286 tree cst_uchar_ptr_node
10287 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
10289 tree ind1
10290 = fold_convert_loc (loc, integer_type_node,
10291 build1 (INDIRECT_REF, cst_uchar_node,
10292 fold_convert_loc (loc,
10293 cst_uchar_ptr_node,
10294 arg1)));
10295 tree ind2
10296 = fold_convert_loc (loc, integer_type_node,
10297 build1 (INDIRECT_REF, cst_uchar_node,
10298 fold_convert_loc (loc,
10299 cst_uchar_ptr_node,
10300 arg2)));
10301 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
10304 return NULL_TREE;
10307 /* Fold a call to builtin isascii with argument ARG. */
10309 static tree
10310 fold_builtin_isascii (location_t loc, tree arg)
10312 if (!validate_arg (arg, INTEGER_TYPE))
10313 return NULL_TREE;
10314 else
10316 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
10317 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
10318 build_int_cst (integer_type_node,
10319 ~ (unsigned HOST_WIDE_INT) 0x7f));
10320 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
10321 arg, integer_zero_node);
10325 /* Fold a call to builtin toascii with argument ARG. */
10327 static tree
10328 fold_builtin_toascii (location_t loc, tree arg)
10330 if (!validate_arg (arg, INTEGER_TYPE))
10331 return NULL_TREE;
10333 /* Transform toascii(c) -> (c & 0x7f). */
10334 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
10335 build_int_cst (integer_type_node, 0x7f));
10338 /* Fold a call to builtin isdigit with argument ARG. */
10340 static tree
10341 fold_builtin_isdigit (location_t loc, tree arg)
10343 if (!validate_arg (arg, INTEGER_TYPE))
10344 return NULL_TREE;
10345 else
10347 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
10348 /* According to the C standard, isdigit is unaffected by locale.
10349 However, it definitely is affected by the target character set. */
10350 unsigned HOST_WIDE_INT target_digit0
10351 = lang_hooks.to_target_charset ('0');
10353 if (target_digit0 == 0)
10354 return NULL_TREE;
10356 arg = fold_convert_loc (loc, unsigned_type_node, arg);
10357 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
10358 build_int_cst (unsigned_type_node, target_digit0));
10359 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
10360 build_int_cst (unsigned_type_node, 9));
10364 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
10366 static tree
10367 fold_builtin_fabs (location_t loc, tree arg, tree type)
10369 if (!validate_arg (arg, REAL_TYPE))
10370 return NULL_TREE;
10372 arg = fold_convert_loc (loc, type, arg);
10373 return fold_build1_loc (loc, ABS_EXPR, type, arg);
10376 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
10378 static tree
10379 fold_builtin_abs (location_t loc, tree arg, tree type)
10381 if (!validate_arg (arg, INTEGER_TYPE))
10382 return NULL_TREE;
10384 arg = fold_convert_loc (loc, type, arg);
10385 return fold_build1_loc (loc, ABS_EXPR, type, arg);
10388 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
10390 static tree
10391 fold_builtin_carg (location_t loc, tree arg, tree type)
10393 if (validate_arg (arg, COMPLEX_TYPE)
10394 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
10396 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
10398 if (atan2_fn)
10400 tree new_arg = builtin_save_expr (arg);
10401 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
10402 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
10403 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
10407 return NULL_TREE;
10410 /* Fold a call to builtin frexp, we can assume the base is 2. */
10412 static tree
10413 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
10415 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
10416 return NULL_TREE;
10418 STRIP_NOPS (arg0);
10420 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
10421 return NULL_TREE;
10423 arg1 = build_fold_indirect_ref_loc (loc, arg1);
10425 /* Proceed if a valid pointer type was passed in. */
10426 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
10428 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
10429 tree frac, exp;
10431 switch (value->cl)
10433 case rvc_zero:
10434 /* For +-0, return (*exp = 0, +-0). */
10435 exp = integer_zero_node;
10436 frac = arg0;
10437 break;
10438 case rvc_nan:
10439 case rvc_inf:
10440 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
10441 return omit_one_operand_loc (loc, rettype, arg0, arg1);
10442 case rvc_normal:
10444 /* Since the frexp function always expects base 2, and in
10445 GCC normalized significands are already in the range
10446 [0.5, 1.0), we have exactly what frexp wants. */
10447 REAL_VALUE_TYPE frac_rvt = *value;
10448 SET_REAL_EXP (&frac_rvt, 0);
10449 frac = build_real (rettype, frac_rvt);
10450 exp = build_int_cst (integer_type_node, REAL_EXP (value));
10452 break;
10453 default:
10454 gcc_unreachable ();
10457 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
10458 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
10459 TREE_SIDE_EFFECTS (arg1) = 1;
10460 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
10463 return NULL_TREE;
10466 /* Fold a call to builtin modf. */
10468 static tree
10469 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
10471 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
10472 return NULL_TREE;
10474 STRIP_NOPS (arg0);
10476 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
10477 return NULL_TREE;
10479 arg1 = build_fold_indirect_ref_loc (loc, arg1);
10481 /* Proceed if a valid pointer type was passed in. */
10482 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
10484 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
10485 REAL_VALUE_TYPE trunc, frac;
10487 switch (value->cl)
10489 case rvc_nan:
10490 case rvc_zero:
10491 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
10492 trunc = frac = *value;
10493 break;
10494 case rvc_inf:
10495 /* For +-Inf, return (*arg1 = arg0, +-0). */
10496 frac = dconst0;
10497 frac.sign = value->sign;
10498 trunc = *value;
10499 break;
10500 case rvc_normal:
10501 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
10502 real_trunc (&trunc, VOIDmode, value);
10503 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
10504 /* If the original number was negative and already
10505 integral, then the fractional part is -0.0. */
10506 if (value->sign && frac.cl == rvc_zero)
10507 frac.sign = value->sign;
10508 break;
10511 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
10512 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
10513 build_real (rettype, trunc));
10514 TREE_SIDE_EFFECTS (arg1) = 1;
10515 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
10516 build_real (rettype, frac));
10519 return NULL_TREE;
10522 /* Given a location LOC, an interclass builtin function decl FNDECL
10523 and its single argument ARG, return an folded expression computing
10524 the same, or NULL_TREE if we either couldn't or didn't want to fold
10525 (the latter happen if there's an RTL instruction available). */
10527 static tree
10528 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
10530 machine_mode mode;
10532 if (!validate_arg (arg, REAL_TYPE))
10533 return NULL_TREE;
10535 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
10536 return NULL_TREE;
10538 mode = TYPE_MODE (TREE_TYPE (arg));
10540 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
10542 /* If there is no optab, try generic code. */
10543 switch (DECL_FUNCTION_CODE (fndecl))
10545 tree result;
10547 CASE_FLT_FN (BUILT_IN_ISINF):
10549 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
10550 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
10551 tree type = TREE_TYPE (arg);
10552 REAL_VALUE_TYPE r;
10553 char buf[128];
10555 if (is_ibm_extended)
10557 /* NaN and Inf are encoded in the high-order double value
10558 only. The low-order value is not significant. */
10559 type = double_type_node;
10560 mode = DFmode;
10561 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
10563 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
10564 real_from_string (&r, buf);
10565 result = build_call_expr (isgr_fn, 2,
10566 fold_build1_loc (loc, ABS_EXPR, type, arg),
10567 build_real (type, r));
10568 return result;
10570 CASE_FLT_FN (BUILT_IN_FINITE):
10571 case BUILT_IN_ISFINITE:
10573 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
10574 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10575 tree type = TREE_TYPE (arg);
10576 REAL_VALUE_TYPE r;
10577 char buf[128];
10579 if (is_ibm_extended)
10581 /* NaN and Inf are encoded in the high-order double value
10582 only. The low-order value is not significant. */
10583 type = double_type_node;
10584 mode = DFmode;
10585 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
10587 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
10588 real_from_string (&r, buf);
10589 result = build_call_expr (isle_fn, 2,
10590 fold_build1_loc (loc, ABS_EXPR, type, arg),
10591 build_real (type, r));
10592 /*result = fold_build2_loc (loc, UNGT_EXPR,
10593 TREE_TYPE (TREE_TYPE (fndecl)),
10594 fold_build1_loc (loc, ABS_EXPR, type, arg),
10595 build_real (type, r));
10596 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10597 TREE_TYPE (TREE_TYPE (fndecl)),
10598 result);*/
10599 return result;
10601 case BUILT_IN_ISNORMAL:
10603 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10604 islessequal(fabs(x),DBL_MAX). */
10605 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10606 tree type = TREE_TYPE (arg);
10607 tree orig_arg, max_exp, min_exp;
10608 machine_mode orig_mode = mode;
10609 REAL_VALUE_TYPE rmax, rmin;
10610 char buf[128];
10612 orig_arg = arg = builtin_save_expr (arg);
10613 if (is_ibm_extended)
10615 /* Use double to test the normal range of IBM extended
10616 precision. Emin for IBM extended precision is
10617 different to emin for IEEE double, being 53 higher
10618 since the low double exponent is at least 53 lower
10619 than the high double exponent. */
10620 type = double_type_node;
10621 mode = DFmode;
10622 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
10624 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
10626 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
10627 real_from_string (&rmax, buf);
10628 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
10629 real_from_string (&rmin, buf);
10630 max_exp = build_real (type, rmax);
10631 min_exp = build_real (type, rmin);
10633 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
10634 if (is_ibm_extended)
10636 /* Testing the high end of the range is done just using
10637 the high double, using the same test as isfinite().
10638 For the subnormal end of the range we first test the
10639 high double, then if its magnitude is equal to the
10640 limit of 0x1p-969, we test whether the low double is
10641 non-zero and opposite sign to the high double. */
10642 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
10643 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
10644 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
10645 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
10646 arg, min_exp);
10647 tree as_complex = build1 (VIEW_CONVERT_EXPR,
10648 complex_double_type_node, orig_arg);
10649 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
10650 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
10651 tree zero = build_real (type, dconst0);
10652 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
10653 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
10654 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
10655 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
10656 fold_build3 (COND_EXPR,
10657 integer_type_node,
10658 hilt, logt, lolt));
10659 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
10660 eq_min, ok_lo);
10661 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
10662 gt_min, eq_min);
10664 else
10666 tree const isge_fn
10667 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10668 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
10670 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
10671 max_exp, min_exp);
10672 return result;
10674 default:
10675 break;
10678 return NULL_TREE;
10681 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10682 ARG is the argument for the call. */
10684 static tree
10685 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10687 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10689 if (!validate_arg (arg, REAL_TYPE))
10690 return NULL_TREE;
10692 switch (builtin_index)
10694 case BUILT_IN_ISINF:
10695 if (tree_expr_infinite_p (arg))
10696 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10697 if (!tree_expr_maybe_infinite_p (arg))
10698 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10699 return NULL_TREE;
10701 case BUILT_IN_ISINF_SIGN:
10703 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10704 /* In a boolean context, GCC will fold the inner COND_EXPR to
10705 1. So e.g. "if (isinf_sign(x))" would be folded to just
10706 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10707 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
10708 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10709 tree tmp = NULL_TREE;
10711 arg = builtin_save_expr (arg);
10713 if (signbit_fn && isinf_fn)
10715 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10716 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10718 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10719 signbit_call, integer_zero_node);
10720 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10721 isinf_call, integer_zero_node);
10723 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10724 integer_minus_one_node, integer_one_node);
10725 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10726 isinf_call, tmp,
10727 integer_zero_node);
10730 return tmp;
10733 case BUILT_IN_ISFINITE:
10734 if (tree_expr_finite_p (arg))
10735 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10736 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
10737 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10738 return NULL_TREE;
10740 case BUILT_IN_ISNAN:
10741 if (tree_expr_nan_p (arg))
10742 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10743 if (!tree_expr_maybe_nan_p (arg))
10744 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10747 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
10748 if (is_ibm_extended)
10750 /* NaN and Inf are encoded in the high-order double value
10751 only. The low-order value is not significant. */
10752 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
10755 arg = builtin_save_expr (arg);
10756 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10758 default:
10759 gcc_unreachable ();
10763 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10764 This builtin will generate code to return the appropriate floating
10765 point classification depending on the value of the floating point
10766 number passed in. The possible return values must be supplied as
10767 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10768 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10769 one floating point argument which is "type generic". */
10771 static tree
10772 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
10774 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10775 arg, type, res, tmp;
10776 machine_mode mode;
10777 REAL_VALUE_TYPE r;
10778 char buf[128];
10780 /* Verify the required arguments in the original call. */
10781 if (nargs != 6
10782 || !validate_arg (args[0], INTEGER_TYPE)
10783 || !validate_arg (args[1], INTEGER_TYPE)
10784 || !validate_arg (args[2], INTEGER_TYPE)
10785 || !validate_arg (args[3], INTEGER_TYPE)
10786 || !validate_arg (args[4], INTEGER_TYPE)
10787 || !validate_arg (args[5], REAL_TYPE))
10788 return NULL_TREE;
10790 fp_nan = args[0];
10791 fp_infinite = args[1];
10792 fp_normal = args[2];
10793 fp_subnormal = args[3];
10794 fp_zero = args[4];
10795 arg = args[5];
10796 type = TREE_TYPE (arg);
10797 mode = TYPE_MODE (type);
10798 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10800 /* fpclassify(x) ->
10801 isnan(x) ? FP_NAN :
10802 (fabs(x) == Inf ? FP_INFINITE :
10803 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10804 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10806 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10807 build_real (type, dconst0));
10808 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10809 tmp, fp_zero, fp_subnormal);
10811 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10812 real_from_string (&r, buf);
10813 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10814 arg, build_real (type, r));
10815 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10817 if (tree_expr_maybe_infinite_p (arg))
10819 real_inf (&r);
10820 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10821 build_real (type, r));
10822 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10823 fp_infinite, res);
10826 if (tree_expr_maybe_nan_p (arg))
10828 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10829 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10832 return res;
10835 /* Fold a call to an unordered comparison function such as
10836 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10837 being called and ARG0 and ARG1 are the arguments for the call.
10838 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10839 the opposite of the desired result. UNORDERED_CODE is used
10840 for modes that can hold NaNs and ORDERED_CODE is used for
10841 the rest. */
10843 static tree
10844 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10845 enum tree_code unordered_code,
10846 enum tree_code ordered_code)
10848 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10849 enum tree_code code;
10850 tree type0, type1;
10851 enum tree_code code0, code1;
10852 tree cmp_type = NULL_TREE;
10854 type0 = TREE_TYPE (arg0);
10855 type1 = TREE_TYPE (arg1);
10857 code0 = TREE_CODE (type0);
10858 code1 = TREE_CODE (type1);
10860 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10861 /* Choose the wider of two real types. */
10862 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10863 ? type0 : type1;
10864 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10865 cmp_type = type0;
10866 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10867 cmp_type = type1;
10869 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10870 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10872 if (unordered_code == UNORDERED_EXPR)
10874 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
10875 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
10876 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
10877 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10878 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10881 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
10882 ? unordered_code : ordered_code;
10883 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10884 fold_build2_loc (loc, code, type, arg0, arg1));
10887 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
10888 arithmetics if it can never overflow, or into internal functions that
10889 return both result of arithmetics and overflowed boolean flag in
10890 a complex integer result, or some other check for overflow.
10891 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
10892 checking part of that. */
10894 static tree
10895 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
10896 tree arg0, tree arg1, tree arg2)
10898 enum internal_fn ifn = IFN_LAST;
10899 /* The code of the expression corresponding to the built-in. */
10900 enum tree_code opcode = ERROR_MARK;
10901 bool ovf_only = false;
10903 switch (fcode)
10905 case BUILT_IN_ADD_OVERFLOW_P:
10906 ovf_only = true;
10907 /* FALLTHRU */
10908 case BUILT_IN_ADD_OVERFLOW:
10909 case BUILT_IN_SADD_OVERFLOW:
10910 case BUILT_IN_SADDL_OVERFLOW:
10911 case BUILT_IN_SADDLL_OVERFLOW:
10912 case BUILT_IN_UADD_OVERFLOW:
10913 case BUILT_IN_UADDL_OVERFLOW:
10914 case BUILT_IN_UADDLL_OVERFLOW:
10915 opcode = PLUS_EXPR;
10916 ifn = IFN_ADD_OVERFLOW;
10917 break;
10918 case BUILT_IN_SUB_OVERFLOW_P:
10919 ovf_only = true;
10920 /* FALLTHRU */
10921 case BUILT_IN_SUB_OVERFLOW:
10922 case BUILT_IN_SSUB_OVERFLOW:
10923 case BUILT_IN_SSUBL_OVERFLOW:
10924 case BUILT_IN_SSUBLL_OVERFLOW:
10925 case BUILT_IN_USUB_OVERFLOW:
10926 case BUILT_IN_USUBL_OVERFLOW:
10927 case BUILT_IN_USUBLL_OVERFLOW:
10928 opcode = MINUS_EXPR;
10929 ifn = IFN_SUB_OVERFLOW;
10930 break;
10931 case BUILT_IN_MUL_OVERFLOW_P:
10932 ovf_only = true;
10933 /* FALLTHRU */
10934 case BUILT_IN_MUL_OVERFLOW:
10935 case BUILT_IN_SMUL_OVERFLOW:
10936 case BUILT_IN_SMULL_OVERFLOW:
10937 case BUILT_IN_SMULLL_OVERFLOW:
10938 case BUILT_IN_UMUL_OVERFLOW:
10939 case BUILT_IN_UMULL_OVERFLOW:
10940 case BUILT_IN_UMULLL_OVERFLOW:
10941 opcode = MULT_EXPR;
10942 ifn = IFN_MUL_OVERFLOW;
10943 break;
10944 default:
10945 gcc_unreachable ();
10948 /* For the "generic" overloads, the first two arguments can have different
10949 types and the last argument determines the target type to use to check
10950 for overflow. The arguments of the other overloads all have the same
10951 type. */
10952 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
10954 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
10955 arguments are constant, attempt to fold the built-in call into a constant
10956 expression indicating whether or not it detected an overflow. */
10957 if (ovf_only
10958 && TREE_CODE (arg0) == INTEGER_CST
10959 && TREE_CODE (arg1) == INTEGER_CST)
10960 /* Perform the computation in the target type and check for overflow. */
10961 return omit_one_operand_loc (loc, boolean_type_node,
10962 arith_overflowed_p (opcode, type, arg0, arg1)
10963 ? boolean_true_node : boolean_false_node,
10964 arg2);
10966 tree intres, ovfres;
10967 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10969 intres = fold_binary_loc (loc, opcode, type,
10970 fold_convert_loc (loc, type, arg0),
10971 fold_convert_loc (loc, type, arg1));
10972 if (TREE_OVERFLOW (intres))
10973 intres = drop_tree_overflow (intres);
10974 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
10975 ? boolean_true_node : boolean_false_node);
10977 else
10979 tree ctype = build_complex_type (type);
10980 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10981 arg0, arg1);
10982 tree tgt = save_expr (call);
10983 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10984 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10985 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
10988 if (ovf_only)
10989 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
10991 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
10992 tree store
10993 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
10994 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
10997 /* Fold a call to __builtin_FILE to a constant string. */
10999 static inline tree
11000 fold_builtin_FILE (location_t loc)
11002 if (const char *fname = LOCATION_FILE (loc))
11004 /* The documentation says this builtin is equivalent to the preprocessor
11005 __FILE__ macro so it appears appropriate to use the same file prefix
11006 mappings. */
11007 fname = remap_macro_filename (fname);
11008 return build_string_literal (strlen (fname) + 1, fname);
11011 return build_string_literal (1, "");
11014 /* Fold a call to __builtin_FUNCTION to a constant string. */
11016 static inline tree
11017 fold_builtin_FUNCTION ()
11019 const char *name = "";
11021 if (current_function_decl)
11022 name = lang_hooks.decl_printable_name (current_function_decl, 0);
11024 return build_string_literal (strlen (name) + 1, name);
11027 /* Fold a call to __builtin_LINE to an integer constant. */
11029 static inline tree
11030 fold_builtin_LINE (location_t loc, tree type)
11032 return build_int_cst (type, LOCATION_LINE (loc));
11035 /* Fold a call to built-in function FNDECL with 0 arguments.
11036 This function returns NULL_TREE if no simplification was possible. */
11038 static tree
11039 fold_builtin_0 (location_t loc, tree fndecl)
11041 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11042 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11043 switch (fcode)
11045 case BUILT_IN_FILE:
11046 return fold_builtin_FILE (loc);
11048 case BUILT_IN_FUNCTION:
11049 return fold_builtin_FUNCTION ();
11051 case BUILT_IN_LINE:
11052 return fold_builtin_LINE (loc, type);
11054 CASE_FLT_FN (BUILT_IN_INF):
11055 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
11056 case BUILT_IN_INFD32:
11057 case BUILT_IN_INFD64:
11058 case BUILT_IN_INFD128:
11059 return fold_builtin_inf (loc, type, true);
11061 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
11062 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
11063 return fold_builtin_inf (loc, type, false);
11065 case BUILT_IN_CLASSIFY_TYPE:
11066 return fold_builtin_classify_type (NULL_TREE);
11068 default:
11069 break;
11071 return NULL_TREE;
11074 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
11075 This function returns NULL_TREE if no simplification was possible. */
11077 static tree
11078 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
11080 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11081 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11083 if (TREE_CODE (arg0) == ERROR_MARK)
11084 return NULL_TREE;
11086 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
11087 return ret;
11089 switch (fcode)
11091 case BUILT_IN_CONSTANT_P:
11093 tree val = fold_builtin_constant_p (arg0);
11095 /* Gimplification will pull the CALL_EXPR for the builtin out of
11096 an if condition. When not optimizing, we'll not CSE it back.
11097 To avoid link error types of regressions, return false now. */
11098 if (!val && !optimize)
11099 val = integer_zero_node;
11101 return val;
11104 case BUILT_IN_CLASSIFY_TYPE:
11105 return fold_builtin_classify_type (arg0);
11107 case BUILT_IN_STRLEN:
11108 return fold_builtin_strlen (loc, expr, type, arg0);
11110 CASE_FLT_FN (BUILT_IN_FABS):
11111 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11112 case BUILT_IN_FABSD32:
11113 case BUILT_IN_FABSD64:
11114 case BUILT_IN_FABSD128:
11115 return fold_builtin_fabs (loc, arg0, type);
11117 case BUILT_IN_ABS:
11118 case BUILT_IN_LABS:
11119 case BUILT_IN_LLABS:
11120 case BUILT_IN_IMAXABS:
11121 return fold_builtin_abs (loc, arg0, type);
11123 CASE_FLT_FN (BUILT_IN_CONJ):
11124 if (validate_arg (arg0, COMPLEX_TYPE)
11125 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
11126 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
11127 break;
11129 CASE_FLT_FN (BUILT_IN_CREAL):
11130 if (validate_arg (arg0, COMPLEX_TYPE)
11131 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
11132 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
11133 break;
11135 CASE_FLT_FN (BUILT_IN_CIMAG):
11136 if (validate_arg (arg0, COMPLEX_TYPE)
11137 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
11138 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
11139 break;
11141 CASE_FLT_FN (BUILT_IN_CARG):
11142 return fold_builtin_carg (loc, arg0, type);
11144 case BUILT_IN_ISASCII:
11145 return fold_builtin_isascii (loc, arg0);
11147 case BUILT_IN_TOASCII:
11148 return fold_builtin_toascii (loc, arg0);
11150 case BUILT_IN_ISDIGIT:
11151 return fold_builtin_isdigit (loc, arg0);
11153 CASE_FLT_FN (BUILT_IN_FINITE):
11154 case BUILT_IN_FINITED32:
11155 case BUILT_IN_FINITED64:
11156 case BUILT_IN_FINITED128:
11157 case BUILT_IN_ISFINITE:
11159 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
11160 if (ret)
11161 return ret;
11162 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11165 CASE_FLT_FN (BUILT_IN_ISINF):
11166 case BUILT_IN_ISINFD32:
11167 case BUILT_IN_ISINFD64:
11168 case BUILT_IN_ISINFD128:
11170 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
11171 if (ret)
11172 return ret;
11173 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11176 case BUILT_IN_ISNORMAL:
11177 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11179 case BUILT_IN_ISINF_SIGN:
11180 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
11182 CASE_FLT_FN (BUILT_IN_ISNAN):
11183 case BUILT_IN_ISNAND32:
11184 case BUILT_IN_ISNAND64:
11185 case BUILT_IN_ISNAND128:
11186 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
11188 case BUILT_IN_FREE:
11189 if (integer_zerop (arg0))
11190 return build_empty_stmt (loc);
11191 break;
11193 default:
11194 break;
11197 return NULL_TREE;
11201 /* Folds a call EXPR (which may be null) to built-in function FNDECL
11202 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
11203 if no simplification was possible. */
11205 static tree
11206 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
11208 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11209 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11211 if (TREE_CODE (arg0) == ERROR_MARK
11212 || TREE_CODE (arg1) == ERROR_MARK)
11213 return NULL_TREE;
11215 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
11216 return ret;
11218 switch (fcode)
11220 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
11221 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
11222 if (validate_arg (arg0, REAL_TYPE)
11223 && validate_arg (arg1, POINTER_TYPE))
11224 return do_mpfr_lgamma_r (arg0, arg1, type);
11225 break;
11227 CASE_FLT_FN (BUILT_IN_FREXP):
11228 return fold_builtin_frexp (loc, arg0, arg1, type);
11230 CASE_FLT_FN (BUILT_IN_MODF):
11231 return fold_builtin_modf (loc, arg0, arg1, type);
11233 case BUILT_IN_STRSPN:
11234 return fold_builtin_strspn (loc, expr, arg0, arg1);
11236 case BUILT_IN_STRCSPN:
11237 return fold_builtin_strcspn (loc, expr, arg0, arg1);
11239 case BUILT_IN_STRPBRK:
11240 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
11242 case BUILT_IN_EXPECT:
11243 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
11245 case BUILT_IN_ISGREATER:
11246 return fold_builtin_unordered_cmp (loc, fndecl,
11247 arg0, arg1, UNLE_EXPR, LE_EXPR);
11248 case BUILT_IN_ISGREATEREQUAL:
11249 return fold_builtin_unordered_cmp (loc, fndecl,
11250 arg0, arg1, UNLT_EXPR, LT_EXPR);
11251 case BUILT_IN_ISLESS:
11252 return fold_builtin_unordered_cmp (loc, fndecl,
11253 arg0, arg1, UNGE_EXPR, GE_EXPR);
11254 case BUILT_IN_ISLESSEQUAL:
11255 return fold_builtin_unordered_cmp (loc, fndecl,
11256 arg0, arg1, UNGT_EXPR, GT_EXPR);
11257 case BUILT_IN_ISLESSGREATER:
11258 return fold_builtin_unordered_cmp (loc, fndecl,
11259 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
11260 case BUILT_IN_ISUNORDERED:
11261 return fold_builtin_unordered_cmp (loc, fndecl,
11262 arg0, arg1, UNORDERED_EXPR,
11263 NOP_EXPR);
11265 /* We do the folding for va_start in the expander. */
11266 case BUILT_IN_VA_START:
11267 break;
11269 case BUILT_IN_OBJECT_SIZE:
11270 return fold_builtin_object_size (arg0, arg1);
11272 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
11273 return fold_builtin_atomic_always_lock_free (arg0, arg1);
11275 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
11276 return fold_builtin_atomic_is_lock_free (arg0, arg1);
11278 default:
11279 break;
11281 return NULL_TREE;
11284 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
11285 and ARG2.
11286 This function returns NULL_TREE if no simplification was possible. */
11288 static tree
11289 fold_builtin_3 (location_t loc, tree fndecl,
11290 tree arg0, tree arg1, tree arg2)
11292 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11293 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11295 if (TREE_CODE (arg0) == ERROR_MARK
11296 || TREE_CODE (arg1) == ERROR_MARK
11297 || TREE_CODE (arg2) == ERROR_MARK)
11298 return NULL_TREE;
11300 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
11301 arg0, arg1, arg2))
11302 return ret;
11304 switch (fcode)
11307 CASE_FLT_FN (BUILT_IN_SINCOS):
11308 return fold_builtin_sincos (loc, arg0, arg1, arg2);
11310 CASE_FLT_FN (BUILT_IN_REMQUO):
11311 if (validate_arg (arg0, REAL_TYPE)
11312 && validate_arg (arg1, REAL_TYPE)
11313 && validate_arg (arg2, POINTER_TYPE))
11314 return do_mpfr_remquo (arg0, arg1, arg2);
11315 break;
11317 case BUILT_IN_MEMCMP:
11318 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
11320 case BUILT_IN_EXPECT:
11321 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
11323 case BUILT_IN_EXPECT_WITH_PROBABILITY:
11324 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
11326 case BUILT_IN_ADD_OVERFLOW:
11327 case BUILT_IN_SUB_OVERFLOW:
11328 case BUILT_IN_MUL_OVERFLOW:
11329 case BUILT_IN_ADD_OVERFLOW_P:
11330 case BUILT_IN_SUB_OVERFLOW_P:
11331 case BUILT_IN_MUL_OVERFLOW_P:
11332 case BUILT_IN_SADD_OVERFLOW:
11333 case BUILT_IN_SADDL_OVERFLOW:
11334 case BUILT_IN_SADDLL_OVERFLOW:
11335 case BUILT_IN_SSUB_OVERFLOW:
11336 case BUILT_IN_SSUBL_OVERFLOW:
11337 case BUILT_IN_SSUBLL_OVERFLOW:
11338 case BUILT_IN_SMUL_OVERFLOW:
11339 case BUILT_IN_SMULL_OVERFLOW:
11340 case BUILT_IN_SMULLL_OVERFLOW:
11341 case BUILT_IN_UADD_OVERFLOW:
11342 case BUILT_IN_UADDL_OVERFLOW:
11343 case BUILT_IN_UADDLL_OVERFLOW:
11344 case BUILT_IN_USUB_OVERFLOW:
11345 case BUILT_IN_USUBL_OVERFLOW:
11346 case BUILT_IN_USUBLL_OVERFLOW:
11347 case BUILT_IN_UMUL_OVERFLOW:
11348 case BUILT_IN_UMULL_OVERFLOW:
11349 case BUILT_IN_UMULLL_OVERFLOW:
11350 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
11352 default:
11353 break;
11355 return NULL_TREE;
11358 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
11359 ARGS is an array of NARGS arguments. IGNORE is true if the result
11360 of the function call is ignored. This function returns NULL_TREE
11361 if no simplification was possible. */
11363 static tree
11364 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
11365 int nargs, bool)
11367 tree ret = NULL_TREE;
11369 switch (nargs)
11371 case 0:
11372 ret = fold_builtin_0 (loc, fndecl);
11373 break;
11374 case 1:
11375 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
11376 break;
11377 case 2:
11378 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
11379 break;
11380 case 3:
11381 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
11382 break;
11383 default:
11384 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
11385 break;
11387 if (ret)
11389 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11390 SET_EXPR_LOCATION (ret, loc);
11391 return ret;
11393 return NULL_TREE;
11396 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11397 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11398 of arguments in ARGS to be omitted. OLDNARGS is the number of
11399 elements in ARGS. */
11401 static tree
11402 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11403 int skip, tree fndecl, int n, va_list newargs)
11405 int nargs = oldnargs - skip + n;
11406 tree *buffer;
11408 if (n > 0)
11410 int i, j;
11412 buffer = XALLOCAVEC (tree, nargs);
11413 for (i = 0; i < n; i++)
11414 buffer[i] = va_arg (newargs, tree);
11415 for (j = skip; j < oldnargs; j++, i++)
11416 buffer[i] = args[j];
11418 else
11419 buffer = args + skip;
11421 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11424 /* Return true if FNDECL shouldn't be folded right now.
11425 If a built-in function has an inline attribute always_inline
11426 wrapper, defer folding it after always_inline functions have
11427 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11428 might not be performed. */
11430 bool
11431 avoid_folding_inline_builtin (tree fndecl)
11433 return (DECL_DECLARED_INLINE_P (fndecl)
11434 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11435 && cfun
11436 && !cfun->always_inline_functions_inlined
11437 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11440 /* A wrapper function for builtin folding that prevents warnings for
11441 "statement without effect" and the like, caused by removing the
11442 call node earlier than the warning is generated. */
11444 tree
11445 fold_call_expr (location_t loc, tree exp, bool ignore)
11447 tree ret = NULL_TREE;
11448 tree fndecl = get_callee_fndecl (exp);
11449 if (fndecl && fndecl_built_in_p (fndecl)
11450 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11451 yet. Defer folding until we see all the arguments
11452 (after inlining). */
11453 && !CALL_EXPR_VA_ARG_PACK (exp))
11455 int nargs = call_expr_nargs (exp);
11457 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11458 instead last argument is __builtin_va_arg_pack (). Defer folding
11459 even in that case, until arguments are finalized. */
11460 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11462 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11463 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
11464 return NULL_TREE;
11467 if (avoid_folding_inline_builtin (fndecl))
11468 return NULL_TREE;
11470 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11471 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11472 CALL_EXPR_ARGP (exp), ignore);
11473 else
11475 tree *args = CALL_EXPR_ARGP (exp);
11476 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
11477 if (ret)
11478 return ret;
11481 return NULL_TREE;
11484 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
11485 N arguments are passed in the array ARGARRAY. Return a folded
11486 expression or NULL_TREE if no simplification was possible. */
11488 tree
11489 fold_builtin_call_array (location_t loc, tree,
11490 tree fn,
11491 int n,
11492 tree *argarray)
11494 if (TREE_CODE (fn) != ADDR_EXPR)
11495 return NULL_TREE;
11497 tree fndecl = TREE_OPERAND (fn, 0);
11498 if (TREE_CODE (fndecl) == FUNCTION_DECL
11499 && fndecl_built_in_p (fndecl))
11501 /* If last argument is __builtin_va_arg_pack (), arguments to this
11502 function are not finalized yet. Defer folding until they are. */
11503 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11505 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11506 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
11507 return NULL_TREE;
11509 if (avoid_folding_inline_builtin (fndecl))
11510 return NULL_TREE;
11511 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11512 return targetm.fold_builtin (fndecl, n, argarray, false);
11513 else
11514 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
11517 return NULL_TREE;
11520 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11521 along with N new arguments specified as the "..." parameters. SKIP
11522 is the number of arguments in EXP to be omitted. This function is used
11523 to do varargs-to-varargs transformations. */
11525 static tree
11526 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11528 va_list ap;
11529 tree t;
11531 va_start (ap, n);
11532 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11533 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11534 va_end (ap);
11536 return t;
11539 /* Validate a single argument ARG against a tree code CODE representing
11540 a type. Return true when argument is valid. */
11542 static bool
11543 validate_arg (const_tree arg, enum tree_code code)
11545 if (!arg)
11546 return false;
11547 else if (code == POINTER_TYPE)
11548 return POINTER_TYPE_P (TREE_TYPE (arg));
11549 else if (code == INTEGER_TYPE)
11550 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11551 return code == TREE_CODE (TREE_TYPE (arg));
11554 /* This function validates the types of a function call argument list
11555 against a specified list of tree_codes. If the last specifier is a 0,
11556 that represents an ellipses, otherwise the last specifier must be a
11557 VOID_TYPE.
11559 This is the GIMPLE version of validate_arglist. Eventually we want to
11560 completely convert builtins.c to work from GIMPLEs and the tree based
11561 validate_arglist will then be removed. */
11563 bool
11564 validate_gimple_arglist (const gcall *call, ...)
11566 enum tree_code code;
11567 bool res = 0;
11568 va_list ap;
11569 const_tree arg;
11570 size_t i;
11572 va_start (ap, call);
11573 i = 0;
11577 code = (enum tree_code) va_arg (ap, int);
11578 switch (code)
11580 case 0:
11581 /* This signifies an ellipses, any further arguments are all ok. */
11582 res = true;
11583 goto end;
11584 case VOID_TYPE:
11585 /* This signifies an endlink, if no arguments remain, return
11586 true, otherwise return false. */
11587 res = (i == gimple_call_num_args (call));
11588 goto end;
11589 default:
11590 /* If no parameters remain or the parameter's code does not
11591 match the specified code, return false. Otherwise continue
11592 checking any remaining arguments. */
11593 arg = gimple_call_arg (call, i++);
11594 if (!validate_arg (arg, code))
11595 goto end;
11596 break;
11599 while (1);
11601 /* We need gotos here since we can only have one VA_CLOSE in a
11602 function. */
11603 end: ;
11604 va_end (ap);
11606 return res;
11609 /* Default target-specific builtin expander that does nothing. */
11612 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11613 rtx target ATTRIBUTE_UNUSED,
11614 rtx subtarget ATTRIBUTE_UNUSED,
11615 machine_mode mode ATTRIBUTE_UNUSED,
11616 int ignore ATTRIBUTE_UNUSED)
11618 return NULL_RTX;
11621 /* Returns true is EXP represents data that would potentially reside
11622 in a readonly section. */
11624 bool
11625 readonly_data_expr (tree exp)
11627 STRIP_NOPS (exp);
11629 if (TREE_CODE (exp) != ADDR_EXPR)
11630 return false;
11632 exp = get_base_address (TREE_OPERAND (exp, 0));
11633 if (!exp)
11634 return false;
11636 /* Make sure we call decl_readonly_section only for trees it
11637 can handle (since it returns true for everything it doesn't
11638 understand). */
11639 if (TREE_CODE (exp) == STRING_CST
11640 || TREE_CODE (exp) == CONSTRUCTOR
11641 || (VAR_P (exp) && TREE_STATIC (exp)))
11642 return decl_readonly_section (exp, 0);
11643 else
11644 return false;
11647 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11648 to the call, and TYPE is its return type.
11650 Return NULL_TREE if no simplification was possible, otherwise return the
11651 simplified form of the call as a tree.
11653 The simplified form may be a constant or other expression which
11654 computes the same value, but in a more efficient manner (including
11655 calls to other builtin functions).
11657 The call may contain arguments which need to be evaluated, but
11658 which are not useful to determine the result of the call. In
11659 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11660 COMPOUND_EXPR will be an argument which must be evaluated.
11661 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11662 COMPOUND_EXPR in the chain will contain the tree for the simplified
11663 form of the builtin function call. */
11665 static tree
11666 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
11668 if (!validate_arg (s1, POINTER_TYPE)
11669 || !validate_arg (s2, POINTER_TYPE))
11670 return NULL_TREE;
11672 tree fn;
11673 const char *p1, *p2;
11675 p2 = c_getstr (s2);
11676 if (p2 == NULL)
11677 return NULL_TREE;
11679 p1 = c_getstr (s1);
11680 if (p1 != NULL)
11682 const char *r = strpbrk (p1, p2);
11683 tree tem;
11685 if (r == NULL)
11686 return build_int_cst (TREE_TYPE (s1), 0);
11688 /* Return an offset into the constant string argument. */
11689 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11690 return fold_convert_loc (loc, type, tem);
11693 if (p2[0] == '\0')
11694 /* strpbrk(x, "") == NULL.
11695 Evaluate and ignore s1 in case it had side-effects. */
11696 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
11698 if (p2[1] != '\0')
11699 return NULL_TREE; /* Really call strpbrk. */
11701 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11702 if (!fn)
11703 return NULL_TREE;
11705 /* New argument list transforming strpbrk(s1, s2) to
11706 strchr(s1, s2[0]). */
11707 return build_call_expr_loc (loc, fn, 2, s1,
11708 build_int_cst (integer_type_node, p2[0]));
11711 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11712 to the call.
11714 Return NULL_TREE if no simplification was possible, otherwise return the
11715 simplified form of the call as a tree.
11717 The simplified form may be a constant or other expression which
11718 computes the same value, but in a more efficient manner (including
11719 calls to other builtin functions).
11721 The call may contain arguments which need to be evaluated, but
11722 which are not useful to determine the result of the call. In
11723 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11724 COMPOUND_EXPR will be an argument which must be evaluated.
11725 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11726 COMPOUND_EXPR in the chain will contain the tree for the simplified
11727 form of the builtin function call. */
11729 static tree
11730 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
11732 if (!validate_arg (s1, POINTER_TYPE)
11733 || !validate_arg (s2, POINTER_TYPE))
11734 return NULL_TREE;
11736 if (!check_nul_terminated_array (expr, s1)
11737 || !check_nul_terminated_array (expr, s2))
11738 return NULL_TREE;
11740 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11742 /* If either argument is "", return NULL_TREE. */
11743 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11744 /* Evaluate and ignore both arguments in case either one has
11745 side-effects. */
11746 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11747 s1, s2);
11748 return NULL_TREE;
11751 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11752 to the call.
11754 Return NULL_TREE if no simplification was possible, otherwise return the
11755 simplified form of the call as a tree.
11757 The simplified form may be a constant or other expression which
11758 computes the same value, but in a more efficient manner (including
11759 calls to other builtin functions).
11761 The call may contain arguments which need to be evaluated, but
11762 which are not useful to determine the result of the call. In
11763 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11764 COMPOUND_EXPR will be an argument which must be evaluated.
11765 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11766 COMPOUND_EXPR in the chain will contain the tree for the simplified
11767 form of the builtin function call. */
11769 static tree
11770 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
11772 if (!validate_arg (s1, POINTER_TYPE)
11773 || !validate_arg (s2, POINTER_TYPE))
11774 return NULL_TREE;
11776 if (!check_nul_terminated_array (expr, s1)
11777 || !check_nul_terminated_array (expr, s2))
11778 return NULL_TREE;
11780 /* If the first argument is "", return NULL_TREE. */
11781 const char *p1 = c_getstr (s1);
11782 if (p1 && *p1 == '\0')
11784 /* Evaluate and ignore argument s2 in case it has
11785 side-effects. */
11786 return omit_one_operand_loc (loc, size_type_node,
11787 size_zero_node, s2);
11790 /* If the second argument is "", return __builtin_strlen(s1). */
11791 const char *p2 = c_getstr (s2);
11792 if (p2 && *p2 == '\0')
11794 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11796 /* If the replacement _DECL isn't initialized, don't do the
11797 transformation. */
11798 if (!fn)
11799 return NULL_TREE;
11801 return build_call_expr_loc (loc, fn, 1, s1);
11803 return NULL_TREE;
11806 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11807 produced. False otherwise. This is done so that we don't output the error
11808 or warning twice or three times. */
11810 bool
11811 fold_builtin_next_arg (tree exp, bool va_start_p)
11813 tree fntype = TREE_TYPE (current_function_decl);
11814 int nargs = call_expr_nargs (exp);
11815 tree arg;
11816 /* There is good chance the current input_location points inside the
11817 definition of the va_start macro (perhaps on the token for
11818 builtin) in a system header, so warnings will not be emitted.
11819 Use the location in real source code. */
11820 location_t current_location =
11821 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11822 NULL);
11824 if (!stdarg_p (fntype))
11826 error ("%<va_start%> used in function with fixed arguments");
11827 return true;
11830 if (va_start_p)
11832 if (va_start_p && (nargs != 2))
11834 error ("wrong number of arguments to function %<va_start%>");
11835 return true;
11837 arg = CALL_EXPR_ARG (exp, 1);
11839 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11840 when we checked the arguments and if needed issued a warning. */
11841 else
11843 if (nargs == 0)
11845 /* Evidently an out of date version of <stdarg.h>; can't validate
11846 va_start's second argument, but can still work as intended. */
11847 warning_at (current_location,
11848 OPT_Wvarargs,
11849 "%<__builtin_next_arg%> called without an argument");
11850 return true;
11852 else if (nargs > 1)
11854 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11855 return true;
11857 arg = CALL_EXPR_ARG (exp, 0);
11860 if (TREE_CODE (arg) == SSA_NAME)
11861 arg = SSA_NAME_VAR (arg);
11863 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11864 or __builtin_next_arg (0) the first time we see it, after checking
11865 the arguments and if needed issuing a warning. */
11866 if (!integer_zerop (arg))
11868 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11870 /* Strip off all nops for the sake of the comparison. This
11871 is not quite the same as STRIP_NOPS. It does more.
11872 We must also strip off INDIRECT_EXPR for C++ reference
11873 parameters. */
11874 while (CONVERT_EXPR_P (arg)
11875 || TREE_CODE (arg) == INDIRECT_REF)
11876 arg = TREE_OPERAND (arg, 0);
11877 if (arg != last_parm)
11879 /* FIXME: Sometimes with the tree optimizers we can get the
11880 not the last argument even though the user used the last
11881 argument. We just warn and set the arg to be the last
11882 argument so that we will get wrong-code because of
11883 it. */
11884 warning_at (current_location,
11885 OPT_Wvarargs,
11886 "second parameter of %<va_start%> not last named argument");
11889 /* Undefined by C99 7.15.1.4p4 (va_start):
11890 "If the parameter parmN is declared with the register storage
11891 class, with a function or array type, or with a type that is
11892 not compatible with the type that results after application of
11893 the default argument promotions, the behavior is undefined."
11895 else if (DECL_REGISTER (arg))
11897 warning_at (current_location,
11898 OPT_Wvarargs,
11899 "undefined behavior when second parameter of "
11900 "%<va_start%> is declared with %<register%> storage");
11903 /* We want to verify the second parameter just once before the tree
11904 optimizers are run and then avoid keeping it in the tree,
11905 as otherwise we could warn even for correct code like:
11906 void foo (int i, ...)
11907 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11908 if (va_start_p)
11909 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11910 else
11911 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11913 return false;
11917 /* Expand a call EXP to __builtin_object_size. */
11919 static rtx
11920 expand_builtin_object_size (tree exp)
11922 tree ost;
11923 int object_size_type;
11924 tree fndecl = get_callee_fndecl (exp);
11926 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11928 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
11929 exp, fndecl);
11930 expand_builtin_trap ();
11931 return const0_rtx;
11934 ost = CALL_EXPR_ARG (exp, 1);
11935 STRIP_NOPS (ost);
11937 if (TREE_CODE (ost) != INTEGER_CST
11938 || tree_int_cst_sgn (ost) < 0
11939 || compare_tree_int (ost, 3) > 0)
11941 error ("%Klast argument of %qD is not integer constant between 0 and 3",
11942 exp, fndecl);
11943 expand_builtin_trap ();
11944 return const0_rtx;
11947 object_size_type = tree_to_shwi (ost);
11949 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11952 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11953 FCODE is the BUILT_IN_* to use.
11954 Return NULL_RTX if we failed; the caller should emit a normal call,
11955 otherwise try to get the result in TARGET, if convenient (and in
11956 mode MODE if that's convenient). */
11958 static rtx
11959 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11960 enum built_in_function fcode)
11962 if (!validate_arglist (exp,
11963 POINTER_TYPE,
11964 fcode == BUILT_IN_MEMSET_CHK
11965 ? INTEGER_TYPE : POINTER_TYPE,
11966 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11967 return NULL_RTX;
11969 tree dest = CALL_EXPR_ARG (exp, 0);
11970 tree src = CALL_EXPR_ARG (exp, 1);
11971 tree len = CALL_EXPR_ARG (exp, 2);
11972 tree size = CALL_EXPR_ARG (exp, 3);
11974 /* FIXME: Set access mode to write only for memset et al. */
11975 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
11976 /*srcstr=*/NULL_TREE, size, access_read_write);
11978 if (!tree_fits_uhwi_p (size))
11979 return NULL_RTX;
11981 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11983 /* Avoid transforming the checking call to an ordinary one when
11984 an overflow has been detected or when the call couldn't be
11985 validated because the size is not constant. */
11986 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
11987 return NULL_RTX;
11989 tree fn = NULL_TREE;
11990 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11991 mem{cpy,pcpy,move,set} is available. */
11992 switch (fcode)
11994 case BUILT_IN_MEMCPY_CHK:
11995 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11996 break;
11997 case BUILT_IN_MEMPCPY_CHK:
11998 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11999 break;
12000 case BUILT_IN_MEMMOVE_CHK:
12001 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12002 break;
12003 case BUILT_IN_MEMSET_CHK:
12004 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12005 break;
12006 default:
12007 break;
12010 if (! fn)
12011 return NULL_RTX;
12013 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12014 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12015 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12016 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12018 else if (fcode == BUILT_IN_MEMSET_CHK)
12019 return NULL_RTX;
12020 else
12022 unsigned int dest_align = get_pointer_alignment (dest);
12024 /* If DEST is not a pointer type, call the normal function. */
12025 if (dest_align == 0)
12026 return NULL_RTX;
12028 /* If SRC and DEST are the same (and not volatile), do nothing. */
12029 if (operand_equal_p (src, dest, 0))
12031 tree expr;
12033 if (fcode != BUILT_IN_MEMPCPY_CHK)
12035 /* Evaluate and ignore LEN in case it has side-effects. */
12036 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12037 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12040 expr = fold_build_pointer_plus (dest, len);
12041 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12044 /* __memmove_chk special case. */
12045 if (fcode == BUILT_IN_MEMMOVE_CHK)
12047 unsigned int src_align = get_pointer_alignment (src);
12049 if (src_align == 0)
12050 return NULL_RTX;
12052 /* If src is categorized for a readonly section we can use
12053 normal __memcpy_chk. */
12054 if (readonly_data_expr (src))
12056 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12057 if (!fn)
12058 return NULL_RTX;
12059 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12060 dest, src, len, size);
12061 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12062 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12063 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12066 return NULL_RTX;
12070 /* Emit warning if a buffer overflow is detected at compile time. */
12072 static void
12073 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12075 /* The source string. */
12076 tree srcstr = NULL_TREE;
12077 /* The size of the destination object returned by __builtin_object_size. */
12078 tree objsize = NULL_TREE;
12079 /* The string that is being concatenated with (as in __strcat_chk)
12080 or null if it isn't. */
12081 tree catstr = NULL_TREE;
12082 /* The maximum length of the source sequence in a bounded operation
12083 (such as __strncat_chk) or null if the operation isn't bounded
12084 (such as __strcat_chk). */
12085 tree maxread = NULL_TREE;
12086 /* The exact size of the access (such as in __strncpy_chk). */
12087 tree size = NULL_TREE;
12088 /* The access by the function that's checked. Except for snprintf
12089 both writing and reading is checked. */
12090 access_mode mode = access_read_write;
12092 switch (fcode)
12094 case BUILT_IN_STRCPY_CHK:
12095 case BUILT_IN_STPCPY_CHK:
12096 srcstr = CALL_EXPR_ARG (exp, 1);
12097 objsize = CALL_EXPR_ARG (exp, 2);
12098 break;
12100 case BUILT_IN_STRCAT_CHK:
12101 /* For __strcat_chk the warning will be emitted only if overflowing
12102 by at least strlen (dest) + 1 bytes. */
12103 catstr = CALL_EXPR_ARG (exp, 0);
12104 srcstr = CALL_EXPR_ARG (exp, 1);
12105 objsize = CALL_EXPR_ARG (exp, 2);
12106 break;
12108 case BUILT_IN_STRNCAT_CHK:
12109 catstr = CALL_EXPR_ARG (exp, 0);
12110 srcstr = CALL_EXPR_ARG (exp, 1);
12111 maxread = CALL_EXPR_ARG (exp, 2);
12112 objsize = CALL_EXPR_ARG (exp, 3);
12113 break;
12115 case BUILT_IN_STRNCPY_CHK:
12116 case BUILT_IN_STPNCPY_CHK:
12117 srcstr = CALL_EXPR_ARG (exp, 1);
12118 size = CALL_EXPR_ARG (exp, 2);
12119 objsize = CALL_EXPR_ARG (exp, 3);
12120 break;
12122 case BUILT_IN_SNPRINTF_CHK:
12123 case BUILT_IN_VSNPRINTF_CHK:
12124 maxread = CALL_EXPR_ARG (exp, 1);
12125 objsize = CALL_EXPR_ARG (exp, 3);
12126 /* The only checked access the write to the destination. */
12127 mode = access_write_only;
12128 break;
12129 default:
12130 gcc_unreachable ();
12133 if (catstr && maxread)
12135 /* Check __strncat_chk. There is no way to determine the length
12136 of the string to which the source string is being appended so
12137 just warn when the length of the source string is not known. */
12138 check_strncat_sizes (exp, objsize);
12139 return;
12142 check_access (exp, size, maxread, srcstr, objsize, mode);
12145 /* Emit warning if a buffer overflow is detected at compile time
12146 in __sprintf_chk/__vsprintf_chk calls. */
12148 static void
12149 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12151 tree size, len, fmt;
12152 const char *fmt_str;
12153 int nargs = call_expr_nargs (exp);
12155 /* Verify the required arguments in the original call. */
12157 if (nargs < 4)
12158 return;
12159 size = CALL_EXPR_ARG (exp, 2);
12160 fmt = CALL_EXPR_ARG (exp, 3);
12162 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12163 return;
12165 /* Check whether the format is a literal string constant. */
12166 fmt_str = c_getstr (fmt);
12167 if (fmt_str == NULL)
12168 return;
12170 if (!init_target_chars ())
12171 return;
12173 /* If the format doesn't contain % args or %%, we know its size. */
12174 if (strchr (fmt_str, target_percent) == 0)
12175 len = build_int_cstu (size_type_node, strlen (fmt_str));
12176 /* If the format is "%s" and first ... argument is a string literal,
12177 we know it too. */
12178 else if (fcode == BUILT_IN_SPRINTF_CHK
12179 && strcmp (fmt_str, target_percent_s) == 0)
12181 tree arg;
12183 if (nargs < 5)
12184 return;
12185 arg = CALL_EXPR_ARG (exp, 4);
12186 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12187 return;
12189 len = c_strlen (arg, 1);
12190 if (!len || ! tree_fits_uhwi_p (len))
12191 return;
12193 else
12194 return;
12196 /* Add one for the terminating nul. */
12197 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
12199 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
12200 access_write_only);
12203 /* Emit warning if a free is called with address of a variable. */
12205 static void
12206 maybe_emit_free_warning (tree exp)
12208 if (call_expr_nargs (exp) != 1)
12209 return;
12211 tree arg = CALL_EXPR_ARG (exp, 0);
12213 STRIP_NOPS (arg);
12214 if (TREE_CODE (arg) != ADDR_EXPR)
12215 return;
12217 arg = get_base_address (TREE_OPERAND (arg, 0));
12218 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12219 return;
12221 if (SSA_VAR_P (arg))
12222 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12223 "%Kattempt to free a non-heap object %qD", exp, arg);
12224 else
12225 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12226 "%Kattempt to free a non-heap object", exp);
12229 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12230 if possible. */
12232 static tree
12233 fold_builtin_object_size (tree ptr, tree ost)
12235 unsigned HOST_WIDE_INT bytes;
12236 int object_size_type;
12238 if (!validate_arg (ptr, POINTER_TYPE)
12239 || !validate_arg (ost, INTEGER_TYPE))
12240 return NULL_TREE;
12242 STRIP_NOPS (ost);
12244 if (TREE_CODE (ost) != INTEGER_CST
12245 || tree_int_cst_sgn (ost) < 0
12246 || compare_tree_int (ost, 3) > 0)
12247 return NULL_TREE;
12249 object_size_type = tree_to_shwi (ost);
12251 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12252 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12253 and (size_t) 0 for types 2 and 3. */
12254 if (TREE_SIDE_EFFECTS (ptr))
12255 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12257 if (TREE_CODE (ptr) == ADDR_EXPR)
12259 compute_builtin_object_size (ptr, object_size_type, &bytes);
12260 if (wi::fits_to_tree_p (bytes, size_type_node))
12261 return build_int_cstu (size_type_node, bytes);
12263 else if (TREE_CODE (ptr) == SSA_NAME)
12265 /* If object size is not known yet, delay folding until
12266 later. Maybe subsequent passes will help determining
12267 it. */
12268 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
12269 && wi::fits_to_tree_p (bytes, size_type_node))
12270 return build_int_cstu (size_type_node, bytes);
12273 return NULL_TREE;
12276 /* Builtins with folding operations that operate on "..." arguments
12277 need special handling; we need to store the arguments in a convenient
12278 data structure before attempting any folding. Fortunately there are
12279 only a few builtins that fall into this category. FNDECL is the
12280 function, EXP is the CALL_EXPR for the call. */
12282 static tree
12283 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
12285 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
12286 tree ret = NULL_TREE;
12288 switch (fcode)
12290 case BUILT_IN_FPCLASSIFY:
12291 ret = fold_builtin_fpclassify (loc, args, nargs);
12292 break;
12294 default:
12295 break;
12297 if (ret)
12299 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
12300 SET_EXPR_LOCATION (ret, loc);
12301 TREE_NO_WARNING (ret) = 1;
12302 return ret;
12304 return NULL_TREE;
12307 /* Initialize format string characters in the target charset. */
12309 bool
12310 init_target_chars (void)
12312 static bool init;
12313 if (!init)
12315 target_newline = lang_hooks.to_target_charset ('\n');
12316 target_percent = lang_hooks.to_target_charset ('%');
12317 target_c = lang_hooks.to_target_charset ('c');
12318 target_s = lang_hooks.to_target_charset ('s');
12319 if (target_newline == 0 || target_percent == 0 || target_c == 0
12320 || target_s == 0)
12321 return false;
12323 target_percent_c[0] = target_percent;
12324 target_percent_c[1] = target_c;
12325 target_percent_c[2] = '\0';
12327 target_percent_s[0] = target_percent;
12328 target_percent_s[1] = target_s;
12329 target_percent_s[2] = '\0';
12331 target_percent_s_newline[0] = target_percent;
12332 target_percent_s_newline[1] = target_s;
12333 target_percent_s_newline[2] = target_newline;
12334 target_percent_s_newline[3] = '\0';
12336 init = true;
12338 return true;
12341 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12342 and no overflow/underflow occurred. INEXACT is true if M was not
12343 exactly calculated. TYPE is the tree type for the result. This
12344 function assumes that you cleared the MPFR flags and then
12345 calculated M to see if anything subsequently set a flag prior to
12346 entering this function. Return NULL_TREE if any checks fail. */
12348 static tree
12349 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12351 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12352 overflow/underflow occurred. If -frounding-math, proceed iff the
12353 result of calling FUNC was exact. */
12354 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12355 && (!flag_rounding_math || !inexact))
12357 REAL_VALUE_TYPE rr;
12359 real_from_mpfr (&rr, m, type, MPFR_RNDN);
12360 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12361 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12362 but the mpft_t is not, then we underflowed in the
12363 conversion. */
12364 if (real_isfinite (&rr)
12365 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12367 REAL_VALUE_TYPE rmode;
12369 real_convert (&rmode, TYPE_MODE (type), &rr);
12370 /* Proceed iff the specified mode can hold the value. */
12371 if (real_identical (&rmode, &rr))
12372 return build_real (type, rmode);
12375 return NULL_TREE;
12378 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12379 number and no overflow/underflow occurred. INEXACT is true if M
12380 was not exactly calculated. TYPE is the tree type for the result.
12381 This function assumes that you cleared the MPFR flags and then
12382 calculated M to see if anything subsequently set a flag prior to
12383 entering this function. Return NULL_TREE if any checks fail, if
12384 FORCE_CONVERT is true, then bypass the checks. */
12386 static tree
12387 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12389 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12390 overflow/underflow occurred. If -frounding-math, proceed iff the
12391 result of calling FUNC was exact. */
12392 if (force_convert
12393 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12394 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12395 && (!flag_rounding_math || !inexact)))
12397 REAL_VALUE_TYPE re, im;
12399 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
12400 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
12401 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12402 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12403 but the mpft_t is not, then we underflowed in the
12404 conversion. */
12405 if (force_convert
12406 || (real_isfinite (&re) && real_isfinite (&im)
12407 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12408 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12410 REAL_VALUE_TYPE re_mode, im_mode;
12412 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12413 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12414 /* Proceed iff the specified mode can hold the value. */
12415 if (force_convert
12416 || (real_identical (&re_mode, &re)
12417 && real_identical (&im_mode, &im)))
12418 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12419 build_real (TREE_TYPE (type), im_mode));
12422 return NULL_TREE;
12425 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12426 the pointer *(ARG_QUO) and return the result. The type is taken
12427 from the type of ARG0 and is used for setting the precision of the
12428 calculation and results. */
12430 static tree
12431 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12433 tree const type = TREE_TYPE (arg0);
12434 tree result = NULL_TREE;
12436 STRIP_NOPS (arg0);
12437 STRIP_NOPS (arg1);
12439 /* To proceed, MPFR must exactly represent the target floating point
12440 format, which only happens when the target base equals two. */
12441 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12442 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12443 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12445 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12446 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12448 if (real_isfinite (ra0) && real_isfinite (ra1))
12450 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12451 const int prec = fmt->p;
12452 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
12453 tree result_rem;
12454 long integer_quo;
12455 mpfr_t m0, m1;
12457 mpfr_inits2 (prec, m0, m1, NULL);
12458 mpfr_from_real (m0, ra0, MPFR_RNDN);
12459 mpfr_from_real (m1, ra1, MPFR_RNDN);
12460 mpfr_clear_flags ();
12461 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12462 /* Remquo is independent of the rounding mode, so pass
12463 inexact=0 to do_mpfr_ckconv(). */
12464 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12465 mpfr_clears (m0, m1, NULL);
12466 if (result_rem)
12468 /* MPFR calculates quo in the host's long so it may
12469 return more bits in quo than the target int can hold
12470 if sizeof(host long) > sizeof(target int). This can
12471 happen even for native compilers in LP64 mode. In
12472 these cases, modulo the quo value with the largest
12473 number that the target int can hold while leaving one
12474 bit for the sign. */
12475 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12476 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12478 /* Dereference the quo pointer argument. */
12479 arg_quo = build_fold_indirect_ref (arg_quo);
12480 /* Proceed iff a valid pointer type was passed in. */
12481 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12483 /* Set the value. */
12484 tree result_quo
12485 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12486 build_int_cst (TREE_TYPE (arg_quo),
12487 integer_quo));
12488 TREE_SIDE_EFFECTS (result_quo) = 1;
12489 /* Combine the quo assignment with the rem. */
12490 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12491 result_quo, result_rem));
12496 return result;
12499 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12500 resulting value as a tree with type TYPE. The mpfr precision is
12501 set to the precision of TYPE. We assume that this mpfr function
12502 returns zero if the result could be calculated exactly within the
12503 requested precision. In addition, the integer pointer represented
12504 by ARG_SG will be dereferenced and set to the appropriate signgam
12505 (-1,1) value. */
12507 static tree
12508 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12510 tree result = NULL_TREE;
12512 STRIP_NOPS (arg);
12514 /* To proceed, MPFR must exactly represent the target floating point
12515 format, which only happens when the target base equals two. Also
12516 verify ARG is a constant and that ARG_SG is an int pointer. */
12517 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12518 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12519 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12520 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12522 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12524 /* In addition to NaN and Inf, the argument cannot be zero or a
12525 negative integer. */
12526 if (real_isfinite (ra)
12527 && ra->cl != rvc_zero
12528 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12530 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12531 const int prec = fmt->p;
12532 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
12533 int inexact, sg;
12534 mpfr_t m;
12535 tree result_lg;
12537 mpfr_init2 (m, prec);
12538 mpfr_from_real (m, ra, MPFR_RNDN);
12539 mpfr_clear_flags ();
12540 inexact = mpfr_lgamma (m, &sg, m, rnd);
12541 result_lg = do_mpfr_ckconv (m, type, inexact);
12542 mpfr_clear (m);
12543 if (result_lg)
12545 tree result_sg;
12547 /* Dereference the arg_sg pointer argument. */
12548 arg_sg = build_fold_indirect_ref (arg_sg);
12549 /* Assign the signgam value into *arg_sg. */
12550 result_sg = fold_build2 (MODIFY_EXPR,
12551 TREE_TYPE (arg_sg), arg_sg,
12552 build_int_cst (TREE_TYPE (arg_sg), sg));
12553 TREE_SIDE_EFFECTS (result_sg) = 1;
12554 /* Combine the signgam assignment with the lgamma result. */
12555 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12556 result_sg, result_lg));
12561 return result;
12564 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12565 mpc function FUNC on it and return the resulting value as a tree
12566 with type TYPE. The mpfr precision is set to the precision of
12567 TYPE. We assume that function FUNC returns zero if the result
12568 could be calculated exactly within the requested precision. If
12569 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12570 in the arguments and/or results. */
12572 tree
12573 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12574 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12576 tree result = NULL_TREE;
12578 STRIP_NOPS (arg0);
12579 STRIP_NOPS (arg1);
12581 /* To proceed, MPFR must exactly represent the target floating point
12582 format, which only happens when the target base equals two. */
12583 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12584 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12585 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12586 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12587 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12589 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12590 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12591 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12592 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12594 if (do_nonfinite
12595 || (real_isfinite (re0) && real_isfinite (im0)
12596 && real_isfinite (re1) && real_isfinite (im1)))
12598 const struct real_format *const fmt =
12599 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12600 const int prec = fmt->p;
12601 const mpfr_rnd_t rnd = fmt->round_towards_zero
12602 ? MPFR_RNDZ : MPFR_RNDN;
12603 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12604 int inexact;
12605 mpc_t m0, m1;
12607 mpc_init2 (m0, prec);
12608 mpc_init2 (m1, prec);
12609 mpfr_from_real (mpc_realref (m0), re0, rnd);
12610 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12611 mpfr_from_real (mpc_realref (m1), re1, rnd);
12612 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12613 mpfr_clear_flags ();
12614 inexact = func (m0, m0, m1, crnd);
12615 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12616 mpc_clear (m0);
12617 mpc_clear (m1);
12621 return result;
12624 /* A wrapper function for builtin folding that prevents warnings for
12625 "statement without effect" and the like, caused by removing the
12626 call node earlier than the warning is generated. */
12628 tree
12629 fold_call_stmt (gcall *stmt, bool ignore)
12631 tree ret = NULL_TREE;
12632 tree fndecl = gimple_call_fndecl (stmt);
12633 location_t loc = gimple_location (stmt);
12634 if (fndecl && fndecl_built_in_p (fndecl)
12635 && !gimple_call_va_arg_pack_p (stmt))
12637 int nargs = gimple_call_num_args (stmt);
12638 tree *args = (nargs > 0
12639 ? gimple_call_arg_ptr (stmt, 0)
12640 : &error_mark_node);
12642 if (avoid_folding_inline_builtin (fndecl))
12643 return NULL_TREE;
12644 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12646 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12648 else
12650 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
12651 if (ret)
12653 /* Propagate location information from original call to
12654 expansion of builtin. Otherwise things like
12655 maybe_emit_chk_warning, that operate on the expansion
12656 of a builtin, will use the wrong location information. */
12657 if (gimple_has_location (stmt))
12659 tree realret = ret;
12660 if (TREE_CODE (ret) == NOP_EXPR)
12661 realret = TREE_OPERAND (ret, 0);
12662 if (CAN_HAVE_LOCATION_P (realret)
12663 && !EXPR_HAS_LOCATION (realret))
12664 SET_EXPR_LOCATION (realret, loc);
12665 return realret;
12667 return ret;
12671 return NULL_TREE;
12674 /* Look up the function in builtin_decl that corresponds to DECL
12675 and set ASMSPEC as its user assembler name. DECL must be a
12676 function decl that declares a builtin. */
12678 void
12679 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12681 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
12682 && asmspec != 0);
12684 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12685 set_user_assembler_name (builtin, asmspec);
12687 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
12688 && INT_TYPE_SIZE < BITS_PER_WORD)
12690 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
12691 set_user_assembler_libfunc ("ffs", asmspec);
12692 set_optab_libfunc (ffs_optab, mode, "ffs");
12696 /* Return true if DECL is a builtin that expands to a constant or similarly
12697 simple code. */
12698 bool
12699 is_simple_builtin (tree decl)
12701 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
12702 switch (DECL_FUNCTION_CODE (decl))
12704 /* Builtins that expand to constants. */
12705 case BUILT_IN_CONSTANT_P:
12706 case BUILT_IN_EXPECT:
12707 case BUILT_IN_OBJECT_SIZE:
12708 case BUILT_IN_UNREACHABLE:
12709 /* Simple register moves or loads from stack. */
12710 case BUILT_IN_ASSUME_ALIGNED:
12711 case BUILT_IN_RETURN_ADDRESS:
12712 case BUILT_IN_EXTRACT_RETURN_ADDR:
12713 case BUILT_IN_FROB_RETURN_ADDR:
12714 case BUILT_IN_RETURN:
12715 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12716 case BUILT_IN_FRAME_ADDRESS:
12717 case BUILT_IN_VA_END:
12718 case BUILT_IN_STACK_SAVE:
12719 case BUILT_IN_STACK_RESTORE:
12720 /* Exception state returns or moves registers around. */
12721 case BUILT_IN_EH_FILTER:
12722 case BUILT_IN_EH_POINTER:
12723 case BUILT_IN_EH_COPY_VALUES:
12724 return true;
12726 default:
12727 return false;
12730 return false;
12733 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12734 most probably expanded inline into reasonably simple code. This is a
12735 superset of is_simple_builtin. */
12736 bool
12737 is_inexpensive_builtin (tree decl)
12739 if (!decl)
12740 return false;
12741 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12742 return true;
12743 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12744 switch (DECL_FUNCTION_CODE (decl))
12746 case BUILT_IN_ABS:
12747 CASE_BUILT_IN_ALLOCA:
12748 case BUILT_IN_BSWAP16:
12749 case BUILT_IN_BSWAP32:
12750 case BUILT_IN_BSWAP64:
12751 case BUILT_IN_BSWAP128:
12752 case BUILT_IN_CLZ:
12753 case BUILT_IN_CLZIMAX:
12754 case BUILT_IN_CLZL:
12755 case BUILT_IN_CLZLL:
12756 case BUILT_IN_CTZ:
12757 case BUILT_IN_CTZIMAX:
12758 case BUILT_IN_CTZL:
12759 case BUILT_IN_CTZLL:
12760 case BUILT_IN_FFS:
12761 case BUILT_IN_FFSIMAX:
12762 case BUILT_IN_FFSL:
12763 case BUILT_IN_FFSLL:
12764 case BUILT_IN_IMAXABS:
12765 case BUILT_IN_FINITE:
12766 case BUILT_IN_FINITEF:
12767 case BUILT_IN_FINITEL:
12768 case BUILT_IN_FINITED32:
12769 case BUILT_IN_FINITED64:
12770 case BUILT_IN_FINITED128:
12771 case BUILT_IN_FPCLASSIFY:
12772 case BUILT_IN_ISFINITE:
12773 case BUILT_IN_ISINF_SIGN:
12774 case BUILT_IN_ISINF:
12775 case BUILT_IN_ISINFF:
12776 case BUILT_IN_ISINFL:
12777 case BUILT_IN_ISINFD32:
12778 case BUILT_IN_ISINFD64:
12779 case BUILT_IN_ISINFD128:
12780 case BUILT_IN_ISNAN:
12781 case BUILT_IN_ISNANF:
12782 case BUILT_IN_ISNANL:
12783 case BUILT_IN_ISNAND32:
12784 case BUILT_IN_ISNAND64:
12785 case BUILT_IN_ISNAND128:
12786 case BUILT_IN_ISNORMAL:
12787 case BUILT_IN_ISGREATER:
12788 case BUILT_IN_ISGREATEREQUAL:
12789 case BUILT_IN_ISLESS:
12790 case BUILT_IN_ISLESSEQUAL:
12791 case BUILT_IN_ISLESSGREATER:
12792 case BUILT_IN_ISUNORDERED:
12793 case BUILT_IN_VA_ARG_PACK:
12794 case BUILT_IN_VA_ARG_PACK_LEN:
12795 case BUILT_IN_VA_COPY:
12796 case BUILT_IN_TRAP:
12797 case BUILT_IN_SAVEREGS:
12798 case BUILT_IN_POPCOUNTL:
12799 case BUILT_IN_POPCOUNTLL:
12800 case BUILT_IN_POPCOUNTIMAX:
12801 case BUILT_IN_POPCOUNT:
12802 case BUILT_IN_PARITYL:
12803 case BUILT_IN_PARITYLL:
12804 case BUILT_IN_PARITYIMAX:
12805 case BUILT_IN_PARITY:
12806 case BUILT_IN_LABS:
12807 case BUILT_IN_LLABS:
12808 case BUILT_IN_PREFETCH:
12809 case BUILT_IN_ACC_ON_DEVICE:
12810 return true;
12812 default:
12813 return is_simple_builtin (decl);
12816 return false;
12819 /* Return true if T is a constant and the value cast to a target char
12820 can be represented by a host char.
12821 Store the casted char constant in *P if so. */
12823 bool
12824 target_char_cst_p (tree t, char *p)
12826 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
12827 return false;
12829 *p = (char)tree_to_uhwi (t);
12830 return true;
12833 /* Return true if the builtin DECL is implemented in a standard library.
12834 Otherwise returns false which doesn't guarantee it is not (thus the list of
12835 handled builtins below may be incomplete). */
12837 bool
12838 builtin_with_linkage_p (tree decl)
12840 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12841 switch (DECL_FUNCTION_CODE (decl))
12843 CASE_FLT_FN (BUILT_IN_ACOS):
12844 CASE_FLT_FN (BUILT_IN_ACOSH):
12845 CASE_FLT_FN (BUILT_IN_ASIN):
12846 CASE_FLT_FN (BUILT_IN_ASINH):
12847 CASE_FLT_FN (BUILT_IN_ATAN):
12848 CASE_FLT_FN (BUILT_IN_ATANH):
12849 CASE_FLT_FN (BUILT_IN_ATAN2):
12850 CASE_FLT_FN (BUILT_IN_CBRT):
12851 CASE_FLT_FN (BUILT_IN_CEIL):
12852 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
12853 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12854 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
12855 CASE_FLT_FN (BUILT_IN_COS):
12856 CASE_FLT_FN (BUILT_IN_COSH):
12857 CASE_FLT_FN (BUILT_IN_ERF):
12858 CASE_FLT_FN (BUILT_IN_ERFC):
12859 CASE_FLT_FN (BUILT_IN_EXP):
12860 CASE_FLT_FN (BUILT_IN_EXP2):
12861 CASE_FLT_FN (BUILT_IN_EXPM1):
12862 CASE_FLT_FN (BUILT_IN_FABS):
12863 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
12864 CASE_FLT_FN (BUILT_IN_FDIM):
12865 CASE_FLT_FN (BUILT_IN_FLOOR):
12866 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
12867 CASE_FLT_FN (BUILT_IN_FMA):
12868 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
12869 CASE_FLT_FN (BUILT_IN_FMAX):
12870 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
12871 CASE_FLT_FN (BUILT_IN_FMIN):
12872 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
12873 CASE_FLT_FN (BUILT_IN_FMOD):
12874 CASE_FLT_FN (BUILT_IN_FREXP):
12875 CASE_FLT_FN (BUILT_IN_HYPOT):
12876 CASE_FLT_FN (BUILT_IN_ILOGB):
12877 CASE_FLT_FN (BUILT_IN_LDEXP):
12878 CASE_FLT_FN (BUILT_IN_LGAMMA):
12879 CASE_FLT_FN (BUILT_IN_LLRINT):
12880 CASE_FLT_FN (BUILT_IN_LLROUND):
12881 CASE_FLT_FN (BUILT_IN_LOG):
12882 CASE_FLT_FN (BUILT_IN_LOG10):
12883 CASE_FLT_FN (BUILT_IN_LOG1P):
12884 CASE_FLT_FN (BUILT_IN_LOG2):
12885 CASE_FLT_FN (BUILT_IN_LOGB):
12886 CASE_FLT_FN (BUILT_IN_LRINT):
12887 CASE_FLT_FN (BUILT_IN_LROUND):
12888 CASE_FLT_FN (BUILT_IN_MODF):
12889 CASE_FLT_FN (BUILT_IN_NAN):
12890 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12891 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
12892 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
12893 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
12894 CASE_FLT_FN (BUILT_IN_POW):
12895 CASE_FLT_FN (BUILT_IN_REMAINDER):
12896 CASE_FLT_FN (BUILT_IN_REMQUO):
12897 CASE_FLT_FN (BUILT_IN_RINT):
12898 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
12899 CASE_FLT_FN (BUILT_IN_ROUND):
12900 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
12901 CASE_FLT_FN (BUILT_IN_SCALBLN):
12902 CASE_FLT_FN (BUILT_IN_SCALBN):
12903 CASE_FLT_FN (BUILT_IN_SIN):
12904 CASE_FLT_FN (BUILT_IN_SINH):
12905 CASE_FLT_FN (BUILT_IN_SINCOS):
12906 CASE_FLT_FN (BUILT_IN_SQRT):
12907 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
12908 CASE_FLT_FN (BUILT_IN_TAN):
12909 CASE_FLT_FN (BUILT_IN_TANH):
12910 CASE_FLT_FN (BUILT_IN_TGAMMA):
12911 CASE_FLT_FN (BUILT_IN_TRUNC):
12912 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
12913 return true;
12914 default:
12915 break;
12917 return false;
12920 /* Return true if OFFRNG is bounded to a subrange of offset values
12921 valid for the largest possible object. */
12923 bool
12924 access_ref::offset_bounded () const
12926 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
12927 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
12928 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
12931 /* If CALLEE has known side effects, fill in INFO and return true.
12932 See tree-ssa-structalias.c:find_func_aliases
12933 for the list of builtins we might need to handle here. */
12935 attr_fnspec
12936 builtin_fnspec (tree callee)
12938 built_in_function code = DECL_FUNCTION_CODE (callee);
12940 switch (code)
12942 /* All the following functions read memory pointed to by
12943 their second argument and write memory pointed to by first
12944 argument.
12945 strcat/strncat additionally reads memory pointed to by the first
12946 argument. */
12947 case BUILT_IN_STRCAT:
12948 case BUILT_IN_STRCAT_CHK:
12949 return "1cW 1 ";
12950 case BUILT_IN_STRNCAT:
12951 case BUILT_IN_STRNCAT_CHK:
12952 return "1cW 13";
12953 case BUILT_IN_STRCPY:
12954 case BUILT_IN_STRCPY_CHK:
12955 return "1cO 1 ";
12956 case BUILT_IN_STPCPY:
12957 case BUILT_IN_STPCPY_CHK:
12958 return ".cO 1 ";
12959 case BUILT_IN_STRNCPY:
12960 case BUILT_IN_MEMCPY:
12961 case BUILT_IN_MEMMOVE:
12962 case BUILT_IN_TM_MEMCPY:
12963 case BUILT_IN_TM_MEMMOVE:
12964 case BUILT_IN_STRNCPY_CHK:
12965 case BUILT_IN_MEMCPY_CHK:
12966 case BUILT_IN_MEMMOVE_CHK:
12967 return "1cO313";
12968 case BUILT_IN_MEMPCPY:
12969 case BUILT_IN_MEMPCPY_CHK:
12970 return ".cO313";
12971 case BUILT_IN_STPNCPY:
12972 case BUILT_IN_STPNCPY_CHK:
12973 return ".cO313";
12974 case BUILT_IN_BCOPY:
12975 return ".c23O3";
12976 case BUILT_IN_BZERO:
12977 return ".cO2";
12978 case BUILT_IN_MEMCMP:
12979 case BUILT_IN_MEMCMP_EQ:
12980 case BUILT_IN_BCMP:
12981 case BUILT_IN_STRNCMP:
12982 case BUILT_IN_STRNCMP_EQ:
12983 case BUILT_IN_STRNCASECMP:
12984 return ".cR3R3";
12986 /* The following functions read memory pointed to by their
12987 first argument. */
12988 CASE_BUILT_IN_TM_LOAD (1):
12989 CASE_BUILT_IN_TM_LOAD (2):
12990 CASE_BUILT_IN_TM_LOAD (4):
12991 CASE_BUILT_IN_TM_LOAD (8):
12992 CASE_BUILT_IN_TM_LOAD (FLOAT):
12993 CASE_BUILT_IN_TM_LOAD (DOUBLE):
12994 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
12995 CASE_BUILT_IN_TM_LOAD (M64):
12996 CASE_BUILT_IN_TM_LOAD (M128):
12997 CASE_BUILT_IN_TM_LOAD (M256):
12998 case BUILT_IN_TM_LOG:
12999 case BUILT_IN_TM_LOG_1:
13000 case BUILT_IN_TM_LOG_2:
13001 case BUILT_IN_TM_LOG_4:
13002 case BUILT_IN_TM_LOG_8:
13003 case BUILT_IN_TM_LOG_FLOAT:
13004 case BUILT_IN_TM_LOG_DOUBLE:
13005 case BUILT_IN_TM_LOG_LDOUBLE:
13006 case BUILT_IN_TM_LOG_M64:
13007 case BUILT_IN_TM_LOG_M128:
13008 case BUILT_IN_TM_LOG_M256:
13009 return ".cR ";
13011 case BUILT_IN_INDEX:
13012 case BUILT_IN_RINDEX:
13013 case BUILT_IN_STRCHR:
13014 case BUILT_IN_STRLEN:
13015 case BUILT_IN_STRRCHR:
13016 return ".cR ";
13017 case BUILT_IN_STRNLEN:
13018 return ".cR2";
13020 /* These read memory pointed to by the first argument.
13021 Allocating memory does not have any side-effects apart from
13022 being the definition point for the pointer.
13023 Unix98 specifies that errno is set on allocation failure. */
13024 case BUILT_IN_STRDUP:
13025 return "mCR ";
13026 case BUILT_IN_STRNDUP:
13027 return "mCR2";
13028 /* Allocating memory does not have any side-effects apart from
13029 being the definition point for the pointer. */
13030 case BUILT_IN_MALLOC:
13031 case BUILT_IN_ALIGNED_ALLOC:
13032 case BUILT_IN_CALLOC:
13033 case BUILT_IN_GOMP_ALLOC:
13034 return "mC";
13035 CASE_BUILT_IN_ALLOCA:
13036 return "mc";
13037 /* These read memory pointed to by the first argument with size
13038 in the third argument. */
13039 case BUILT_IN_MEMCHR:
13040 return ".cR3";
13041 /* These read memory pointed to by the first and second arguments. */
13042 case BUILT_IN_STRSTR:
13043 case BUILT_IN_STRPBRK:
13044 case BUILT_IN_STRCASECMP:
13045 case BUILT_IN_STRCSPN:
13046 case BUILT_IN_STRSPN:
13047 case BUILT_IN_STRCMP:
13048 case BUILT_IN_STRCMP_EQ:
13049 return ".cR R ";
13050 /* Freeing memory kills the pointed-to memory. More importantly
13051 the call has to serve as a barrier for moving loads and stores
13052 across it. */
13053 case BUILT_IN_STACK_RESTORE:
13054 case BUILT_IN_FREE:
13055 case BUILT_IN_GOMP_FREE:
13056 return ".co ";
13057 case BUILT_IN_VA_END:
13058 return ".cO ";
13059 /* Realloc serves both as allocation point and deallocation point. */
13060 case BUILT_IN_REALLOC:
13061 return ".cw ";
13062 case BUILT_IN_GAMMA_R:
13063 case BUILT_IN_GAMMAF_R:
13064 case BUILT_IN_GAMMAL_R:
13065 case BUILT_IN_LGAMMA_R:
13066 case BUILT_IN_LGAMMAF_R:
13067 case BUILT_IN_LGAMMAL_R:
13068 return ".C. Ot";
13069 case BUILT_IN_FREXP:
13070 case BUILT_IN_FREXPF:
13071 case BUILT_IN_FREXPL:
13072 case BUILT_IN_MODF:
13073 case BUILT_IN_MODFF:
13074 case BUILT_IN_MODFL:
13075 return ".c. Ot";
13076 case BUILT_IN_REMQUO:
13077 case BUILT_IN_REMQUOF:
13078 case BUILT_IN_REMQUOL:
13079 return ".c. . Ot";
13080 case BUILT_IN_SINCOS:
13081 case BUILT_IN_SINCOSF:
13082 case BUILT_IN_SINCOSL:
13083 return ".c. OtOt";
13084 case BUILT_IN_MEMSET:
13085 case BUILT_IN_MEMSET_CHK:
13086 case BUILT_IN_TM_MEMSET:
13087 return "1cO3";
13088 CASE_BUILT_IN_TM_STORE (1):
13089 CASE_BUILT_IN_TM_STORE (2):
13090 CASE_BUILT_IN_TM_STORE (4):
13091 CASE_BUILT_IN_TM_STORE (8):
13092 CASE_BUILT_IN_TM_STORE (FLOAT):
13093 CASE_BUILT_IN_TM_STORE (DOUBLE):
13094 CASE_BUILT_IN_TM_STORE (LDOUBLE):
13095 CASE_BUILT_IN_TM_STORE (M64):
13096 CASE_BUILT_IN_TM_STORE (M128):
13097 CASE_BUILT_IN_TM_STORE (M256):
13098 return ".cO ";
13099 case BUILT_IN_STACK_SAVE:
13100 return ".c";
13101 case BUILT_IN_ASSUME_ALIGNED:
13102 return "1cX ";
13103 /* But posix_memalign stores a pointer into the memory pointed to
13104 by its first argument. */
13105 case BUILT_IN_POSIX_MEMALIGN:
13106 return ".cOt";
13108 default:
13109 return "";