aix: Use lcomm for TLS static data.
[official-gcc.git] / gcc / builtins.c
blob41e336c071c4444738e55ba0a41782d05e311475
1 /* Expand builtin functions.
2 Copyright (C) 1988-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76 #include "gimple-iterator.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
81 #include "demangle.h"
83 struct target_builtins default_target_builtins;
84 #if SWITCHABLE_TARGET
85 struct target_builtins *this_target_builtins = &default_target_builtins;
86 #endif
88 /* Define the names of the builtin function types and codes. */
89 const char *const built_in_class_names[BUILT_IN_LAST]
90 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
92 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
93 const char * built_in_names[(int) END_BUILTINS] =
95 #include "builtins.def"
98 /* Setup an array of builtin_info_type, make sure each element decl is
99 initialized to NULL_TREE. */
100 builtin_info_type builtin_info[(int)END_BUILTINS];
102 /* Non-zero if __builtin_constant_p should be folded right away. */
103 bool force_folding_builtin_constant_p;
105 static int target_char_cast (tree, char *);
106 static rtx get_memory_rtx (tree, tree);
107 static int apply_args_size (void);
108 static int apply_result_size (void);
109 static rtx result_vector (int, rtx);
110 static void expand_builtin_prefetch (tree);
111 static rtx expand_builtin_apply_args (void);
112 static rtx expand_builtin_apply_args_1 (void);
113 static rtx expand_builtin_apply (rtx, rtx, rtx);
114 static void expand_builtin_return (rtx);
115 static enum type_class type_to_class (tree);
116 static rtx expand_builtin_classify_type (tree);
117 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
118 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
119 static rtx expand_builtin_interclass_mathfn (tree, rtx);
120 static rtx expand_builtin_sincos (tree);
121 static rtx expand_builtin_cexpi (tree, rtx);
122 static rtx expand_builtin_int_roundingfn (tree, rtx);
123 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
124 static rtx expand_builtin_next_arg (void);
125 static rtx expand_builtin_va_start (tree);
126 static rtx expand_builtin_va_end (tree);
127 static rtx expand_builtin_va_copy (tree);
128 static rtx inline_expand_builtin_bytecmp (tree, rtx);
129 static rtx expand_builtin_strcmp (tree, rtx);
130 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
131 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
132 static rtx expand_builtin_memchr (tree, rtx);
133 static rtx expand_builtin_memcpy (tree, rtx);
134 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
135 rtx target, tree exp,
136 memop_ret retmode,
137 bool might_overlap);
138 static rtx expand_builtin_memmove (tree, rtx);
139 static rtx expand_builtin_mempcpy (tree, rtx);
140 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
141 static rtx expand_builtin_strcat (tree);
142 static rtx expand_builtin_strcpy (tree, rtx);
143 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
144 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
145 static rtx expand_builtin_stpncpy (tree, rtx);
146 static rtx expand_builtin_strncat (tree, rtx);
147 static rtx expand_builtin_strncpy (tree, rtx);
148 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
149 static rtx expand_builtin_memset (tree, rtx, machine_mode);
150 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
151 static rtx expand_builtin_bzero (tree);
152 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
153 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
154 static rtx expand_builtin_alloca (tree);
155 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
156 static rtx expand_builtin_frame_address (tree, tree);
157 static tree stabilize_va_list_loc (location_t, tree, int);
158 static rtx expand_builtin_expect (tree, rtx);
159 static rtx expand_builtin_expect_with_probability (tree, rtx);
160 static tree fold_builtin_constant_p (tree);
161 static tree fold_builtin_classify_type (tree);
162 static tree fold_builtin_strlen (location_t, tree, tree, tree);
163 static tree fold_builtin_inf (location_t, tree, int);
164 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
165 static bool validate_arg (const_tree, enum tree_code code);
166 static rtx expand_builtin_fabs (tree, rtx, rtx);
167 static rtx expand_builtin_signbit (tree, rtx);
168 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 enum tree_code);
176 static tree fold_builtin_varargs (location_t, tree, tree*, int);
178 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
179 static tree fold_builtin_strspn (location_t, tree, tree, tree);
180 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
182 static rtx expand_builtin_object_size (tree);
183 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
184 enum built_in_function);
185 static void maybe_emit_chk_warning (tree, enum built_in_function);
186 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
187 static tree fold_builtin_object_size (tree, tree);
188 static bool check_read_access (tree, tree, tree = NULL_TREE, int = 1);
189 static bool compute_objsize_r (tree, int, access_ref *, ssa_name_limit_t &,
190 pointer_query *);
192 unsigned HOST_WIDE_INT target_newline;
193 unsigned HOST_WIDE_INT target_percent;
194 static unsigned HOST_WIDE_INT target_c;
195 static unsigned HOST_WIDE_INT target_s;
196 char target_percent_c[3];
197 char target_percent_s[3];
198 char target_percent_s_newline[4];
199 static tree do_mpfr_remquo (tree, tree, tree);
200 static tree do_mpfr_lgamma_r (tree, tree, tree);
201 static void expand_builtin_sync_synchronize (void);
203 access_ref::access_ref (tree bound /* = NULL_TREE */,
204 bool minaccess /* = false */)
205 : ref (), eval ([](tree x){ return x; }), deref (), trail1special (true),
206 base0 (true), parmarray ()
208 /* Set to valid. */
209 offrng[0] = offrng[1] = 0;
210 /* Invalidate. */
211 sizrng[0] = sizrng[1] = -1;
213 /* Set the default bounds of the access and adjust below. */
214 bndrng[0] = minaccess ? 1 : 0;
215 bndrng[1] = HOST_WIDE_INT_M1U;
217 /* When BOUND is nonnull and a range can be extracted from it,
218 set the bounds of the access to reflect both it and MINACCESS.
219 BNDRNG[0] is the size of the minimum access. */
220 tree rng[2];
221 if (bound && get_size_range (bound, rng, SR_ALLOW_ZERO))
223 bndrng[0] = wi::to_offset (rng[0]);
224 bndrng[1] = wi::to_offset (rng[1]);
225 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
229 /* Return the PHI node REF refers to or null if it doesn't. */
231 gphi *
232 access_ref::phi () const
234 if (!ref || TREE_CODE (ref) != SSA_NAME)
235 return NULL;
237 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
238 if (gimple_code (def_stmt) != GIMPLE_PHI)
239 return NULL;
241 return as_a <gphi *> (def_stmt);
244 /* Determine and return the largest object to which *THIS. If *THIS
245 refers to a PHI and PREF is nonnull, fill *PREF with the details
246 of the object determined by compute_objsize(ARG, OSTYPE) for each
247 PHI argument ARG. */
249 tree
250 access_ref::get_ref (vec<access_ref> *all_refs,
251 access_ref *pref /* = NULL */,
252 int ostype /* = 1 */,
253 ssa_name_limit_t *psnlim /* = NULL */,
254 pointer_query *qry /* = NULL */) const
256 gphi *phi_stmt = this->phi ();
257 if (!phi_stmt)
258 return ref;
260 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
261 cause unbounded recursion. */
262 ssa_name_limit_t snlim_buf;
263 if (!psnlim)
264 psnlim = &snlim_buf;
266 if (!psnlim->visit_phi (ref))
267 return NULL_TREE;
269 /* Reflects the range of offsets of all PHI arguments refer to the same
270 object (i.e., have the same REF). */
271 access_ref same_ref;
272 /* The conservative result of the PHI reflecting the offset and size
273 of the largest PHI argument, regardless of whether or not they all
274 refer to the same object. */
275 pointer_query empty_qry;
276 if (!qry)
277 qry = &empty_qry;
279 access_ref phi_ref;
280 if (pref)
282 phi_ref = *pref;
283 same_ref = *pref;
286 /* Set if any argument is a function array (or VLA) parameter not
287 declared [static]. */
288 bool parmarray = false;
289 /* The size of the smallest object referenced by the PHI arguments. */
290 offset_int minsize = 0;
291 const offset_int maxobjsize = wi::to_offset (max_object_size ());
292 /* The offset of the PHI, not reflecting those of its arguments. */
293 const offset_int orng[2] = { phi_ref.offrng[0], phi_ref.offrng[1] };
295 const unsigned nargs = gimple_phi_num_args (phi_stmt);
296 for (unsigned i = 0; i < nargs; ++i)
298 access_ref phi_arg_ref;
299 tree arg = gimple_phi_arg_def (phi_stmt, i);
300 if (!compute_objsize_r (arg, ostype, &phi_arg_ref, *psnlim, qry)
301 || phi_arg_ref.sizrng[0] < 0)
302 /* A PHI with all null pointer arguments. */
303 return NULL_TREE;
305 /* Add PREF's offset to that of the argument. */
306 phi_arg_ref.add_offset (orng[0], orng[1]);
307 if (TREE_CODE (arg) == SSA_NAME)
308 qry->put_ref (arg, phi_arg_ref);
310 if (all_refs)
311 all_refs->safe_push (phi_arg_ref);
313 const bool arg_known_size = (phi_arg_ref.sizrng[0] != 0
314 || phi_arg_ref.sizrng[1] != maxobjsize);
316 parmarray |= phi_arg_ref.parmarray;
318 const bool nullp = integer_zerop (arg) && (i || i + 1 < nargs);
320 if (phi_ref.sizrng[0] < 0)
322 if (!nullp)
323 same_ref = phi_arg_ref;
324 phi_ref = phi_arg_ref;
325 if (arg_known_size)
326 minsize = phi_arg_ref.sizrng[0];
327 continue;
330 const bool phi_known_size = (phi_ref.sizrng[0] != 0
331 || phi_ref.sizrng[1] != maxobjsize);
333 if (phi_known_size && phi_arg_ref.sizrng[0] < minsize)
334 minsize = phi_arg_ref.sizrng[0];
336 /* Disregard null pointers in PHIs with two or more arguments.
337 TODO: Handle this better! */
338 if (nullp)
339 continue;
341 /* Determine the amount of remaining space in the argument. */
342 offset_int argrem[2];
343 argrem[1] = phi_arg_ref.size_remaining (argrem);
345 /* Determine the amount of remaining space computed so far and
346 if the remaining space in the argument is more use it instead. */
347 offset_int phirem[2];
348 phirem[1] = phi_ref.size_remaining (phirem);
350 if (phi_arg_ref.ref != same_ref.ref)
351 same_ref.ref = NULL_TREE;
353 if (phirem[1] < argrem[1]
354 || (phirem[1] == argrem[1]
355 && phi_ref.sizrng[1] < phi_arg_ref.sizrng[1]))
356 /* Use the argument with the most space remaining as the result,
357 or the larger one if the space is equal. */
358 phi_ref = phi_arg_ref;
360 /* Set SAME_REF.OFFRNG to the maximum range of all arguments. */
361 if (phi_arg_ref.offrng[0] < same_ref.offrng[0])
362 same_ref.offrng[0] = phi_arg_ref.offrng[0];
363 if (same_ref.offrng[1] < phi_arg_ref.offrng[1])
364 same_ref.offrng[1] = phi_arg_ref.offrng[1];
367 if (phi_ref.sizrng[0] < 0)
369 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
370 (perhaps because they have all been already visited by prior
371 recursive calls). */
372 psnlim->leave_phi (ref);
373 return NULL_TREE;
376 if (!same_ref.ref && same_ref.offrng[0] != 0)
377 /* Clear BASE0 if not all the arguments refer to the same object and
378 if not all their offsets are zero-based. This allows the final
379 PHI offset to out of bounds for some arguments but not for others
380 (or negative even of all the arguments are BASE0), which is overly
381 permissive. */
382 phi_ref.base0 = false;
384 if (same_ref.ref)
385 phi_ref = same_ref;
386 else
388 /* Replace the lower bound of the largest argument with the size
389 of the smallest argument, and set PARMARRAY if any argument
390 was one. */
391 phi_ref.sizrng[0] = minsize;
392 phi_ref.parmarray = parmarray;
395 /* Avoid changing *THIS. */
396 if (pref && pref != this)
397 *pref = phi_ref;
399 psnlim->leave_phi (ref);
401 return phi_ref.ref;
404 /* Return the maximum amount of space remaining and if non-null, set
405 argument to the minimum. */
407 offset_int
408 access_ref::size_remaining (offset_int *pmin /* = NULL */) const
410 offset_int minbuf;
411 if (!pmin)
412 pmin = &minbuf;
414 /* add_offset() ensures the offset range isn't inverted. */
415 gcc_checking_assert (offrng[0] <= offrng[1]);
417 if (base0)
419 /* The offset into referenced object is zero-based (i.e., it's
420 not referenced by a pointer into middle of some unknown object). */
421 if (offrng[0] < 0 && offrng[1] < 0)
423 /* If the offset is negative the remaining size is zero. */
424 *pmin = 0;
425 return 0;
428 if (sizrng[1] <= offrng[0])
430 /* If the starting offset is greater than or equal to the upper
431 bound on the size of the object, the space remaining is zero.
432 As a special case, if it's equal, set *PMIN to -1 to let
433 the caller know the offset is valid and just past the end. */
434 *pmin = sizrng[1] == offrng[0] ? -1 : 0;
435 return 0;
438 /* Otherwise return the size minus the lower bound of the offset. */
439 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
441 *pmin = sizrng[0] - or0;
442 return sizrng[1] - or0;
445 /* The offset to the referenced object isn't zero-based (i.e., it may
446 refer to a byte other than the first. The size of such an object
447 is constrained only by the size of the address space (the result
448 of max_object_size()). */
449 if (sizrng[1] <= offrng[0])
451 *pmin = 0;
452 return 0;
455 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
457 *pmin = sizrng[0] - or0;
458 return sizrng[1] - or0;
461 /* Add the range [MIN, MAX] to the offset range. For known objects (with
462 zero-based offsets) at least one of whose offset's bounds is in range,
463 constrain the other (or both) to the bounds of the object (i.e., zero
464 and the upper bound of its size). This improves the quality of
465 diagnostics. */
467 void access_ref::add_offset (const offset_int &min, const offset_int &max)
469 if (min <= max)
471 /* To add an ordinary range just add it to the bounds. */
472 offrng[0] += min;
473 offrng[1] += max;
475 else if (!base0)
477 /* To add an inverted range to an offset to an unknown object
478 expand it to the maximum. */
479 add_max_offset ();
480 return;
482 else
484 /* To add an inverted range to an offset to an known object set
485 the upper bound to the maximum representable offset value
486 (which may be greater than MAX_OBJECT_SIZE).
487 The lower bound is either the sum of the current offset and
488 MIN when abs(MAX) is greater than the former, or zero otherwise.
489 Zero because then then inverted range includes the negative of
490 the lower bound. */
491 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
492 offrng[1] = maxoff;
494 if (max >= 0)
496 offrng[0] = 0;
497 return;
500 offset_int absmax = wi::abs (max);
501 if (offrng[0] < absmax)
503 offrng[0] += min;
504 /* Cap the lower bound at the upper (set to MAXOFF above)
505 to avoid inadvertently recreating an inverted range. */
506 if (offrng[1] < offrng[0])
507 offrng[0] = offrng[1];
509 else
510 offrng[0] = 0;
513 if (!base0)
514 return;
516 /* When referencing a known object check to see if the offset computed
517 so far is in bounds... */
518 offset_int remrng[2];
519 remrng[1] = size_remaining (remrng);
520 if (remrng[1] > 0 || remrng[0] < 0)
522 /* ...if so, constrain it so that neither bound exceeds the size of
523 the object. Out of bounds offsets are left unchanged, and, for
524 better or worse, become in bounds later. They should be detected
525 and diagnosed at the point they first become invalid by
526 -Warray-bounds. */
527 if (offrng[0] < 0)
528 offrng[0] = 0;
529 if (offrng[1] > sizrng[1])
530 offrng[1] = sizrng[1];
534 /* Set a bit for the PHI in VISITED and return true if it wasn't
535 already set. */
537 bool
538 ssa_name_limit_t::visit_phi (tree ssa_name)
540 if (!visited)
541 visited = BITMAP_ALLOC (NULL);
543 /* Return false if SSA_NAME has already been visited. */
544 return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name));
547 /* Clear a bit for the PHI in VISITED. */
549 void
550 ssa_name_limit_t::leave_phi (tree ssa_name)
552 /* Return false if SSA_NAME has already been visited. */
553 bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name));
556 /* Return false if the SSA_NAME chain length counter has reached
557 the limit, otherwise increment the counter and return true. */
559 bool
560 ssa_name_limit_t::next ()
562 /* Return a negative value to let caller avoid recursing beyond
563 the specified limit. */
564 if (ssa_def_max == 0)
565 return false;
567 --ssa_def_max;
568 return true;
571 /* If the SSA_NAME has already been "seen" return a positive value.
572 Otherwise add it to VISITED. If the SSA_NAME limit has been
573 reached, return a negative value. Otherwise return zero. */
576 ssa_name_limit_t::next_phi (tree ssa_name)
579 gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name);
580 /* Return a positive value if the PHI has already been visited. */
581 if (gimple_code (def_stmt) == GIMPLE_PHI
582 && !visit_phi (ssa_name))
583 return 1;
586 /* Return a negative value to let caller avoid recursing beyond
587 the specified limit. */
588 if (ssa_def_max == 0)
589 return -1;
591 --ssa_def_max;
593 return 0;
596 ssa_name_limit_t::~ssa_name_limit_t ()
598 if (visited)
599 BITMAP_FREE (visited);
602 /* Default ctor. Initialize object with pointers to the range_query
603 and cache_type instances to use or null. */
605 pointer_query::pointer_query (range_query *qry /* = NULL */,
606 cache_type *cache /* = NULL */)
607 : rvals (qry), var_cache (cache), hits (), misses (),
608 failures (), depth (), max_depth ()
610 /* No op. */
613 /* Return a pointer to the cached access_ref instance for the SSA_NAME
614 PTR if it's there or null otherwise. */
616 const access_ref *
617 pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
619 if (!var_cache)
621 ++misses;
622 return NULL;
625 unsigned version = SSA_NAME_VERSION (ptr);
626 unsigned idx = version << 1 | (ostype & 1);
627 if (var_cache->indices.length () <= idx)
629 ++misses;
630 return NULL;
633 unsigned cache_idx = var_cache->indices[idx];
634 if (var_cache->access_refs.length () <= cache_idx)
636 ++misses;
637 return NULL;
640 access_ref &cache_ref = var_cache->access_refs[cache_idx];
641 if (cache_ref.ref)
643 ++hits;
644 return &cache_ref;
647 ++misses;
648 return NULL;
651 /* Retrieve the access_ref instance for a variable from the cache if it's
652 there or compute it and insert it into the cache if it's nonnonull. */
654 bool
655 pointer_query::get_ref (tree ptr, access_ref *pref, int ostype /* = 1 */)
657 const unsigned version
658 = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0;
660 if (var_cache && version)
662 unsigned idx = version << 1 | (ostype & 1);
663 if (idx < var_cache->indices.length ())
665 unsigned cache_idx = var_cache->indices[idx] - 1;
666 if (cache_idx < var_cache->access_refs.length ()
667 && var_cache->access_refs[cache_idx].ref)
669 ++hits;
670 *pref = var_cache->access_refs[cache_idx];
671 return true;
675 ++misses;
678 if (!compute_objsize (ptr, ostype, pref, this))
680 ++failures;
681 return false;
684 return true;
687 /* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
688 nonnull. */
690 void
691 pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
693 /* Only add populated/valid entries. */
694 if (!var_cache || !ref.ref || ref.sizrng[0] < 0)
695 return;
697 /* Add REF to the two-level cache. */
698 unsigned version = SSA_NAME_VERSION (ptr);
699 unsigned idx = version << 1 | (ostype & 1);
701 /* Grow INDICES if necessary. An index is valid if it's nonzero.
702 Its value minus one is the index into ACCESS_REFS. Not all
703 entries are valid. */
704 if (var_cache->indices.length () <= idx)
705 var_cache->indices.safe_grow_cleared (idx + 1);
707 if (!var_cache->indices[idx])
708 var_cache->indices[idx] = var_cache->access_refs.length () + 1;
710 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
711 REF member is nonnull. All entries except for the last two
712 are valid. Once nonnull, the REF value must stay unchanged. */
713 unsigned cache_idx = var_cache->indices[idx];
714 if (var_cache->access_refs.length () <= cache_idx)
715 var_cache->access_refs.safe_grow_cleared (cache_idx + 1);
717 access_ref cache_ref = var_cache->access_refs[cache_idx - 1];
718 if (cache_ref.ref)
720 gcc_checking_assert (cache_ref.ref == ref.ref);
721 return;
724 cache_ref = ref;
727 /* Flush the cache if it's nonnull. */
729 void
730 pointer_query::flush_cache ()
732 if (!var_cache)
733 return;
734 var_cache->indices.release ();
735 var_cache->access_refs.release ();
738 /* Return true if NAME starts with __builtin_ or __sync_. */
740 static bool
741 is_builtin_name (const char *name)
743 if (strncmp (name, "__builtin_", 10) == 0)
744 return true;
745 if (strncmp (name, "__sync_", 7) == 0)
746 return true;
747 if (strncmp (name, "__atomic_", 9) == 0)
748 return true;
749 return false;
752 /* Return true if NODE should be considered for inline expansion regardless
753 of the optimization level. This means whenever a function is invoked with
754 its "internal" name, which normally contains the prefix "__builtin". */
756 bool
757 called_as_built_in (tree node)
759 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
760 we want the name used to call the function, not the name it
761 will have. */
762 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
763 return is_builtin_name (name);
766 /* Compute values M and N such that M divides (address of EXP - N) and such
767 that N < M. If these numbers can be determined, store M in alignp and N in
768 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
769 *alignp and any bit-offset to *bitposp.
771 Note that the address (and thus the alignment) computed here is based
772 on the address to which a symbol resolves, whereas DECL_ALIGN is based
773 on the address at which an object is actually located. These two
774 addresses are not always the same. For example, on ARM targets,
775 the address &foo of a Thumb function foo() has the lowest bit set,
776 whereas foo() itself starts on an even address.
778 If ADDR_P is true we are taking the address of the memory reference EXP
779 and thus cannot rely on the access taking place. */
781 static bool
782 get_object_alignment_2 (tree exp, unsigned int *alignp,
783 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
785 poly_int64 bitsize, bitpos;
786 tree offset;
787 machine_mode mode;
788 int unsignedp, reversep, volatilep;
789 unsigned int align = BITS_PER_UNIT;
790 bool known_alignment = false;
792 /* Get the innermost object and the constant (bitpos) and possibly
793 variable (offset) offset of the access. */
794 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
795 &unsignedp, &reversep, &volatilep);
797 /* Extract alignment information from the innermost object and
798 possibly adjust bitpos and offset. */
799 if (TREE_CODE (exp) == FUNCTION_DECL)
801 /* Function addresses can encode extra information besides their
802 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
803 allows the low bit to be used as a virtual bit, we know
804 that the address itself must be at least 2-byte aligned. */
805 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
806 align = 2 * BITS_PER_UNIT;
808 else if (TREE_CODE (exp) == LABEL_DECL)
810 else if (TREE_CODE (exp) == CONST_DECL)
812 /* The alignment of a CONST_DECL is determined by its initializer. */
813 exp = DECL_INITIAL (exp);
814 align = TYPE_ALIGN (TREE_TYPE (exp));
815 if (CONSTANT_CLASS_P (exp))
816 align = targetm.constant_alignment (exp, align);
818 known_alignment = true;
820 else if (DECL_P (exp))
822 align = DECL_ALIGN (exp);
823 known_alignment = true;
825 else if (TREE_CODE (exp) == INDIRECT_REF
826 || TREE_CODE (exp) == MEM_REF
827 || TREE_CODE (exp) == TARGET_MEM_REF)
829 tree addr = TREE_OPERAND (exp, 0);
830 unsigned ptr_align;
831 unsigned HOST_WIDE_INT ptr_bitpos;
832 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
834 /* If the address is explicitely aligned, handle that. */
835 if (TREE_CODE (addr) == BIT_AND_EXPR
836 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
838 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
839 ptr_bitmask *= BITS_PER_UNIT;
840 align = least_bit_hwi (ptr_bitmask);
841 addr = TREE_OPERAND (addr, 0);
844 known_alignment
845 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
846 align = MAX (ptr_align, align);
848 /* Re-apply explicit alignment to the bitpos. */
849 ptr_bitpos &= ptr_bitmask;
851 /* The alignment of the pointer operand in a TARGET_MEM_REF
852 has to take the variable offset parts into account. */
853 if (TREE_CODE (exp) == TARGET_MEM_REF)
855 if (TMR_INDEX (exp))
857 unsigned HOST_WIDE_INT step = 1;
858 if (TMR_STEP (exp))
859 step = TREE_INT_CST_LOW (TMR_STEP (exp));
860 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
862 if (TMR_INDEX2 (exp))
863 align = BITS_PER_UNIT;
864 known_alignment = false;
867 /* When EXP is an actual memory reference then we can use
868 TYPE_ALIGN of a pointer indirection to derive alignment.
869 Do so only if get_pointer_alignment_1 did not reveal absolute
870 alignment knowledge and if using that alignment would
871 improve the situation. */
872 unsigned int talign;
873 if (!addr_p && !known_alignment
874 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
875 && talign > align)
876 align = talign;
877 else
879 /* Else adjust bitpos accordingly. */
880 bitpos += ptr_bitpos;
881 if (TREE_CODE (exp) == MEM_REF
882 || TREE_CODE (exp) == TARGET_MEM_REF)
883 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
886 else if (TREE_CODE (exp) == STRING_CST)
888 /* STRING_CST are the only constant objects we allow to be not
889 wrapped inside a CONST_DECL. */
890 align = TYPE_ALIGN (TREE_TYPE (exp));
891 if (CONSTANT_CLASS_P (exp))
892 align = targetm.constant_alignment (exp, align);
894 known_alignment = true;
897 /* If there is a non-constant offset part extract the maximum
898 alignment that can prevail. */
899 if (offset)
901 unsigned int trailing_zeros = tree_ctz (offset);
902 if (trailing_zeros < HOST_BITS_PER_INT)
904 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
905 if (inner)
906 align = MIN (align, inner);
910 /* Account for the alignment of runtime coefficients, so that the constant
911 bitpos is guaranteed to be accurate. */
912 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
913 if (alt_align != 0 && alt_align < align)
915 align = alt_align;
916 known_alignment = false;
919 *alignp = align;
920 *bitposp = bitpos.coeffs[0] & (align - 1);
921 return known_alignment;
924 /* For a memory reference expression EXP compute values M and N such that M
925 divides (&EXP - N) and such that N < M. If these numbers can be determined,
926 store M in alignp and N in *BITPOSP and return true. Otherwise return false
927 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
929 bool
930 get_object_alignment_1 (tree exp, unsigned int *alignp,
931 unsigned HOST_WIDE_INT *bitposp)
933 return get_object_alignment_2 (exp, alignp, bitposp, false);
936 /* Return the alignment in bits of EXP, an object. */
938 unsigned int
939 get_object_alignment (tree exp)
941 unsigned HOST_WIDE_INT bitpos = 0;
942 unsigned int align;
944 get_object_alignment_1 (exp, &align, &bitpos);
946 /* align and bitpos now specify known low bits of the pointer.
947 ptr & (align - 1) == bitpos. */
949 if (bitpos != 0)
950 align = least_bit_hwi (bitpos);
951 return align;
954 /* For a pointer valued expression EXP compute values M and N such that M
955 divides (EXP - N) and such that N < M. If these numbers can be determined,
956 store M in alignp and N in *BITPOSP and return true. Return false if
957 the results are just a conservative approximation.
959 If EXP is not a pointer, false is returned too. */
961 bool
962 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
963 unsigned HOST_WIDE_INT *bitposp)
965 STRIP_NOPS (exp);
967 if (TREE_CODE (exp) == ADDR_EXPR)
968 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
969 alignp, bitposp, true);
970 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
972 unsigned int align;
973 unsigned HOST_WIDE_INT bitpos;
974 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
975 &align, &bitpos);
976 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
977 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
978 else
980 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
981 if (trailing_zeros < HOST_BITS_PER_INT)
983 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
984 if (inner)
985 align = MIN (align, inner);
988 *alignp = align;
989 *bitposp = bitpos & (align - 1);
990 return res;
992 else if (TREE_CODE (exp) == SSA_NAME
993 && POINTER_TYPE_P (TREE_TYPE (exp)))
995 unsigned int ptr_align, ptr_misalign;
996 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
998 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
1000 *bitposp = ptr_misalign * BITS_PER_UNIT;
1001 *alignp = ptr_align * BITS_PER_UNIT;
1002 /* Make sure to return a sensible alignment when the multiplication
1003 by BITS_PER_UNIT overflowed. */
1004 if (*alignp == 0)
1005 *alignp = 1u << (HOST_BITS_PER_INT - 1);
1006 /* We cannot really tell whether this result is an approximation. */
1007 return false;
1009 else
1011 *bitposp = 0;
1012 *alignp = BITS_PER_UNIT;
1013 return false;
1016 else if (TREE_CODE (exp) == INTEGER_CST)
1018 *alignp = BIGGEST_ALIGNMENT;
1019 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
1020 & (BIGGEST_ALIGNMENT - 1));
1021 return true;
1024 *bitposp = 0;
1025 *alignp = BITS_PER_UNIT;
1026 return false;
1029 /* Return the alignment in bits of EXP, a pointer valued expression.
1030 The alignment returned is, by default, the alignment of the thing that
1031 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
1033 Otherwise, look at the expression to see if we can do better, i.e., if the
1034 expression is actually pointing at an object whose alignment is tighter. */
1036 unsigned int
1037 get_pointer_alignment (tree exp)
1039 unsigned HOST_WIDE_INT bitpos = 0;
1040 unsigned int align;
1042 get_pointer_alignment_1 (exp, &align, &bitpos);
1044 /* align and bitpos now specify known low bits of the pointer.
1045 ptr & (align - 1) == bitpos. */
1047 if (bitpos != 0)
1048 align = least_bit_hwi (bitpos);
1050 return align;
1053 /* Return the number of leading non-zero elements in the sequence
1054 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
1055 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
1057 unsigned
1058 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
1060 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
1062 unsigned n;
1064 if (eltsize == 1)
1066 /* Optimize the common case of plain char. */
1067 for (n = 0; n < maxelts; n++)
1069 const char *elt = (const char*) ptr + n;
1070 if (!*elt)
1071 break;
1074 else
1076 for (n = 0; n < maxelts; n++)
1078 const char *elt = (const char*) ptr + n * eltsize;
1079 if (!memcmp (elt, "\0\0\0\0", eltsize))
1080 break;
1083 return n;
1086 /* For a call EXPR at LOC to a function FNAME that expects a string
1087 in the argument ARG, issue a diagnostic due to it being a called
1088 with an argument that is a character array with no terminating
1089 NUL. SIZE is the EXACT size of the array, and BNDRNG the number
1090 of characters in which the NUL is expected. Either EXPR or FNAME
1091 may be null but noth both. SIZE may be null when BNDRNG is null. */
1093 void
1094 warn_string_no_nul (location_t loc, tree expr, const char *fname,
1095 tree arg, tree decl, tree size /* = NULL_TREE */,
1096 bool exact /* = false */,
1097 const wide_int bndrng[2] /* = NULL */)
1099 if ((expr && TREE_NO_WARNING (expr)) || TREE_NO_WARNING (arg))
1100 return;
1102 loc = expansion_point_location_if_in_system_header (loc);
1103 bool warned;
1105 /* Format the bound range as a string to keep the nuber of messages
1106 from exploding. */
1107 char bndstr[80];
1108 *bndstr = 0;
1109 if (bndrng)
1111 if (bndrng[0] == bndrng[1])
1112 sprintf (bndstr, "%llu", (unsigned long long) bndrng[0].to_uhwi ());
1113 else
1114 sprintf (bndstr, "[%llu, %llu]",
1115 (unsigned long long) bndrng[0].to_uhwi (),
1116 (unsigned long long) bndrng[1].to_uhwi ());
1119 const tree maxobjsize = max_object_size ();
1120 const wide_int maxsiz = wi::to_wide (maxobjsize);
1121 if (expr)
1123 tree func = get_callee_fndecl (expr);
1124 if (bndrng)
1126 if (wi::ltu_p (maxsiz, bndrng[0]))
1127 warned = warning_at (loc, OPT_Wstringop_overread,
1128 "%K%qD specified bound %s exceeds "
1129 "maximum object size %E",
1130 expr, func, bndstr, maxobjsize);
1131 else
1133 bool maybe = wi::to_wide (size) == bndrng[0];
1134 warned = warning_at (loc, OPT_Wstringop_overread,
1135 exact
1136 ? G_("%K%qD specified bound %s exceeds "
1137 "the size %E of unterminated array")
1138 : (maybe
1139 ? G_("%K%qD specified bound %s may "
1140 "exceed the size of at most %E "
1141 "of unterminated array")
1142 : G_("%K%qD specified bound %s exceeds "
1143 "the size of at most %E "
1144 "of unterminated array")),
1145 expr, func, bndstr, size);
1148 else
1149 warned = warning_at (loc, OPT_Wstringop_overread,
1150 "%K%qD argument missing terminating nul",
1151 expr, func);
1153 else
1155 if (bndrng)
1157 if (wi::ltu_p (maxsiz, bndrng[0]))
1158 warned = warning_at (loc, OPT_Wstringop_overread,
1159 "%qs specified bound %s exceeds "
1160 "maximum object size %E",
1161 fname, bndstr, maxobjsize);
1162 else
1164 bool maybe = wi::to_wide (size) == bndrng[0];
1165 warned = warning_at (loc, OPT_Wstringop_overread,
1166 exact
1167 ? G_("%qs specified bound %s exceeds "
1168 "the size %E of unterminated array")
1169 : (maybe
1170 ? G_("%qs specified bound %s may "
1171 "exceed the size of at most %E "
1172 "of unterminated array")
1173 : G_("%qs specified bound %s exceeds "
1174 "the size of at most %E "
1175 "of unterminated array")),
1176 fname, bndstr, size);
1179 else
1180 warned = warning_at (loc, OPT_Wstringop_overread,
1181 "%qs argument missing terminating nul",
1182 fname);
1185 if (warned)
1187 inform (DECL_SOURCE_LOCATION (decl),
1188 "referenced argument declared here");
1189 TREE_NO_WARNING (arg) = 1;
1190 if (expr)
1191 TREE_NO_WARNING (expr) = 1;
1195 /* For a call EXPR (which may be null) that expects a string argument
1196 SRC as an argument, returns false if SRC is a character array with
1197 no terminating NUL. When nonnull, BOUND is the number of characters
1198 in which to expect the terminating NUL. RDONLY is true for read-only
1199 accesses such as strcmp, false for read-write such as strcpy. When
1200 EXPR is also issues a warning. */
1202 bool
1203 check_nul_terminated_array (tree expr, tree src,
1204 tree bound /* = NULL_TREE */)
1206 /* The constant size of the array SRC points to. The actual size
1207 may be less of EXACT is true, but not more. */
1208 tree size;
1209 /* True if SRC involves a non-constant offset into the array. */
1210 bool exact;
1211 /* The unterminated constant array SRC points to. */
1212 tree nonstr = unterminated_array (src, &size, &exact);
1213 if (!nonstr)
1214 return true;
1216 /* NONSTR refers to the non-nul terminated constant array and SIZE
1217 is the constant size of the array in bytes. EXACT is true when
1218 SIZE is exact. */
1220 wide_int bndrng[2];
1221 if (bound)
1223 if (TREE_CODE (bound) == INTEGER_CST)
1224 bndrng[0] = bndrng[1] = wi::to_wide (bound);
1225 else
1227 value_range_kind rng = get_range_info (bound, bndrng, bndrng + 1);
1228 if (rng != VR_RANGE)
1229 return true;
1232 if (exact)
1234 if (wi::leu_p (bndrng[0], wi::to_wide (size)))
1235 return true;
1237 else if (wi::lt_p (bndrng[0], wi::to_wide (size), UNSIGNED))
1238 return true;
1241 if (expr)
1242 warn_string_no_nul (EXPR_LOCATION (expr), expr, NULL, src, nonstr,
1243 size, exact, bound ? bndrng : NULL);
1245 return false;
1248 /* If EXP refers to an unterminated constant character array return
1249 the declaration of the object of which the array is a member or
1250 element and if SIZE is not null, set *SIZE to the size of
1251 the unterminated array and set *EXACT if the size is exact or
1252 clear it otherwise. Otherwise return null. */
1254 tree
1255 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
1257 /* C_STRLEN will return NULL and set DECL in the info
1258 structure if EXP references a unterminated array. */
1259 c_strlen_data lendata = { };
1260 tree len = c_strlen (exp, 1, &lendata);
1261 if (len == NULL_TREE && lendata.minlen && lendata.decl)
1263 if (size)
1265 len = lendata.minlen;
1266 if (lendata.off)
1268 /* Constant offsets are already accounted for in LENDATA.MINLEN,
1269 but not in a SSA_NAME + CST expression. */
1270 if (TREE_CODE (lendata.off) == INTEGER_CST)
1271 *exact = true;
1272 else if (TREE_CODE (lendata.off) == PLUS_EXPR
1273 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
1275 /* Subtract the offset from the size of the array. */
1276 *exact = false;
1277 tree temp = TREE_OPERAND (lendata.off, 1);
1278 temp = fold_convert (ssizetype, temp);
1279 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
1281 else
1282 *exact = false;
1284 else
1285 *exact = true;
1287 *size = len;
1289 return lendata.decl;
1292 return NULL_TREE;
1295 /* Compute the length of a null-terminated character string or wide
1296 character string handling character sizes of 1, 2, and 4 bytes.
1297 TREE_STRING_LENGTH is not the right way because it evaluates to
1298 the size of the character array in bytes (as opposed to characters)
1299 and because it can contain a zero byte in the middle.
1301 ONLY_VALUE should be nonzero if the result is not going to be emitted
1302 into the instruction stream and zero if it is going to be expanded.
1303 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
1304 is returned, otherwise NULL, since
1305 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
1306 evaluate the side-effects.
1308 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
1309 accesses. Note that this implies the result is not going to be emitted
1310 into the instruction stream.
1312 Additional information about the string accessed may be recorded
1313 in DATA. For example, if ARG references an unterminated string,
1314 then the declaration will be stored in the DECL field. If the
1315 length of the unterminated string can be determined, it'll be
1316 stored in the LEN field. Note this length could well be different
1317 than what a C strlen call would return.
1319 ELTSIZE is 1 for normal single byte character strings, and 2 or
1320 4 for wide characer strings. ELTSIZE is by default 1.
1322 The value returned is of type `ssizetype'. */
1324 tree
1325 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
1327 /* If we were not passed a DATA pointer, then get one to a local
1328 structure. That avoids having to check DATA for NULL before
1329 each time we want to use it. */
1330 c_strlen_data local_strlen_data = { };
1331 if (!data)
1332 data = &local_strlen_data;
1334 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
1336 tree src = STRIP_NOPS (arg);
1337 if (TREE_CODE (src) == COND_EXPR
1338 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
1340 tree len1, len2;
1342 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
1343 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
1344 if (tree_int_cst_equal (len1, len2))
1345 return len1;
1348 if (TREE_CODE (src) == COMPOUND_EXPR
1349 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
1350 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
1352 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
1354 /* Offset from the beginning of the string in bytes. */
1355 tree byteoff;
1356 tree memsize;
1357 tree decl;
1358 src = string_constant (src, &byteoff, &memsize, &decl);
1359 if (src == 0)
1360 return NULL_TREE;
1362 /* Determine the size of the string element. */
1363 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
1364 return NULL_TREE;
1366 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
1367 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
1368 in case the latter is less than the size of the array, such as when
1369 SRC refers to a short string literal used to initialize a large array.
1370 In that case, the elements of the array after the terminating NUL are
1371 all NUL. */
1372 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
1373 strelts = strelts / eltsize;
1375 if (!tree_fits_uhwi_p (memsize))
1376 return NULL_TREE;
1378 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
1380 /* PTR can point to the byte representation of any string type, including
1381 char* and wchar_t*. */
1382 const char *ptr = TREE_STRING_POINTER (src);
1384 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
1386 /* The code below works only for single byte character types. */
1387 if (eltsize != 1)
1388 return NULL_TREE;
1390 /* If the string has an internal NUL character followed by any
1391 non-NUL characters (e.g., "foo\0bar"), we can't compute
1392 the offset to the following NUL if we don't know where to
1393 start searching for it. */
1394 unsigned len = string_length (ptr, eltsize, strelts);
1396 /* Return when an embedded null character is found or none at all.
1397 In the latter case, set the DECL/LEN field in the DATA structure
1398 so that callers may examine them. */
1399 if (len + 1 < strelts)
1400 return NULL_TREE;
1401 else if (len >= maxelts)
1403 data->decl = decl;
1404 data->off = byteoff;
1405 data->minlen = ssize_int (len);
1406 return NULL_TREE;
1409 /* For empty strings the result should be zero. */
1410 if (len == 0)
1411 return ssize_int (0);
1413 /* We don't know the starting offset, but we do know that the string
1414 has no internal zero bytes. If the offset falls within the bounds
1415 of the string subtract the offset from the length of the string,
1416 and return that. Otherwise the length is zero. Take care to
1417 use SAVE_EXPR in case the OFFSET has side-effects. */
1418 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
1419 : byteoff;
1420 offsave = fold_convert_loc (loc, sizetype, offsave);
1421 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
1422 size_int (len));
1423 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
1424 offsave);
1425 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
1426 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
1427 build_zero_cst (ssizetype));
1430 /* Offset from the beginning of the string in elements. */
1431 HOST_WIDE_INT eltoff;
1433 /* We have a known offset into the string. Start searching there for
1434 a null character if we can represent it as a single HOST_WIDE_INT. */
1435 if (byteoff == 0)
1436 eltoff = 0;
1437 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
1438 eltoff = -1;
1439 else
1440 eltoff = tree_to_uhwi (byteoff) / eltsize;
1442 /* If the offset is known to be out of bounds, warn, and call strlen at
1443 runtime. */
1444 if (eltoff < 0 || eltoff >= maxelts)
1446 /* Suppress multiple warnings for propagated constant strings. */
1447 if (only_value != 2
1448 && !TREE_NO_WARNING (arg)
1449 && warning_at (loc, OPT_Warray_bounds,
1450 "offset %qwi outside bounds of constant string",
1451 eltoff))
1453 if (decl)
1454 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
1455 TREE_NO_WARNING (arg) = 1;
1457 return NULL_TREE;
1460 /* If eltoff is larger than strelts but less than maxelts the
1461 string length is zero, since the excess memory will be zero. */
1462 if (eltoff > strelts)
1463 return ssize_int (0);
1465 /* Use strlen to search for the first zero byte. Since any strings
1466 constructed with build_string will have nulls appended, we win even
1467 if we get handed something like (char[4])"abcd".
1469 Since ELTOFF is our starting index into the string, no further
1470 calculation is needed. */
1471 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
1472 strelts - eltoff);
1474 /* Don't know what to return if there was no zero termination.
1475 Ideally this would turn into a gcc_checking_assert over time.
1476 Set DECL/LEN so callers can examine them. */
1477 if (len >= maxelts - eltoff)
1479 data->decl = decl;
1480 data->off = byteoff;
1481 data->minlen = ssize_int (len);
1482 return NULL_TREE;
1485 return ssize_int (len);
1488 /* Return a constant integer corresponding to target reading
1489 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
1490 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
1491 are assumed to be zero, otherwise it reads as many characters
1492 as needed. */
1495 c_readstr (const char *str, scalar_int_mode mode,
1496 bool null_terminated_p/*=true*/)
1498 HOST_WIDE_INT ch;
1499 unsigned int i, j;
1500 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
1502 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
1503 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
1504 / HOST_BITS_PER_WIDE_INT;
1506 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
1507 for (i = 0; i < len; i++)
1508 tmp[i] = 0;
1510 ch = 1;
1511 for (i = 0; i < GET_MODE_SIZE (mode); i++)
1513 j = i;
1514 if (WORDS_BIG_ENDIAN)
1515 j = GET_MODE_SIZE (mode) - i - 1;
1516 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
1517 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
1518 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
1519 j *= BITS_PER_UNIT;
1521 if (ch || !null_terminated_p)
1522 ch = (unsigned char) str[i];
1523 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
1526 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
1527 return immed_wide_int_const (c, mode);
1530 /* Cast a target constant CST to target CHAR and if that value fits into
1531 host char type, return zero and put that value into variable pointed to by
1532 P. */
1534 static int
1535 target_char_cast (tree cst, char *p)
1537 unsigned HOST_WIDE_INT val, hostval;
1539 if (TREE_CODE (cst) != INTEGER_CST
1540 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
1541 return 1;
1543 /* Do not care if it fits or not right here. */
1544 val = TREE_INT_CST_LOW (cst);
1546 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
1547 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
1549 hostval = val;
1550 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
1551 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
1553 if (val != hostval)
1554 return 1;
1556 *p = hostval;
1557 return 0;
1560 /* Similar to save_expr, but assumes that arbitrary code is not executed
1561 in between the multiple evaluations. In particular, we assume that a
1562 non-addressable local variable will not be modified. */
1564 static tree
1565 builtin_save_expr (tree exp)
1567 if (TREE_CODE (exp) == SSA_NAME
1568 || (TREE_ADDRESSABLE (exp) == 0
1569 && (TREE_CODE (exp) == PARM_DECL
1570 || (VAR_P (exp) && !TREE_STATIC (exp)))))
1571 return exp;
1573 return save_expr (exp);
1576 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
1577 times to get the address of either a higher stack frame, or a return
1578 address located within it (depending on FNDECL_CODE). */
1580 static rtx
1581 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
1583 int i;
1584 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
1585 if (tem == NULL_RTX)
1587 /* For a zero count with __builtin_return_address, we don't care what
1588 frame address we return, because target-specific definitions will
1589 override us. Therefore frame pointer elimination is OK, and using
1590 the soft frame pointer is OK.
1592 For a nonzero count, or a zero count with __builtin_frame_address,
1593 we require a stable offset from the current frame pointer to the
1594 previous one, so we must use the hard frame pointer, and
1595 we must disable frame pointer elimination. */
1596 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1597 tem = frame_pointer_rtx;
1598 else
1600 tem = hard_frame_pointer_rtx;
1602 /* Tell reload not to eliminate the frame pointer. */
1603 crtl->accesses_prior_frames = 1;
1607 if (count > 0)
1608 SETUP_FRAME_ADDRESSES ();
1610 /* On the SPARC, the return address is not in the frame, it is in a
1611 register. There is no way to access it off of the current frame
1612 pointer, but it can be accessed off the previous frame pointer by
1613 reading the value from the register window save area. */
1614 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1615 count--;
1617 /* Scan back COUNT frames to the specified frame. */
1618 for (i = 0; i < count; i++)
1620 /* Assume the dynamic chain pointer is in the word that the
1621 frame address points to, unless otherwise specified. */
1622 tem = DYNAMIC_CHAIN_ADDRESS (tem);
1623 tem = memory_address (Pmode, tem);
1624 tem = gen_frame_mem (Pmode, tem);
1625 tem = copy_to_reg (tem);
1628 /* For __builtin_frame_address, return what we've got. But, on
1629 the SPARC for example, we may have to add a bias. */
1630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
1631 return FRAME_ADDR_RTX (tem);
1633 /* For __builtin_return_address, get the return address from that frame. */
1634 #ifdef RETURN_ADDR_RTX
1635 tem = RETURN_ADDR_RTX (count, tem);
1636 #else
1637 tem = memory_address (Pmode,
1638 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
1639 tem = gen_frame_mem (Pmode, tem);
1640 #endif
1641 return tem;
1644 /* Alias set used for setjmp buffer. */
1645 static alias_set_type setjmp_alias_set = -1;
1647 /* Construct the leading half of a __builtin_setjmp call. Control will
1648 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1649 exception handling code. */
1651 void
1652 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
1654 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1655 rtx stack_save;
1656 rtx mem;
1658 if (setjmp_alias_set == -1)
1659 setjmp_alias_set = new_alias_set ();
1661 buf_addr = convert_memory_address (Pmode, buf_addr);
1663 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
1665 /* We store the frame pointer and the address of receiver_label in
1666 the buffer and use the rest of it for the stack save area, which
1667 is machine-dependent. */
1669 mem = gen_rtx_MEM (Pmode, buf_addr);
1670 set_mem_alias_set (mem, setjmp_alias_set);
1671 emit_move_insn (mem, hard_frame_pointer_rtx);
1673 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1674 GET_MODE_SIZE (Pmode))),
1675 set_mem_alias_set (mem, setjmp_alias_set);
1677 emit_move_insn (validize_mem (mem),
1678 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
1680 stack_save = gen_rtx_MEM (sa_mode,
1681 plus_constant (Pmode, buf_addr,
1682 2 * GET_MODE_SIZE (Pmode)));
1683 set_mem_alias_set (stack_save, setjmp_alias_set);
1684 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1686 /* If there is further processing to do, do it. */
1687 if (targetm.have_builtin_setjmp_setup ())
1688 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1690 /* We have a nonlocal label. */
1691 cfun->has_nonlocal_label = 1;
1694 /* Construct the trailing part of a __builtin_setjmp call. This is
1695 also called directly by the SJLJ exception handling code.
1696 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1698 void
1699 expand_builtin_setjmp_receiver (rtx receiver_label)
1701 rtx chain;
1703 /* Mark the FP as used when we get here, so we have to make sure it's
1704 marked as used by this function. */
1705 emit_use (hard_frame_pointer_rtx);
1707 /* Mark the static chain as clobbered here so life information
1708 doesn't get messed up for it. */
1709 chain = rtx_for_static_chain (current_function_decl, true);
1710 if (chain && REG_P (chain))
1711 emit_clobber (chain);
1713 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1715 /* If the argument pointer can be eliminated in favor of the
1716 frame pointer, we don't need to restore it. We assume here
1717 that if such an elimination is present, it can always be used.
1718 This is the case on all known machines; if we don't make this
1719 assumption, we do unnecessary saving on many machines. */
1720 size_t i;
1721 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1723 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1724 if (elim_regs[i].from == ARG_POINTER_REGNUM
1725 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1726 break;
1728 if (i == ARRAY_SIZE (elim_regs))
1730 /* Now restore our arg pointer from the address at which it
1731 was saved in our stack frame. */
1732 emit_move_insn (crtl->args.internal_arg_pointer,
1733 copy_to_reg (get_arg_pointer_save_area ()));
1737 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1738 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1739 else if (targetm.have_nonlocal_goto_receiver ())
1740 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1741 else
1742 { /* Nothing */ }
1744 /* We must not allow the code we just generated to be reordered by
1745 scheduling. Specifically, the update of the frame pointer must
1746 happen immediately, not later. */
1747 emit_insn (gen_blockage ());
1750 /* __builtin_longjmp is passed a pointer to an array of five words (not
1751 all will be used on all machines). It operates similarly to the C
1752 library function of the same name, but is more efficient. Much of
1753 the code below is copied from the handling of non-local gotos. */
1755 static void
1756 expand_builtin_longjmp (rtx buf_addr, rtx value)
1758 rtx fp, lab, stack;
1759 rtx_insn *insn, *last;
1760 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1762 /* DRAP is needed for stack realign if longjmp is expanded to current
1763 function */
1764 if (SUPPORTS_STACK_ALIGNMENT)
1765 crtl->need_drap = true;
1767 if (setjmp_alias_set == -1)
1768 setjmp_alias_set = new_alias_set ();
1770 buf_addr = convert_memory_address (Pmode, buf_addr);
1772 buf_addr = force_reg (Pmode, buf_addr);
1774 /* We require that the user must pass a second argument of 1, because
1775 that is what builtin_setjmp will return. */
1776 gcc_assert (value == const1_rtx);
1778 last = get_last_insn ();
1779 if (targetm.have_builtin_longjmp ())
1780 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1781 else
1783 fp = gen_rtx_MEM (Pmode, buf_addr);
1784 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1785 GET_MODE_SIZE (Pmode)));
1787 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1788 2 * GET_MODE_SIZE (Pmode)));
1789 set_mem_alias_set (fp, setjmp_alias_set);
1790 set_mem_alias_set (lab, setjmp_alias_set);
1791 set_mem_alias_set (stack, setjmp_alias_set);
1793 /* Pick up FP, label, and SP from the block and jump. This code is
1794 from expand_goto in stmt.c; see there for detailed comments. */
1795 if (targetm.have_nonlocal_goto ())
1796 /* We have to pass a value to the nonlocal_goto pattern that will
1797 get copied into the static_chain pointer, but it does not matter
1798 what that value is, because builtin_setjmp does not use it. */
1799 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1800 else
1802 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1803 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1805 lab = copy_to_reg (lab);
1807 /* Restore the frame pointer and stack pointer. We must use a
1808 temporary since the setjmp buffer may be a local. */
1809 fp = copy_to_reg (fp);
1810 emit_stack_restore (SAVE_NONLOCAL, stack);
1812 /* Ensure the frame pointer move is not optimized. */
1813 emit_insn (gen_blockage ());
1814 emit_clobber (hard_frame_pointer_rtx);
1815 emit_clobber (frame_pointer_rtx);
1816 emit_move_insn (hard_frame_pointer_rtx, fp);
1818 emit_use (hard_frame_pointer_rtx);
1819 emit_use (stack_pointer_rtx);
1820 emit_indirect_jump (lab);
1824 /* Search backwards and mark the jump insn as a non-local goto.
1825 Note that this precludes the use of __builtin_longjmp to a
1826 __builtin_setjmp target in the same function. However, we've
1827 already cautioned the user that these functions are for
1828 internal exception handling use only. */
1829 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1831 gcc_assert (insn != last);
1833 if (JUMP_P (insn))
1835 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1836 break;
1838 else if (CALL_P (insn))
1839 break;
1843 static inline bool
1844 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1846 return (iter->i < iter->n);
1849 /* This function validates the types of a function call argument list
1850 against a specified list of tree_codes. If the last specifier is a 0,
1851 that represents an ellipsis, otherwise the last specifier must be a
1852 VOID_TYPE. */
1854 static bool
1855 validate_arglist (const_tree callexpr, ...)
1857 enum tree_code code;
1858 bool res = 0;
1859 va_list ap;
1860 const_call_expr_arg_iterator iter;
1861 const_tree arg;
1863 va_start (ap, callexpr);
1864 init_const_call_expr_arg_iterator (callexpr, &iter);
1866 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1867 tree fn = CALL_EXPR_FN (callexpr);
1868 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1870 for (unsigned argno = 1; ; ++argno)
1872 code = (enum tree_code) va_arg (ap, int);
1874 switch (code)
1876 case 0:
1877 /* This signifies an ellipses, any further arguments are all ok. */
1878 res = true;
1879 goto end;
1880 case VOID_TYPE:
1881 /* This signifies an endlink, if no arguments remain, return
1882 true, otherwise return false. */
1883 res = !more_const_call_expr_args_p (&iter);
1884 goto end;
1885 case POINTER_TYPE:
1886 /* The actual argument must be nonnull when either the whole
1887 called function has been declared nonnull, or when the formal
1888 argument corresponding to the actual argument has been. */
1889 if (argmap
1890 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1892 arg = next_const_call_expr_arg (&iter);
1893 if (!validate_arg (arg, code) || integer_zerop (arg))
1894 goto end;
1895 break;
1897 /* FALLTHRU */
1898 default:
1899 /* If no parameters remain or the parameter's code does not
1900 match the specified code, return false. Otherwise continue
1901 checking any remaining arguments. */
1902 arg = next_const_call_expr_arg (&iter);
1903 if (!validate_arg (arg, code))
1904 goto end;
1905 break;
1909 /* We need gotos here since we can only have one VA_CLOSE in a
1910 function. */
1911 end: ;
1912 va_end (ap);
1914 BITMAP_FREE (argmap);
1916 return res;
1919 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1920 and the address of the save area. */
1922 static rtx
1923 expand_builtin_nonlocal_goto (tree exp)
1925 tree t_label, t_save_area;
1926 rtx r_label, r_save_area, r_fp, r_sp;
1927 rtx_insn *insn;
1929 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1930 return NULL_RTX;
1932 t_label = CALL_EXPR_ARG (exp, 0);
1933 t_save_area = CALL_EXPR_ARG (exp, 1);
1935 r_label = expand_normal (t_label);
1936 r_label = convert_memory_address (Pmode, r_label);
1937 r_save_area = expand_normal (t_save_area);
1938 r_save_area = convert_memory_address (Pmode, r_save_area);
1939 /* Copy the address of the save location to a register just in case it was
1940 based on the frame pointer. */
1941 r_save_area = copy_to_reg (r_save_area);
1942 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1943 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1944 plus_constant (Pmode, r_save_area,
1945 GET_MODE_SIZE (Pmode)));
1947 crtl->has_nonlocal_goto = 1;
1949 /* ??? We no longer need to pass the static chain value, afaik. */
1950 if (targetm.have_nonlocal_goto ())
1951 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1952 else
1954 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1955 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1957 r_label = copy_to_reg (r_label);
1959 /* Restore the frame pointer and stack pointer. We must use a
1960 temporary since the setjmp buffer may be a local. */
1961 r_fp = copy_to_reg (r_fp);
1962 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1964 /* Ensure the frame pointer move is not optimized. */
1965 emit_insn (gen_blockage ());
1966 emit_clobber (hard_frame_pointer_rtx);
1967 emit_clobber (frame_pointer_rtx);
1968 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1970 /* USE of hard_frame_pointer_rtx added for consistency;
1971 not clear if really needed. */
1972 emit_use (hard_frame_pointer_rtx);
1973 emit_use (stack_pointer_rtx);
1975 /* If the architecture is using a GP register, we must
1976 conservatively assume that the target function makes use of it.
1977 The prologue of functions with nonlocal gotos must therefore
1978 initialize the GP register to the appropriate value, and we
1979 must then make sure that this value is live at the point
1980 of the jump. (Note that this doesn't necessarily apply
1981 to targets with a nonlocal_goto pattern; they are free
1982 to implement it in their own way. Note also that this is
1983 a no-op if the GP register is a global invariant.) */
1984 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1985 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1986 emit_use (pic_offset_table_rtx);
1988 emit_indirect_jump (r_label);
1991 /* Search backwards to the jump insn and mark it as a
1992 non-local goto. */
1993 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1995 if (JUMP_P (insn))
1997 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1998 break;
2000 else if (CALL_P (insn))
2001 break;
2004 return const0_rtx;
2007 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
2008 (not all will be used on all machines) that was passed to __builtin_setjmp.
2009 It updates the stack pointer in that block to the current value. This is
2010 also called directly by the SJLJ exception handling code. */
2012 void
2013 expand_builtin_update_setjmp_buf (rtx buf_addr)
2015 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
2016 buf_addr = convert_memory_address (Pmode, buf_addr);
2017 rtx stack_save
2018 = gen_rtx_MEM (sa_mode,
2019 memory_address
2020 (sa_mode,
2021 plus_constant (Pmode, buf_addr,
2022 2 * GET_MODE_SIZE (Pmode))));
2024 emit_stack_save (SAVE_NONLOCAL, &stack_save);
2027 /* Expand a call to __builtin_prefetch. For a target that does not support
2028 data prefetch, evaluate the memory address argument in case it has side
2029 effects. */
2031 static void
2032 expand_builtin_prefetch (tree exp)
2034 tree arg0, arg1, arg2;
2035 int nargs;
2036 rtx op0, op1, op2;
2038 if (!validate_arglist (exp, POINTER_TYPE, 0))
2039 return;
2041 arg0 = CALL_EXPR_ARG (exp, 0);
2043 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
2044 zero (read) and argument 2 (locality) defaults to 3 (high degree of
2045 locality). */
2046 nargs = call_expr_nargs (exp);
2047 if (nargs > 1)
2048 arg1 = CALL_EXPR_ARG (exp, 1);
2049 else
2050 arg1 = integer_zero_node;
2051 if (nargs > 2)
2052 arg2 = CALL_EXPR_ARG (exp, 2);
2053 else
2054 arg2 = integer_three_node;
2056 /* Argument 0 is an address. */
2057 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
2059 /* Argument 1 (read/write flag) must be a compile-time constant int. */
2060 if (TREE_CODE (arg1) != INTEGER_CST)
2062 error ("second argument to %<__builtin_prefetch%> must be a constant");
2063 arg1 = integer_zero_node;
2065 op1 = expand_normal (arg1);
2066 /* Argument 1 must be either zero or one. */
2067 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
2069 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
2070 " using zero");
2071 op1 = const0_rtx;
2074 /* Argument 2 (locality) must be a compile-time constant int. */
2075 if (TREE_CODE (arg2) != INTEGER_CST)
2077 error ("third argument to %<__builtin_prefetch%> must be a constant");
2078 arg2 = integer_zero_node;
2080 op2 = expand_normal (arg2);
2081 /* Argument 2 must be 0, 1, 2, or 3. */
2082 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
2084 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
2085 op2 = const0_rtx;
2088 if (targetm.have_prefetch ())
2090 class expand_operand ops[3];
2092 create_address_operand (&ops[0], op0);
2093 create_integer_operand (&ops[1], INTVAL (op1));
2094 create_integer_operand (&ops[2], INTVAL (op2));
2095 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
2096 return;
2099 /* Don't do anything with direct references to volatile memory, but
2100 generate code to handle other side effects. */
2101 if (!MEM_P (op0) && side_effects_p (op0))
2102 emit_insn (op0);
2105 /* Get a MEM rtx for expression EXP which is the address of an operand
2106 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
2107 the maximum length of the block of memory that might be accessed or
2108 NULL if unknown. */
2110 static rtx
2111 get_memory_rtx (tree exp, tree len)
2113 tree orig_exp = exp;
2114 rtx addr, mem;
2116 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
2117 from its expression, for expr->a.b only <variable>.a.b is recorded. */
2118 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
2119 exp = TREE_OPERAND (exp, 0);
2121 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2122 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2124 /* Get an expression we can use to find the attributes to assign to MEM.
2125 First remove any nops. */
2126 while (CONVERT_EXPR_P (exp)
2127 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2128 exp = TREE_OPERAND (exp, 0);
2130 /* Build a MEM_REF representing the whole accessed area as a byte blob,
2131 (as builtin stringops may alias with anything). */
2132 exp = fold_build2 (MEM_REF,
2133 build_array_type (char_type_node,
2134 build_range_type (sizetype,
2135 size_one_node, len)),
2136 exp, build_int_cst (ptr_type_node, 0));
2138 /* If the MEM_REF has no acceptable address, try to get the base object
2139 from the original address we got, and build an all-aliasing
2140 unknown-sized access to that one. */
2141 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2142 set_mem_attributes (mem, exp, 0);
2143 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
2144 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
2145 0))))
2147 exp = build_fold_addr_expr (exp);
2148 exp = fold_build2 (MEM_REF,
2149 build_array_type (char_type_node,
2150 build_range_type (sizetype,
2151 size_zero_node,
2152 NULL)),
2153 exp, build_int_cst (ptr_type_node, 0));
2154 set_mem_attributes (mem, exp, 0);
2156 set_mem_alias_set (mem, 0);
2157 return mem;
2160 /* Built-in functions to perform an untyped call and return. */
2162 #define apply_args_mode \
2163 (this_target_builtins->x_apply_args_mode)
2164 #define apply_result_mode \
2165 (this_target_builtins->x_apply_result_mode)
2167 /* Return the size required for the block returned by __builtin_apply_args,
2168 and initialize apply_args_mode. */
2170 static int
2171 apply_args_size (void)
2173 static int size = -1;
2174 int align;
2175 unsigned int regno;
2177 /* The values computed by this function never change. */
2178 if (size < 0)
2180 /* The first value is the incoming arg-pointer. */
2181 size = GET_MODE_SIZE (Pmode);
2183 /* The second value is the structure value address unless this is
2184 passed as an "invisible" first argument. */
2185 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
2186 size += GET_MODE_SIZE (Pmode);
2188 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2189 if (FUNCTION_ARG_REGNO_P (regno))
2191 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
2193 gcc_assert (mode != VOIDmode);
2195 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2196 if (size % align != 0)
2197 size = CEIL (size, align) * align;
2198 size += GET_MODE_SIZE (mode);
2199 apply_args_mode[regno] = mode;
2201 else
2203 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
2206 return size;
2209 /* Return the size required for the block returned by __builtin_apply,
2210 and initialize apply_result_mode. */
2212 static int
2213 apply_result_size (void)
2215 static int size = -1;
2216 int align, regno;
2218 /* The values computed by this function never change. */
2219 if (size < 0)
2221 size = 0;
2223 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2224 if (targetm.calls.function_value_regno_p (regno))
2226 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
2228 gcc_assert (mode != VOIDmode);
2230 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2231 if (size % align != 0)
2232 size = CEIL (size, align) * align;
2233 size += GET_MODE_SIZE (mode);
2234 apply_result_mode[regno] = mode;
2236 else
2237 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
2239 /* Allow targets that use untyped_call and untyped_return to override
2240 the size so that machine-specific information can be stored here. */
2241 #ifdef APPLY_RESULT_SIZE
2242 size = APPLY_RESULT_SIZE;
2243 #endif
2245 return size;
2248 /* Create a vector describing the result block RESULT. If SAVEP is true,
2249 the result block is used to save the values; otherwise it is used to
2250 restore the values. */
2252 static rtx
2253 result_vector (int savep, rtx result)
2255 int regno, size, align, nelts;
2256 fixed_size_mode mode;
2257 rtx reg, mem;
2258 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
2260 size = nelts = 0;
2261 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2262 if ((mode = apply_result_mode[regno]) != VOIDmode)
2264 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2265 if (size % align != 0)
2266 size = CEIL (size, align) * align;
2267 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
2268 mem = adjust_address (result, mode, size);
2269 savevec[nelts++] = (savep
2270 ? gen_rtx_SET (mem, reg)
2271 : gen_rtx_SET (reg, mem));
2272 size += GET_MODE_SIZE (mode);
2274 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
2277 /* Save the state required to perform an untyped call with the same
2278 arguments as were passed to the current function. */
2280 static rtx
2281 expand_builtin_apply_args_1 (void)
2283 rtx registers, tem;
2284 int size, align, regno;
2285 fixed_size_mode mode;
2286 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
2288 /* Create a block where the arg-pointer, structure value address,
2289 and argument registers can be saved. */
2290 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
2292 /* Walk past the arg-pointer and structure value address. */
2293 size = GET_MODE_SIZE (Pmode);
2294 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
2295 size += GET_MODE_SIZE (Pmode);
2297 /* Save each register used in calling a function to the block. */
2298 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2299 if ((mode = apply_args_mode[regno]) != VOIDmode)
2301 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2302 if (size % align != 0)
2303 size = CEIL (size, align) * align;
2305 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
2307 emit_move_insn (adjust_address (registers, mode, size), tem);
2308 size += GET_MODE_SIZE (mode);
2311 /* Save the arg pointer to the block. */
2312 tem = copy_to_reg (crtl->args.internal_arg_pointer);
2313 /* We need the pointer as the caller actually passed them to us, not
2314 as we might have pretended they were passed. Make sure it's a valid
2315 operand, as emit_move_insn isn't expected to handle a PLUS. */
2316 if (STACK_GROWS_DOWNWARD)
2318 = force_operand (plus_constant (Pmode, tem,
2319 crtl->args.pretend_args_size),
2320 NULL_RTX);
2321 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
2323 size = GET_MODE_SIZE (Pmode);
2325 /* Save the structure value address unless this is passed as an
2326 "invisible" first argument. */
2327 if (struct_incoming_value)
2328 emit_move_insn (adjust_address (registers, Pmode, size),
2329 copy_to_reg (struct_incoming_value));
2331 /* Return the address of the block. */
2332 return copy_addr_to_reg (XEXP (registers, 0));
2335 /* __builtin_apply_args returns block of memory allocated on
2336 the stack into which is stored the arg pointer, structure
2337 value address, static chain, and all the registers that might
2338 possibly be used in performing a function call. The code is
2339 moved to the start of the function so the incoming values are
2340 saved. */
2342 static rtx
2343 expand_builtin_apply_args (void)
2345 /* Don't do __builtin_apply_args more than once in a function.
2346 Save the result of the first call and reuse it. */
2347 if (apply_args_value != 0)
2348 return apply_args_value;
2350 /* When this function is called, it means that registers must be
2351 saved on entry to this function. So we migrate the
2352 call to the first insn of this function. */
2353 rtx temp;
2355 start_sequence ();
2356 temp = expand_builtin_apply_args_1 ();
2357 rtx_insn *seq = get_insns ();
2358 end_sequence ();
2360 apply_args_value = temp;
2362 /* Put the insns after the NOTE that starts the function.
2363 If this is inside a start_sequence, make the outer-level insn
2364 chain current, so the code is placed at the start of the
2365 function. If internal_arg_pointer is a non-virtual pseudo,
2366 it needs to be placed after the function that initializes
2367 that pseudo. */
2368 push_topmost_sequence ();
2369 if (REG_P (crtl->args.internal_arg_pointer)
2370 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
2371 emit_insn_before (seq, parm_birth_insn);
2372 else
2373 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
2374 pop_topmost_sequence ();
2375 return temp;
2379 /* Perform an untyped call and save the state required to perform an
2380 untyped return of whatever value was returned by the given function. */
2382 static rtx
2383 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
2385 int size, align, regno;
2386 fixed_size_mode mode;
2387 rtx incoming_args, result, reg, dest, src;
2388 rtx_call_insn *call_insn;
2389 rtx old_stack_level = 0;
2390 rtx call_fusage = 0;
2391 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
2393 arguments = convert_memory_address (Pmode, arguments);
2395 /* Create a block where the return registers can be saved. */
2396 result = assign_stack_local (BLKmode, apply_result_size (), -1);
2398 /* Fetch the arg pointer from the ARGUMENTS block. */
2399 incoming_args = gen_reg_rtx (Pmode);
2400 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
2401 if (!STACK_GROWS_DOWNWARD)
2402 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
2403 incoming_args, 0, OPTAB_LIB_WIDEN);
2405 /* Push a new argument block and copy the arguments. Do not allow
2406 the (potential) memcpy call below to interfere with our stack
2407 manipulations. */
2408 do_pending_stack_adjust ();
2409 NO_DEFER_POP;
2411 /* Save the stack with nonlocal if available. */
2412 if (targetm.have_save_stack_nonlocal ())
2413 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
2414 else
2415 emit_stack_save (SAVE_BLOCK, &old_stack_level);
2417 /* Allocate a block of memory onto the stack and copy the memory
2418 arguments to the outgoing arguments address. We can pass TRUE
2419 as the 4th argument because we just saved the stack pointer
2420 and will restore it right after the call. */
2421 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
2423 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
2424 may have already set current_function_calls_alloca to true.
2425 current_function_calls_alloca won't be set if argsize is zero,
2426 so we have to guarantee need_drap is true here. */
2427 if (SUPPORTS_STACK_ALIGNMENT)
2428 crtl->need_drap = true;
2430 dest = virtual_outgoing_args_rtx;
2431 if (!STACK_GROWS_DOWNWARD)
2433 if (CONST_INT_P (argsize))
2434 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
2435 else
2436 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
2438 dest = gen_rtx_MEM (BLKmode, dest);
2439 set_mem_align (dest, PARM_BOUNDARY);
2440 src = gen_rtx_MEM (BLKmode, incoming_args);
2441 set_mem_align (src, PARM_BOUNDARY);
2442 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
2444 /* Refer to the argument block. */
2445 apply_args_size ();
2446 arguments = gen_rtx_MEM (BLKmode, arguments);
2447 set_mem_align (arguments, PARM_BOUNDARY);
2449 /* Walk past the arg-pointer and structure value address. */
2450 size = GET_MODE_SIZE (Pmode);
2451 if (struct_value)
2452 size += GET_MODE_SIZE (Pmode);
2454 /* Restore each of the registers previously saved. Make USE insns
2455 for each of these registers for use in making the call. */
2456 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2457 if ((mode = apply_args_mode[regno]) != VOIDmode)
2459 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2460 if (size % align != 0)
2461 size = CEIL (size, align) * align;
2462 reg = gen_rtx_REG (mode, regno);
2463 emit_move_insn (reg, adjust_address (arguments, mode, size));
2464 use_reg (&call_fusage, reg);
2465 size += GET_MODE_SIZE (mode);
2468 /* Restore the structure value address unless this is passed as an
2469 "invisible" first argument. */
2470 size = GET_MODE_SIZE (Pmode);
2471 if (struct_value)
2473 rtx value = gen_reg_rtx (Pmode);
2474 emit_move_insn (value, adjust_address (arguments, Pmode, size));
2475 emit_move_insn (struct_value, value);
2476 if (REG_P (struct_value))
2477 use_reg (&call_fusage, struct_value);
2480 /* All arguments and registers used for the call are set up by now! */
2481 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
2483 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
2484 and we don't want to load it into a register as an optimization,
2485 because prepare_call_address already did it if it should be done. */
2486 if (GET_CODE (function) != SYMBOL_REF)
2487 function = memory_address (FUNCTION_MODE, function);
2489 /* Generate the actual call instruction and save the return value. */
2490 if (targetm.have_untyped_call ())
2492 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
2493 emit_call_insn (targetm.gen_untyped_call (mem, result,
2494 result_vector (1, result)));
2496 else if (targetm.have_call_value ())
2498 rtx valreg = 0;
2500 /* Locate the unique return register. It is not possible to
2501 express a call that sets more than one return register using
2502 call_value; use untyped_call for that. In fact, untyped_call
2503 only needs to save the return registers in the given block. */
2504 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2505 if ((mode = apply_result_mode[regno]) != VOIDmode)
2507 gcc_assert (!valreg); /* have_untyped_call required. */
2509 valreg = gen_rtx_REG (mode, regno);
2512 emit_insn (targetm.gen_call_value (valreg,
2513 gen_rtx_MEM (FUNCTION_MODE, function),
2514 const0_rtx, NULL_RTX, const0_rtx));
2516 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
2518 else
2519 gcc_unreachable ();
2521 /* Find the CALL insn we just emitted, and attach the register usage
2522 information. */
2523 call_insn = last_call_insn ();
2524 add_function_usage_to (call_insn, call_fusage);
2526 /* Restore the stack. */
2527 if (targetm.have_save_stack_nonlocal ())
2528 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
2529 else
2530 emit_stack_restore (SAVE_BLOCK, old_stack_level);
2531 fixup_args_size_notes (call_insn, get_last_insn (), 0);
2533 OK_DEFER_POP;
2535 /* Return the address of the result block. */
2536 result = copy_addr_to_reg (XEXP (result, 0));
2537 return convert_memory_address (ptr_mode, result);
2540 /* Perform an untyped return. */
2542 static void
2543 expand_builtin_return (rtx result)
2545 int size, align, regno;
2546 fixed_size_mode mode;
2547 rtx reg;
2548 rtx_insn *call_fusage = 0;
2550 result = convert_memory_address (Pmode, result);
2552 apply_result_size ();
2553 result = gen_rtx_MEM (BLKmode, result);
2555 if (targetm.have_untyped_return ())
2557 rtx vector = result_vector (0, result);
2558 emit_jump_insn (targetm.gen_untyped_return (result, vector));
2559 emit_barrier ();
2560 return;
2563 /* Restore the return value and note that each value is used. */
2564 size = 0;
2565 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2566 if ((mode = apply_result_mode[regno]) != VOIDmode)
2568 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2569 if (size % align != 0)
2570 size = CEIL (size, align) * align;
2571 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
2572 emit_move_insn (reg, adjust_address (result, mode, size));
2574 push_to_sequence (call_fusage);
2575 emit_use (reg);
2576 call_fusage = get_insns ();
2577 end_sequence ();
2578 size += GET_MODE_SIZE (mode);
2581 /* Put the USE insns before the return. */
2582 emit_insn (call_fusage);
2584 /* Return whatever values was restored by jumping directly to the end
2585 of the function. */
2586 expand_naked_return ();
2589 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
2591 static enum type_class
2592 type_to_class (tree type)
2594 switch (TREE_CODE (type))
2596 case VOID_TYPE: return void_type_class;
2597 case INTEGER_TYPE: return integer_type_class;
2598 case ENUMERAL_TYPE: return enumeral_type_class;
2599 case BOOLEAN_TYPE: return boolean_type_class;
2600 case POINTER_TYPE: return pointer_type_class;
2601 case REFERENCE_TYPE: return reference_type_class;
2602 case OFFSET_TYPE: return offset_type_class;
2603 case REAL_TYPE: return real_type_class;
2604 case COMPLEX_TYPE: return complex_type_class;
2605 case FUNCTION_TYPE: return function_type_class;
2606 case METHOD_TYPE: return method_type_class;
2607 case RECORD_TYPE: return record_type_class;
2608 case UNION_TYPE:
2609 case QUAL_UNION_TYPE: return union_type_class;
2610 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
2611 ? string_type_class : array_type_class);
2612 case LANG_TYPE: return lang_type_class;
2613 case OPAQUE_TYPE: return opaque_type_class;
2614 default: return no_type_class;
2618 /* Expand a call EXP to __builtin_classify_type. */
2620 static rtx
2621 expand_builtin_classify_type (tree exp)
2623 if (call_expr_nargs (exp))
2624 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
2625 return GEN_INT (no_type_class);
2628 /* This helper macro, meant to be used in mathfn_built_in below, determines
2629 which among a set of builtin math functions is appropriate for a given type
2630 mode. The `F' (float) and `L' (long double) are automatically generated
2631 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
2632 types, there are additional types that are considered with 'F32', 'F64',
2633 'F128', etc. suffixes. */
2634 #define CASE_MATHFN(MATHFN) \
2635 CASE_CFN_##MATHFN: \
2636 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2637 fcodel = BUILT_IN_##MATHFN##L ; break;
2638 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
2639 types. */
2640 #define CASE_MATHFN_FLOATN(MATHFN) \
2641 CASE_CFN_##MATHFN: \
2642 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2643 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2644 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2645 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2646 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2647 break;
2648 /* Similar to above, but appends _R after any F/L suffix. */
2649 #define CASE_MATHFN_REENT(MATHFN) \
2650 case CFN_BUILT_IN_##MATHFN##_R: \
2651 case CFN_BUILT_IN_##MATHFN##F_R: \
2652 case CFN_BUILT_IN_##MATHFN##L_R: \
2653 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2654 fcodel = BUILT_IN_##MATHFN##L_R ; break;
2656 /* Return a function equivalent to FN but operating on floating-point
2657 values of type TYPE, or END_BUILTINS if no such function exists.
2658 This is purely an operation on function codes; it does not guarantee
2659 that the target actually has an implementation of the function. */
2661 static built_in_function
2662 mathfn_built_in_2 (tree type, combined_fn fn)
2664 tree mtype;
2665 built_in_function fcode, fcodef, fcodel;
2666 built_in_function fcodef16 = END_BUILTINS;
2667 built_in_function fcodef32 = END_BUILTINS;
2668 built_in_function fcodef64 = END_BUILTINS;
2669 built_in_function fcodef128 = END_BUILTINS;
2670 built_in_function fcodef32x = END_BUILTINS;
2671 built_in_function fcodef64x = END_BUILTINS;
2672 built_in_function fcodef128x = END_BUILTINS;
2674 switch (fn)
2676 #define SEQ_OF_CASE_MATHFN \
2677 CASE_MATHFN (ACOS) \
2678 CASE_MATHFN (ACOSH) \
2679 CASE_MATHFN (ASIN) \
2680 CASE_MATHFN (ASINH) \
2681 CASE_MATHFN (ATAN) \
2682 CASE_MATHFN (ATAN2) \
2683 CASE_MATHFN (ATANH) \
2684 CASE_MATHFN (CBRT) \
2685 CASE_MATHFN_FLOATN (CEIL) \
2686 CASE_MATHFN (CEXPI) \
2687 CASE_MATHFN_FLOATN (COPYSIGN) \
2688 CASE_MATHFN (COS) \
2689 CASE_MATHFN (COSH) \
2690 CASE_MATHFN (DREM) \
2691 CASE_MATHFN (ERF) \
2692 CASE_MATHFN (ERFC) \
2693 CASE_MATHFN (EXP) \
2694 CASE_MATHFN (EXP10) \
2695 CASE_MATHFN (EXP2) \
2696 CASE_MATHFN (EXPM1) \
2697 CASE_MATHFN (FABS) \
2698 CASE_MATHFN (FDIM) \
2699 CASE_MATHFN_FLOATN (FLOOR) \
2700 CASE_MATHFN_FLOATN (FMA) \
2701 CASE_MATHFN_FLOATN (FMAX) \
2702 CASE_MATHFN_FLOATN (FMIN) \
2703 CASE_MATHFN (FMOD) \
2704 CASE_MATHFN (FREXP) \
2705 CASE_MATHFN (GAMMA) \
2706 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
2707 CASE_MATHFN (HUGE_VAL) \
2708 CASE_MATHFN (HYPOT) \
2709 CASE_MATHFN (ILOGB) \
2710 CASE_MATHFN (ICEIL) \
2711 CASE_MATHFN (IFLOOR) \
2712 CASE_MATHFN (INF) \
2713 CASE_MATHFN (IRINT) \
2714 CASE_MATHFN (IROUND) \
2715 CASE_MATHFN (ISINF) \
2716 CASE_MATHFN (J0) \
2717 CASE_MATHFN (J1) \
2718 CASE_MATHFN (JN) \
2719 CASE_MATHFN (LCEIL) \
2720 CASE_MATHFN (LDEXP) \
2721 CASE_MATHFN (LFLOOR) \
2722 CASE_MATHFN (LGAMMA) \
2723 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
2724 CASE_MATHFN (LLCEIL) \
2725 CASE_MATHFN (LLFLOOR) \
2726 CASE_MATHFN (LLRINT) \
2727 CASE_MATHFN (LLROUND) \
2728 CASE_MATHFN (LOG) \
2729 CASE_MATHFN (LOG10) \
2730 CASE_MATHFN (LOG1P) \
2731 CASE_MATHFN (LOG2) \
2732 CASE_MATHFN (LOGB) \
2733 CASE_MATHFN (LRINT) \
2734 CASE_MATHFN (LROUND) \
2735 CASE_MATHFN (MODF) \
2736 CASE_MATHFN (NAN) \
2737 CASE_MATHFN (NANS) \
2738 CASE_MATHFN_FLOATN (NEARBYINT) \
2739 CASE_MATHFN (NEXTAFTER) \
2740 CASE_MATHFN (NEXTTOWARD) \
2741 CASE_MATHFN (POW) \
2742 CASE_MATHFN (POWI) \
2743 CASE_MATHFN (POW10) \
2744 CASE_MATHFN (REMAINDER) \
2745 CASE_MATHFN (REMQUO) \
2746 CASE_MATHFN_FLOATN (RINT) \
2747 CASE_MATHFN_FLOATN (ROUND) \
2748 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2749 CASE_MATHFN (SCALB) \
2750 CASE_MATHFN (SCALBLN) \
2751 CASE_MATHFN (SCALBN) \
2752 CASE_MATHFN (SIGNBIT) \
2753 CASE_MATHFN (SIGNIFICAND) \
2754 CASE_MATHFN (SIN) \
2755 CASE_MATHFN (SINCOS) \
2756 CASE_MATHFN (SINH) \
2757 CASE_MATHFN_FLOATN (SQRT) \
2758 CASE_MATHFN (TAN) \
2759 CASE_MATHFN (TANH) \
2760 CASE_MATHFN (TGAMMA) \
2761 CASE_MATHFN_FLOATN (TRUNC) \
2762 CASE_MATHFN (Y0) \
2763 CASE_MATHFN (Y1) \
2764 CASE_MATHFN (YN)
2766 SEQ_OF_CASE_MATHFN
2768 default:
2769 return END_BUILTINS;
2772 mtype = TYPE_MAIN_VARIANT (type);
2773 if (mtype == double_type_node)
2774 return fcode;
2775 else if (mtype == float_type_node)
2776 return fcodef;
2777 else if (mtype == long_double_type_node)
2778 return fcodel;
2779 else if (mtype == float16_type_node)
2780 return fcodef16;
2781 else if (mtype == float32_type_node)
2782 return fcodef32;
2783 else if (mtype == float64_type_node)
2784 return fcodef64;
2785 else if (mtype == float128_type_node)
2786 return fcodef128;
2787 else if (mtype == float32x_type_node)
2788 return fcodef32x;
2789 else if (mtype == float64x_type_node)
2790 return fcodef64x;
2791 else if (mtype == float128x_type_node)
2792 return fcodef128x;
2793 else
2794 return END_BUILTINS;
2797 #undef CASE_MATHFN
2798 #undef CASE_MATHFN_FLOATN
2799 #undef CASE_MATHFN_REENT
2801 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2802 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2803 otherwise use the explicit declaration. If we can't do the conversion,
2804 return null. */
2806 static tree
2807 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2809 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2810 if (fcode2 == END_BUILTINS)
2811 return NULL_TREE;
2813 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2814 return NULL_TREE;
2816 return builtin_decl_explicit (fcode2);
2819 /* Like mathfn_built_in_1, but always use the implicit array. */
2821 tree
2822 mathfn_built_in (tree type, combined_fn fn)
2824 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2827 /* Like mathfn_built_in_1, but take a built_in_function and
2828 always use the implicit array. */
2830 tree
2831 mathfn_built_in (tree type, enum built_in_function fn)
2833 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2836 /* Return the type associated with a built in function, i.e., the one
2837 to be passed to mathfn_built_in to get the type-specific
2838 function. */
2840 tree
2841 mathfn_built_in_type (combined_fn fn)
2843 #define CASE_MATHFN(MATHFN) \
2844 case CFN_BUILT_IN_##MATHFN: \
2845 return double_type_node; \
2846 case CFN_BUILT_IN_##MATHFN##F: \
2847 return float_type_node; \
2848 case CFN_BUILT_IN_##MATHFN##L: \
2849 return long_double_type_node;
2851 #define CASE_MATHFN_FLOATN(MATHFN) \
2852 CASE_MATHFN(MATHFN) \
2853 case CFN_BUILT_IN_##MATHFN##F16: \
2854 return float16_type_node; \
2855 case CFN_BUILT_IN_##MATHFN##F32: \
2856 return float32_type_node; \
2857 case CFN_BUILT_IN_##MATHFN##F64: \
2858 return float64_type_node; \
2859 case CFN_BUILT_IN_##MATHFN##F128: \
2860 return float128_type_node; \
2861 case CFN_BUILT_IN_##MATHFN##F32X: \
2862 return float32x_type_node; \
2863 case CFN_BUILT_IN_##MATHFN##F64X: \
2864 return float64x_type_node; \
2865 case CFN_BUILT_IN_##MATHFN##F128X: \
2866 return float128x_type_node;
2868 /* Similar to above, but appends _R after any F/L suffix. */
2869 #define CASE_MATHFN_REENT(MATHFN) \
2870 case CFN_BUILT_IN_##MATHFN##_R: \
2871 return double_type_node; \
2872 case CFN_BUILT_IN_##MATHFN##F_R: \
2873 return float_type_node; \
2874 case CFN_BUILT_IN_##MATHFN##L_R: \
2875 return long_double_type_node;
2877 switch (fn)
2879 SEQ_OF_CASE_MATHFN
2881 default:
2882 return NULL_TREE;
2885 #undef CASE_MATHFN
2886 #undef CASE_MATHFN_FLOATN
2887 #undef CASE_MATHFN_REENT
2888 #undef SEQ_OF_CASE_MATHFN
2891 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2892 return its code, otherwise return IFN_LAST. Note that this function
2893 only tests whether the function is defined in internals.def, not whether
2894 it is actually available on the target. */
2896 internal_fn
2897 associated_internal_fn (tree fndecl)
2899 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2900 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2901 switch (DECL_FUNCTION_CODE (fndecl))
2903 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2904 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2905 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2906 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2907 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2908 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2909 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2910 #include "internal-fn.def"
2912 CASE_FLT_FN (BUILT_IN_POW10):
2913 return IFN_EXP10;
2915 CASE_FLT_FN (BUILT_IN_DREM):
2916 return IFN_REMAINDER;
2918 CASE_FLT_FN (BUILT_IN_SCALBN):
2919 CASE_FLT_FN (BUILT_IN_SCALBLN):
2920 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2921 return IFN_LDEXP;
2922 return IFN_LAST;
2924 default:
2925 return IFN_LAST;
2929 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2930 on the current target by a call to an internal function, return the
2931 code of that internal function, otherwise return IFN_LAST. The caller
2932 is responsible for ensuring that any side-effects of the built-in
2933 call are dealt with correctly. E.g. if CALL sets errno, the caller
2934 must decide that the errno result isn't needed or make it available
2935 in some other way. */
2937 internal_fn
2938 replacement_internal_fn (gcall *call)
2940 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2942 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2943 if (ifn != IFN_LAST)
2945 tree_pair types = direct_internal_fn_types (ifn, call);
2946 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2947 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2948 return ifn;
2951 return IFN_LAST;
2954 /* Expand a call to the builtin trinary math functions (fma).
2955 Return NULL_RTX if a normal call should be emitted rather than expanding the
2956 function in-line. EXP is the expression that is a call to the builtin
2957 function; if convenient, the result should be placed in TARGET.
2958 SUBTARGET may be used as the target for computing one of EXP's
2959 operands. */
2961 static rtx
2962 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2964 optab builtin_optab;
2965 rtx op0, op1, op2, result;
2966 rtx_insn *insns;
2967 tree fndecl = get_callee_fndecl (exp);
2968 tree arg0, arg1, arg2;
2969 machine_mode mode;
2971 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2972 return NULL_RTX;
2974 arg0 = CALL_EXPR_ARG (exp, 0);
2975 arg1 = CALL_EXPR_ARG (exp, 1);
2976 arg2 = CALL_EXPR_ARG (exp, 2);
2978 switch (DECL_FUNCTION_CODE (fndecl))
2980 CASE_FLT_FN (BUILT_IN_FMA):
2981 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2982 builtin_optab = fma_optab; break;
2983 default:
2984 gcc_unreachable ();
2987 /* Make a suitable register to place result in. */
2988 mode = TYPE_MODE (TREE_TYPE (exp));
2990 /* Before working hard, check whether the instruction is available. */
2991 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2992 return NULL_RTX;
2994 result = gen_reg_rtx (mode);
2996 /* Always stabilize the argument list. */
2997 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2998 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2999 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
3001 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3002 op1 = expand_normal (arg1);
3003 op2 = expand_normal (arg2);
3005 start_sequence ();
3007 /* Compute into RESULT.
3008 Set RESULT to wherever the result comes back. */
3009 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
3010 result, 0);
3012 /* If we were unable to expand via the builtin, stop the sequence
3013 (without outputting the insns) and call to the library function
3014 with the stabilized argument list. */
3015 if (result == 0)
3017 end_sequence ();
3018 return expand_call (exp, target, target == const0_rtx);
3021 /* Output the entire sequence. */
3022 insns = get_insns ();
3023 end_sequence ();
3024 emit_insn (insns);
3026 return result;
3029 /* Expand a call to the builtin sin and cos math functions.
3030 Return NULL_RTX if a normal call should be emitted rather than expanding the
3031 function in-line. EXP is the expression that is a call to the builtin
3032 function; if convenient, the result should be placed in TARGET.
3033 SUBTARGET may be used as the target for computing one of EXP's
3034 operands. */
3036 static rtx
3037 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
3039 optab builtin_optab;
3040 rtx op0;
3041 rtx_insn *insns;
3042 tree fndecl = get_callee_fndecl (exp);
3043 machine_mode mode;
3044 tree arg;
3046 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3047 return NULL_RTX;
3049 arg = CALL_EXPR_ARG (exp, 0);
3051 switch (DECL_FUNCTION_CODE (fndecl))
3053 CASE_FLT_FN (BUILT_IN_SIN):
3054 CASE_FLT_FN (BUILT_IN_COS):
3055 builtin_optab = sincos_optab; break;
3056 default:
3057 gcc_unreachable ();
3060 /* Make a suitable register to place result in. */
3061 mode = TYPE_MODE (TREE_TYPE (exp));
3063 /* Check if sincos insn is available, otherwise fallback
3064 to sin or cos insn. */
3065 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
3066 switch (DECL_FUNCTION_CODE (fndecl))
3068 CASE_FLT_FN (BUILT_IN_SIN):
3069 builtin_optab = sin_optab; break;
3070 CASE_FLT_FN (BUILT_IN_COS):
3071 builtin_optab = cos_optab; break;
3072 default:
3073 gcc_unreachable ();
3076 /* Before working hard, check whether the instruction is available. */
3077 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
3079 rtx result = gen_reg_rtx (mode);
3081 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3082 need to expand the argument again. This way, we will not perform
3083 side-effects more the once. */
3084 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3086 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
3088 start_sequence ();
3090 /* Compute into RESULT.
3091 Set RESULT to wherever the result comes back. */
3092 if (builtin_optab == sincos_optab)
3094 int ok;
3096 switch (DECL_FUNCTION_CODE (fndecl))
3098 CASE_FLT_FN (BUILT_IN_SIN):
3099 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
3100 break;
3101 CASE_FLT_FN (BUILT_IN_COS):
3102 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
3103 break;
3104 default:
3105 gcc_unreachable ();
3107 gcc_assert (ok);
3109 else
3110 result = expand_unop (mode, builtin_optab, op0, result, 0);
3112 if (result != 0)
3114 /* Output the entire sequence. */
3115 insns = get_insns ();
3116 end_sequence ();
3117 emit_insn (insns);
3118 return result;
3121 /* If we were unable to expand via the builtin, stop the sequence
3122 (without outputting the insns) and call to the library function
3123 with the stabilized argument list. */
3124 end_sequence ();
3127 return expand_call (exp, target, target == const0_rtx);
3130 /* Given an interclass math builtin decl FNDECL and it's argument ARG
3131 return an RTL instruction code that implements the functionality.
3132 If that isn't possible or available return CODE_FOR_nothing. */
3134 static enum insn_code
3135 interclass_mathfn_icode (tree arg, tree fndecl)
3137 bool errno_set = false;
3138 optab builtin_optab = unknown_optab;
3139 machine_mode mode;
3141 switch (DECL_FUNCTION_CODE (fndecl))
3143 CASE_FLT_FN (BUILT_IN_ILOGB):
3144 errno_set = true; builtin_optab = ilogb_optab; break;
3145 CASE_FLT_FN (BUILT_IN_ISINF):
3146 builtin_optab = isinf_optab; break;
3147 case BUILT_IN_ISNORMAL:
3148 case BUILT_IN_ISFINITE:
3149 CASE_FLT_FN (BUILT_IN_FINITE):
3150 case BUILT_IN_FINITED32:
3151 case BUILT_IN_FINITED64:
3152 case BUILT_IN_FINITED128:
3153 case BUILT_IN_ISINFD32:
3154 case BUILT_IN_ISINFD64:
3155 case BUILT_IN_ISINFD128:
3156 /* These builtins have no optabs (yet). */
3157 break;
3158 default:
3159 gcc_unreachable ();
3162 /* There's no easy way to detect the case we need to set EDOM. */
3163 if (flag_errno_math && errno_set)
3164 return CODE_FOR_nothing;
3166 /* Optab mode depends on the mode of the input argument. */
3167 mode = TYPE_MODE (TREE_TYPE (arg));
3169 if (builtin_optab)
3170 return optab_handler (builtin_optab, mode);
3171 return CODE_FOR_nothing;
3174 /* Expand a call to one of the builtin math functions that operate on
3175 floating point argument and output an integer result (ilogb, isinf,
3176 isnan, etc).
3177 Return 0 if a normal call should be emitted rather than expanding the
3178 function in-line. EXP is the expression that is a call to the builtin
3179 function; if convenient, the result should be placed in TARGET. */
3181 static rtx
3182 expand_builtin_interclass_mathfn (tree exp, rtx target)
3184 enum insn_code icode = CODE_FOR_nothing;
3185 rtx op0;
3186 tree fndecl = get_callee_fndecl (exp);
3187 machine_mode mode;
3188 tree arg;
3190 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3191 return NULL_RTX;
3193 arg = CALL_EXPR_ARG (exp, 0);
3194 icode = interclass_mathfn_icode (arg, fndecl);
3195 mode = TYPE_MODE (TREE_TYPE (arg));
3197 if (icode != CODE_FOR_nothing)
3199 class expand_operand ops[1];
3200 rtx_insn *last = get_last_insn ();
3201 tree orig_arg = arg;
3203 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3204 need to expand the argument again. This way, we will not perform
3205 side-effects more the once. */
3206 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3208 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3210 if (mode != GET_MODE (op0))
3211 op0 = convert_to_mode (mode, op0, 0);
3213 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
3214 if (maybe_legitimize_operands (icode, 0, 1, ops)
3215 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
3216 return ops[0].value;
3218 delete_insns_since (last);
3219 CALL_EXPR_ARG (exp, 0) = orig_arg;
3222 return NULL_RTX;
3225 /* Expand a call to the builtin sincos math function.
3226 Return NULL_RTX if a normal call should be emitted rather than expanding the
3227 function in-line. EXP is the expression that is a call to the builtin
3228 function. */
3230 static rtx
3231 expand_builtin_sincos (tree exp)
3233 rtx op0, op1, op2, target1, target2;
3234 machine_mode mode;
3235 tree arg, sinp, cosp;
3236 int result;
3237 location_t loc = EXPR_LOCATION (exp);
3238 tree alias_type, alias_off;
3240 if (!validate_arglist (exp, REAL_TYPE,
3241 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3242 return NULL_RTX;
3244 arg = CALL_EXPR_ARG (exp, 0);
3245 sinp = CALL_EXPR_ARG (exp, 1);
3246 cosp = CALL_EXPR_ARG (exp, 2);
3248 /* Make a suitable register to place result in. */
3249 mode = TYPE_MODE (TREE_TYPE (arg));
3251 /* Check if sincos insn is available, otherwise emit the call. */
3252 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
3253 return NULL_RTX;
3255 target1 = gen_reg_rtx (mode);
3256 target2 = gen_reg_rtx (mode);
3258 op0 = expand_normal (arg);
3259 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
3260 alias_off = build_int_cst (alias_type, 0);
3261 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
3262 sinp, alias_off));
3263 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
3264 cosp, alias_off));
3266 /* Compute into target1 and target2.
3267 Set TARGET to wherever the result comes back. */
3268 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
3269 gcc_assert (result);
3271 /* Move target1 and target2 to the memory locations indicated
3272 by op1 and op2. */
3273 emit_move_insn (op1, target1);
3274 emit_move_insn (op2, target2);
3276 return const0_rtx;
3279 /* Expand a call to the internal cexpi builtin to the sincos math function.
3280 EXP is the expression that is a call to the builtin function; if convenient,
3281 the result should be placed in TARGET. */
3283 static rtx
3284 expand_builtin_cexpi (tree exp, rtx target)
3286 tree fndecl = get_callee_fndecl (exp);
3287 tree arg, type;
3288 machine_mode mode;
3289 rtx op0, op1, op2;
3290 location_t loc = EXPR_LOCATION (exp);
3292 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3293 return NULL_RTX;
3295 arg = CALL_EXPR_ARG (exp, 0);
3296 type = TREE_TYPE (arg);
3297 mode = TYPE_MODE (TREE_TYPE (arg));
3299 /* Try expanding via a sincos optab, fall back to emitting a libcall
3300 to sincos or cexp. We are sure we have sincos or cexp because cexpi
3301 is only generated from sincos, cexp or if we have either of them. */
3302 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
3304 op1 = gen_reg_rtx (mode);
3305 op2 = gen_reg_rtx (mode);
3307 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3309 /* Compute into op1 and op2. */
3310 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
3312 else if (targetm.libc_has_function (function_sincos, type))
3314 tree call, fn = NULL_TREE;
3315 tree top1, top2;
3316 rtx op1a, op2a;
3318 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3319 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
3320 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3321 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
3322 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3323 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
3324 else
3325 gcc_unreachable ();
3327 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
3328 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
3329 op1a = copy_addr_to_reg (XEXP (op1, 0));
3330 op2a = copy_addr_to_reg (XEXP (op2, 0));
3331 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
3332 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
3334 /* Make sure not to fold the sincos call again. */
3335 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3336 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
3337 call, 3, arg, top1, top2));
3339 else
3341 tree call, fn = NULL_TREE, narg;
3342 tree ctype = build_complex_type (type);
3344 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3345 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
3346 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3347 fn = builtin_decl_explicit (BUILT_IN_CEXP);
3348 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3349 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
3350 else
3351 gcc_unreachable ();
3353 /* If we don't have a decl for cexp create one. This is the
3354 friendliest fallback if the user calls __builtin_cexpi
3355 without full target C99 function support. */
3356 if (fn == NULL_TREE)
3358 tree fntype;
3359 const char *name = NULL;
3361 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3362 name = "cexpf";
3363 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3364 name = "cexp";
3365 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3366 name = "cexpl";
3368 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
3369 fn = build_fn_decl (name, fntype);
3372 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
3373 build_real (type, dconst0), arg);
3375 /* Make sure not to fold the cexp call again. */
3376 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3377 return expand_expr (build_call_nary (ctype, call, 1, narg),
3378 target, VOIDmode, EXPAND_NORMAL);
3381 /* Now build the proper return type. */
3382 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
3383 make_tree (TREE_TYPE (arg), op2),
3384 make_tree (TREE_TYPE (arg), op1)),
3385 target, VOIDmode, EXPAND_NORMAL);
3388 /* Conveniently construct a function call expression. FNDECL names the
3389 function to be called, N is the number of arguments, and the "..."
3390 parameters are the argument expressions. Unlike build_call_exr
3391 this doesn't fold the call, hence it will always return a CALL_EXPR. */
3393 static tree
3394 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
3396 va_list ap;
3397 tree fntype = TREE_TYPE (fndecl);
3398 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
3400 va_start (ap, n);
3401 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
3402 va_end (ap);
3403 SET_EXPR_LOCATION (fn, loc);
3404 return fn;
3407 /* Expand a call to one of the builtin rounding functions gcc defines
3408 as an extension (lfloor and lceil). As these are gcc extensions we
3409 do not need to worry about setting errno to EDOM.
3410 If expanding via optab fails, lower expression to (int)(floor(x)).
3411 EXP is the expression that is a call to the builtin function;
3412 if convenient, the result should be placed in TARGET. */
3414 static rtx
3415 expand_builtin_int_roundingfn (tree exp, rtx target)
3417 convert_optab builtin_optab;
3418 rtx op0, tmp;
3419 rtx_insn *insns;
3420 tree fndecl = get_callee_fndecl (exp);
3421 enum built_in_function fallback_fn;
3422 tree fallback_fndecl;
3423 machine_mode mode;
3424 tree arg;
3426 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3427 return NULL_RTX;
3429 arg = CALL_EXPR_ARG (exp, 0);
3431 switch (DECL_FUNCTION_CODE (fndecl))
3433 CASE_FLT_FN (BUILT_IN_ICEIL):
3434 CASE_FLT_FN (BUILT_IN_LCEIL):
3435 CASE_FLT_FN (BUILT_IN_LLCEIL):
3436 builtin_optab = lceil_optab;
3437 fallback_fn = BUILT_IN_CEIL;
3438 break;
3440 CASE_FLT_FN (BUILT_IN_IFLOOR):
3441 CASE_FLT_FN (BUILT_IN_LFLOOR):
3442 CASE_FLT_FN (BUILT_IN_LLFLOOR):
3443 builtin_optab = lfloor_optab;
3444 fallback_fn = BUILT_IN_FLOOR;
3445 break;
3447 default:
3448 gcc_unreachable ();
3451 /* Make a suitable register to place result in. */
3452 mode = TYPE_MODE (TREE_TYPE (exp));
3454 target = gen_reg_rtx (mode);
3456 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3457 need to expand the argument again. This way, we will not perform
3458 side-effects more the once. */
3459 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3461 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3463 start_sequence ();
3465 /* Compute into TARGET. */
3466 if (expand_sfix_optab (target, op0, builtin_optab))
3468 /* Output the entire sequence. */
3469 insns = get_insns ();
3470 end_sequence ();
3471 emit_insn (insns);
3472 return target;
3475 /* If we were unable to expand via the builtin, stop the sequence
3476 (without outputting the insns). */
3477 end_sequence ();
3479 /* Fall back to floating point rounding optab. */
3480 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
3482 /* For non-C99 targets we may end up without a fallback fndecl here
3483 if the user called __builtin_lfloor directly. In this case emit
3484 a call to the floor/ceil variants nevertheless. This should result
3485 in the best user experience for not full C99 targets. */
3486 if (fallback_fndecl == NULL_TREE)
3488 tree fntype;
3489 const char *name = NULL;
3491 switch (DECL_FUNCTION_CODE (fndecl))
3493 case BUILT_IN_ICEIL:
3494 case BUILT_IN_LCEIL:
3495 case BUILT_IN_LLCEIL:
3496 name = "ceil";
3497 break;
3498 case BUILT_IN_ICEILF:
3499 case BUILT_IN_LCEILF:
3500 case BUILT_IN_LLCEILF:
3501 name = "ceilf";
3502 break;
3503 case BUILT_IN_ICEILL:
3504 case BUILT_IN_LCEILL:
3505 case BUILT_IN_LLCEILL:
3506 name = "ceill";
3507 break;
3508 case BUILT_IN_IFLOOR:
3509 case BUILT_IN_LFLOOR:
3510 case BUILT_IN_LLFLOOR:
3511 name = "floor";
3512 break;
3513 case BUILT_IN_IFLOORF:
3514 case BUILT_IN_LFLOORF:
3515 case BUILT_IN_LLFLOORF:
3516 name = "floorf";
3517 break;
3518 case BUILT_IN_IFLOORL:
3519 case BUILT_IN_LFLOORL:
3520 case BUILT_IN_LLFLOORL:
3521 name = "floorl";
3522 break;
3523 default:
3524 gcc_unreachable ();
3527 fntype = build_function_type_list (TREE_TYPE (arg),
3528 TREE_TYPE (arg), NULL_TREE);
3529 fallback_fndecl = build_fn_decl (name, fntype);
3532 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
3534 tmp = expand_normal (exp);
3535 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3537 /* Truncate the result of floating point optab to integer
3538 via expand_fix (). */
3539 target = gen_reg_rtx (mode);
3540 expand_fix (target, tmp, 0);
3542 return target;
3545 /* Expand a call to one of the builtin math functions doing integer
3546 conversion (lrint).
3547 Return 0 if a normal call should be emitted rather than expanding the
3548 function in-line. EXP is the expression that is a call to the builtin
3549 function; if convenient, the result should be placed in TARGET. */
3551 static rtx
3552 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3554 convert_optab builtin_optab;
3555 rtx op0;
3556 rtx_insn *insns;
3557 tree fndecl = get_callee_fndecl (exp);
3558 tree arg;
3559 machine_mode mode;
3560 enum built_in_function fallback_fn = BUILT_IN_NONE;
3562 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3563 return NULL_RTX;
3565 arg = CALL_EXPR_ARG (exp, 0);
3567 switch (DECL_FUNCTION_CODE (fndecl))
3569 CASE_FLT_FN (BUILT_IN_IRINT):
3570 fallback_fn = BUILT_IN_LRINT;
3571 gcc_fallthrough ();
3572 CASE_FLT_FN (BUILT_IN_LRINT):
3573 CASE_FLT_FN (BUILT_IN_LLRINT):
3574 builtin_optab = lrint_optab;
3575 break;
3577 CASE_FLT_FN (BUILT_IN_IROUND):
3578 fallback_fn = BUILT_IN_LROUND;
3579 gcc_fallthrough ();
3580 CASE_FLT_FN (BUILT_IN_LROUND):
3581 CASE_FLT_FN (BUILT_IN_LLROUND):
3582 builtin_optab = lround_optab;
3583 break;
3585 default:
3586 gcc_unreachable ();
3589 /* There's no easy way to detect the case we need to set EDOM. */
3590 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3591 return NULL_RTX;
3593 /* Make a suitable register to place result in. */
3594 mode = TYPE_MODE (TREE_TYPE (exp));
3596 /* There's no easy way to detect the case we need to set EDOM. */
3597 if (!flag_errno_math)
3599 rtx result = gen_reg_rtx (mode);
3601 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3602 need to expand the argument again. This way, we will not perform
3603 side-effects more the once. */
3604 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3606 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3608 start_sequence ();
3610 if (expand_sfix_optab (result, op0, builtin_optab))
3612 /* Output the entire sequence. */
3613 insns = get_insns ();
3614 end_sequence ();
3615 emit_insn (insns);
3616 return result;
3619 /* If we were unable to expand via the builtin, stop the sequence
3620 (without outputting the insns) and call to the library function
3621 with the stabilized argument list. */
3622 end_sequence ();
3625 if (fallback_fn != BUILT_IN_NONE)
3627 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3628 targets, (int) round (x) should never be transformed into
3629 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3630 a call to lround in the hope that the target provides at least some
3631 C99 functions. This should result in the best user experience for
3632 not full C99 targets. */
3633 tree fallback_fndecl = mathfn_built_in_1
3634 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
3636 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3637 fallback_fndecl, 1, arg);
3639 target = expand_call (exp, NULL_RTX, target == const0_rtx);
3640 target = maybe_emit_group_store (target, TREE_TYPE (exp));
3641 return convert_to_mode (mode, target, 0);
3644 return expand_call (exp, target, target == const0_rtx);
3647 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3648 a normal call should be emitted rather than expanding the function
3649 in-line. EXP is the expression that is a call to the builtin
3650 function; if convenient, the result should be placed in TARGET. */
3652 static rtx
3653 expand_builtin_powi (tree exp, rtx target)
3655 tree arg0, arg1;
3656 rtx op0, op1;
3657 machine_mode mode;
3658 machine_mode mode2;
3660 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3661 return NULL_RTX;
3663 arg0 = CALL_EXPR_ARG (exp, 0);
3664 arg1 = CALL_EXPR_ARG (exp, 1);
3665 mode = TYPE_MODE (TREE_TYPE (exp));
3667 /* Emit a libcall to libgcc. */
3669 /* Mode of the 2nd argument must match that of an int. */
3670 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3672 if (target == NULL_RTX)
3673 target = gen_reg_rtx (mode);
3675 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3676 if (GET_MODE (op0) != mode)
3677 op0 = convert_to_mode (mode, op0, 0);
3678 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3679 if (GET_MODE (op1) != mode2)
3680 op1 = convert_to_mode (mode2, op1, 0);
3682 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3683 target, LCT_CONST, mode,
3684 op0, mode, op1, mode2);
3686 return target;
3689 /* Expand expression EXP which is a call to the strlen builtin. Return
3690 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3691 try to get the result in TARGET, if convenient. */
3693 static rtx
3694 expand_builtin_strlen (tree exp, rtx target,
3695 machine_mode target_mode)
3697 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3698 return NULL_RTX;
3700 tree src = CALL_EXPR_ARG (exp, 0);
3701 if (!check_read_access (exp, src))
3702 return NULL_RTX;
3704 /* If the length can be computed at compile-time, return it. */
3705 if (tree len = c_strlen (src, 0))
3706 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3708 /* If the length can be computed at compile-time and is constant
3709 integer, but there are side-effects in src, evaluate
3710 src for side-effects, then return len.
3711 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3712 can be optimized into: i++; x = 3; */
3713 tree len = c_strlen (src, 1);
3714 if (len && TREE_CODE (len) == INTEGER_CST)
3716 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3717 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3720 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3722 /* If SRC is not a pointer type, don't do this operation inline. */
3723 if (align == 0)
3724 return NULL_RTX;
3726 /* Bail out if we can't compute strlen in the right mode. */
3727 machine_mode insn_mode;
3728 enum insn_code icode = CODE_FOR_nothing;
3729 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3731 icode = optab_handler (strlen_optab, insn_mode);
3732 if (icode != CODE_FOR_nothing)
3733 break;
3735 if (insn_mode == VOIDmode)
3736 return NULL_RTX;
3738 /* Make a place to hold the source address. We will not expand
3739 the actual source until we are sure that the expansion will
3740 not fail -- there are trees that cannot be expanded twice. */
3741 rtx src_reg = gen_reg_rtx (Pmode);
3743 /* Mark the beginning of the strlen sequence so we can emit the
3744 source operand later. */
3745 rtx_insn *before_strlen = get_last_insn ();
3747 class expand_operand ops[4];
3748 create_output_operand (&ops[0], target, insn_mode);
3749 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3750 create_integer_operand (&ops[2], 0);
3751 create_integer_operand (&ops[3], align);
3752 if (!maybe_expand_insn (icode, 4, ops))
3753 return NULL_RTX;
3755 /* Check to see if the argument was declared attribute nonstring
3756 and if so, issue a warning since at this point it's not known
3757 to be nul-terminated. */
3758 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3760 /* Now that we are assured of success, expand the source. */
3761 start_sequence ();
3762 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3763 if (pat != src_reg)
3765 #ifdef POINTERS_EXTEND_UNSIGNED
3766 if (GET_MODE (pat) != Pmode)
3767 pat = convert_to_mode (Pmode, pat,
3768 POINTERS_EXTEND_UNSIGNED);
3769 #endif
3770 emit_move_insn (src_reg, pat);
3772 pat = get_insns ();
3773 end_sequence ();
3775 if (before_strlen)
3776 emit_insn_after (pat, before_strlen);
3777 else
3778 emit_insn_before (pat, get_insns ());
3780 /* Return the value in the proper mode for this function. */
3781 if (GET_MODE (ops[0].value) == target_mode)
3782 target = ops[0].value;
3783 else if (target != 0)
3784 convert_move (target, ops[0].value, 0);
3785 else
3786 target = convert_to_mode (target_mode, ops[0].value, 0);
3788 return target;
3791 /* Expand call EXP to the strnlen built-in, returning the result
3792 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3794 static rtx
3795 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3797 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3798 return NULL_RTX;
3800 tree src = CALL_EXPR_ARG (exp, 0);
3801 tree bound = CALL_EXPR_ARG (exp, 1);
3803 if (!bound)
3804 return NULL_RTX;
3806 check_read_access (exp, src, bound);
3808 location_t loc = UNKNOWN_LOCATION;
3809 if (EXPR_HAS_LOCATION (exp))
3810 loc = EXPR_LOCATION (exp);
3812 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3813 so these conversions aren't necessary. */
3814 c_strlen_data lendata = { };
3815 tree len = c_strlen (src, 0, &lendata, 1);
3816 if (len)
3817 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3819 if (TREE_CODE (bound) == INTEGER_CST)
3821 if (!len)
3822 return NULL_RTX;
3824 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3825 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3828 if (TREE_CODE (bound) != SSA_NAME)
3829 return NULL_RTX;
3831 wide_int min, max;
3832 enum value_range_kind rng = get_range_info (bound, &min, &max);
3833 if (rng != VR_RANGE)
3834 return NULL_RTX;
3836 if (!len || TREE_CODE (len) != INTEGER_CST)
3838 bool exact;
3839 lendata.decl = unterminated_array (src, &len, &exact);
3840 if (!lendata.decl)
3841 return NULL_RTX;
3844 if (lendata.decl)
3845 return NULL_RTX;
3847 if (wi::gtu_p (min, wi::to_wide (len)))
3848 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3850 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3851 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3854 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3855 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3856 a target constant. */
3858 static rtx
3859 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3860 scalar_int_mode mode)
3862 /* The REPresentation pointed to by DATA need not be a nul-terminated
3863 string but the caller guarantees it's large enough for MODE. */
3864 const char *rep = (const char *) data;
3866 return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3869 /* LEN specify length of the block of memcpy/memset operation.
3870 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3871 In some cases we can make very likely guess on max size, then we
3872 set it into PROBABLE_MAX_SIZE. */
3874 static void
3875 determine_block_size (tree len, rtx len_rtx,
3876 unsigned HOST_WIDE_INT *min_size,
3877 unsigned HOST_WIDE_INT *max_size,
3878 unsigned HOST_WIDE_INT *probable_max_size)
3880 if (CONST_INT_P (len_rtx))
3882 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3883 return;
3885 else
3887 wide_int min, max;
3888 enum value_range_kind range_type = VR_UNDEFINED;
3890 /* Determine bounds from the type. */
3891 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3892 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3893 else
3894 *min_size = 0;
3895 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3896 *probable_max_size = *max_size
3897 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3898 else
3899 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3901 if (TREE_CODE (len) == SSA_NAME)
3902 range_type = get_range_info (len, &min, &max);
3903 if (range_type == VR_RANGE)
3905 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3906 *min_size = min.to_uhwi ();
3907 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3908 *probable_max_size = *max_size = max.to_uhwi ();
3910 else if (range_type == VR_ANTI_RANGE)
3912 /* Code like
3914 int n;
3915 if (n < 100)
3916 memcpy (a, b, n)
3918 Produce anti range allowing negative values of N. We still
3919 can use the information and make a guess that N is not negative.
3921 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3922 *probable_max_size = min.to_uhwi () - 1;
3925 gcc_checking_assert (*max_size <=
3926 (unsigned HOST_WIDE_INT)
3927 GET_MODE_MASK (GET_MODE (len_rtx)));
3930 /* Issue a warning OPT for a bounded call EXP with a bound in RANGE
3931 accessing an object with SIZE. */
3933 static bool
3934 maybe_warn_for_bound (int opt, location_t loc, tree exp, tree func,
3935 tree bndrng[2], tree size, const access_data *pad = NULL)
3937 if (!bndrng[0] || TREE_NO_WARNING (exp))
3938 return false;
3940 tree maxobjsize = max_object_size ();
3942 bool warned = false;
3944 if (opt == OPT_Wstringop_overread)
3946 bool maybe = pad && pad->src.phi ();
3948 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
3950 if (bndrng[0] == bndrng[1])
3951 warned = (func
3952 ? warning_at (loc, opt,
3953 (maybe
3954 ? G_("%K%qD specified bound %E may "
3955 "exceed maximum object size %E")
3956 : G_("%K%qD specified bound %E "
3957 "exceeds maximum object size %E")),
3958 exp, func, bndrng[0], maxobjsize)
3959 : warning_at (loc, opt,
3960 (maybe
3961 ? G_("%Kspecified bound %E may "
3962 "exceed maximum object size %E")
3963 : G_("%Kspecified bound %E "
3964 "exceeds maximum object size %E")),
3965 exp, bndrng[0], maxobjsize));
3966 else
3967 warned = (func
3968 ? warning_at (loc, opt,
3969 (maybe
3970 ? G_("%K%qD specified bound [%E, %E] may "
3971 "exceed maximum object size %E")
3972 : G_("%K%qD specified bound [%E, %E] "
3973 "exceeds maximum object size %E")),
3974 exp, func,
3975 bndrng[0], bndrng[1], maxobjsize)
3976 : warning_at (loc, opt,
3977 (maybe
3978 ? G_("%Kspecified bound [%E, %E] may "
3979 "exceed maximum object size %E")
3980 : G_("%Kspecified bound [%E, %E] "
3981 "exceeds maximum object size %E")),
3982 exp, bndrng[0], bndrng[1], maxobjsize));
3984 else if (!size || tree_int_cst_le (bndrng[0], size))
3985 return false;
3986 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
3987 warned = (func
3988 ? warning_at (loc, opt,
3989 (maybe
3990 ? G_("%K%qD specified bound %E may exceed "
3991 "source size %E")
3992 : G_("%K%qD specified bound %E exceeds "
3993 "source size %E")),
3994 exp, func, bndrng[0], size)
3995 : warning_at (loc, opt,
3996 (maybe
3997 ? G_("%Kspecified bound %E may exceed "
3998 "source size %E")
3999 : G_("%Kspecified bound %E exceeds "
4000 "source size %E")),
4001 exp, bndrng[0], size));
4002 else
4003 warned = (func
4004 ? warning_at (loc, opt,
4005 (maybe
4006 ? G_("%K%qD specified bound [%E, %E] may "
4007 "exceed source size %E")
4008 : G_("%K%qD specified bound [%E, %E] exceeds "
4009 "source size %E")),
4010 exp, func, bndrng[0], bndrng[1], size)
4011 : warning_at (loc, opt,
4012 (maybe
4013 ? G_("%Kspecified bound [%E, %E] may exceed "
4014 "source size %E")
4015 : G_("%Kspecified bound [%E, %E] exceeds "
4016 "source size %E")),
4017 exp, bndrng[0], bndrng[1], size));
4018 if (warned)
4020 if (pad && pad->src.ref)
4022 if (DECL_P (pad->src.ref))
4023 inform (DECL_SOURCE_LOCATION (pad->src.ref),
4024 "source object declared here");
4025 else if (EXPR_HAS_LOCATION (pad->src.ref))
4026 inform (EXPR_LOCATION (pad->src.ref),
4027 "source object allocated here");
4029 TREE_NO_WARNING (exp) = true;
4032 return warned;
4035 bool maybe = pad && pad->dst.phi ();
4036 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
4038 if (bndrng[0] == bndrng[1])
4039 warned = (func
4040 ? warning_at (loc, opt,
4041 (maybe
4042 ? G_("%K%qD specified size %E may "
4043 "exceed maximum object size %E")
4044 : G_("%K%qD specified size %E "
4045 "exceeds maximum object size %E")),
4046 exp, func, bndrng[0], maxobjsize)
4047 : warning_at (loc, opt,
4048 (maybe
4049 ? G_("%Kspecified size %E may exceed "
4050 "maximum object size %E")
4051 : G_("%Kspecified size %E exceeds "
4052 "maximum object size %E")),
4053 exp, bndrng[0], maxobjsize));
4054 else
4055 warned = (func
4056 ? warning_at (loc, opt,
4057 (maybe
4058 ? G_("%K%qD specified size between %E and %E "
4059 "may exceed maximum object size %E")
4060 : G_("%K%qD specified size between %E and %E "
4061 "exceeds maximum object size %E")),
4062 exp, func,
4063 bndrng[0], bndrng[1], maxobjsize)
4064 : warning_at (loc, opt,
4065 (maybe
4066 ? G_("%Kspecified size between %E and %E "
4067 "may exceed maximum object size %E")
4068 : G_("%Kspecified size between %E and %E "
4069 "exceeds maximum object size %E")),
4070 exp, bndrng[0], bndrng[1], maxobjsize));
4072 else if (!size || tree_int_cst_le (bndrng[0], size))
4073 return false;
4074 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
4075 warned = (func
4076 ? warning_at (loc, OPT_Wstringop_overflow_,
4077 (maybe
4078 ? G_("%K%qD specified bound %E may exceed "
4079 "destination size %E")
4080 : G_("%K%qD specified bound %E exceeds "
4081 "destination size %E")),
4082 exp, func, bndrng[0], size)
4083 : warning_at (loc, OPT_Wstringop_overflow_,
4084 (maybe
4085 ? G_("%Kspecified bound %E may exceed "
4086 "destination size %E")
4087 : G_("%Kspecified bound %E exceeds "
4088 "destination size %E")),
4089 exp, bndrng[0], size));
4090 else
4091 warned = (func
4092 ? warning_at (loc, OPT_Wstringop_overflow_,
4093 (maybe
4094 ? G_("%K%qD specified bound [%E, %E] may exceed "
4095 "destination size %E")
4096 : G_("%K%qD specified bound [%E, %E] exceeds "
4097 "destination size %E")),
4098 exp, func, bndrng[0], bndrng[1], size)
4099 : warning_at (loc, OPT_Wstringop_overflow_,
4100 (maybe
4101 ? G_("%Kspecified bound [%E, %E] exceeds "
4102 "destination size %E")
4103 : G_("%Kspecified bound [%E, %E] exceeds "
4104 "destination size %E")),
4105 exp, bndrng[0], bndrng[1], size));
4107 if (warned)
4109 if (pad && pad->dst.ref)
4111 if (DECL_P (pad->dst.ref))
4112 inform (DECL_SOURCE_LOCATION (pad->dst.ref),
4113 "destination object declared here");
4114 else if (EXPR_HAS_LOCATION (pad->dst.ref))
4115 inform (EXPR_LOCATION (pad->dst.ref),
4116 "destination object allocated here");
4118 TREE_NO_WARNING (exp) = true;
4121 return warned;
4124 /* For an expression EXP issue an access warning controlled by option OPT
4125 with access to a region SIZE bytes in size in the RANGE of sizes.
4126 WRITE is true for a write access, READ for a read access, neither for
4127 call that may or may not perform an access but for which the range
4128 is expected to valid.
4129 Returns true when a warning has been issued. */
4131 static bool
4132 warn_for_access (location_t loc, tree func, tree exp, int opt, tree range[2],
4133 tree size, bool write, bool read, bool maybe)
4135 bool warned = false;
4137 if (write && read)
4139 if (tree_int_cst_equal (range[0], range[1]))
4140 warned = (func
4141 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
4142 (maybe
4143 ? G_("%K%qD may access %E byte in a region "
4144 "of size %E")
4145 : G_("%K%qD accessing %E byte in a region "
4146 "of size %E")),
4147 (maybe
4148 ? G_ ("%K%qD may access %E bytes in a region "
4149 "of size %E")
4150 : G_ ("%K%qD accessing %E bytes in a region "
4151 "of size %E")),
4152 exp, func, range[0], size)
4153 : warning_n (loc, opt, tree_to_uhwi (range[0]),
4154 (maybe
4155 ? G_("%Kmay access %E byte in a region "
4156 "of size %E")
4157 : G_("%Kaccessing %E byte in a region "
4158 "of size %E")),
4159 (maybe
4160 ? G_("%Kmay access %E bytes in a region "
4161 "of size %E")
4162 : G_("%Kaccessing %E bytes in a region "
4163 "of size %E")),
4164 exp, range[0], size));
4165 else if (tree_int_cst_sign_bit (range[1]))
4167 /* Avoid printing the upper bound if it's invalid. */
4168 warned = (func
4169 ? warning_at (loc, opt,
4170 (maybe
4171 ? G_("%K%qD may access %E or more bytes "
4172 "in a region of size %E")
4173 : G_("%K%qD accessing %E or more bytes "
4174 "in a region of size %E")),
4175 exp, func, range[0], size)
4176 : warning_at (loc, opt,
4177 (maybe
4178 ? G_("%Kmay access %E or more bytes "
4179 "in a region of size %E")
4180 : G_("%Kaccessing %E or more bytes "
4181 "in a region of size %E")),
4182 exp, range[0], size));
4184 else
4185 warned = (func
4186 ? warning_at (loc, opt,
4187 (maybe
4188 ? G_("%K%qD may access between %E and %E "
4189 "bytes in a region of size %E")
4190 : G_("%K%qD accessing between %E and %E "
4191 "bytes in a region of size %E")),
4192 exp, func, range[0], range[1],
4193 size)
4194 : warning_at (loc, opt,
4195 (maybe
4196 ? G_("%Kmay access between %E and %E bytes "
4197 "in a region of size %E")
4198 : G_("%Kaccessing between %E and %E bytes "
4199 "in a region of size %E")),
4200 exp, range[0], range[1],
4201 size));
4202 return warned;
4205 if (write)
4207 if (tree_int_cst_equal (range[0], range[1]))
4208 warned = (func
4209 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
4210 (maybe
4211 ? G_("%K%qD may write %E byte into a region "
4212 "of size %E")
4213 : G_("%K%qD writing %E byte into a region "
4214 "of size %E overflows the destination")),
4215 (maybe
4216 ? G_("%K%qD may write %E bytes into a region "
4217 "of size %E")
4218 : G_("%K%qD writing %E bytes into a region "
4219 "of size %E overflows the destination")),
4220 exp, func, range[0], size)
4221 : warning_n (loc, opt, tree_to_uhwi (range[0]),
4222 (maybe
4223 ? G_("%Kmay write %E byte into a region "
4224 "of size %E")
4225 : G_("%Kwriting %E byte into a region "
4226 "of size %E overflows the destination")),
4227 (maybe
4228 ? G_("%Kmay write %E bytes into a region "
4229 "of size %E")
4230 : G_("%Kwriting %E bytes into a region "
4231 "of size %E overflows the destination")),
4232 exp, range[0], size));
4233 else if (tree_int_cst_sign_bit (range[1]))
4235 /* Avoid printing the upper bound if it's invalid. */
4236 warned = (func
4237 ? warning_at (loc, opt,
4238 (maybe
4239 ? G_("%K%qD may write %E or more bytes "
4240 "into a region of size %E")
4241 : G_("%K%qD writing %E or more bytes "
4242 "into a region of size %E overflows "
4243 "the destination")),
4244 exp, func, range[0], size)
4245 : warning_at (loc, opt,
4246 (maybe
4247 ? G_("%Kmay write %E or more bytes into "
4248 "a region of size %E")
4249 : G_("%Kwriting %E or more bytes into "
4250 "a region of size %E overflows "
4251 "the destination")),
4252 exp, range[0], size));
4254 else
4255 warned = (func
4256 ? warning_at (loc, opt,
4257 (maybe
4258 ? G_("%K%qD may write between %E and %E bytes "
4259 "into a region of size %E")
4260 : G_("%K%qD writing between %E and %E bytes "
4261 "into a region of size %E overflows "
4262 "the destination")),
4263 exp, func, range[0], range[1],
4264 size)
4265 : warning_at (loc, opt,
4266 (maybe
4267 ? G_("%Kmay write between %E and %E bytes "
4268 "into a region of size %E")
4269 : G_("%Kwriting between %E and %E bytes "
4270 "into a region of size %E overflows "
4271 "the destination")),
4272 exp, range[0], range[1],
4273 size));
4274 return warned;
4277 if (read)
4279 if (tree_int_cst_equal (range[0], range[1]))
4280 warned = (func
4281 ? warning_n (loc, OPT_Wstringop_overread,
4282 tree_to_uhwi (range[0]),
4283 (maybe
4284 ? G_("%K%qD may read %E byte from a region "
4285 "of size %E")
4286 : G_("%K%qD reading %E byte from a region "
4287 "of size %E")),
4288 (maybe
4289 ? G_("%K%qD may read %E bytes from a region "
4290 "of size %E")
4291 : G_("%K%qD reading %E bytes from a region "
4292 "of size %E")),
4293 exp, func, range[0], size)
4294 : warning_n (loc, OPT_Wstringop_overread,
4295 tree_to_uhwi (range[0]),
4296 (maybe
4297 ? G_("%Kmay read %E byte from a region "
4298 "of size %E")
4299 : G_("%Kreading %E byte from a region "
4300 "of size %E")),
4301 (maybe
4302 ? G_("%Kmay read %E bytes from a region "
4303 "of size %E")
4304 : G_("%Kreading %E bytes from a region "
4305 "of size %E")),
4306 exp, range[0], size));
4307 else if (tree_int_cst_sign_bit (range[1]))
4309 /* Avoid printing the upper bound if it's invalid. */
4310 warned = (func
4311 ? warning_at (loc, OPT_Wstringop_overread,
4312 (maybe
4313 ? G_("%K%qD may read %E or more bytes "
4314 "from a region of size %E")
4315 : G_("%K%qD reading %E or more bytes "
4316 "from a region of size %E")),
4317 exp, func, range[0], size)
4318 : warning_at (loc, OPT_Wstringop_overread,
4319 (maybe
4320 ? G_("%Kmay read %E or more bytes "
4321 "from a region of size %E")
4322 : G_("%Kreading %E or more bytes "
4323 "from a region of size %E")),
4324 exp, range[0], size));
4326 else
4327 warned = (func
4328 ? warning_at (loc, OPT_Wstringop_overread,
4329 (maybe
4330 ? G_("%K%qD may read between %E and %E bytes "
4331 "from a region of size %E")
4332 : G_("%K%qD reading between %E and %E bytes "
4333 "from a region of size %E")),
4334 exp, func, range[0], range[1], size)
4335 : warning_at (loc, opt,
4336 (maybe
4337 ? G_("%Kmay read between %E and %E bytes "
4338 "from a region of size %E")
4339 : G_("%Kreading between %E and %E bytes "
4340 "from a region of size %E")),
4341 exp, range[0], range[1], size));
4343 if (warned)
4344 TREE_NO_WARNING (exp) = true;
4346 return warned;
4349 if (tree_int_cst_equal (range[0], range[1])
4350 || tree_int_cst_sign_bit (range[1]))
4351 warned = (func
4352 ? warning_n (loc, OPT_Wstringop_overread,
4353 tree_to_uhwi (range[0]),
4354 "%K%qD expecting %E byte in a region of size %E",
4355 "%K%qD expecting %E bytes in a region of size %E",
4356 exp, func, range[0], size)
4357 : warning_n (loc, OPT_Wstringop_overread,
4358 tree_to_uhwi (range[0]),
4359 "%Kexpecting %E byte in a region of size %E",
4360 "%Kexpecting %E bytes in a region of size %E",
4361 exp, range[0], size));
4362 else if (tree_int_cst_sign_bit (range[1]))
4364 /* Avoid printing the upper bound if it's invalid. */
4365 warned = (func
4366 ? warning_at (loc, OPT_Wstringop_overread,
4367 "%K%qD expecting %E or more bytes in a region "
4368 "of size %E",
4369 exp, func, range[0], size)
4370 : warning_at (loc, OPT_Wstringop_overread,
4371 "%Kexpecting %E or more bytes in a region "
4372 "of size %E",
4373 exp, range[0], size));
4375 else
4376 warned = (func
4377 ? warning_at (loc, OPT_Wstringop_overread,
4378 "%K%qD expecting between %E and %E bytes in "
4379 "a region of size %E",
4380 exp, func, range[0], range[1], size)
4381 : warning_at (loc, OPT_Wstringop_overread,
4382 "%Kexpecting between %E and %E bytes in "
4383 "a region of size %E",
4384 exp, range[0], range[1], size));
4386 if (warned)
4387 TREE_NO_WARNING (exp) = true;
4389 return warned;
4392 /* Issue one inform message describing each target of an access REF.
4393 WRITE is set for a write access and clear for a read access. */
4395 void
4396 access_ref::inform_access (access_mode mode) const
4398 const access_ref &aref = *this;
4399 if (!aref.ref)
4400 return;
4402 if (aref.phi ())
4404 /* Set MAXREF to refer to the largest object and fill ALL_REFS
4405 with data for all objects referenced by the PHI arguments. */
4406 access_ref maxref;
4407 auto_vec<access_ref> all_refs;
4408 if (!get_ref (&all_refs, &maxref))
4409 return;
4411 /* Except for MAXREF, the rest of the arguments' offsets need not
4412 reflect one added to the PHI itself. Determine the latter from
4413 MAXREF on which the result is based. */
4414 const offset_int orng[] =
4416 offrng[0] - maxref.offrng[0],
4417 wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
4420 /* Add the final PHI's offset to that of each of the arguments
4421 and recurse to issue an inform message for it. */
4422 for (unsigned i = 0; i != all_refs.length (); ++i)
4424 /* Skip any PHIs; those could lead to infinite recursion. */
4425 if (all_refs[i].phi ())
4426 continue;
4428 all_refs[i].add_offset (orng[0], orng[1]);
4429 all_refs[i].inform_access (mode);
4431 return;
4434 /* Convert offset range and avoid including a zero range since it
4435 isn't necessarily meaningful. */
4436 HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
4437 HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
4438 HOST_WIDE_INT minoff;
4439 HOST_WIDE_INT maxoff = diff_max;
4440 if (wi::fits_shwi_p (aref.offrng[0]))
4441 minoff = aref.offrng[0].to_shwi ();
4442 else
4443 minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
4445 if (wi::fits_shwi_p (aref.offrng[1]))
4446 maxoff = aref.offrng[1].to_shwi ();
4448 if (maxoff <= diff_min || maxoff >= diff_max)
4449 /* Avoid mentioning an upper bound that's equal to or in excess
4450 of the maximum of ptrdiff_t. */
4451 maxoff = minoff;
4453 /* Convert size range and always include it since all sizes are
4454 meaningful. */
4455 unsigned long long minsize = 0, maxsize = 0;
4456 if (wi::fits_shwi_p (aref.sizrng[0])
4457 && wi::fits_shwi_p (aref.sizrng[1]))
4459 minsize = aref.sizrng[0].to_shwi ();
4460 maxsize = aref.sizrng[1].to_shwi ();
4463 /* SIZRNG doesn't necessarily have the same range as the allocation
4464 size determined by gimple_call_alloc_size (). */
4465 char sizestr[80];
4466 if (minsize == maxsize)
4467 sprintf (sizestr, "%llu", minsize);
4468 else
4469 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
4471 char offstr[80];
4472 if (minoff == 0
4473 && (maxoff == 0 || aref.sizrng[1] <= maxoff))
4474 offstr[0] = '\0';
4475 else if (minoff == maxoff)
4476 sprintf (offstr, "%lli", (long long) minoff);
4477 else
4478 sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
4480 location_t loc = UNKNOWN_LOCATION;
4482 tree ref = this->ref;
4483 tree allocfn = NULL_TREE;
4484 if (TREE_CODE (ref) == SSA_NAME)
4486 gimple *stmt = SSA_NAME_DEF_STMT (ref);
4487 if (is_gimple_call (stmt))
4489 loc = gimple_location (stmt);
4490 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4492 /* Strip the SSA_NAME suffix from the variable name and
4493 recreate an identifier with the VLA's original name. */
4494 ref = gimple_call_lhs (stmt);
4495 if (SSA_NAME_IDENTIFIER (ref))
4497 ref = SSA_NAME_IDENTIFIER (ref);
4498 const char *id = IDENTIFIER_POINTER (ref);
4499 size_t len = strcspn (id, ".$");
4500 if (!len)
4501 len = strlen (id);
4502 ref = get_identifier_with_length (id, len);
4505 else
4507 /* Except for VLAs, retrieve the allocation function. */
4508 allocfn = gimple_call_fndecl (stmt);
4509 if (!allocfn)
4510 allocfn = gimple_call_fn (stmt);
4511 if (TREE_CODE (allocfn) == SSA_NAME)
4513 /* For an ALLOC_CALL via a function pointer make a small
4514 effort to determine the destination of the pointer. */
4515 gimple *def = SSA_NAME_DEF_STMT (allocfn);
4516 if (gimple_assign_single_p (def))
4518 tree rhs = gimple_assign_rhs1 (def);
4519 if (DECL_P (rhs))
4520 allocfn = rhs;
4521 else if (TREE_CODE (rhs) == COMPONENT_REF)
4522 allocfn = TREE_OPERAND (rhs, 1);
4527 else if (gimple_nop_p (stmt))
4528 /* Handle DECL_PARM below. */
4529 ref = SSA_NAME_VAR (ref);
4532 if (DECL_P (ref))
4533 loc = DECL_SOURCE_LOCATION (ref);
4534 else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref))
4535 loc = EXPR_LOCATION (ref);
4536 else if (TREE_CODE (ref) != IDENTIFIER_NODE
4537 && TREE_CODE (ref) != SSA_NAME)
4538 return;
4540 if (mode == access_read_write || mode == access_write_only)
4542 if (allocfn == NULL_TREE)
4544 if (*offstr)
4545 inform (loc, "at offset %s into destination object %qE of size %s",
4546 offstr, ref, sizestr);
4547 else
4548 inform (loc, "destination object %qE of size %s", ref, sizestr);
4549 return;
4552 if (*offstr)
4553 inform (loc,
4554 "at offset %s into destination object of size %s "
4555 "allocated by %qE", offstr, sizestr, allocfn);
4556 else
4557 inform (loc, "destination object of size %s allocated by %qE",
4558 sizestr, allocfn);
4559 return;
4562 if (allocfn == NULL_TREE)
4564 if (*offstr)
4565 inform (loc, "at offset %s into source object %qE of size %s",
4566 offstr, ref, sizestr);
4567 else
4568 inform (loc, "source object %qE of size %s", ref, sizestr);
4570 return;
4573 if (*offstr)
4574 inform (loc,
4575 "at offset %s into source object of size %s allocated by %qE",
4576 offstr, sizestr, allocfn);
4577 else
4578 inform (loc, "source object of size %s allocated by %qE",
4579 sizestr, allocfn);
4582 /* Helper to set RANGE to the range of BOUND if it's nonnull, bounded
4583 by BNDRNG if nonnull and valid. */
4585 static void
4586 get_size_range (tree bound, tree range[2], const offset_int bndrng[2])
4588 if (bound)
4589 get_size_range (bound, range);
4591 if (!bndrng || (bndrng[0] == 0 && bndrng[1] == HOST_WIDE_INT_M1U))
4592 return;
4594 if (range[0] && TREE_CODE (range[0]) == INTEGER_CST)
4596 offset_int r[] =
4597 { wi::to_offset (range[0]), wi::to_offset (range[1]) };
4598 if (r[0] < bndrng[0])
4599 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
4600 if (bndrng[1] < r[1])
4601 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
4603 else
4605 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
4606 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
4610 /* Try to verify that the sizes and lengths of the arguments to a string
4611 manipulation function given by EXP are within valid bounds and that
4612 the operation does not lead to buffer overflow or read past the end.
4613 Arguments other than EXP may be null. When non-null, the arguments
4614 have the following meaning:
4615 DST is the destination of a copy call or NULL otherwise.
4616 SRC is the source of a copy call or NULL otherwise.
4617 DSTWRITE is the number of bytes written into the destination obtained
4618 from the user-supplied size argument to the function (such as in
4619 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
4620 MAXREAD is the user-supplied bound on the length of the source sequence
4621 (such as in strncat(d, s, N). It specifies the upper limit on the number
4622 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
4623 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
4624 expression EXP is a string function call (as opposed to a memory call
4625 like memcpy). As an exception, SRCSTR can also be an integer denoting
4626 the precomputed size of the source string or object (for functions like
4627 memcpy).
4628 DSTSIZE is the size of the destination object.
4630 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
4631 SIZE_MAX.
4633 WRITE is true for write accesses, READ is true for reads. Both are
4634 false for simple size checks in calls to functions that neither read
4635 from nor write to the region.
4637 When nonnull, PAD points to a more detailed description of the access.
4639 If the call is successfully verified as safe return true, otherwise
4640 return false. */
4642 bool
4643 check_access (tree exp, tree dstwrite,
4644 tree maxread, tree srcstr, tree dstsize,
4645 access_mode mode, const access_data *pad /* = NULL */)
4647 /* The size of the largest object is half the address space, or
4648 PTRDIFF_MAX. (This is way too permissive.) */
4649 tree maxobjsize = max_object_size ();
4651 /* Either an approximate/minimum the length of the source string for
4652 string functions or the size of the source object for raw memory
4653 functions. */
4654 tree slen = NULL_TREE;
4656 /* The range of the access in bytes; first set to the write access
4657 for functions that write and then read for those that also (or
4658 just) read. */
4659 tree range[2] = { NULL_TREE, NULL_TREE };
4661 /* Set to true when the exact number of bytes written by a string
4662 function like strcpy is not known and the only thing that is
4663 known is that it must be at least one (for the terminating nul). */
4664 bool at_least_one = false;
4665 if (srcstr)
4667 /* SRCSTR is normally a pointer to string but as a special case
4668 it can be an integer denoting the length of a string. */
4669 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
4671 if (!check_nul_terminated_array (exp, srcstr, maxread))
4672 return false;
4673 /* Try to determine the range of lengths the source string
4674 refers to. If it can be determined and is less than
4675 the upper bound given by MAXREAD add one to it for
4676 the terminating nul. Otherwise, set it to one for
4677 the same reason, or to MAXREAD as appropriate. */
4678 c_strlen_data lendata = { };
4679 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
4680 range[0] = lendata.minlen;
4681 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
4682 if (range[0]
4683 && TREE_CODE (range[0]) == INTEGER_CST
4684 && TREE_CODE (range[1]) == INTEGER_CST
4685 && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
4687 if (maxread && tree_int_cst_le (maxread, range[0]))
4688 range[0] = range[1] = maxread;
4689 else
4690 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
4691 range[0], size_one_node);
4693 if (maxread && tree_int_cst_le (maxread, range[1]))
4694 range[1] = maxread;
4695 else if (!integer_all_onesp (range[1]))
4696 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
4697 range[1], size_one_node);
4699 slen = range[0];
4701 else
4703 at_least_one = true;
4704 slen = size_one_node;
4707 else
4708 slen = srcstr;
4711 if (!dstwrite && !maxread)
4713 /* When the only available piece of data is the object size
4714 there is nothing to do. */
4715 if (!slen)
4716 return true;
4718 /* Otherwise, when the length of the source sequence is known
4719 (as with strlen), set DSTWRITE to it. */
4720 if (!range[0])
4721 dstwrite = slen;
4724 if (!dstsize)
4725 dstsize = maxobjsize;
4727 /* Set RANGE to that of DSTWRITE if non-null, bounded by PAD->DST.BNDRNG
4728 if valid. */
4729 get_size_range (dstwrite, range, pad ? pad->dst.bndrng : NULL);
4731 tree func = get_callee_fndecl (exp);
4732 /* Read vs write access by built-ins can be determined from the const
4733 qualifiers on the pointer argument. In the absence of attribute
4734 access, non-const qualified pointer arguments to user-defined
4735 functions are assumed to both read and write the objects. */
4736 const bool builtin = func ? fndecl_built_in_p (func) : false;
4738 /* First check the number of bytes to be written against the maximum
4739 object size. */
4740 if (range[0]
4741 && TREE_CODE (range[0]) == INTEGER_CST
4742 && tree_int_cst_lt (maxobjsize, range[0]))
4744 location_t loc = tree_inlined_location (exp);
4745 maybe_warn_for_bound (OPT_Wstringop_overflow_, loc, exp, func, range,
4746 NULL_TREE, pad);
4747 return false;
4750 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
4751 constant, and in range of unsigned HOST_WIDE_INT. */
4752 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
4754 /* Next check the number of bytes to be written against the destination
4755 object size. */
4756 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
4758 if (range[0]
4759 && TREE_CODE (range[0]) == INTEGER_CST
4760 && ((tree_fits_uhwi_p (dstsize)
4761 && tree_int_cst_lt (dstsize, range[0]))
4762 || (dstwrite
4763 && tree_fits_uhwi_p (dstwrite)
4764 && tree_int_cst_lt (dstwrite, range[0]))))
4766 if (TREE_NO_WARNING (exp)
4767 || (pad && pad->dst.ref && TREE_NO_WARNING (pad->dst.ref)))
4768 return false;
4770 location_t loc = tree_inlined_location (exp);
4771 bool warned = false;
4772 if (dstwrite == slen && at_least_one)
4774 /* This is a call to strcpy with a destination of 0 size
4775 and a source of unknown length. The call will write
4776 at least one byte past the end of the destination. */
4777 warned = (func
4778 ? warning_at (loc, OPT_Wstringop_overflow_,
4779 "%K%qD writing %E or more bytes into "
4780 "a region of size %E overflows "
4781 "the destination",
4782 exp, func, range[0], dstsize)
4783 : warning_at (loc, OPT_Wstringop_overflow_,
4784 "%Kwriting %E or more bytes into "
4785 "a region of size %E overflows "
4786 "the destination",
4787 exp, range[0], dstsize));
4789 else
4791 const bool read
4792 = mode == access_read_only || mode == access_read_write;
4793 const bool write
4794 = mode == access_write_only || mode == access_read_write;
4795 const bool maybe = pad && pad->dst.parmarray;
4796 warned = warn_for_access (loc, func, exp,
4797 OPT_Wstringop_overflow_,
4798 range, dstsize,
4799 write, read && !builtin, maybe);
4802 if (warned)
4804 TREE_NO_WARNING (exp) = true;
4805 if (pad)
4806 pad->dst.inform_access (pad->mode);
4809 /* Return error when an overflow has been detected. */
4810 return false;
4814 /* Check the maximum length of the source sequence against the size
4815 of the destination object if known, or against the maximum size
4816 of an object. */
4817 if (maxread)
4819 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4820 PAD is nonnull and BNDRNG is valid. */
4821 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4823 location_t loc = tree_inlined_location (exp);
4824 tree size = dstsize;
4825 if (pad && pad->mode == access_read_only)
4826 size = wide_int_to_tree (sizetype, pad->src.sizrng[1]);
4828 if (range[0] && maxread && tree_fits_uhwi_p (size))
4830 if (tree_int_cst_lt (maxobjsize, range[0]))
4832 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
4833 range, size, pad);
4834 return false;
4837 if (size != maxobjsize && tree_int_cst_lt (size, range[0]))
4839 int opt = (dstwrite || mode != access_read_only
4840 ? OPT_Wstringop_overflow_
4841 : OPT_Wstringop_overread);
4842 maybe_warn_for_bound (opt, loc, exp, func, range, size, pad);
4843 return false;
4847 maybe_warn_nonstring_arg (func, exp);
4850 /* Check for reading past the end of SRC. */
4851 bool overread = (slen
4852 && slen == srcstr
4853 && dstwrite
4854 && range[0]
4855 && TREE_CODE (slen) == INTEGER_CST
4856 && tree_int_cst_lt (slen, range[0]));
4857 /* If none is determined try to get a better answer based on the details
4858 in PAD. */
4859 if (!overread
4860 && pad
4861 && pad->src.sizrng[1] >= 0
4862 && pad->src.offrng[0] >= 0
4863 && (pad->src.offrng[1] < 0
4864 || pad->src.offrng[0] <= pad->src.offrng[1]))
4866 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4867 PAD is nonnull and BNDRNG is valid. */
4868 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4869 /* Set OVERREAD for reads starting just past the end of an object. */
4870 overread = pad->src.sizrng[1] - pad->src.offrng[0] < pad->src.bndrng[0];
4871 range[0] = wide_int_to_tree (sizetype, pad->src.bndrng[0]);
4872 slen = size_zero_node;
4875 if (overread)
4877 if (TREE_NO_WARNING (exp)
4878 || (srcstr && TREE_NO_WARNING (srcstr))
4879 || (pad && pad->src.ref && TREE_NO_WARNING (pad->src.ref)))
4880 return false;
4882 location_t loc = tree_inlined_location (exp);
4883 const bool read
4884 = mode == access_read_only || mode == access_read_write;
4885 const bool maybe = pad && pad->dst.parmarray;
4886 if (warn_for_access (loc, func, exp, OPT_Wstringop_overread, range,
4887 slen, false, read, maybe))
4889 TREE_NO_WARNING (exp) = true;
4890 if (pad)
4891 pad->src.inform_access (access_read_only);
4893 return false;
4896 return true;
4899 /* A convenience wrapper for check_access above to check access
4900 by a read-only function like puts. */
4902 static bool
4903 check_read_access (tree exp, tree src, tree bound /* = NULL_TREE */,
4904 int ost /* = 1 */)
4906 if (!warn_stringop_overread)
4907 return true;
4909 access_data data (exp, access_read_only, NULL_TREE, false, bound, true);
4910 compute_objsize (src, ost, &data.src);
4911 return check_access (exp, /*dstwrite=*/ NULL_TREE, /*maxread=*/ bound,
4912 /*srcstr=*/ src, /*dstsize=*/ NULL_TREE, data.mode,
4913 &data);
4916 /* If STMT is a call to an allocation function, returns the constant
4917 maximum size of the object allocated by the call represented as
4918 sizetype. If nonnull, sets RNG1[] to the range of the size.
4919 When nonnull, uses RVALS for range information, otherwise calls
4920 get_range_info to get it.
4921 Returns null when STMT is not a call to a valid allocation function. */
4923 tree
4924 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
4925 range_query * /* = NULL */)
4927 if (!stmt)
4928 return NULL_TREE;
4930 tree allocfntype;
4931 if (tree fndecl = gimple_call_fndecl (stmt))
4932 allocfntype = TREE_TYPE (fndecl);
4933 else
4934 allocfntype = gimple_call_fntype (stmt);
4936 if (!allocfntype)
4937 return NULL_TREE;
4939 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
4940 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
4941 if (!at)
4943 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4944 return NULL_TREE;
4946 argidx1 = 0;
4949 unsigned nargs = gimple_call_num_args (stmt);
4951 if (argidx1 == UINT_MAX)
4953 tree atval = TREE_VALUE (at);
4954 if (!atval)
4955 return NULL_TREE;
4957 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4958 if (nargs <= argidx1)
4959 return NULL_TREE;
4961 atval = TREE_CHAIN (atval);
4962 if (atval)
4964 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4965 if (nargs <= argidx2)
4966 return NULL_TREE;
4970 tree size = gimple_call_arg (stmt, argidx1);
4972 wide_int rng1_buf[2];
4973 /* If RNG1 is not set, use the buffer. */
4974 if (!rng1)
4975 rng1 = rng1_buf;
4977 /* Use maximum precision to avoid overflow below. */
4978 const int prec = ADDR_MAX_PRECISION;
4981 tree r[2];
4982 /* Determine the largest valid range size, including zero. */
4983 if (!get_size_range (size, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
4984 return NULL_TREE;
4985 rng1[0] = wi::to_wide (r[0], prec);
4986 rng1[1] = wi::to_wide (r[1], prec);
4989 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
4990 return fold_convert (sizetype, size);
4992 /* To handle ranges do the math in wide_int and return the product
4993 of the upper bounds as a constant. Ignore anti-ranges. */
4994 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
4995 wide_int rng2[2];
4997 tree r[2];
4998 /* As above, use the full non-negative range on failure. */
4999 if (!get_size_range (n, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
5000 return NULL_TREE;
5001 rng2[0] = wi::to_wide (r[0], prec);
5002 rng2[1] = wi::to_wide (r[1], prec);
5005 /* Compute products of both bounds for the caller but return the lesser
5006 of SIZE_MAX and the product of the upper bounds as a constant. */
5007 rng1[0] = rng1[0] * rng2[0];
5008 rng1[1] = rng1[1] * rng2[1];
5010 const tree size_max = TYPE_MAX_VALUE (sizetype);
5011 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
5013 rng1[1] = wi::to_wide (size_max, prec);
5014 return size_max;
5017 return wide_int_to_tree (sizetype, rng1[1]);
5020 /* For an access to an object referenced to by the function parameter PTR
5021 of pointer type, and set RNG[] to the range of sizes of the object
5022 obtainedfrom the attribute access specification for the current function.
5023 Set STATIC_ARRAY if the array parameter has been declared [static].
5024 Return the function parameter on success and null otherwise. */
5026 tree
5027 gimple_parm_array_size (tree ptr, wide_int rng[2],
5028 bool *static_array /* = NULL */)
5030 /* For a function argument try to determine the byte size of the array
5031 from the current function declaratation (e.g., attribute access or
5032 related). */
5033 tree var = SSA_NAME_VAR (ptr);
5034 if (TREE_CODE (var) != PARM_DECL)
5035 return NULL_TREE;
5037 const unsigned prec = TYPE_PRECISION (sizetype);
5039 rdwr_map rdwr_idx;
5040 attr_access *access = get_parm_access (rdwr_idx, var);
5041 if (!access)
5042 return NULL_TREE;
5044 if (access->sizarg != UINT_MAX)
5046 /* TODO: Try to extract the range from the argument based on
5047 those of subsequent assertions or based on known calls to
5048 the current function. */
5049 return NULL_TREE;
5052 if (!access->minsize)
5053 return NULL_TREE;
5055 /* Only consider ordinary array bound at level 2 (or above if it's
5056 ever added). */
5057 if (warn_array_parameter < 2 && !access->static_p)
5058 return NULL_TREE;
5060 if (static_array)
5061 *static_array = access->static_p;
5063 rng[0] = wi::zero (prec);
5064 rng[1] = wi::uhwi (access->minsize, prec);
5065 /* Multiply the array bound encoded in the attribute by the size
5066 of what the pointer argument to which it decays points to. */
5067 tree eltype = TREE_TYPE (TREE_TYPE (ptr));
5068 tree size = TYPE_SIZE_UNIT (eltype);
5069 if (!size || TREE_CODE (size) != INTEGER_CST)
5070 return NULL_TREE;
5072 rng[1] *= wi::to_wide (size, prec);
5073 return var;
5076 /* Wrapper around the wide_int overload of get_range that accepts
5077 offset_int instead. For middle end expressions returns the same
5078 result. For a subset of nonconstamt expressions emitted by the front
5079 end determines a more precise range than would be possible otherwise. */
5081 static bool
5082 get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
5084 offset_int add = 0;
5085 if (TREE_CODE (x) == PLUS_EXPR)
5087 /* Handle constant offsets in pointer addition expressions seen
5088 n the front end IL. */
5089 tree op = TREE_OPERAND (x, 1);
5090 if (TREE_CODE (op) == INTEGER_CST)
5092 op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
5093 add = wi::to_offset (op);
5094 x = TREE_OPERAND (x, 0);
5098 if (TREE_CODE (x) == NOP_EXPR)
5099 /* Also handle conversions to sizetype seen in the front end IL. */
5100 x = TREE_OPERAND (x, 0);
5102 tree type = TREE_TYPE (x);
5103 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
5104 return false;
5106 if (TREE_CODE (x) != INTEGER_CST
5107 && TREE_CODE (x) != SSA_NAME)
5109 if (TYPE_UNSIGNED (type)
5110 && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
5111 type = signed_type_for (type);
5113 r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
5114 r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
5115 return x;
5118 wide_int wr[2];
5119 if (!get_range (x, stmt, wr, rvals))
5120 return false;
5122 signop sgn = SIGNED;
5123 /* Only convert signed integers or unsigned sizetype to a signed
5124 offset and avoid converting large positive values in narrower
5125 types to negative offsets. */
5126 if (TYPE_UNSIGNED (type)
5127 && wr[0].get_precision () < TYPE_PRECISION (sizetype))
5128 sgn = UNSIGNED;
5130 r[0] = offset_int::from (wr[0], sgn);
5131 r[1] = offset_int::from (wr[1], sgn);
5132 return true;
5135 /* Return the argument that the call STMT to a built-in function returns
5136 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
5137 from the argument reflected in the value returned by the built-in if it
5138 can be determined, otherwise to 0 and HWI_M1U respectively. */
5140 static tree
5141 gimple_call_return_array (gimple *stmt, offset_int offrng[2],
5142 range_query *rvals)
5144 if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5145 || gimple_call_num_args (stmt) < 1)
5146 return NULL_TREE;
5148 tree fn = gimple_call_fndecl (stmt);
5149 switch (DECL_FUNCTION_CODE (fn))
5151 case BUILT_IN_MEMCPY:
5152 case BUILT_IN_MEMCPY_CHK:
5153 case BUILT_IN_MEMMOVE:
5154 case BUILT_IN_MEMMOVE_CHK:
5155 case BUILT_IN_MEMSET:
5156 case BUILT_IN_STPCPY:
5157 case BUILT_IN_STPCPY_CHK:
5158 case BUILT_IN_STPNCPY:
5159 case BUILT_IN_STPNCPY_CHK:
5160 case BUILT_IN_STRCAT:
5161 case BUILT_IN_STRCAT_CHK:
5162 case BUILT_IN_STRCPY:
5163 case BUILT_IN_STRCPY_CHK:
5164 case BUILT_IN_STRNCAT:
5165 case BUILT_IN_STRNCAT_CHK:
5166 case BUILT_IN_STRNCPY:
5167 case BUILT_IN_STRNCPY_CHK:
5168 offrng[0] = offrng[1] = 0;
5169 return gimple_call_arg (stmt, 0);
5171 case BUILT_IN_MEMPCPY:
5172 case BUILT_IN_MEMPCPY_CHK:
5174 tree off = gimple_call_arg (stmt, 2);
5175 if (!get_offset_range (off, stmt, offrng, rvals))
5177 offrng[0] = 0;
5178 offrng[1] = HOST_WIDE_INT_M1U;
5180 return gimple_call_arg (stmt, 0);
5183 case BUILT_IN_MEMCHR:
5185 tree off = gimple_call_arg (stmt, 2);
5186 if (get_offset_range (off, stmt, offrng, rvals))
5187 offrng[0] = 0;
5188 else
5190 offrng[0] = 0;
5191 offrng[1] = HOST_WIDE_INT_M1U;
5193 return gimple_call_arg (stmt, 0);
5196 case BUILT_IN_STRCHR:
5197 case BUILT_IN_STRRCHR:
5198 case BUILT_IN_STRSTR:
5200 offrng[0] = 0;
5201 offrng[1] = HOST_WIDE_INT_M1U;
5203 return gimple_call_arg (stmt, 0);
5205 default:
5206 break;
5209 return NULL_TREE;
5212 /* A helper of compute_objsize_r() to determine the size from an assignment
5213 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. */
5215 static bool
5216 handle_min_max_size (gimple *stmt, int ostype, access_ref *pref,
5217 ssa_name_limit_t &snlim, pointer_query *qry)
5219 tree_code code = gimple_assign_rhs_code (stmt);
5221 tree ptr = gimple_assign_rhs1 (stmt);
5223 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
5224 Determine the size/offset of each and use the one with more or less
5225 space remaining, respectively. If either fails, use the information
5226 determined from the other instead, adjusted up or down as appropriate
5227 for the expression. */
5228 access_ref aref[2] = { *pref, *pref };
5229 if (!compute_objsize_r (ptr, ostype, &aref[0], snlim, qry))
5231 aref[0].base0 = false;
5232 aref[0].offrng[0] = aref[0].offrng[1] = 0;
5233 aref[0].add_max_offset ();
5234 aref[0].set_max_size_range ();
5237 ptr = gimple_assign_rhs2 (stmt);
5238 if (!compute_objsize_r (ptr, ostype, &aref[1], snlim, qry))
5240 aref[1].base0 = false;
5241 aref[1].offrng[0] = aref[1].offrng[1] = 0;
5242 aref[1].add_max_offset ();
5243 aref[1].set_max_size_range ();
5246 if (!aref[0].ref && !aref[1].ref)
5247 /* Fail if the identity of neither argument could be determined. */
5248 return false;
5250 bool i0 = false;
5251 if (aref[0].ref && aref[0].base0)
5253 if (aref[1].ref && aref[1].base0)
5255 /* If the object referenced by both arguments has been determined
5256 set *PREF to the one with more or less space remainng, whichever
5257 is appopriate for CODE.
5258 TODO: Indicate when the objects are distinct so it can be
5259 diagnosed. */
5260 i0 = code == MAX_EXPR;
5261 const bool i1 = !i0;
5263 if (aref[i0].size_remaining () < aref[i1].size_remaining ())
5264 *pref = aref[i1];
5265 else
5266 *pref = aref[i0];
5267 return true;
5270 /* If only the object referenced by one of the arguments could be
5271 determined, use it and... */
5272 *pref = aref[0];
5273 i0 = true;
5275 else
5276 *pref = aref[1];
5278 const bool i1 = !i0;
5279 /* ...see if the offset obtained from the other pointer can be used
5280 to tighten up the bound on the offset obtained from the first. */
5281 if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
5282 || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
5284 pref->offrng[0] = aref[i0].offrng[0];
5285 pref->offrng[1] = aref[i0].offrng[1];
5287 return true;
5290 /* A helper of compute_objsize_r() to determine the size from ARRAY_REF
5291 AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true
5292 on success and false on failure. */
5294 static bool
5295 handle_array_ref (tree aref, bool addr, int ostype, access_ref *pref,
5296 ssa_name_limit_t &snlim, pointer_query *qry)
5298 gcc_assert (TREE_CODE (aref) == ARRAY_REF);
5300 ++pref->deref;
5302 tree arefop = TREE_OPERAND (aref, 0);
5303 tree reftype = TREE_TYPE (arefop);
5304 if (!addr && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
5305 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
5306 of known bound. */
5307 return false;
5309 if (!compute_objsize_r (arefop, ostype, pref, snlim, qry))
5310 return false;
5312 offset_int orng[2];
5313 tree off = pref->eval (TREE_OPERAND (aref, 1));
5314 range_query *const rvals = qry ? qry->rvals : NULL;
5315 if (!get_offset_range (off, NULL, orng, rvals))
5317 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
5318 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
5319 orng[0] = -orng[1] - 1;
5322 /* Convert the array index range determined above to a byte
5323 offset. */
5324 tree lowbnd = array_ref_low_bound (aref);
5325 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
5327 /* Adjust the index by the low bound of the array domain
5328 (normally zero but 1 in Fortran). */
5329 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
5330 orng[0] -= lb;
5331 orng[1] -= lb;
5334 tree eltype = TREE_TYPE (aref);
5335 tree tpsize = TYPE_SIZE_UNIT (eltype);
5336 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
5338 pref->add_max_offset ();
5339 return true;
5342 offset_int sz = wi::to_offset (tpsize);
5343 orng[0] *= sz;
5344 orng[1] *= sz;
5346 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
5348 /* Except for the permissive raw memory functions which use
5349 the size of the whole object determined above, use the size
5350 of the referenced array. Because the overall offset is from
5351 the beginning of the complete array object add this overall
5352 offset to the size of array. */
5353 offset_int sizrng[2] =
5355 pref->offrng[0] + orng[0] + sz,
5356 pref->offrng[1] + orng[1] + sz
5358 if (sizrng[1] < sizrng[0])
5359 std::swap (sizrng[0], sizrng[1]);
5360 if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
5361 pref->sizrng[0] = sizrng[0];
5362 if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
5363 pref->sizrng[1] = sizrng[1];
5366 pref->add_offset (orng[0], orng[1]);
5367 return true;
5370 /* A helper of compute_objsize_r() to determine the size from MEM_REF
5371 MREF. Return true on success and false on failure. */
5373 static bool
5374 handle_mem_ref (tree mref, int ostype, access_ref *pref,
5375 ssa_name_limit_t &snlim, pointer_query *qry)
5377 gcc_assert (TREE_CODE (mref) == MEM_REF);
5379 ++pref->deref;
5381 if (VECTOR_TYPE_P (TREE_TYPE (mref)))
5383 /* Hack: Give up for MEM_REFs of vector types; those may be
5384 synthesized from multiple assignments to consecutive data
5385 members (see PR 93200 and 96963).
5386 FIXME: Vectorized assignments should only be present after
5387 vectorization so this hack is only necessary after it has
5388 run and could be avoided in calls from prior passes (e.g.,
5389 tree-ssa-strlen.c).
5390 FIXME: Deal with this more generally, e.g., by marking up
5391 such MEM_REFs at the time they're created. */
5392 return false;
5395 tree mrefop = TREE_OPERAND (mref, 0);
5396 if (!compute_objsize_r (mrefop, ostype, pref, snlim, qry))
5397 return false;
5399 offset_int orng[2];
5400 tree off = pref->eval (TREE_OPERAND (mref, 1));
5401 range_query *const rvals = qry ? qry->rvals : NULL;
5402 if (!get_offset_range (off, NULL, orng, rvals))
5404 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
5405 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
5406 orng[0] = -orng[1] - 1;
5409 pref->add_offset (orng[0], orng[1]);
5410 return true;
5413 /* Helper to compute the size of the object referenced by the PTR
5414 expression which must have pointer type, using Object Size type
5415 OSTYPE (only the least significant 2 bits are used).
5416 On success, sets PREF->REF to the DECL of the referenced object
5417 if it's unique, otherwise to null, PREF->OFFRNG to the range of
5418 offsets into it, and PREF->SIZRNG to the range of sizes of
5419 the object(s).
5420 SNLIM is used to avoid visiting the same PHI operand multiple
5421 times, and, when nonnull, RVALS to determine range information.
5422 Returns true on success, false when a meaningful size (or range)
5423 cannot be determined.
5425 The function is intended for diagnostics and should not be used
5426 to influence code generation or optimization. */
5428 static bool
5429 compute_objsize_r (tree ptr, int ostype, access_ref *pref,
5430 ssa_name_limit_t &snlim, pointer_query *qry)
5432 STRIP_NOPS (ptr);
5434 const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
5435 if (addr)
5437 --pref->deref;
5438 ptr = TREE_OPERAND (ptr, 0);
5441 if (DECL_P (ptr))
5443 pref->ref = ptr;
5445 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
5447 /* Set the maximum size if the reference is to the pointer
5448 itself (as opposed to what it points to). */
5449 pref->set_max_size_range ();
5450 return true;
5453 if (tree size = decl_init_size (ptr, false))
5454 if (TREE_CODE (size) == INTEGER_CST)
5456 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
5457 return true;
5460 pref->set_max_size_range ();
5461 return true;
5464 const tree_code code = TREE_CODE (ptr);
5465 range_query *const rvals = qry ? qry->rvals : NULL;
5467 if (code == BIT_FIELD_REF)
5469 tree ref = TREE_OPERAND (ptr, 0);
5470 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5471 return false;
5473 offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
5474 pref->add_offset (off / BITS_PER_UNIT);
5475 return true;
5478 if (code == COMPONENT_REF)
5480 tree ref = TREE_OPERAND (ptr, 0);
5481 if (TREE_CODE (TREE_TYPE (ref)) == UNION_TYPE)
5482 /* In accesses through union types consider the entire unions
5483 rather than just their members. */
5484 ostype = 0;
5485 tree field = TREE_OPERAND (ptr, 1);
5487 if (ostype == 0)
5489 /* In OSTYPE zero (for raw memory functions like memcpy), use
5490 the maximum size instead if the identity of the enclosing
5491 object cannot be determined. */
5492 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5493 return false;
5495 /* Otherwise, use the size of the enclosing object and add
5496 the offset of the member to the offset computed so far. */
5497 tree offset = byte_position (field);
5498 if (TREE_CODE (offset) == INTEGER_CST)
5499 pref->add_offset (wi::to_offset (offset));
5500 else
5501 pref->add_max_offset ();
5503 if (!pref->ref)
5504 /* REF may have been already set to an SSA_NAME earlier
5505 to provide better context for diagnostics. In that case,
5506 leave it unchanged. */
5507 pref->ref = ref;
5508 return true;
5511 pref->ref = field;
5513 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
5515 /* Set maximum size if the reference is to the pointer member
5516 itself (as opposed to what it points to). */
5517 pref->set_max_size_range ();
5518 return true;
5521 /* SAM is set for array members that might need special treatment. */
5522 special_array_member sam;
5523 tree size = component_ref_size (ptr, &sam);
5524 if (sam == special_array_member::int_0)
5525 pref->sizrng[0] = pref->sizrng[1] = 0;
5526 else if (!pref->trail1special && sam == special_array_member::trail_1)
5527 pref->sizrng[0] = pref->sizrng[1] = 1;
5528 else if (size && TREE_CODE (size) == INTEGER_CST)
5529 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
5530 else
5532 /* When the size of the member is unknown it's either a flexible
5533 array member or a trailing special array member (either zero
5534 length or one-element). Set the size to the maximum minus
5535 the constant size of the type. */
5536 pref->sizrng[0] = 0;
5537 pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
5538 if (tree recsize = TYPE_SIZE_UNIT (TREE_TYPE (ref)))
5539 if (TREE_CODE (recsize) == INTEGER_CST)
5540 pref->sizrng[1] -= wi::to_offset (recsize);
5542 return true;
5545 if (code == ARRAY_REF)
5546 return handle_array_ref (ptr, addr, ostype, pref, snlim, qry);
5548 if (code == MEM_REF)
5549 return handle_mem_ref (ptr, ostype, pref, snlim, qry);
5551 if (code == TARGET_MEM_REF)
5553 tree ref = TREE_OPERAND (ptr, 0);
5554 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5555 return false;
5557 /* TODO: Handle remaining operands. Until then, add maximum offset. */
5558 pref->ref = ptr;
5559 pref->add_max_offset ();
5560 return true;
5563 if (code == INTEGER_CST)
5565 /* Pointer constants other than null are most likely the result
5566 of erroneous null pointer addition/subtraction. Set size to
5567 zero. For null pointers, set size to the maximum for now
5568 since those may be the result of jump threading. */
5569 if (integer_zerop (ptr))
5570 pref->set_max_size_range ();
5571 else
5572 pref->sizrng[0] = pref->sizrng[1] = 0;
5573 pref->ref = ptr;
5575 return true;
5578 if (code == STRING_CST)
5580 pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
5581 pref->ref = ptr;
5582 return true;
5585 if (code == POINTER_PLUS_EXPR)
5587 tree ref = TREE_OPERAND (ptr, 0);
5588 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5589 return false;
5591 /* Clear DEREF since the offset is being applied to the target
5592 of the dereference. */
5593 pref->deref = 0;
5595 offset_int orng[2];
5596 tree off = pref->eval (TREE_OPERAND (ptr, 1));
5597 if (get_offset_range (off, NULL, orng, rvals))
5598 pref->add_offset (orng[0], orng[1]);
5599 else
5600 pref->add_max_offset ();
5601 return true;
5604 if (code == VIEW_CONVERT_EXPR)
5606 ptr = TREE_OPERAND (ptr, 0);
5607 return compute_objsize_r (ptr, ostype, pref, snlim, qry);
5610 if (code == SSA_NAME)
5612 if (!snlim.next ())
5613 return false;
5615 /* Only process an SSA_NAME if the recursion limit has not yet
5616 been reached. */
5617 if (qry)
5619 if (++qry->depth)
5620 qry->max_depth = qry->depth;
5621 if (const access_ref *cache_ref = qry->get_ref (ptr))
5623 /* If the pointer is in the cache set *PREF to what it refers
5624 to and return success. */
5625 *pref = *cache_ref;
5626 return true;
5630 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
5631 if (is_gimple_call (stmt))
5633 /* If STMT is a call to an allocation function get the size
5634 from its argument(s). If successful, also set *PREF->REF
5635 to PTR for the caller to include in diagnostics. */
5636 wide_int wr[2];
5637 if (gimple_call_alloc_size (stmt, wr, rvals))
5639 pref->ref = ptr;
5640 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
5641 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
5642 /* Constrain both bounds to a valid size. */
5643 offset_int maxsize = wi::to_offset (max_object_size ());
5644 if (pref->sizrng[0] > maxsize)
5645 pref->sizrng[0] = maxsize;
5646 if (pref->sizrng[1] > maxsize)
5647 pref->sizrng[1] = maxsize;
5649 else
5651 /* For functions known to return one of their pointer arguments
5652 try to determine what the returned pointer points to, and on
5653 success add OFFRNG which was set to the offset added by
5654 the function (e.g., memchr) to the overall offset. */
5655 offset_int offrng[2];
5656 if (tree ret = gimple_call_return_array (stmt, offrng, rvals))
5658 if (!compute_objsize_r (ret, ostype, pref, snlim, qry))
5659 return false;
5661 /* Cap OFFRNG[1] to at most the remaining size of
5662 the object. */
5663 offset_int remrng[2];
5664 remrng[1] = pref->size_remaining (remrng);
5665 if (remrng[1] < offrng[1])
5666 offrng[1] = remrng[1];
5667 pref->add_offset (offrng[0], offrng[1]);
5669 else
5671 /* For other calls that might return arbitrary pointers
5672 including into the middle of objects set the size
5673 range to maximum, clear PREF->BASE0, and also set
5674 PREF->REF to include in diagnostics. */
5675 pref->set_max_size_range ();
5676 pref->base0 = false;
5677 pref->ref = ptr;
5680 qry->put_ref (ptr, *pref);
5681 return true;
5684 if (gimple_nop_p (stmt))
5686 /* For a function argument try to determine the byte size
5687 of the array from the current function declaratation
5688 (e.g., attribute access or related). */
5689 wide_int wr[2];
5690 bool static_array = false;
5691 if (tree ref = gimple_parm_array_size (ptr, wr, &static_array))
5693 pref->parmarray = !static_array;
5694 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
5695 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
5696 pref->ref = ref;
5697 qry->put_ref (ptr, *pref);
5698 return true;
5701 pref->set_max_size_range ();
5702 pref->base0 = false;
5703 pref->ref = ptr;
5704 qry->put_ref (ptr, *pref);
5705 return true;
5708 if (gimple_code (stmt) == GIMPLE_PHI)
5710 pref->ref = ptr;
5711 access_ref phi_ref = *pref;
5712 if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry))
5713 return false;
5714 *pref = phi_ref;
5715 pref->ref = ptr;
5716 qry->put_ref (ptr, *pref);
5717 return true;
5720 if (!is_gimple_assign (stmt))
5722 /* Clear BASE0 since the assigned pointer might point into
5723 the middle of the object, set the maximum size range and,
5724 if the SSA_NAME refers to a function argumnent, set
5725 PREF->REF to it. */
5726 pref->base0 = false;
5727 pref->set_max_size_range ();
5728 pref->ref = ptr;
5729 return true;
5732 tree_code code = gimple_assign_rhs_code (stmt);
5734 if (code == MAX_EXPR || code == MIN_EXPR)
5736 if (!handle_min_max_size (stmt, ostype, pref, snlim, qry))
5737 return false;
5738 qry->put_ref (ptr, *pref);
5739 return true;
5742 tree rhs = gimple_assign_rhs1 (stmt);
5744 if (code == POINTER_PLUS_EXPR
5745 && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE)
5747 /* Compute the size of the object first. */
5748 if (!compute_objsize_r (rhs, ostype, pref, snlim, qry))
5749 return false;
5751 offset_int orng[2];
5752 tree off = gimple_assign_rhs2 (stmt);
5753 if (get_offset_range (off, stmt, orng, rvals))
5754 pref->add_offset (orng[0], orng[1]);
5755 else
5756 pref->add_max_offset ();
5757 qry->put_ref (ptr, *pref);
5758 return true;
5761 if (code == ADDR_EXPR
5762 || code == SSA_NAME)
5763 return compute_objsize_r (rhs, ostype, pref, snlim, qry);
5765 /* (This could also be an assignment from a nonlocal pointer.) Save
5766 PTR to mention in diagnostics but otherwise treat it as a pointer
5767 to an unknown object. */
5768 pref->ref = rhs;
5769 pref->base0 = false;
5770 pref->set_max_size_range ();
5771 return true;
5774 /* Assume all other expressions point into an unknown object
5775 of the maximum valid size. */
5776 pref->ref = ptr;
5777 pref->base0 = false;
5778 pref->set_max_size_range ();
5779 if (TREE_CODE (ptr) == SSA_NAME)
5780 qry->put_ref (ptr, *pref);
5781 return true;
5784 /* A "public" wrapper around the above. Clients should use this overload
5785 instead. */
5787 tree
5788 compute_objsize (tree ptr, int ostype, access_ref *pref,
5789 range_query *rvals /* = NULL */)
5791 pointer_query qry;
5792 qry.rvals = rvals;
5793 ssa_name_limit_t snlim;
5794 if (!compute_objsize_r (ptr, ostype, pref, snlim, &qry))
5795 return NULL_TREE;
5797 offset_int maxsize = pref->size_remaining ();
5798 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
5799 pref->offrng[0] = 0;
5800 return wide_int_to_tree (sizetype, maxsize);
5803 /* Transitional wrapper. The function should be removed once callers
5804 transition to the pointer_query API. */
5806 tree
5807 compute_objsize (tree ptr, int ostype, access_ref *pref, pointer_query *ptr_qry)
5809 pointer_query qry;
5810 if (ptr_qry)
5811 ptr_qry->depth = 0;
5812 else
5813 ptr_qry = &qry;
5815 ssa_name_limit_t snlim;
5816 if (!compute_objsize_r (ptr, ostype, pref, snlim, ptr_qry))
5817 return NULL_TREE;
5819 offset_int maxsize = pref->size_remaining ();
5820 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
5821 pref->offrng[0] = 0;
5822 return wide_int_to_tree (sizetype, maxsize);
5825 /* Legacy wrapper around the above. The function should be removed
5826 once callers transition to one of the two above. */
5828 tree
5829 compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
5830 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
5832 /* Set the initial offsets to zero and size to negative to indicate
5833 none has been computed yet. */
5834 access_ref ref;
5835 tree size = compute_objsize (ptr, ostype, &ref, rvals);
5836 if (!size || !ref.base0)
5837 return NULL_TREE;
5839 if (pdecl)
5840 *pdecl = ref.ref;
5842 if (poff)
5843 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
5845 return size;
5848 /* Helper to determine and check the sizes of the source and the destination
5849 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
5850 call expression, DEST is the destination argument, SRC is the source
5851 argument or null, and LEN is the number of bytes. Use Object Size type-0
5852 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
5853 (no overflow or invalid sizes), false otherwise. */
5855 static bool
5856 check_memop_access (tree exp, tree dest, tree src, tree size)
5858 /* For functions like memset and memcpy that operate on raw memory
5859 try to determine the size of the largest source and destination
5860 object using type-0 Object Size regardless of the object size
5861 type specified by the option. */
5862 access_data data (exp, access_read_write);
5863 tree srcsize = src ? compute_objsize (src, 0, &data.src) : NULL_TREE;
5864 tree dstsize = compute_objsize (dest, 0, &data.dst);
5866 return check_access (exp, size, /*maxread=*/NULL_TREE,
5867 srcsize, dstsize, data.mode, &data);
5870 /* Validate memchr arguments without performing any expansion.
5871 Return NULL_RTX. */
5873 static rtx
5874 expand_builtin_memchr (tree exp, rtx)
5876 if (!validate_arglist (exp,
5877 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5878 return NULL_RTX;
5880 tree arg1 = CALL_EXPR_ARG (exp, 0);
5881 tree len = CALL_EXPR_ARG (exp, 2);
5883 check_read_access (exp, arg1, len, 0);
5885 return NULL_RTX;
5888 /* Expand a call EXP to the memcpy builtin.
5889 Return NULL_RTX if we failed, the caller should emit a normal call,
5890 otherwise try to get the result in TARGET, if convenient (and in
5891 mode MODE if that's convenient). */
5893 static rtx
5894 expand_builtin_memcpy (tree exp, rtx target)
5896 if (!validate_arglist (exp,
5897 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5898 return NULL_RTX;
5900 tree dest = CALL_EXPR_ARG (exp, 0);
5901 tree src = CALL_EXPR_ARG (exp, 1);
5902 tree len = CALL_EXPR_ARG (exp, 2);
5904 check_memop_access (exp, dest, src, len);
5906 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
5907 /*retmode=*/ RETURN_BEGIN, false);
5910 /* Check a call EXP to the memmove built-in for validity.
5911 Return NULL_RTX on both success and failure. */
5913 static rtx
5914 expand_builtin_memmove (tree exp, rtx target)
5916 if (!validate_arglist (exp,
5917 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5918 return NULL_RTX;
5920 tree dest = CALL_EXPR_ARG (exp, 0);
5921 tree src = CALL_EXPR_ARG (exp, 1);
5922 tree len = CALL_EXPR_ARG (exp, 2);
5924 check_memop_access (exp, dest, src, len);
5926 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
5927 /*retmode=*/ RETURN_BEGIN, true);
5930 /* Expand a call EXP to the mempcpy builtin.
5931 Return NULL_RTX if we failed; the caller should emit a normal call,
5932 otherwise try to get the result in TARGET, if convenient (and in
5933 mode MODE if that's convenient). */
5935 static rtx
5936 expand_builtin_mempcpy (tree exp, rtx target)
5938 if (!validate_arglist (exp,
5939 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5940 return NULL_RTX;
5942 tree dest = CALL_EXPR_ARG (exp, 0);
5943 tree src = CALL_EXPR_ARG (exp, 1);
5944 tree len = CALL_EXPR_ARG (exp, 2);
5946 /* Policy does not generally allow using compute_objsize (which
5947 is used internally by check_memop_size) to change code generation
5948 or drive optimization decisions.
5950 In this instance it is safe because the code we generate has
5951 the same semantics regardless of the return value of
5952 check_memop_sizes. Exactly the same amount of data is copied
5953 and the return value is exactly the same in both cases.
5955 Furthermore, check_memop_size always uses mode 0 for the call to
5956 compute_objsize, so the imprecise nature of compute_objsize is
5957 avoided. */
5959 /* Avoid expanding mempcpy into memcpy when the call is determined
5960 to overflow the buffer. This also prevents the same overflow
5961 from being diagnosed again when expanding memcpy. */
5962 if (!check_memop_access (exp, dest, src, len))
5963 return NULL_RTX;
5965 return expand_builtin_mempcpy_args (dest, src, len,
5966 target, exp, /*retmode=*/ RETURN_END);
5969 /* Helper function to do the actual work for expand of memory copy family
5970 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
5971 of memory from SRC to DEST and assign to TARGET if convenient. Return
5972 value is based on RETMODE argument. */
5974 static rtx
5975 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
5976 rtx target, tree exp, memop_ret retmode,
5977 bool might_overlap)
5979 unsigned int src_align = get_pointer_alignment (src);
5980 unsigned int dest_align = get_pointer_alignment (dest);
5981 rtx dest_mem, src_mem, dest_addr, len_rtx;
5982 HOST_WIDE_INT expected_size = -1;
5983 unsigned int expected_align = 0;
5984 unsigned HOST_WIDE_INT min_size;
5985 unsigned HOST_WIDE_INT max_size;
5986 unsigned HOST_WIDE_INT probable_max_size;
5988 bool is_move_done;
5990 /* If DEST is not a pointer type, call the normal function. */
5991 if (dest_align == 0)
5992 return NULL_RTX;
5994 /* If either SRC is not a pointer type, don't do this
5995 operation in-line. */
5996 if (src_align == 0)
5997 return NULL_RTX;
5999 if (currently_expanding_gimple_stmt)
6000 stringop_block_profile (currently_expanding_gimple_stmt,
6001 &expected_align, &expected_size);
6003 if (expected_align < dest_align)
6004 expected_align = dest_align;
6005 dest_mem = get_memory_rtx (dest, len);
6006 set_mem_align (dest_mem, dest_align);
6007 len_rtx = expand_normal (len);
6008 determine_block_size (len, len_rtx, &min_size, &max_size,
6009 &probable_max_size);
6011 /* Try to get the byte representation of the constant SRC points to,
6012 with its byte size in NBYTES. */
6013 unsigned HOST_WIDE_INT nbytes;
6014 const char *rep = getbyterep (src, &nbytes);
6016 /* If the function's constant bound LEN_RTX is less than or equal
6017 to the byte size of the representation of the constant argument,
6018 and if block move would be done by pieces, we can avoid loading
6019 the bytes from memory and only store the computed constant.
6020 This works in the overlap (memmove) case as well because
6021 store_by_pieces just generates a series of stores of constants
6022 from the representation returned by getbyterep(). */
6023 if (rep
6024 && CONST_INT_P (len_rtx)
6025 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
6026 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
6027 CONST_CAST (char *, rep),
6028 dest_align, false))
6030 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
6031 builtin_memcpy_read_str,
6032 CONST_CAST (char *, rep),
6033 dest_align, false, retmode);
6034 dest_mem = force_operand (XEXP (dest_mem, 0), target);
6035 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6036 return dest_mem;
6039 src_mem = get_memory_rtx (src, len);
6040 set_mem_align (src_mem, src_align);
6042 /* Copy word part most expediently. */
6043 enum block_op_methods method = BLOCK_OP_NORMAL;
6044 if (CALL_EXPR_TAILCALL (exp)
6045 && (retmode == RETURN_BEGIN || target == const0_rtx))
6046 method = BLOCK_OP_TAILCALL;
6047 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
6048 && retmode == RETURN_END
6049 && !might_overlap
6050 && target != const0_rtx);
6051 if (use_mempcpy_call)
6052 method = BLOCK_OP_NO_LIBCALL_RET;
6053 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
6054 expected_align, expected_size,
6055 min_size, max_size, probable_max_size,
6056 use_mempcpy_call, &is_move_done,
6057 might_overlap);
6059 /* Bail out when a mempcpy call would be expanded as libcall and when
6060 we have a target that provides a fast implementation
6061 of mempcpy routine. */
6062 if (!is_move_done)
6063 return NULL_RTX;
6065 if (dest_addr == pc_rtx)
6066 return NULL_RTX;
6068 if (dest_addr == 0)
6070 dest_addr = force_operand (XEXP (dest_mem, 0), target);
6071 dest_addr = convert_memory_address (ptr_mode, dest_addr);
6074 if (retmode != RETURN_BEGIN && target != const0_rtx)
6076 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
6077 /* stpcpy pointer to last byte. */
6078 if (retmode == RETURN_END_MINUS_ONE)
6079 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
6082 return dest_addr;
6085 static rtx
6086 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
6087 rtx target, tree orig_exp, memop_ret retmode)
6089 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
6090 retmode, false);
6093 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
6094 we failed, the caller should emit a normal call, otherwise try to
6095 get the result in TARGET, if convenient.
6096 Return value is based on RETMODE argument. */
6098 static rtx
6099 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
6101 class expand_operand ops[3];
6102 rtx dest_mem;
6103 rtx src_mem;
6105 if (!targetm.have_movstr ())
6106 return NULL_RTX;
6108 dest_mem = get_memory_rtx (dest, NULL);
6109 src_mem = get_memory_rtx (src, NULL);
6110 if (retmode == RETURN_BEGIN)
6112 target = force_reg (Pmode, XEXP (dest_mem, 0));
6113 dest_mem = replace_equiv_address (dest_mem, target);
6116 create_output_operand (&ops[0],
6117 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
6118 create_fixed_operand (&ops[1], dest_mem);
6119 create_fixed_operand (&ops[2], src_mem);
6120 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
6121 return NULL_RTX;
6123 if (retmode != RETURN_BEGIN && target != const0_rtx)
6125 target = ops[0].value;
6126 /* movstr is supposed to set end to the address of the NUL
6127 terminator. If the caller requested a mempcpy-like return value,
6128 adjust it. */
6129 if (retmode == RETURN_END)
6131 rtx tem = plus_constant (GET_MODE (target),
6132 gen_lowpart (GET_MODE (target), target), 1);
6133 emit_move_insn (target, force_operand (tem, NULL_RTX));
6136 return target;
6139 /* Do some very basic size validation of a call to the strcpy builtin
6140 given by EXP. Return NULL_RTX to have the built-in expand to a call
6141 to the library function. */
6143 static rtx
6144 expand_builtin_strcat (tree exp)
6146 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
6147 || !warn_stringop_overflow)
6148 return NULL_RTX;
6150 tree dest = CALL_EXPR_ARG (exp, 0);
6151 tree src = CALL_EXPR_ARG (exp, 1);
6153 /* There is no way here to determine the length of the string in
6154 the destination to which the SRC string is being appended so
6155 just diagnose cases when the souce string is longer than
6156 the destination object. */
6157 access_data data (exp, access_read_write, NULL_TREE, true,
6158 NULL_TREE, true);
6159 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6160 compute_objsize (src, ost, &data.src);
6161 tree destsize = compute_objsize (dest, ost, &data.dst);
6163 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
6164 src, destsize, data.mode, &data);
6166 return NULL_RTX;
6169 /* Expand expression EXP, which is a call to the strcpy builtin. Return
6170 NULL_RTX if we failed the caller should emit a normal call, otherwise
6171 try to get the result in TARGET, if convenient (and in mode MODE if that's
6172 convenient). */
6174 static rtx
6175 expand_builtin_strcpy (tree exp, rtx target)
6177 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6178 return NULL_RTX;
6180 tree dest = CALL_EXPR_ARG (exp, 0);
6181 tree src = CALL_EXPR_ARG (exp, 1);
6183 if (warn_stringop_overflow)
6185 access_data data (exp, access_read_write, NULL_TREE, true,
6186 NULL_TREE, true);
6187 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6188 compute_objsize (src, ost, &data.src);
6189 tree dstsize = compute_objsize (dest, ost, &data.dst);
6190 check_access (exp, /*dstwrite=*/ NULL_TREE,
6191 /*maxread=*/ NULL_TREE, /*srcstr=*/ src,
6192 dstsize, data.mode, &data);
6195 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
6197 /* Check to see if the argument was declared attribute nonstring
6198 and if so, issue a warning since at this point it's not known
6199 to be nul-terminated. */
6200 tree fndecl = get_callee_fndecl (exp);
6201 maybe_warn_nonstring_arg (fndecl, exp);
6202 return ret;
6205 return NULL_RTX;
6208 /* Helper function to do the actual work for expand_builtin_strcpy. The
6209 arguments to the builtin_strcpy call DEST and SRC are broken out
6210 so that this can also be called without constructing an actual CALL_EXPR.
6211 The other arguments and return value are the same as for
6212 expand_builtin_strcpy. */
6214 static rtx
6215 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
6217 /* Detect strcpy calls with unterminated arrays.. */
6218 tree size;
6219 bool exact;
6220 if (tree nonstr = unterminated_array (src, &size, &exact))
6222 /* NONSTR refers to the non-nul terminated constant array. */
6223 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, nonstr,
6224 size, exact);
6225 return NULL_RTX;
6228 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
6231 /* Expand a call EXP to the stpcpy builtin.
6232 Return NULL_RTX if we failed the caller should emit a normal call,
6233 otherwise try to get the result in TARGET, if convenient (and in
6234 mode MODE if that's convenient). */
6236 static rtx
6237 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
6239 tree dst, src;
6240 location_t loc = EXPR_LOCATION (exp);
6242 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6243 return NULL_RTX;
6245 dst = CALL_EXPR_ARG (exp, 0);
6246 src = CALL_EXPR_ARG (exp, 1);
6248 if (warn_stringop_overflow)
6250 access_data data (exp, access_read_write);
6251 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1,
6252 &data.dst);
6253 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
6254 src, destsize, data.mode, &data);
6257 /* If return value is ignored, transform stpcpy into strcpy. */
6258 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
6260 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
6261 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
6262 return expand_expr (result, target, mode, EXPAND_NORMAL);
6264 else
6266 tree len, lenp1;
6267 rtx ret;
6269 /* Ensure we get an actual string whose length can be evaluated at
6270 compile-time, not an expression containing a string. This is
6271 because the latter will potentially produce pessimized code
6272 when used to produce the return value. */
6273 c_strlen_data lendata = { };
6274 if (!c_getstr (src)
6275 || !(len = c_strlen (src, 0, &lendata, 1)))
6276 return expand_movstr (dst, src, target,
6277 /*retmode=*/ RETURN_END_MINUS_ONE);
6279 if (lendata.decl)
6280 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, lendata.decl);
6282 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
6283 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
6284 target, exp,
6285 /*retmode=*/ RETURN_END_MINUS_ONE);
6287 if (ret)
6288 return ret;
6290 if (TREE_CODE (len) == INTEGER_CST)
6292 rtx len_rtx = expand_normal (len);
6294 if (CONST_INT_P (len_rtx))
6296 ret = expand_builtin_strcpy_args (exp, dst, src, target);
6298 if (ret)
6300 if (! target)
6302 if (mode != VOIDmode)
6303 target = gen_reg_rtx (mode);
6304 else
6305 target = gen_reg_rtx (GET_MODE (ret));
6307 if (GET_MODE (target) != GET_MODE (ret))
6308 ret = gen_lowpart (GET_MODE (target), ret);
6310 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
6311 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
6312 gcc_assert (ret);
6314 return target;
6319 return expand_movstr (dst, src, target,
6320 /*retmode=*/ RETURN_END_MINUS_ONE);
6324 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
6325 arguments while being careful to avoid duplicate warnings (which could
6326 be issued if the expander were to expand the call, resulting in it
6327 being emitted in expand_call(). */
6329 static rtx
6330 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
6332 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
6334 /* The call has been successfully expanded. Check for nonstring
6335 arguments and issue warnings as appropriate. */
6336 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
6337 return ret;
6340 return NULL_RTX;
6343 /* Check a call EXP to the stpncpy built-in for validity.
6344 Return NULL_RTX on both success and failure. */
6346 static rtx
6347 expand_builtin_stpncpy (tree exp, rtx)
6349 if (!validate_arglist (exp,
6350 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6351 || !warn_stringop_overflow)
6352 return NULL_RTX;
6354 /* The source and destination of the call. */
6355 tree dest = CALL_EXPR_ARG (exp, 0);
6356 tree src = CALL_EXPR_ARG (exp, 1);
6358 /* The exact number of bytes to write (not the maximum). */
6359 tree len = CALL_EXPR_ARG (exp, 2);
6360 access_data data (exp, access_read_write);
6361 /* The size of the destination object. */
6362 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
6363 check_access (exp, len, /*maxread=*/len, src, destsize, data.mode, &data);
6364 return NULL_RTX;
6367 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
6368 bytes from constant string DATA + OFFSET and return it as target
6369 constant. */
6372 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
6373 scalar_int_mode mode)
6375 const char *str = (const char *) data;
6377 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
6378 return const0_rtx;
6380 return c_readstr (str + offset, mode);
6383 /* Helper to check the sizes of sequences and the destination of calls
6384 to __builtin_strncat and __builtin___strncat_chk. Returns true on
6385 success (no overflow or invalid sizes), false otherwise. */
6387 static bool
6388 check_strncat_sizes (tree exp, tree objsize)
6390 tree dest = CALL_EXPR_ARG (exp, 0);
6391 tree src = CALL_EXPR_ARG (exp, 1);
6392 tree maxread = CALL_EXPR_ARG (exp, 2);
6394 /* Try to determine the range of lengths that the source expression
6395 refers to. */
6396 c_strlen_data lendata = { };
6397 get_range_strlen (src, &lendata, /* eltsize = */ 1);
6399 /* Try to verify that the destination is big enough for the shortest
6400 string. */
6402 access_data data (exp, access_read_write, maxread, true);
6403 if (!objsize && warn_stringop_overflow)
6405 /* If it hasn't been provided by __strncat_chk, try to determine
6406 the size of the destination object into which the source is
6407 being copied. */
6408 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
6411 /* Add one for the terminating nul. */
6412 tree srclen = (lendata.minlen
6413 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
6414 size_one_node)
6415 : NULL_TREE);
6417 /* The strncat function copies at most MAXREAD bytes and always appends
6418 the terminating nul so the specified upper bound should never be equal
6419 to (or greater than) the size of the destination. */
6420 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
6421 && tree_int_cst_equal (objsize, maxread))
6423 location_t loc = tree_inlined_location (exp);
6424 warning_at (loc, OPT_Wstringop_overflow_,
6425 "%K%qD specified bound %E equals destination size",
6426 exp, get_callee_fndecl (exp), maxread);
6428 return false;
6431 if (!srclen
6432 || (maxread && tree_fits_uhwi_p (maxread)
6433 && tree_fits_uhwi_p (srclen)
6434 && tree_int_cst_lt (maxread, srclen)))
6435 srclen = maxread;
6437 /* The number of bytes to write is LEN but check_access will alsoa
6438 check SRCLEN if LEN's value isn't known. */
6439 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
6440 objsize, data.mode, &data);
6443 /* Similar to expand_builtin_strcat, do some very basic size validation
6444 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
6445 the built-in expand to a call to the library function. */
6447 static rtx
6448 expand_builtin_strncat (tree exp, rtx)
6450 if (!validate_arglist (exp,
6451 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6452 || !warn_stringop_overflow)
6453 return NULL_RTX;
6455 tree dest = CALL_EXPR_ARG (exp, 0);
6456 tree src = CALL_EXPR_ARG (exp, 1);
6457 /* The upper bound on the number of bytes to write. */
6458 tree maxread = CALL_EXPR_ARG (exp, 2);
6460 /* Detect unterminated source (only). */
6461 if (!check_nul_terminated_array (exp, src, maxread))
6462 return NULL_RTX;
6464 /* The length of the source sequence. */
6465 tree slen = c_strlen (src, 1);
6467 /* Try to determine the range of lengths that the source expression
6468 refers to. Since the lengths are only used for warning and not
6469 for code generation disable strict mode below. */
6470 tree maxlen = slen;
6471 if (!maxlen)
6473 c_strlen_data lendata = { };
6474 get_range_strlen (src, &lendata, /* eltsize = */ 1);
6475 maxlen = lendata.maxbound;
6478 access_data data (exp, access_read_write);
6479 /* Try to verify that the destination is big enough for the shortest
6480 string. First try to determine the size of the destination object
6481 into which the source is being copied. */
6482 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
6484 /* Add one for the terminating nul. */
6485 tree srclen = (maxlen
6486 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
6487 size_one_node)
6488 : NULL_TREE);
6490 /* The strncat function copies at most MAXREAD bytes and always appends
6491 the terminating nul so the specified upper bound should never be equal
6492 to (or greater than) the size of the destination. */
6493 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
6494 && tree_int_cst_equal (destsize, maxread))
6496 location_t loc = tree_inlined_location (exp);
6497 warning_at (loc, OPT_Wstringop_overflow_,
6498 "%K%qD specified bound %E equals destination size",
6499 exp, get_callee_fndecl (exp), maxread);
6501 return NULL_RTX;
6504 if (!srclen
6505 || (maxread && tree_fits_uhwi_p (maxread)
6506 && tree_fits_uhwi_p (srclen)
6507 && tree_int_cst_lt (maxread, srclen)))
6508 srclen = maxread;
6510 check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
6511 destsize, data.mode, &data);
6512 return NULL_RTX;
6515 /* Expand expression EXP, which is a call to the strncpy builtin. Return
6516 NULL_RTX if we failed the caller should emit a normal call. */
6518 static rtx
6519 expand_builtin_strncpy (tree exp, rtx target)
6521 location_t loc = EXPR_LOCATION (exp);
6523 if (!validate_arglist (exp,
6524 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6525 return NULL_RTX;
6526 tree dest = CALL_EXPR_ARG (exp, 0);
6527 tree src = CALL_EXPR_ARG (exp, 1);
6528 /* The number of bytes to write (not the maximum). */
6529 tree len = CALL_EXPR_ARG (exp, 2);
6531 /* The length of the source sequence. */
6532 tree slen = c_strlen (src, 1);
6534 if (warn_stringop_overflow)
6536 access_data data (exp, access_read_write, len, true, len, true);
6537 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6538 compute_objsize (src, ost, &data.src);
6539 tree dstsize = compute_objsize (dest, ost, &data.dst);
6540 /* The number of bytes to write is LEN but check_access will also
6541 check SLEN if LEN's value isn't known. */
6542 check_access (exp, /*dstwrite=*/len,
6543 /*maxread=*/len, src, dstsize, data.mode, &data);
6546 /* We must be passed a constant len and src parameter. */
6547 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
6548 return NULL_RTX;
6550 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
6552 /* We're required to pad with trailing zeros if the requested
6553 len is greater than strlen(s2)+1. In that case try to
6554 use store_by_pieces, if it fails, punt. */
6555 if (tree_int_cst_lt (slen, len))
6557 unsigned int dest_align = get_pointer_alignment (dest);
6558 const char *p = c_getstr (src);
6559 rtx dest_mem;
6561 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
6562 || !can_store_by_pieces (tree_to_uhwi (len),
6563 builtin_strncpy_read_str,
6564 CONST_CAST (char *, p),
6565 dest_align, false))
6566 return NULL_RTX;
6568 dest_mem = get_memory_rtx (dest, len);
6569 store_by_pieces (dest_mem, tree_to_uhwi (len),
6570 builtin_strncpy_read_str,
6571 CONST_CAST (char *, p), dest_align, false,
6572 RETURN_BEGIN);
6573 dest_mem = force_operand (XEXP (dest_mem, 0), target);
6574 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6575 return dest_mem;
6578 return NULL_RTX;
6581 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
6582 bytes from constant string DATA + OFFSET and return it as target
6583 constant. */
6586 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
6587 scalar_int_mode mode)
6589 const char *c = (const char *) data;
6590 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
6592 memset (p, *c, GET_MODE_SIZE (mode));
6594 return c_readstr (p, mode);
6597 /* Callback routine for store_by_pieces. Return the RTL of a register
6598 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
6599 char value given in the RTL register data. For example, if mode is
6600 4 bytes wide, return the RTL for 0x01010101*data. */
6602 static rtx
6603 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
6604 scalar_int_mode mode)
6606 rtx target, coeff;
6607 size_t size;
6608 char *p;
6610 size = GET_MODE_SIZE (mode);
6611 if (size == 1)
6612 return (rtx) data;
6614 p = XALLOCAVEC (char, size);
6615 memset (p, 1, size);
6616 coeff = c_readstr (p, mode);
6618 target = convert_to_mode (mode, (rtx) data, 1);
6619 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
6620 return force_reg (mode, target);
6623 /* Expand expression EXP, which is a call to the memset builtin. Return
6624 NULL_RTX if we failed the caller should emit a normal call, otherwise
6625 try to get the result in TARGET, if convenient (and in mode MODE if that's
6626 convenient). */
6628 static rtx
6629 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
6631 if (!validate_arglist (exp,
6632 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
6633 return NULL_RTX;
6635 tree dest = CALL_EXPR_ARG (exp, 0);
6636 tree val = CALL_EXPR_ARG (exp, 1);
6637 tree len = CALL_EXPR_ARG (exp, 2);
6639 check_memop_access (exp, dest, NULL_TREE, len);
6641 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
6644 /* Helper function to do the actual work for expand_builtin_memset. The
6645 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
6646 so that this can also be called without constructing an actual CALL_EXPR.
6647 The other arguments and return value are the same as for
6648 expand_builtin_memset. */
6650 static rtx
6651 expand_builtin_memset_args (tree dest, tree val, tree len,
6652 rtx target, machine_mode mode, tree orig_exp)
6654 tree fndecl, fn;
6655 enum built_in_function fcode;
6656 machine_mode val_mode;
6657 char c;
6658 unsigned int dest_align;
6659 rtx dest_mem, dest_addr, len_rtx;
6660 HOST_WIDE_INT expected_size = -1;
6661 unsigned int expected_align = 0;
6662 unsigned HOST_WIDE_INT min_size;
6663 unsigned HOST_WIDE_INT max_size;
6664 unsigned HOST_WIDE_INT probable_max_size;
6666 dest_align = get_pointer_alignment (dest);
6668 /* If DEST is not a pointer type, don't do this operation in-line. */
6669 if (dest_align == 0)
6670 return NULL_RTX;
6672 if (currently_expanding_gimple_stmt)
6673 stringop_block_profile (currently_expanding_gimple_stmt,
6674 &expected_align, &expected_size);
6676 if (expected_align < dest_align)
6677 expected_align = dest_align;
6679 /* If the LEN parameter is zero, return DEST. */
6680 if (integer_zerop (len))
6682 /* Evaluate and ignore VAL in case it has side-effects. */
6683 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
6684 return expand_expr (dest, target, mode, EXPAND_NORMAL);
6687 /* Stabilize the arguments in case we fail. */
6688 dest = builtin_save_expr (dest);
6689 val = builtin_save_expr (val);
6690 len = builtin_save_expr (len);
6692 len_rtx = expand_normal (len);
6693 determine_block_size (len, len_rtx, &min_size, &max_size,
6694 &probable_max_size);
6695 dest_mem = get_memory_rtx (dest, len);
6696 val_mode = TYPE_MODE (unsigned_char_type_node);
6698 if (TREE_CODE (val) != INTEGER_CST)
6700 rtx val_rtx;
6702 val_rtx = expand_normal (val);
6703 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
6705 /* Assume that we can memset by pieces if we can store
6706 * the coefficients by pieces (in the required modes).
6707 * We can't pass builtin_memset_gen_str as that emits RTL. */
6708 c = 1;
6709 if (tree_fits_uhwi_p (len)
6710 && can_store_by_pieces (tree_to_uhwi (len),
6711 builtin_memset_read_str, &c, dest_align,
6712 true))
6714 val_rtx = force_reg (val_mode, val_rtx);
6715 store_by_pieces (dest_mem, tree_to_uhwi (len),
6716 builtin_memset_gen_str, val_rtx, dest_align,
6717 true, RETURN_BEGIN);
6719 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
6720 dest_align, expected_align,
6721 expected_size, min_size, max_size,
6722 probable_max_size))
6723 goto do_libcall;
6725 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6726 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6727 return dest_mem;
6730 if (target_char_cast (val, &c))
6731 goto do_libcall;
6733 if (c)
6735 if (tree_fits_uhwi_p (len)
6736 && can_store_by_pieces (tree_to_uhwi (len),
6737 builtin_memset_read_str, &c, dest_align,
6738 true))
6739 store_by_pieces (dest_mem, tree_to_uhwi (len),
6740 builtin_memset_read_str, &c, dest_align, true,
6741 RETURN_BEGIN);
6742 else if (!set_storage_via_setmem (dest_mem, len_rtx,
6743 gen_int_mode (c, val_mode),
6744 dest_align, expected_align,
6745 expected_size, min_size, max_size,
6746 probable_max_size))
6747 goto do_libcall;
6749 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6750 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6751 return dest_mem;
6754 set_mem_align (dest_mem, dest_align);
6755 dest_addr = clear_storage_hints (dest_mem, len_rtx,
6756 CALL_EXPR_TAILCALL (orig_exp)
6757 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
6758 expected_align, expected_size,
6759 min_size, max_size,
6760 probable_max_size);
6762 if (dest_addr == 0)
6764 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6765 dest_addr = convert_memory_address (ptr_mode, dest_addr);
6768 return dest_addr;
6770 do_libcall:
6771 fndecl = get_callee_fndecl (orig_exp);
6772 fcode = DECL_FUNCTION_CODE (fndecl);
6773 if (fcode == BUILT_IN_MEMSET)
6774 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
6775 dest, val, len);
6776 else if (fcode == BUILT_IN_BZERO)
6777 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
6778 dest, len);
6779 else
6780 gcc_unreachable ();
6781 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
6782 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
6783 return expand_call (fn, target, target == const0_rtx);
6786 /* Expand expression EXP, which is a call to the bzero builtin. Return
6787 NULL_RTX if we failed the caller should emit a normal call. */
6789 static rtx
6790 expand_builtin_bzero (tree exp)
6792 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6793 return NULL_RTX;
6795 tree dest = CALL_EXPR_ARG (exp, 0);
6796 tree size = CALL_EXPR_ARG (exp, 1);
6798 check_memop_access (exp, dest, NULL_TREE, size);
6800 /* New argument list transforming bzero(ptr x, int y) to
6801 memset(ptr x, int 0, size_t y). This is done this way
6802 so that if it isn't expanded inline, we fallback to
6803 calling bzero instead of memset. */
6805 location_t loc = EXPR_LOCATION (exp);
6807 return expand_builtin_memset_args (dest, integer_zero_node,
6808 fold_convert_loc (loc,
6809 size_type_node, size),
6810 const0_rtx, VOIDmode, exp);
6813 /* Try to expand cmpstr operation ICODE with the given operands.
6814 Return the result rtx on success, otherwise return null. */
6816 static rtx
6817 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
6818 HOST_WIDE_INT align)
6820 machine_mode insn_mode = insn_data[icode].operand[0].mode;
6822 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
6823 target = NULL_RTX;
6825 class expand_operand ops[4];
6826 create_output_operand (&ops[0], target, insn_mode);
6827 create_fixed_operand (&ops[1], arg1_rtx);
6828 create_fixed_operand (&ops[2], arg2_rtx);
6829 create_integer_operand (&ops[3], align);
6830 if (maybe_expand_insn (icode, 4, ops))
6831 return ops[0].value;
6832 return NULL_RTX;
6835 /* Expand expression EXP, which is a call to the memcmp built-in function.
6836 Return NULL_RTX if we failed and the caller should emit a normal call,
6837 otherwise try to get the result in TARGET, if convenient.
6838 RESULT_EQ is true if we can relax the returned value to be either zero
6839 or nonzero, without caring about the sign. */
6841 static rtx
6842 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
6844 if (!validate_arglist (exp,
6845 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6846 return NULL_RTX;
6848 tree arg1 = CALL_EXPR_ARG (exp, 0);
6849 tree arg2 = CALL_EXPR_ARG (exp, 1);
6850 tree len = CALL_EXPR_ARG (exp, 2);
6852 /* Diagnose calls where the specified length exceeds the size of either
6853 object. */
6854 if (!check_read_access (exp, arg1, len, 0)
6855 || !check_read_access (exp, arg2, len, 0))
6856 return NULL_RTX;
6858 /* Due to the performance benefit, always inline the calls first
6859 when result_eq is false. */
6860 rtx result = NULL_RTX;
6861 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
6862 if (!result_eq && fcode != BUILT_IN_BCMP)
6864 result = inline_expand_builtin_bytecmp (exp, target);
6865 if (result)
6866 return result;
6869 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6870 location_t loc = EXPR_LOCATION (exp);
6872 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
6873 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6875 /* If we don't have POINTER_TYPE, call the function. */
6876 if (arg1_align == 0 || arg2_align == 0)
6877 return NULL_RTX;
6879 rtx arg1_rtx = get_memory_rtx (arg1, len);
6880 rtx arg2_rtx = get_memory_rtx (arg2, len);
6881 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
6883 /* Set MEM_SIZE as appropriate. */
6884 if (CONST_INT_P (len_rtx))
6886 set_mem_size (arg1_rtx, INTVAL (len_rtx));
6887 set_mem_size (arg2_rtx, INTVAL (len_rtx));
6890 by_pieces_constfn constfn = NULL;
6892 /* Try to get the byte representation of the constant ARG2 (or, only
6893 when the function's result is used for equality to zero, ARG1)
6894 points to, with its byte size in NBYTES. */
6895 unsigned HOST_WIDE_INT nbytes;
6896 const char *rep = getbyterep (arg2, &nbytes);
6897 if (result_eq && rep == NULL)
6899 /* For equality to zero the arguments are interchangeable. */
6900 rep = getbyterep (arg1, &nbytes);
6901 if (rep != NULL)
6902 std::swap (arg1_rtx, arg2_rtx);
6905 /* If the function's constant bound LEN_RTX is less than or equal
6906 to the byte size of the representation of the constant argument,
6907 and if block move would be done by pieces, we can avoid loading
6908 the bytes from memory and only store the computed constant result. */
6909 if (rep
6910 && CONST_INT_P (len_rtx)
6911 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
6912 constfn = builtin_memcpy_read_str;
6914 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
6915 TREE_TYPE (len), target,
6916 result_eq, constfn,
6917 CONST_CAST (char *, rep));
6919 if (result)
6921 /* Return the value in the proper mode for this function. */
6922 if (GET_MODE (result) == mode)
6923 return result;
6925 if (target != 0)
6927 convert_move (target, result, 0);
6928 return target;
6931 return convert_to_mode (mode, result, 0);
6934 return NULL_RTX;
6937 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
6938 if we failed the caller should emit a normal call, otherwise try to get
6939 the result in TARGET, if convenient. */
6941 static rtx
6942 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
6944 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6945 return NULL_RTX;
6947 tree arg1 = CALL_EXPR_ARG (exp, 0);
6948 tree arg2 = CALL_EXPR_ARG (exp, 1);
6950 if (!check_read_access (exp, arg1)
6951 || !check_read_access (exp, arg2))
6952 return NULL_RTX;
6954 /* Due to the performance benefit, always inline the calls first. */
6955 rtx result = NULL_RTX;
6956 result = inline_expand_builtin_bytecmp (exp, target);
6957 if (result)
6958 return result;
6960 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
6961 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
6962 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
6963 return NULL_RTX;
6965 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
6966 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6968 /* If we don't have POINTER_TYPE, call the function. */
6969 if (arg1_align == 0 || arg2_align == 0)
6970 return NULL_RTX;
6972 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
6973 arg1 = builtin_save_expr (arg1);
6974 arg2 = builtin_save_expr (arg2);
6976 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
6977 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
6979 /* Try to call cmpstrsi. */
6980 if (cmpstr_icode != CODE_FOR_nothing)
6981 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
6982 MIN (arg1_align, arg2_align));
6984 /* Try to determine at least one length and call cmpstrnsi. */
6985 if (!result && cmpstrn_icode != CODE_FOR_nothing)
6987 tree len;
6988 rtx arg3_rtx;
6990 tree len1 = c_strlen (arg1, 1);
6991 tree len2 = c_strlen (arg2, 1);
6993 if (len1)
6994 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
6995 if (len2)
6996 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
6998 /* If we don't have a constant length for the first, use the length
6999 of the second, if we know it. We don't require a constant for
7000 this case; some cost analysis could be done if both are available
7001 but neither is constant. For now, assume they're equally cheap,
7002 unless one has side effects. If both strings have constant lengths,
7003 use the smaller. */
7005 if (!len1)
7006 len = len2;
7007 else if (!len2)
7008 len = len1;
7009 else if (TREE_SIDE_EFFECTS (len1))
7010 len = len2;
7011 else if (TREE_SIDE_EFFECTS (len2))
7012 len = len1;
7013 else if (TREE_CODE (len1) != INTEGER_CST)
7014 len = len2;
7015 else if (TREE_CODE (len2) != INTEGER_CST)
7016 len = len1;
7017 else if (tree_int_cst_lt (len1, len2))
7018 len = len1;
7019 else
7020 len = len2;
7022 /* If both arguments have side effects, we cannot optimize. */
7023 if (len && !TREE_SIDE_EFFECTS (len))
7025 arg3_rtx = expand_normal (len);
7026 result = expand_cmpstrn_or_cmpmem
7027 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
7028 arg3_rtx, MIN (arg1_align, arg2_align));
7032 tree fndecl = get_callee_fndecl (exp);
7033 if (result)
7035 /* Check to see if the argument was declared attribute nonstring
7036 and if so, issue a warning since at this point it's not known
7037 to be nul-terminated. */
7038 maybe_warn_nonstring_arg (fndecl, exp);
7040 /* Return the value in the proper mode for this function. */
7041 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7042 if (GET_MODE (result) == mode)
7043 return result;
7044 if (target == 0)
7045 return convert_to_mode (mode, result, 0);
7046 convert_move (target, result, 0);
7047 return target;
7050 /* Expand the library call ourselves using a stabilized argument
7051 list to avoid re-evaluating the function's arguments twice. */
7052 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
7053 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
7054 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
7055 return expand_call (fn, target, target == const0_rtx);
7058 /* Expand expression EXP, which is a call to the strncmp builtin. Return
7059 NULL_RTX if we failed the caller should emit a normal call, otherwise
7060 try to get the result in TARGET, if convenient. */
7062 static rtx
7063 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
7064 ATTRIBUTE_UNUSED machine_mode mode)
7066 if (!validate_arglist (exp,
7067 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7068 return NULL_RTX;
7070 tree arg1 = CALL_EXPR_ARG (exp, 0);
7071 tree arg2 = CALL_EXPR_ARG (exp, 1);
7072 tree arg3 = CALL_EXPR_ARG (exp, 2);
7074 if (!check_nul_terminated_array (exp, arg1, arg3)
7075 || !check_nul_terminated_array (exp, arg2, arg3))
7076 return NULL_RTX;
7078 location_t loc = tree_inlined_location (exp);
7079 tree len1 = c_strlen (arg1, 1);
7080 tree len2 = c_strlen (arg2, 1);
7082 if (!len1 || !len2)
7084 /* Check to see if the argument was declared attribute nonstring
7085 and if so, issue a warning since at this point it's not known
7086 to be nul-terminated. */
7087 if (!maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp)
7088 && !len1 && !len2)
7090 /* A strncmp read is constrained not just by the bound but
7091 also by the length of the shorter string. Specifying
7092 a bound that's larger than the size of either array makes
7093 no sense and is likely a bug. When the length of neither
7094 of the two strings is known but the sizes of both of
7095 the arrays they are stored in is, issue a warning if
7096 the bound is larger than than the size of the larger
7097 of the two arrays. */
7099 access_ref ref1 (arg3, true);
7100 access_ref ref2 (arg3, true);
7102 tree bndrng[2] = { NULL_TREE, NULL_TREE };
7103 get_size_range (arg3, bndrng, ref1.bndrng);
7105 tree size1 = compute_objsize (arg1, 1, &ref1);
7106 tree size2 = compute_objsize (arg2, 1, &ref2);
7107 tree func = get_callee_fndecl (exp);
7109 if (size1 && size2 && bndrng[0] && !integer_zerop (bndrng[0]))
7111 offset_int rem1 = ref1.size_remaining ();
7112 offset_int rem2 = ref2.size_remaining ();
7113 if (rem1 == 0 || rem2 == 0)
7114 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
7115 bndrng, integer_zero_node);
7116 else
7118 offset_int maxrem = wi::max (rem1, rem2, UNSIGNED);
7119 if (maxrem < wi::to_offset (bndrng[0]))
7120 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp,
7121 func, bndrng,
7122 wide_int_to_tree (sizetype, maxrem));
7125 else if (bndrng[0]
7126 && !integer_zerop (bndrng[0])
7127 && ((size1 && integer_zerop (size1))
7128 || (size2 && integer_zerop (size2))))
7129 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
7130 bndrng, integer_zero_node);
7134 /* Due to the performance benefit, always inline the calls first. */
7135 rtx result = NULL_RTX;
7136 result = inline_expand_builtin_bytecmp (exp, target);
7137 if (result)
7138 return result;
7140 /* If c_strlen can determine an expression for one of the string
7141 lengths, and it doesn't have side effects, then emit cmpstrnsi
7142 using length MIN(strlen(string)+1, arg3). */
7143 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
7144 if (cmpstrn_icode == CODE_FOR_nothing)
7145 return NULL_RTX;
7147 tree len;
7149 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
7150 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
7152 if (len1)
7153 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
7154 if (len2)
7155 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
7157 tree len3 = fold_convert_loc (loc, sizetype, arg3);
7159 /* If we don't have a constant length for the first, use the length
7160 of the second, if we know it. If neither string is constant length,
7161 use the given length argument. We don't require a constant for
7162 this case; some cost analysis could be done if both are available
7163 but neither is constant. For now, assume they're equally cheap,
7164 unless one has side effects. If both strings have constant lengths,
7165 use the smaller. */
7167 if (!len1 && !len2)
7168 len = len3;
7169 else if (!len1)
7170 len = len2;
7171 else if (!len2)
7172 len = len1;
7173 else if (TREE_SIDE_EFFECTS (len1))
7174 len = len2;
7175 else if (TREE_SIDE_EFFECTS (len2))
7176 len = len1;
7177 else if (TREE_CODE (len1) != INTEGER_CST)
7178 len = len2;
7179 else if (TREE_CODE (len2) != INTEGER_CST)
7180 len = len1;
7181 else if (tree_int_cst_lt (len1, len2))
7182 len = len1;
7183 else
7184 len = len2;
7186 /* If we are not using the given length, we must incorporate it here.
7187 The actual new length parameter will be MIN(len,arg3) in this case. */
7188 if (len != len3)
7190 len = fold_convert_loc (loc, sizetype, len);
7191 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
7193 rtx arg1_rtx = get_memory_rtx (arg1, len);
7194 rtx arg2_rtx = get_memory_rtx (arg2, len);
7195 rtx arg3_rtx = expand_normal (len);
7196 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
7197 arg2_rtx, TREE_TYPE (len), arg3_rtx,
7198 MIN (arg1_align, arg2_align));
7200 tree fndecl = get_callee_fndecl (exp);
7201 if (result)
7203 /* Return the value in the proper mode for this function. */
7204 mode = TYPE_MODE (TREE_TYPE (exp));
7205 if (GET_MODE (result) == mode)
7206 return result;
7207 if (target == 0)
7208 return convert_to_mode (mode, result, 0);
7209 convert_move (target, result, 0);
7210 return target;
7213 /* Expand the library call ourselves using a stabilized argument
7214 list to avoid re-evaluating the function's arguments twice. */
7215 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
7216 if (TREE_NO_WARNING (exp))
7217 TREE_NO_WARNING (call) = true;
7218 gcc_assert (TREE_CODE (call) == CALL_EXPR);
7219 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
7220 return expand_call (call, target, target == const0_rtx);
7223 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
7224 if that's convenient. */
7227 expand_builtin_saveregs (void)
7229 rtx val;
7230 rtx_insn *seq;
7232 /* Don't do __builtin_saveregs more than once in a function.
7233 Save the result of the first call and reuse it. */
7234 if (saveregs_value != 0)
7235 return saveregs_value;
7237 /* When this function is called, it means that registers must be
7238 saved on entry to this function. So we migrate the call to the
7239 first insn of this function. */
7241 start_sequence ();
7243 /* Do whatever the machine needs done in this case. */
7244 val = targetm.calls.expand_builtin_saveregs ();
7246 seq = get_insns ();
7247 end_sequence ();
7249 saveregs_value = val;
7251 /* Put the insns after the NOTE that starts the function. If this
7252 is inside a start_sequence, make the outer-level insn chain current, so
7253 the code is placed at the start of the function. */
7254 push_topmost_sequence ();
7255 emit_insn_after (seq, entry_of_function ());
7256 pop_topmost_sequence ();
7258 return val;
7261 /* Expand a call to __builtin_next_arg. */
7263 static rtx
7264 expand_builtin_next_arg (void)
7266 /* Checking arguments is already done in fold_builtin_next_arg
7267 that must be called before this function. */
7268 return expand_binop (ptr_mode, add_optab,
7269 crtl->args.internal_arg_pointer,
7270 crtl->args.arg_offset_rtx,
7271 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7274 /* Make it easier for the backends by protecting the valist argument
7275 from multiple evaluations. */
7277 static tree
7278 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
7280 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
7282 /* The current way of determining the type of valist is completely
7283 bogus. We should have the information on the va builtin instead. */
7284 if (!vatype)
7285 vatype = targetm.fn_abi_va_list (cfun->decl);
7287 if (TREE_CODE (vatype) == ARRAY_TYPE)
7289 if (TREE_SIDE_EFFECTS (valist))
7290 valist = save_expr (valist);
7292 /* For this case, the backends will be expecting a pointer to
7293 vatype, but it's possible we've actually been given an array
7294 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
7295 So fix it. */
7296 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
7298 tree p1 = build_pointer_type (TREE_TYPE (vatype));
7299 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
7302 else
7304 tree pt = build_pointer_type (vatype);
7306 if (! needs_lvalue)
7308 if (! TREE_SIDE_EFFECTS (valist))
7309 return valist;
7311 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
7312 TREE_SIDE_EFFECTS (valist) = 1;
7315 if (TREE_SIDE_EFFECTS (valist))
7316 valist = save_expr (valist);
7317 valist = fold_build2_loc (loc, MEM_REF,
7318 vatype, valist, build_int_cst (pt, 0));
7321 return valist;
7324 /* The "standard" definition of va_list is void*. */
7326 tree
7327 std_build_builtin_va_list (void)
7329 return ptr_type_node;
7332 /* The "standard" abi va_list is va_list_type_node. */
7334 tree
7335 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
7337 return va_list_type_node;
7340 /* The "standard" type of va_list is va_list_type_node. */
7342 tree
7343 std_canonical_va_list_type (tree type)
7345 tree wtype, htype;
7347 wtype = va_list_type_node;
7348 htype = type;
7350 if (TREE_CODE (wtype) == ARRAY_TYPE)
7352 /* If va_list is an array type, the argument may have decayed
7353 to a pointer type, e.g. by being passed to another function.
7354 In that case, unwrap both types so that we can compare the
7355 underlying records. */
7356 if (TREE_CODE (htype) == ARRAY_TYPE
7357 || POINTER_TYPE_P (htype))
7359 wtype = TREE_TYPE (wtype);
7360 htype = TREE_TYPE (htype);
7363 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
7364 return va_list_type_node;
7366 return NULL_TREE;
7369 /* The "standard" implementation of va_start: just assign `nextarg' to
7370 the variable. */
7372 void
7373 std_expand_builtin_va_start (tree valist, rtx nextarg)
7375 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
7376 convert_move (va_r, nextarg, 0);
7379 /* Expand EXP, a call to __builtin_va_start. */
7381 static rtx
7382 expand_builtin_va_start (tree exp)
7384 rtx nextarg;
7385 tree valist;
7386 location_t loc = EXPR_LOCATION (exp);
7388 if (call_expr_nargs (exp) < 2)
7390 error_at (loc, "too few arguments to function %<va_start%>");
7391 return const0_rtx;
7394 if (fold_builtin_next_arg (exp, true))
7395 return const0_rtx;
7397 nextarg = expand_builtin_next_arg ();
7398 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
7400 if (targetm.expand_builtin_va_start)
7401 targetm.expand_builtin_va_start (valist, nextarg);
7402 else
7403 std_expand_builtin_va_start (valist, nextarg);
7405 return const0_rtx;
7408 /* Expand EXP, a call to __builtin_va_end. */
7410 static rtx
7411 expand_builtin_va_end (tree exp)
7413 tree valist = CALL_EXPR_ARG (exp, 0);
7415 /* Evaluate for side effects, if needed. I hate macros that don't
7416 do that. */
7417 if (TREE_SIDE_EFFECTS (valist))
7418 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
7420 return const0_rtx;
7423 /* Expand EXP, a call to __builtin_va_copy. We do this as a
7424 builtin rather than just as an assignment in stdarg.h because of the
7425 nastiness of array-type va_list types. */
7427 static rtx
7428 expand_builtin_va_copy (tree exp)
7430 tree dst, src, t;
7431 location_t loc = EXPR_LOCATION (exp);
7433 dst = CALL_EXPR_ARG (exp, 0);
7434 src = CALL_EXPR_ARG (exp, 1);
7436 dst = stabilize_va_list_loc (loc, dst, 1);
7437 src = stabilize_va_list_loc (loc, src, 0);
7439 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
7441 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
7443 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
7444 TREE_SIDE_EFFECTS (t) = 1;
7445 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7447 else
7449 rtx dstb, srcb, size;
7451 /* Evaluate to pointers. */
7452 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
7453 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
7454 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
7455 NULL_RTX, VOIDmode, EXPAND_NORMAL);
7457 dstb = convert_memory_address (Pmode, dstb);
7458 srcb = convert_memory_address (Pmode, srcb);
7460 /* "Dereference" to BLKmode memories. */
7461 dstb = gen_rtx_MEM (BLKmode, dstb);
7462 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
7463 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
7464 srcb = gen_rtx_MEM (BLKmode, srcb);
7465 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
7466 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
7468 /* Copy. */
7469 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
7472 return const0_rtx;
7475 /* Expand a call to one of the builtin functions __builtin_frame_address or
7476 __builtin_return_address. */
7478 static rtx
7479 expand_builtin_frame_address (tree fndecl, tree exp)
7481 /* The argument must be a nonnegative integer constant.
7482 It counts the number of frames to scan up the stack.
7483 The value is either the frame pointer value or the return
7484 address saved in that frame. */
7485 if (call_expr_nargs (exp) == 0)
7486 /* Warning about missing arg was already issued. */
7487 return const0_rtx;
7488 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
7490 error ("invalid argument to %qD", fndecl);
7491 return const0_rtx;
7493 else
7495 /* Number of frames to scan up the stack. */
7496 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
7498 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
7500 /* Some ports cannot access arbitrary stack frames. */
7501 if (tem == NULL)
7503 warning (0, "unsupported argument to %qD", fndecl);
7504 return const0_rtx;
7507 if (count)
7509 /* Warn since no effort is made to ensure that any frame
7510 beyond the current one exists or can be safely reached. */
7511 warning (OPT_Wframe_address, "calling %qD with "
7512 "a nonzero argument is unsafe", fndecl);
7515 /* For __builtin_frame_address, return what we've got. */
7516 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7517 return tem;
7519 if (!REG_P (tem)
7520 && ! CONSTANT_P (tem))
7521 tem = copy_addr_to_reg (tem);
7522 return tem;
7526 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
7527 failed and the caller should emit a normal call. */
7529 static rtx
7530 expand_builtin_alloca (tree exp)
7532 rtx op0;
7533 rtx result;
7534 unsigned int align;
7535 tree fndecl = get_callee_fndecl (exp);
7536 HOST_WIDE_INT max_size;
7537 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7538 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
7539 bool valid_arglist
7540 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
7541 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
7542 VOID_TYPE)
7543 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
7544 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
7545 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
7547 if (!valid_arglist)
7548 return NULL_RTX;
7550 if ((alloca_for_var
7551 && warn_vla_limit >= HOST_WIDE_INT_MAX
7552 && warn_alloc_size_limit < warn_vla_limit)
7553 || (!alloca_for_var
7554 && warn_alloca_limit >= HOST_WIDE_INT_MAX
7555 && warn_alloc_size_limit < warn_alloca_limit
7558 /* -Walloca-larger-than and -Wvla-larger-than settings of
7559 less than HOST_WIDE_INT_MAX override the more general
7560 -Walloc-size-larger-than so unless either of the former
7561 options is smaller than the last one (wchich would imply
7562 that the call was already checked), check the alloca
7563 arguments for overflow. */
7564 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
7565 int idx[] = { 0, -1 };
7566 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
7569 /* Compute the argument. */
7570 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
7572 /* Compute the alignment. */
7573 align = (fcode == BUILT_IN_ALLOCA
7574 ? BIGGEST_ALIGNMENT
7575 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
7577 /* Compute the maximum size. */
7578 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
7579 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
7580 : -1);
7582 /* Allocate the desired space. If the allocation stems from the declaration
7583 of a variable-sized object, it cannot accumulate. */
7584 result
7585 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
7586 result = convert_memory_address (ptr_mode, result);
7588 /* Dynamic allocations for variables are recorded during gimplification. */
7589 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
7590 record_dynamic_alloc (exp);
7592 return result;
7595 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
7596 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
7597 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
7598 handle_builtin_stack_restore function. */
7600 static rtx
7601 expand_asan_emit_allocas_unpoison (tree exp)
7603 tree arg0 = CALL_EXPR_ARG (exp, 0);
7604 tree arg1 = CALL_EXPR_ARG (exp, 1);
7605 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7606 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7607 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
7608 stack_pointer_rtx, NULL_RTX, 0,
7609 OPTAB_LIB_WIDEN);
7610 off = convert_modes (ptr_mode, Pmode, off, 0);
7611 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
7612 OPTAB_LIB_WIDEN);
7613 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
7614 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
7615 top, ptr_mode, bot, ptr_mode);
7616 return ret;
7619 /* Expand a call to bswap builtin in EXP.
7620 Return NULL_RTX if a normal call should be emitted rather than expanding the
7621 function in-line. If convenient, the result should be placed in TARGET.
7622 SUBTARGET may be used as the target for computing one of EXP's operands. */
7624 static rtx
7625 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
7626 rtx subtarget)
7628 tree arg;
7629 rtx op0;
7631 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
7632 return NULL_RTX;
7634 arg = CALL_EXPR_ARG (exp, 0);
7635 op0 = expand_expr (arg,
7636 subtarget && GET_MODE (subtarget) == target_mode
7637 ? subtarget : NULL_RTX,
7638 target_mode, EXPAND_NORMAL);
7639 if (GET_MODE (op0) != target_mode)
7640 op0 = convert_to_mode (target_mode, op0, 1);
7642 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
7644 gcc_assert (target);
7646 return convert_to_mode (target_mode, target, 1);
7649 /* Expand a call to a unary builtin in EXP.
7650 Return NULL_RTX if a normal call should be emitted rather than expanding the
7651 function in-line. If convenient, the result should be placed in TARGET.
7652 SUBTARGET may be used as the target for computing one of EXP's operands. */
7654 static rtx
7655 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
7656 rtx subtarget, optab op_optab)
7658 rtx op0;
7660 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
7661 return NULL_RTX;
7663 /* Compute the argument. */
7664 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
7665 (subtarget
7666 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
7667 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
7668 VOIDmode, EXPAND_NORMAL);
7669 /* Compute op, into TARGET if possible.
7670 Set TARGET to wherever the result comes back. */
7671 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
7672 op_optab, op0, target, op_optab != clrsb_optab);
7673 gcc_assert (target);
7675 return convert_to_mode (target_mode, target, 0);
7678 /* Expand a call to __builtin_expect. We just return our argument
7679 as the builtin_expect semantic should've been already executed by
7680 tree branch prediction pass. */
7682 static rtx
7683 expand_builtin_expect (tree exp, rtx target)
7685 tree arg;
7687 if (call_expr_nargs (exp) < 2)
7688 return const0_rtx;
7689 arg = CALL_EXPR_ARG (exp, 0);
7691 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
7692 /* When guessing was done, the hints should be already stripped away. */
7693 gcc_assert (!flag_guess_branch_prob
7694 || optimize == 0 || seen_error ());
7695 return target;
7698 /* Expand a call to __builtin_expect_with_probability. We just return our
7699 argument as the builtin_expect semantic should've been already executed by
7700 tree branch prediction pass. */
7702 static rtx
7703 expand_builtin_expect_with_probability (tree exp, rtx target)
7705 tree arg;
7707 if (call_expr_nargs (exp) < 3)
7708 return const0_rtx;
7709 arg = CALL_EXPR_ARG (exp, 0);
7711 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
7712 /* When guessing was done, the hints should be already stripped away. */
7713 gcc_assert (!flag_guess_branch_prob
7714 || optimize == 0 || seen_error ());
7715 return target;
7719 /* Expand a call to __builtin_assume_aligned. We just return our first
7720 argument as the builtin_assume_aligned semantic should've been already
7721 executed by CCP. */
7723 static rtx
7724 expand_builtin_assume_aligned (tree exp, rtx target)
7726 if (call_expr_nargs (exp) < 2)
7727 return const0_rtx;
7728 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
7729 EXPAND_NORMAL);
7730 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
7731 && (call_expr_nargs (exp) < 3
7732 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
7733 return target;
7736 void
7737 expand_builtin_trap (void)
7739 if (targetm.have_trap ())
7741 rtx_insn *insn = emit_insn (targetm.gen_trap ());
7742 /* For trap insns when not accumulating outgoing args force
7743 REG_ARGS_SIZE note to prevent crossjumping of calls with
7744 different args sizes. */
7745 if (!ACCUMULATE_OUTGOING_ARGS)
7746 add_args_size_note (insn, stack_pointer_delta);
7748 else
7750 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
7751 tree call_expr = build_call_expr (fn, 0);
7752 expand_call (call_expr, NULL_RTX, false);
7755 emit_barrier ();
7758 /* Expand a call to __builtin_unreachable. We do nothing except emit
7759 a barrier saying that control flow will not pass here.
7761 It is the responsibility of the program being compiled to ensure
7762 that control flow does never reach __builtin_unreachable. */
7763 static void
7764 expand_builtin_unreachable (void)
7766 emit_barrier ();
7769 /* Expand EXP, a call to fabs, fabsf or fabsl.
7770 Return NULL_RTX if a normal call should be emitted rather than expanding
7771 the function inline. If convenient, the result should be placed
7772 in TARGET. SUBTARGET may be used as the target for computing
7773 the operand. */
7775 static rtx
7776 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
7778 machine_mode mode;
7779 tree arg;
7780 rtx op0;
7782 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
7783 return NULL_RTX;
7785 arg = CALL_EXPR_ARG (exp, 0);
7786 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7787 mode = TYPE_MODE (TREE_TYPE (arg));
7788 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
7789 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
7792 /* Expand EXP, a call to copysign, copysignf, or copysignl.
7793 Return NULL is a normal call should be emitted rather than expanding the
7794 function inline. If convenient, the result should be placed in TARGET.
7795 SUBTARGET may be used as the target for computing the operand. */
7797 static rtx
7798 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
7800 rtx op0, op1;
7801 tree arg;
7803 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
7804 return NULL_RTX;
7806 arg = CALL_EXPR_ARG (exp, 0);
7807 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
7809 arg = CALL_EXPR_ARG (exp, 1);
7810 op1 = expand_normal (arg);
7812 return expand_copysign (op0, op1, target);
7815 /* Emit a call to __builtin___clear_cache. */
7817 void
7818 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
7820 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
7821 BUILTIN_ASM_NAME_PTR
7822 (BUILT_IN_CLEAR_CACHE));
7824 emit_library_call (callee,
7825 LCT_NORMAL, VOIDmode,
7826 convert_memory_address (ptr_mode, begin), ptr_mode,
7827 convert_memory_address (ptr_mode, end), ptr_mode);
7830 /* Emit a call to __builtin___clear_cache, unless the target specifies
7831 it as do-nothing. This function can be used by trampoline
7832 finalizers to duplicate the effects of expanding a call to the
7833 clear_cache builtin. */
7835 void
7836 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
7838 if ((GET_MODE (begin) != ptr_mode && GET_MODE (begin) != Pmode)
7839 || (GET_MODE (end) != ptr_mode && GET_MODE (end) != Pmode))
7841 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
7842 return;
7845 if (targetm.have_clear_cache ())
7847 /* We have a "clear_cache" insn, and it will handle everything. */
7848 class expand_operand ops[2];
7850 create_address_operand (&ops[0], begin);
7851 create_address_operand (&ops[1], end);
7853 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
7854 return;
7856 else
7858 #ifndef CLEAR_INSN_CACHE
7859 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
7860 does nothing. There is no need to call it. Do nothing. */
7861 return;
7862 #endif /* CLEAR_INSN_CACHE */
7865 targetm.calls.emit_call_builtin___clear_cache (begin, end);
7868 /* Expand a call to __builtin___clear_cache. */
7870 static void
7871 expand_builtin___clear_cache (tree exp)
7873 tree begin, end;
7874 rtx begin_rtx, end_rtx;
7876 /* We must not expand to a library call. If we did, any
7877 fallback library function in libgcc that might contain a call to
7878 __builtin___clear_cache() would recurse infinitely. */
7879 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7881 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
7882 return;
7885 begin = CALL_EXPR_ARG (exp, 0);
7886 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
7888 end = CALL_EXPR_ARG (exp, 1);
7889 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
7891 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
7894 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
7896 static rtx
7897 round_trampoline_addr (rtx tramp)
7899 rtx temp, addend, mask;
7901 /* If we don't need too much alignment, we'll have been guaranteed
7902 proper alignment by get_trampoline_type. */
7903 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
7904 return tramp;
7906 /* Round address up to desired boundary. */
7907 temp = gen_reg_rtx (Pmode);
7908 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
7909 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
7911 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
7912 temp, 0, OPTAB_LIB_WIDEN);
7913 tramp = expand_simple_binop (Pmode, AND, temp, mask,
7914 temp, 0, OPTAB_LIB_WIDEN);
7916 return tramp;
7919 static rtx
7920 expand_builtin_init_trampoline (tree exp, bool onstack)
7922 tree t_tramp, t_func, t_chain;
7923 rtx m_tramp, r_tramp, r_chain, tmp;
7925 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
7926 POINTER_TYPE, VOID_TYPE))
7927 return NULL_RTX;
7929 t_tramp = CALL_EXPR_ARG (exp, 0);
7930 t_func = CALL_EXPR_ARG (exp, 1);
7931 t_chain = CALL_EXPR_ARG (exp, 2);
7933 r_tramp = expand_normal (t_tramp);
7934 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
7935 MEM_NOTRAP_P (m_tramp) = 1;
7937 /* If ONSTACK, the TRAMP argument should be the address of a field
7938 within the local function's FRAME decl. Either way, let's see if
7939 we can fill in the MEM_ATTRs for this memory. */
7940 if (TREE_CODE (t_tramp) == ADDR_EXPR)
7941 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
7943 /* Creator of a heap trampoline is responsible for making sure the
7944 address is aligned to at least STACK_BOUNDARY. Normally malloc
7945 will ensure this anyhow. */
7946 tmp = round_trampoline_addr (r_tramp);
7947 if (tmp != r_tramp)
7949 m_tramp = change_address (m_tramp, BLKmode, tmp);
7950 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
7951 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
7954 /* The FUNC argument should be the address of the nested function.
7955 Extract the actual function decl to pass to the hook. */
7956 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
7957 t_func = TREE_OPERAND (t_func, 0);
7958 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
7960 r_chain = expand_normal (t_chain);
7962 /* Generate insns to initialize the trampoline. */
7963 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
7965 if (onstack)
7967 trampolines_created = 1;
7969 if (targetm.calls.custom_function_descriptors != 0)
7970 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
7971 "trampoline generated for nested function %qD", t_func);
7974 return const0_rtx;
7977 static rtx
7978 expand_builtin_adjust_trampoline (tree exp)
7980 rtx tramp;
7982 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7983 return NULL_RTX;
7985 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
7986 tramp = round_trampoline_addr (tramp);
7987 if (targetm.calls.trampoline_adjust_address)
7988 tramp = targetm.calls.trampoline_adjust_address (tramp);
7990 return tramp;
7993 /* Expand a call to the builtin descriptor initialization routine.
7994 A descriptor is made up of a couple of pointers to the static
7995 chain and the code entry in this order. */
7997 static rtx
7998 expand_builtin_init_descriptor (tree exp)
8000 tree t_descr, t_func, t_chain;
8001 rtx m_descr, r_descr, r_func, r_chain;
8003 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
8004 VOID_TYPE))
8005 return NULL_RTX;
8007 t_descr = CALL_EXPR_ARG (exp, 0);
8008 t_func = CALL_EXPR_ARG (exp, 1);
8009 t_chain = CALL_EXPR_ARG (exp, 2);
8011 r_descr = expand_normal (t_descr);
8012 m_descr = gen_rtx_MEM (BLKmode, r_descr);
8013 MEM_NOTRAP_P (m_descr) = 1;
8014 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
8016 r_func = expand_normal (t_func);
8017 r_chain = expand_normal (t_chain);
8019 /* Generate insns to initialize the descriptor. */
8020 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
8021 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
8022 POINTER_SIZE / BITS_PER_UNIT), r_func);
8024 return const0_rtx;
8027 /* Expand a call to the builtin descriptor adjustment routine. */
8029 static rtx
8030 expand_builtin_adjust_descriptor (tree exp)
8032 rtx tramp;
8034 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8035 return NULL_RTX;
8037 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
8039 /* Unalign the descriptor to allow runtime identification. */
8040 tramp = plus_constant (ptr_mode, tramp,
8041 targetm.calls.custom_function_descriptors);
8043 return force_operand (tramp, NULL_RTX);
8046 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
8047 function. The function first checks whether the back end provides
8048 an insn to implement signbit for the respective mode. If not, it
8049 checks whether the floating point format of the value is such that
8050 the sign bit can be extracted. If that is not the case, error out.
8051 EXP is the expression that is a call to the builtin function; if
8052 convenient, the result should be placed in TARGET. */
8053 static rtx
8054 expand_builtin_signbit (tree exp, rtx target)
8056 const struct real_format *fmt;
8057 scalar_float_mode fmode;
8058 scalar_int_mode rmode, imode;
8059 tree arg;
8060 int word, bitpos;
8061 enum insn_code icode;
8062 rtx temp;
8063 location_t loc = EXPR_LOCATION (exp);
8065 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
8066 return NULL_RTX;
8068 arg = CALL_EXPR_ARG (exp, 0);
8069 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
8070 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
8071 fmt = REAL_MODE_FORMAT (fmode);
8073 arg = builtin_save_expr (arg);
8075 /* Expand the argument yielding a RTX expression. */
8076 temp = expand_normal (arg);
8078 /* Check if the back end provides an insn that handles signbit for the
8079 argument's mode. */
8080 icode = optab_handler (signbit_optab, fmode);
8081 if (icode != CODE_FOR_nothing)
8083 rtx_insn *last = get_last_insn ();
8084 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8085 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
8086 return target;
8087 delete_insns_since (last);
8090 /* For floating point formats without a sign bit, implement signbit
8091 as "ARG < 0.0". */
8092 bitpos = fmt->signbit_ro;
8093 if (bitpos < 0)
8095 /* But we can't do this if the format supports signed zero. */
8096 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
8098 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
8099 build_real (TREE_TYPE (arg), dconst0));
8100 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
8103 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
8105 imode = int_mode_for_mode (fmode).require ();
8106 temp = gen_lowpart (imode, temp);
8108 else
8110 imode = word_mode;
8111 /* Handle targets with different FP word orders. */
8112 if (FLOAT_WORDS_BIG_ENDIAN)
8113 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
8114 else
8115 word = bitpos / BITS_PER_WORD;
8116 temp = operand_subword_force (temp, word, fmode);
8117 bitpos = bitpos % BITS_PER_WORD;
8120 /* Force the intermediate word_mode (or narrower) result into a
8121 register. This avoids attempting to create paradoxical SUBREGs
8122 of floating point modes below. */
8123 temp = force_reg (imode, temp);
8125 /* If the bitpos is within the "result mode" lowpart, the operation
8126 can be implement with a single bitwise AND. Otherwise, we need
8127 a right shift and an AND. */
8129 if (bitpos < GET_MODE_BITSIZE (rmode))
8131 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
8133 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
8134 temp = gen_lowpart (rmode, temp);
8135 temp = expand_binop (rmode, and_optab, temp,
8136 immed_wide_int_const (mask, rmode),
8137 NULL_RTX, 1, OPTAB_LIB_WIDEN);
8139 else
8141 /* Perform a logical right shift to place the signbit in the least
8142 significant bit, then truncate the result to the desired mode
8143 and mask just this bit. */
8144 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
8145 temp = gen_lowpart (rmode, temp);
8146 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
8147 NULL_RTX, 1, OPTAB_LIB_WIDEN);
8150 return temp;
8153 /* Expand fork or exec calls. TARGET is the desired target of the
8154 call. EXP is the call. FN is the
8155 identificator of the actual function. IGNORE is nonzero if the
8156 value is to be ignored. */
8158 static rtx
8159 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
8161 tree id, decl;
8162 tree call;
8164 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
8166 tree path = CALL_EXPR_ARG (exp, 0);
8167 /* Detect unterminated path. */
8168 if (!check_read_access (exp, path))
8169 return NULL_RTX;
8171 /* Also detect unterminated first argument. */
8172 switch (DECL_FUNCTION_CODE (fn))
8174 case BUILT_IN_EXECL:
8175 case BUILT_IN_EXECLE:
8176 case BUILT_IN_EXECLP:
8177 if (!check_read_access (exp, path))
8178 return NULL_RTX;
8179 default:
8180 break;
8185 /* If we are not profiling, just call the function. */
8186 if (!profile_arc_flag)
8187 return NULL_RTX;
8189 /* Otherwise call the wrapper. This should be equivalent for the rest of
8190 compiler, so the code does not diverge, and the wrapper may run the
8191 code necessary for keeping the profiling sane. */
8193 switch (DECL_FUNCTION_CODE (fn))
8195 case BUILT_IN_FORK:
8196 id = get_identifier ("__gcov_fork");
8197 break;
8199 case BUILT_IN_EXECL:
8200 id = get_identifier ("__gcov_execl");
8201 break;
8203 case BUILT_IN_EXECV:
8204 id = get_identifier ("__gcov_execv");
8205 break;
8207 case BUILT_IN_EXECLP:
8208 id = get_identifier ("__gcov_execlp");
8209 break;
8211 case BUILT_IN_EXECLE:
8212 id = get_identifier ("__gcov_execle");
8213 break;
8215 case BUILT_IN_EXECVP:
8216 id = get_identifier ("__gcov_execvp");
8217 break;
8219 case BUILT_IN_EXECVE:
8220 id = get_identifier ("__gcov_execve");
8221 break;
8223 default:
8224 gcc_unreachable ();
8227 decl = build_decl (DECL_SOURCE_LOCATION (fn),
8228 FUNCTION_DECL, id, TREE_TYPE (fn));
8229 DECL_EXTERNAL (decl) = 1;
8230 TREE_PUBLIC (decl) = 1;
8231 DECL_ARTIFICIAL (decl) = 1;
8232 TREE_NOTHROW (decl) = 1;
8233 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
8234 DECL_VISIBILITY_SPECIFIED (decl) = 1;
8235 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
8236 return expand_call (call, target, ignore);
8241 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
8242 the pointer in these functions is void*, the tree optimizers may remove
8243 casts. The mode computed in expand_builtin isn't reliable either, due
8244 to __sync_bool_compare_and_swap.
8246 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
8247 group of builtins. This gives us log2 of the mode size. */
8249 static inline machine_mode
8250 get_builtin_sync_mode (int fcode_diff)
8252 /* The size is not negotiable, so ask not to get BLKmode in return
8253 if the target indicates that a smaller size would be better. */
8254 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
8257 /* Expand the memory expression LOC and return the appropriate memory operand
8258 for the builtin_sync operations. */
8260 static rtx
8261 get_builtin_sync_mem (tree loc, machine_mode mode)
8263 rtx addr, mem;
8264 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
8265 ? TREE_TYPE (TREE_TYPE (loc))
8266 : TREE_TYPE (loc));
8267 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
8269 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
8270 addr = convert_memory_address (addr_mode, addr);
8272 /* Note that we explicitly do not want any alias information for this
8273 memory, so that we kill all other live memories. Otherwise we don't
8274 satisfy the full barrier semantics of the intrinsic. */
8275 mem = gen_rtx_MEM (mode, addr);
8277 set_mem_addr_space (mem, addr_space);
8279 mem = validize_mem (mem);
8281 /* The alignment needs to be at least according to that of the mode. */
8282 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
8283 get_pointer_alignment (loc)));
8284 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
8285 MEM_VOLATILE_P (mem) = 1;
8287 return mem;
8290 /* Make sure an argument is in the right mode.
8291 EXP is the tree argument.
8292 MODE is the mode it should be in. */
8294 static rtx
8295 expand_expr_force_mode (tree exp, machine_mode mode)
8297 rtx val;
8298 machine_mode old_mode;
8300 if (TREE_CODE (exp) == SSA_NAME
8301 && TYPE_MODE (TREE_TYPE (exp)) != mode)
8303 /* Undo argument promotion if possible, as combine might not
8304 be able to do it later due to MEM_VOLATILE_P uses in the
8305 patterns. */
8306 gimple *g = get_gimple_for_ssa_name (exp);
8307 if (g && gimple_assign_cast_p (g))
8309 tree rhs = gimple_assign_rhs1 (g);
8310 tree_code code = gimple_assign_rhs_code (g);
8311 if (CONVERT_EXPR_CODE_P (code)
8312 && TYPE_MODE (TREE_TYPE (rhs)) == mode
8313 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
8314 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
8315 && (TYPE_PRECISION (TREE_TYPE (exp))
8316 > TYPE_PRECISION (TREE_TYPE (rhs))))
8317 exp = rhs;
8321 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
8322 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
8323 of CONST_INTs, where we know the old_mode only from the call argument. */
8325 old_mode = GET_MODE (val);
8326 if (old_mode == VOIDmode)
8327 old_mode = TYPE_MODE (TREE_TYPE (exp));
8328 val = convert_modes (mode, old_mode, val, 1);
8329 return val;
8333 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
8334 EXP is the CALL_EXPR. CODE is the rtx code
8335 that corresponds to the arithmetic or logical operation from the name;
8336 an exception here is that NOT actually means NAND. TARGET is an optional
8337 place for us to store the results; AFTER is true if this is the
8338 fetch_and_xxx form. */
8340 static rtx
8341 expand_builtin_sync_operation (machine_mode mode, tree exp,
8342 enum rtx_code code, bool after,
8343 rtx target)
8345 rtx val, mem;
8346 location_t loc = EXPR_LOCATION (exp);
8348 if (code == NOT && warn_sync_nand)
8350 tree fndecl = get_callee_fndecl (exp);
8351 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8353 static bool warned_f_a_n, warned_n_a_f;
8355 switch (fcode)
8357 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8358 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8359 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8360 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8361 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8362 if (warned_f_a_n)
8363 break;
8365 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
8366 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
8367 warned_f_a_n = true;
8368 break;
8370 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8371 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8372 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8373 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8374 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8375 if (warned_n_a_f)
8376 break;
8378 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
8379 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
8380 warned_n_a_f = true;
8381 break;
8383 default:
8384 gcc_unreachable ();
8388 /* Expand the operands. */
8389 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8390 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8392 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
8393 after);
8396 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
8397 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
8398 true if this is the boolean form. TARGET is a place for us to store the
8399 results; this is NOT optional if IS_BOOL is true. */
8401 static rtx
8402 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
8403 bool is_bool, rtx target)
8405 rtx old_val, new_val, mem;
8406 rtx *pbool, *poval;
8408 /* Expand the operands. */
8409 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8410 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8411 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
8413 pbool = poval = NULL;
8414 if (target != const0_rtx)
8416 if (is_bool)
8417 pbool = &target;
8418 else
8419 poval = &target;
8421 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
8422 false, MEMMODEL_SYNC_SEQ_CST,
8423 MEMMODEL_SYNC_SEQ_CST))
8424 return NULL_RTX;
8426 return target;
8429 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
8430 general form is actually an atomic exchange, and some targets only
8431 support a reduced form with the second argument being a constant 1.
8432 EXP is the CALL_EXPR; TARGET is an optional place for us to store
8433 the results. */
8435 static rtx
8436 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
8437 rtx target)
8439 rtx val, mem;
8441 /* Expand the operands. */
8442 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8443 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8445 return expand_sync_lock_test_and_set (target, mem, val);
8448 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
8450 static void
8451 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
8453 rtx mem;
8455 /* Expand the operands. */
8456 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8458 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
8461 /* Given an integer representing an ``enum memmodel'', verify its
8462 correctness and return the memory model enum. */
8464 static enum memmodel
8465 get_memmodel (tree exp)
8467 rtx op;
8468 unsigned HOST_WIDE_INT val;
8469 location_t loc
8470 = expansion_point_location_if_in_system_header (input_location);
8472 /* If the parameter is not a constant, it's a run time value so we'll just
8473 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
8474 if (TREE_CODE (exp) != INTEGER_CST)
8475 return MEMMODEL_SEQ_CST;
8477 op = expand_normal (exp);
8479 val = INTVAL (op);
8480 if (targetm.memmodel_check)
8481 val = targetm.memmodel_check (val);
8482 else if (val & ~MEMMODEL_MASK)
8484 warning_at (loc, OPT_Winvalid_memory_model,
8485 "unknown architecture specifier in memory model to builtin");
8486 return MEMMODEL_SEQ_CST;
8489 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
8490 if (memmodel_base (val) >= MEMMODEL_LAST)
8492 warning_at (loc, OPT_Winvalid_memory_model,
8493 "invalid memory model argument to builtin");
8494 return MEMMODEL_SEQ_CST;
8497 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
8498 be conservative and promote consume to acquire. */
8499 if (val == MEMMODEL_CONSUME)
8500 val = MEMMODEL_ACQUIRE;
8502 return (enum memmodel) val;
8505 /* Expand the __atomic_exchange intrinsic:
8506 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
8507 EXP is the CALL_EXPR.
8508 TARGET is an optional place for us to store the results. */
8510 static rtx
8511 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
8513 rtx val, mem;
8514 enum memmodel model;
8516 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
8518 if (!flag_inline_atomics)
8519 return NULL_RTX;
8521 /* Expand the operands. */
8522 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8523 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8525 return expand_atomic_exchange (target, mem, val, model);
8528 /* Expand the __atomic_compare_exchange intrinsic:
8529 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
8530 TYPE desired, BOOL weak,
8531 enum memmodel success,
8532 enum memmodel failure)
8533 EXP is the CALL_EXPR.
8534 TARGET is an optional place for us to store the results. */
8536 static rtx
8537 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
8538 rtx target)
8540 rtx expect, desired, mem, oldval;
8541 rtx_code_label *label;
8542 enum memmodel success, failure;
8543 tree weak;
8544 bool is_weak;
8545 location_t loc
8546 = expansion_point_location_if_in_system_header (input_location);
8548 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
8549 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
8551 if (failure > success)
8553 warning_at (loc, OPT_Winvalid_memory_model,
8554 "failure memory model cannot be stronger than success "
8555 "memory model for %<__atomic_compare_exchange%>");
8556 success = MEMMODEL_SEQ_CST;
8559 if (is_mm_release (failure) || is_mm_acq_rel (failure))
8561 warning_at (loc, OPT_Winvalid_memory_model,
8562 "invalid failure memory model for "
8563 "%<__atomic_compare_exchange%>");
8564 failure = MEMMODEL_SEQ_CST;
8565 success = MEMMODEL_SEQ_CST;
8569 if (!flag_inline_atomics)
8570 return NULL_RTX;
8572 /* Expand the operands. */
8573 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8575 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
8576 expect = convert_memory_address (Pmode, expect);
8577 expect = gen_rtx_MEM (mode, expect);
8578 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
8580 weak = CALL_EXPR_ARG (exp, 3);
8581 is_weak = false;
8582 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
8583 is_weak = true;
8585 if (target == const0_rtx)
8586 target = NULL;
8588 /* Lest the rtl backend create a race condition with an imporoper store
8589 to memory, always create a new pseudo for OLDVAL. */
8590 oldval = NULL;
8592 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
8593 is_weak, success, failure))
8594 return NULL_RTX;
8596 /* Conditionally store back to EXPECT, lest we create a race condition
8597 with an improper store to memory. */
8598 /* ??? With a rearrangement of atomics at the gimple level, we can handle
8599 the normal case where EXPECT is totally private, i.e. a register. At
8600 which point the store can be unconditional. */
8601 label = gen_label_rtx ();
8602 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
8603 GET_MODE (target), 1, label);
8604 emit_move_insn (expect, oldval);
8605 emit_label (label);
8607 return target;
8610 /* Helper function for expand_ifn_atomic_compare_exchange - expand
8611 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
8612 call. The weak parameter must be dropped to match the expected parameter
8613 list and the expected argument changed from value to pointer to memory
8614 slot. */
8616 static void
8617 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
8619 unsigned int z;
8620 vec<tree, va_gc> *vec;
8622 vec_alloc (vec, 5);
8623 vec->quick_push (gimple_call_arg (call, 0));
8624 tree expected = gimple_call_arg (call, 1);
8625 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
8626 TREE_TYPE (expected));
8627 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
8628 if (expd != x)
8629 emit_move_insn (x, expd);
8630 tree v = make_tree (TREE_TYPE (expected), x);
8631 vec->quick_push (build1 (ADDR_EXPR,
8632 build_pointer_type (TREE_TYPE (expected)), v));
8633 vec->quick_push (gimple_call_arg (call, 2));
8634 /* Skip the boolean weak parameter. */
8635 for (z = 4; z < 6; z++)
8636 vec->quick_push (gimple_call_arg (call, z));
8637 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
8638 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
8639 gcc_assert (bytes_log2 < 5);
8640 built_in_function fncode
8641 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
8642 + bytes_log2);
8643 tree fndecl = builtin_decl_explicit (fncode);
8644 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
8645 fndecl);
8646 tree exp = build_call_vec (boolean_type_node, fn, vec);
8647 tree lhs = gimple_call_lhs (call);
8648 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
8649 if (lhs)
8651 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8652 if (GET_MODE (boolret) != mode)
8653 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
8654 x = force_reg (mode, x);
8655 write_complex_part (target, boolret, true);
8656 write_complex_part (target, x, false);
8660 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
8662 void
8663 expand_ifn_atomic_compare_exchange (gcall *call)
8665 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
8666 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
8667 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
8668 rtx expect, desired, mem, oldval, boolret;
8669 enum memmodel success, failure;
8670 tree lhs;
8671 bool is_weak;
8672 location_t loc
8673 = expansion_point_location_if_in_system_header (gimple_location (call));
8675 success = get_memmodel (gimple_call_arg (call, 4));
8676 failure = get_memmodel (gimple_call_arg (call, 5));
8678 if (failure > success)
8680 warning_at (loc, OPT_Winvalid_memory_model,
8681 "failure memory model cannot be stronger than success "
8682 "memory model for %<__atomic_compare_exchange%>");
8683 success = MEMMODEL_SEQ_CST;
8686 if (is_mm_release (failure) || is_mm_acq_rel (failure))
8688 warning_at (loc, OPT_Winvalid_memory_model,
8689 "invalid failure memory model for "
8690 "%<__atomic_compare_exchange%>");
8691 failure = MEMMODEL_SEQ_CST;
8692 success = MEMMODEL_SEQ_CST;
8695 if (!flag_inline_atomics)
8697 expand_ifn_atomic_compare_exchange_into_call (call, mode);
8698 return;
8701 /* Expand the operands. */
8702 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
8704 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
8705 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
8707 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
8709 boolret = NULL;
8710 oldval = NULL;
8712 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
8713 is_weak, success, failure))
8715 expand_ifn_atomic_compare_exchange_into_call (call, mode);
8716 return;
8719 lhs = gimple_call_lhs (call);
8720 if (lhs)
8722 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8723 if (GET_MODE (boolret) != mode)
8724 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
8725 write_complex_part (target, boolret, true);
8726 write_complex_part (target, oldval, false);
8730 /* Expand the __atomic_load intrinsic:
8731 TYPE __atomic_load (TYPE *object, enum memmodel)
8732 EXP is the CALL_EXPR.
8733 TARGET is an optional place for us to store the results. */
8735 static rtx
8736 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
8738 rtx mem;
8739 enum memmodel model;
8741 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
8742 if (is_mm_release (model) || is_mm_acq_rel (model))
8744 location_t loc
8745 = expansion_point_location_if_in_system_header (input_location);
8746 warning_at (loc, OPT_Winvalid_memory_model,
8747 "invalid memory model for %<__atomic_load%>");
8748 model = MEMMODEL_SEQ_CST;
8751 if (!flag_inline_atomics)
8752 return NULL_RTX;
8754 /* Expand the operand. */
8755 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8757 return expand_atomic_load (target, mem, model);
8761 /* Expand the __atomic_store intrinsic:
8762 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
8763 EXP is the CALL_EXPR.
8764 TARGET is an optional place for us to store the results. */
8766 static rtx
8767 expand_builtin_atomic_store (machine_mode mode, tree exp)
8769 rtx mem, val;
8770 enum memmodel model;
8772 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
8773 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
8774 || is_mm_release (model)))
8776 location_t loc
8777 = expansion_point_location_if_in_system_header (input_location);
8778 warning_at (loc, OPT_Winvalid_memory_model,
8779 "invalid memory model for %<__atomic_store%>");
8780 model = MEMMODEL_SEQ_CST;
8783 if (!flag_inline_atomics)
8784 return NULL_RTX;
8786 /* Expand the operands. */
8787 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8788 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8790 return expand_atomic_store (mem, val, model, false);
8793 /* Expand the __atomic_fetch_XXX intrinsic:
8794 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
8795 EXP is the CALL_EXPR.
8796 TARGET is an optional place for us to store the results.
8797 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
8798 FETCH_AFTER is true if returning the result of the operation.
8799 FETCH_AFTER is false if returning the value before the operation.
8800 IGNORE is true if the result is not used.
8801 EXT_CALL is the correct builtin for an external call if this cannot be
8802 resolved to an instruction sequence. */
8804 static rtx
8805 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
8806 enum rtx_code code, bool fetch_after,
8807 bool ignore, enum built_in_function ext_call)
8809 rtx val, mem, ret;
8810 enum memmodel model;
8811 tree fndecl;
8812 tree addr;
8814 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
8816 /* Expand the operands. */
8817 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8818 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8820 /* Only try generating instructions if inlining is turned on. */
8821 if (flag_inline_atomics)
8823 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
8824 if (ret)
8825 return ret;
8828 /* Return if a different routine isn't needed for the library call. */
8829 if (ext_call == BUILT_IN_NONE)
8830 return NULL_RTX;
8832 /* Change the call to the specified function. */
8833 fndecl = get_callee_fndecl (exp);
8834 addr = CALL_EXPR_FN (exp);
8835 STRIP_NOPS (addr);
8837 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
8838 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
8840 /* If we will emit code after the call, the call cannot be a tail call.
8841 If it is emitted as a tail call, a barrier is emitted after it, and
8842 then all trailing code is removed. */
8843 if (!ignore)
8844 CALL_EXPR_TAILCALL (exp) = 0;
8846 /* Expand the call here so we can emit trailing code. */
8847 ret = expand_call (exp, target, ignore);
8849 /* Replace the original function just in case it matters. */
8850 TREE_OPERAND (addr, 0) = fndecl;
8852 /* Then issue the arithmetic correction to return the right result. */
8853 if (!ignore)
8855 if (code == NOT)
8857 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
8858 OPTAB_LIB_WIDEN);
8859 ret = expand_simple_unop (mode, NOT, ret, target, true);
8861 else
8862 ret = expand_simple_binop (mode, code, ret, val, target, true,
8863 OPTAB_LIB_WIDEN);
8865 return ret;
8868 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
8870 void
8871 expand_ifn_atomic_bit_test_and (gcall *call)
8873 tree ptr = gimple_call_arg (call, 0);
8874 tree bit = gimple_call_arg (call, 1);
8875 tree flag = gimple_call_arg (call, 2);
8876 tree lhs = gimple_call_lhs (call);
8877 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
8878 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
8879 enum rtx_code code;
8880 optab optab;
8881 class expand_operand ops[5];
8883 gcc_assert (flag_inline_atomics);
8885 if (gimple_call_num_args (call) == 4)
8886 model = get_memmodel (gimple_call_arg (call, 3));
8888 rtx mem = get_builtin_sync_mem (ptr, mode);
8889 rtx val = expand_expr_force_mode (bit, mode);
8891 switch (gimple_call_internal_fn (call))
8893 case IFN_ATOMIC_BIT_TEST_AND_SET:
8894 code = IOR;
8895 optab = atomic_bit_test_and_set_optab;
8896 break;
8897 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
8898 code = XOR;
8899 optab = atomic_bit_test_and_complement_optab;
8900 break;
8901 case IFN_ATOMIC_BIT_TEST_AND_RESET:
8902 code = AND;
8903 optab = atomic_bit_test_and_reset_optab;
8904 break;
8905 default:
8906 gcc_unreachable ();
8909 if (lhs == NULL_TREE)
8911 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
8912 val, NULL_RTX, true, OPTAB_DIRECT);
8913 if (code == AND)
8914 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
8915 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
8916 return;
8919 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8920 enum insn_code icode = direct_optab_handler (optab, mode);
8921 gcc_assert (icode != CODE_FOR_nothing);
8922 create_output_operand (&ops[0], target, mode);
8923 create_fixed_operand (&ops[1], mem);
8924 create_convert_operand_to (&ops[2], val, mode, true);
8925 create_integer_operand (&ops[3], model);
8926 create_integer_operand (&ops[4], integer_onep (flag));
8927 if (maybe_expand_insn (icode, 5, ops))
8928 return;
8930 rtx bitval = val;
8931 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
8932 val, NULL_RTX, true, OPTAB_DIRECT);
8933 rtx maskval = val;
8934 if (code == AND)
8935 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
8936 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
8937 code, model, false);
8938 if (integer_onep (flag))
8940 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
8941 NULL_RTX, true, OPTAB_DIRECT);
8942 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
8943 true, OPTAB_DIRECT);
8945 else
8946 result = expand_simple_binop (mode, AND, result, maskval, target, true,
8947 OPTAB_DIRECT);
8948 if (result != target)
8949 emit_move_insn (target, result);
8952 /* Expand an atomic clear operation.
8953 void _atomic_clear (BOOL *obj, enum memmodel)
8954 EXP is the call expression. */
8956 static rtx
8957 expand_builtin_atomic_clear (tree exp)
8959 machine_mode mode;
8960 rtx mem, ret;
8961 enum memmodel model;
8963 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
8964 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8965 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
8967 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
8969 location_t loc
8970 = expansion_point_location_if_in_system_header (input_location);
8971 warning_at (loc, OPT_Winvalid_memory_model,
8972 "invalid memory model for %<__atomic_store%>");
8973 model = MEMMODEL_SEQ_CST;
8976 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
8977 Failing that, a store is issued by __atomic_store. The only way this can
8978 fail is if the bool type is larger than a word size. Unlikely, but
8979 handle it anyway for completeness. Assume a single threaded model since
8980 there is no atomic support in this case, and no barriers are required. */
8981 ret = expand_atomic_store (mem, const0_rtx, model, true);
8982 if (!ret)
8983 emit_move_insn (mem, const0_rtx);
8984 return const0_rtx;
8987 /* Expand an atomic test_and_set operation.
8988 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
8989 EXP is the call expression. */
8991 static rtx
8992 expand_builtin_atomic_test_and_set (tree exp, rtx target)
8994 rtx mem;
8995 enum memmodel model;
8996 machine_mode mode;
8998 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
8999 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
9000 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
9002 return expand_atomic_test_and_set (target, mem, model);
9006 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
9007 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
9009 static tree
9010 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
9012 int size;
9013 machine_mode mode;
9014 unsigned int mode_align, type_align;
9016 if (TREE_CODE (arg0) != INTEGER_CST)
9017 return NULL_TREE;
9019 /* We need a corresponding integer mode for the access to be lock-free. */
9020 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
9021 if (!int_mode_for_size (size, 0).exists (&mode))
9022 return boolean_false_node;
9024 mode_align = GET_MODE_ALIGNMENT (mode);
9026 if (TREE_CODE (arg1) == INTEGER_CST)
9028 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
9030 /* Either this argument is null, or it's a fake pointer encoding
9031 the alignment of the object. */
9032 val = least_bit_hwi (val);
9033 val *= BITS_PER_UNIT;
9035 if (val == 0 || mode_align < val)
9036 type_align = mode_align;
9037 else
9038 type_align = val;
9040 else
9042 tree ttype = TREE_TYPE (arg1);
9044 /* This function is usually invoked and folded immediately by the front
9045 end before anything else has a chance to look at it. The pointer
9046 parameter at this point is usually cast to a void *, so check for that
9047 and look past the cast. */
9048 if (CONVERT_EXPR_P (arg1)
9049 && POINTER_TYPE_P (ttype)
9050 && VOID_TYPE_P (TREE_TYPE (ttype))
9051 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
9052 arg1 = TREE_OPERAND (arg1, 0);
9054 ttype = TREE_TYPE (arg1);
9055 gcc_assert (POINTER_TYPE_P (ttype));
9057 /* Get the underlying type of the object. */
9058 ttype = TREE_TYPE (ttype);
9059 type_align = TYPE_ALIGN (ttype);
9062 /* If the object has smaller alignment, the lock free routines cannot
9063 be used. */
9064 if (type_align < mode_align)
9065 return boolean_false_node;
9067 /* Check if a compare_and_swap pattern exists for the mode which represents
9068 the required size. The pattern is not allowed to fail, so the existence
9069 of the pattern indicates support is present. Also require that an
9070 atomic load exists for the required size. */
9071 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
9072 return boolean_true_node;
9073 else
9074 return boolean_false_node;
9077 /* Return true if the parameters to call EXP represent an object which will
9078 always generate lock free instructions. The first argument represents the
9079 size of the object, and the second parameter is a pointer to the object
9080 itself. If NULL is passed for the object, then the result is based on
9081 typical alignment for an object of the specified size. Otherwise return
9082 false. */
9084 static rtx
9085 expand_builtin_atomic_always_lock_free (tree exp)
9087 tree size;
9088 tree arg0 = CALL_EXPR_ARG (exp, 0);
9089 tree arg1 = CALL_EXPR_ARG (exp, 1);
9091 if (TREE_CODE (arg0) != INTEGER_CST)
9093 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
9094 return const0_rtx;
9097 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
9098 if (size == boolean_true_node)
9099 return const1_rtx;
9100 return const0_rtx;
9103 /* Return a one or zero if it can be determined that object ARG1 of size ARG
9104 is lock free on this architecture. */
9106 static tree
9107 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
9109 if (!flag_inline_atomics)
9110 return NULL_TREE;
9112 /* If it isn't always lock free, don't generate a result. */
9113 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
9114 return boolean_true_node;
9116 return NULL_TREE;
9119 /* Return true if the parameters to call EXP represent an object which will
9120 always generate lock free instructions. The first argument represents the
9121 size of the object, and the second parameter is a pointer to the object
9122 itself. If NULL is passed for the object, then the result is based on
9123 typical alignment for an object of the specified size. Otherwise return
9124 NULL*/
9126 static rtx
9127 expand_builtin_atomic_is_lock_free (tree exp)
9129 tree size;
9130 tree arg0 = CALL_EXPR_ARG (exp, 0);
9131 tree arg1 = CALL_EXPR_ARG (exp, 1);
9133 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9135 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
9136 return NULL_RTX;
9139 if (!flag_inline_atomics)
9140 return NULL_RTX;
9142 /* If the value is known at compile time, return the RTX for it. */
9143 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
9144 if (size == boolean_true_node)
9145 return const1_rtx;
9147 return NULL_RTX;
9150 /* Expand the __atomic_thread_fence intrinsic:
9151 void __atomic_thread_fence (enum memmodel)
9152 EXP is the CALL_EXPR. */
9154 static void
9155 expand_builtin_atomic_thread_fence (tree exp)
9157 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
9158 expand_mem_thread_fence (model);
9161 /* Expand the __atomic_signal_fence intrinsic:
9162 void __atomic_signal_fence (enum memmodel)
9163 EXP is the CALL_EXPR. */
9165 static void
9166 expand_builtin_atomic_signal_fence (tree exp)
9168 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
9169 expand_mem_signal_fence (model);
9172 /* Expand the __sync_synchronize intrinsic. */
9174 static void
9175 expand_builtin_sync_synchronize (void)
9177 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
9180 static rtx
9181 expand_builtin_thread_pointer (tree exp, rtx target)
9183 enum insn_code icode;
9184 if (!validate_arglist (exp, VOID_TYPE))
9185 return const0_rtx;
9186 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
9187 if (icode != CODE_FOR_nothing)
9189 class expand_operand op;
9190 /* If the target is not sutitable then create a new target. */
9191 if (target == NULL_RTX
9192 || !REG_P (target)
9193 || GET_MODE (target) != Pmode)
9194 target = gen_reg_rtx (Pmode);
9195 create_output_operand (&op, target, Pmode);
9196 expand_insn (icode, 1, &op);
9197 return target;
9199 error ("%<__builtin_thread_pointer%> is not supported on this target");
9200 return const0_rtx;
9203 static void
9204 expand_builtin_set_thread_pointer (tree exp)
9206 enum insn_code icode;
9207 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9208 return;
9209 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
9210 if (icode != CODE_FOR_nothing)
9212 class expand_operand op;
9213 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
9214 Pmode, EXPAND_NORMAL);
9215 create_input_operand (&op, val, Pmode);
9216 expand_insn (icode, 1, &op);
9217 return;
9219 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
9223 /* Emit code to restore the current value of stack. */
9225 static void
9226 expand_stack_restore (tree var)
9228 rtx_insn *prev;
9229 rtx sa = expand_normal (var);
9231 sa = convert_memory_address (Pmode, sa);
9233 prev = get_last_insn ();
9234 emit_stack_restore (SAVE_BLOCK, sa);
9236 record_new_stack_level ();
9238 fixup_args_size_notes (prev, get_last_insn (), 0);
9241 /* Emit code to save the current value of stack. */
9243 static rtx
9244 expand_stack_save (void)
9246 rtx ret = NULL_RTX;
9248 emit_stack_save (SAVE_BLOCK, &ret);
9249 return ret;
9252 /* Emit code to get the openacc gang, worker or vector id or size. */
9254 static rtx
9255 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
9257 const char *name;
9258 rtx fallback_retval;
9259 rtx_insn *(*gen_fn) (rtx, rtx);
9260 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
9262 case BUILT_IN_GOACC_PARLEVEL_ID:
9263 name = "__builtin_goacc_parlevel_id";
9264 fallback_retval = const0_rtx;
9265 gen_fn = targetm.gen_oacc_dim_pos;
9266 break;
9267 case BUILT_IN_GOACC_PARLEVEL_SIZE:
9268 name = "__builtin_goacc_parlevel_size";
9269 fallback_retval = const1_rtx;
9270 gen_fn = targetm.gen_oacc_dim_size;
9271 break;
9272 default:
9273 gcc_unreachable ();
9276 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
9278 error ("%qs only supported in OpenACC code", name);
9279 return const0_rtx;
9282 tree arg = CALL_EXPR_ARG (exp, 0);
9283 if (TREE_CODE (arg) != INTEGER_CST)
9285 error ("non-constant argument 0 to %qs", name);
9286 return const0_rtx;
9289 int dim = TREE_INT_CST_LOW (arg);
9290 switch (dim)
9292 case GOMP_DIM_GANG:
9293 case GOMP_DIM_WORKER:
9294 case GOMP_DIM_VECTOR:
9295 break;
9296 default:
9297 error ("illegal argument 0 to %qs", name);
9298 return const0_rtx;
9301 if (ignore)
9302 return target;
9304 if (target == NULL_RTX)
9305 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9307 if (!targetm.have_oacc_dim_size ())
9309 emit_move_insn (target, fallback_retval);
9310 return target;
9313 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
9314 emit_insn (gen_fn (reg, GEN_INT (dim)));
9315 if (reg != target)
9316 emit_move_insn (target, reg);
9318 return target;
9321 /* Expand a string compare operation using a sequence of char comparison
9322 to get rid of the calling overhead, with result going to TARGET if
9323 that's convenient.
9325 VAR_STR is the variable string source;
9326 CONST_STR is the constant string source;
9327 LENGTH is the number of chars to compare;
9328 CONST_STR_N indicates which source string is the constant string;
9329 IS_MEMCMP indicates whether it's a memcmp or strcmp.
9331 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
9333 target = (int) (unsigned char) var_str[0]
9334 - (int) (unsigned char) const_str[0];
9335 if (target != 0)
9336 goto ne_label;
9338 target = (int) (unsigned char) var_str[length - 2]
9339 - (int) (unsigned char) const_str[length - 2];
9340 if (target != 0)
9341 goto ne_label;
9342 target = (int) (unsigned char) var_str[length - 1]
9343 - (int) (unsigned char) const_str[length - 1];
9344 ne_label:
9347 static rtx
9348 inline_string_cmp (rtx target, tree var_str, const char *const_str,
9349 unsigned HOST_WIDE_INT length,
9350 int const_str_n, machine_mode mode)
9352 HOST_WIDE_INT offset = 0;
9353 rtx var_rtx_array
9354 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
9355 rtx var_rtx = NULL_RTX;
9356 rtx const_rtx = NULL_RTX;
9357 rtx result = target ? target : gen_reg_rtx (mode);
9358 rtx_code_label *ne_label = gen_label_rtx ();
9359 tree unit_type_node = unsigned_char_type_node;
9360 scalar_int_mode unit_mode
9361 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
9363 start_sequence ();
9365 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
9367 var_rtx
9368 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
9369 const_rtx = c_readstr (const_str + offset, unit_mode);
9370 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
9371 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
9373 op0 = convert_modes (mode, unit_mode, op0, 1);
9374 op1 = convert_modes (mode, unit_mode, op1, 1);
9375 result = expand_simple_binop (mode, MINUS, op0, op1,
9376 result, 1, OPTAB_WIDEN);
9377 if (i < length - 1)
9378 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
9379 mode, true, ne_label);
9380 offset += GET_MODE_SIZE (unit_mode);
9383 emit_label (ne_label);
9384 rtx_insn *insns = get_insns ();
9385 end_sequence ();
9386 emit_insn (insns);
9388 return result;
9391 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
9392 to TARGET if that's convenient.
9393 If the call is not been inlined, return NULL_RTX. */
9395 static rtx
9396 inline_expand_builtin_bytecmp (tree exp, rtx target)
9398 tree fndecl = get_callee_fndecl (exp);
9399 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9400 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
9402 /* Do NOT apply this inlining expansion when optimizing for size or
9403 optimization level below 2. */
9404 if (optimize < 2 || optimize_insn_for_size_p ())
9405 return NULL_RTX;
9407 gcc_checking_assert (fcode == BUILT_IN_STRCMP
9408 || fcode == BUILT_IN_STRNCMP
9409 || fcode == BUILT_IN_MEMCMP);
9411 /* On a target where the type of the call (int) has same or narrower presicion
9412 than unsigned char, give up the inlining expansion. */
9413 if (TYPE_PRECISION (unsigned_char_type_node)
9414 >= TYPE_PRECISION (TREE_TYPE (exp)))
9415 return NULL_RTX;
9417 tree arg1 = CALL_EXPR_ARG (exp, 0);
9418 tree arg2 = CALL_EXPR_ARG (exp, 1);
9419 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
9421 unsigned HOST_WIDE_INT len1 = 0;
9422 unsigned HOST_WIDE_INT len2 = 0;
9423 unsigned HOST_WIDE_INT len3 = 0;
9425 /* Get the object representation of the initializers of ARG1 and ARG2
9426 as strings, provided they refer to constant objects, with their byte
9427 sizes in LEN1 and LEN2, respectively. */
9428 const char *bytes1 = getbyterep (arg1, &len1);
9429 const char *bytes2 = getbyterep (arg2, &len2);
9431 /* Fail if neither argument refers to an initialized constant. */
9432 if (!bytes1 && !bytes2)
9433 return NULL_RTX;
9435 if (is_ncmp)
9437 /* Fail if the memcmp/strncmp bound is not a constant. */
9438 if (!tree_fits_uhwi_p (len3_tree))
9439 return NULL_RTX;
9441 len3 = tree_to_uhwi (len3_tree);
9443 if (fcode == BUILT_IN_MEMCMP)
9445 /* Fail if the memcmp bound is greater than the size of either
9446 of the two constant objects. */
9447 if ((bytes1 && len1 < len3)
9448 || (bytes2 && len2 < len3))
9449 return NULL_RTX;
9453 if (fcode != BUILT_IN_MEMCMP)
9455 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
9456 and LEN2 to the length of the nul-terminated string stored
9457 in each. */
9458 if (bytes1 != NULL)
9459 len1 = strnlen (bytes1, len1) + 1;
9460 if (bytes2 != NULL)
9461 len2 = strnlen (bytes2, len2) + 1;
9464 /* See inline_string_cmp. */
9465 int const_str_n;
9466 if (!len1)
9467 const_str_n = 2;
9468 else if (!len2)
9469 const_str_n = 1;
9470 else if (len2 > len1)
9471 const_str_n = 1;
9472 else
9473 const_str_n = 2;
9475 /* For strncmp only, compute the new bound as the smallest of
9476 the lengths of the two strings (plus 1) and the bound provided
9477 to the function. */
9478 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
9479 if (is_ncmp && len3 < bound)
9480 bound = len3;
9482 /* If the bound of the comparison is larger than the threshold,
9483 do nothing. */
9484 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
9485 return NULL_RTX;
9487 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9489 /* Now, start inline expansion the call. */
9490 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
9491 (const_str_n == 1) ? bytes1 : bytes2, bound,
9492 const_str_n, mode);
9495 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
9496 represents the size of the first argument to that call, or VOIDmode
9497 if the argument is a pointer. IGNORE will be true if the result
9498 isn't used. */
9499 static rtx
9500 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
9501 bool ignore)
9503 rtx val, failsafe;
9504 unsigned nargs = call_expr_nargs (exp);
9506 tree arg0 = CALL_EXPR_ARG (exp, 0);
9508 if (mode == VOIDmode)
9510 mode = TYPE_MODE (TREE_TYPE (arg0));
9511 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
9514 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
9516 /* An optional second argument can be used as a failsafe value on
9517 some machines. If it isn't present, then the failsafe value is
9518 assumed to be 0. */
9519 if (nargs > 1)
9521 tree arg1 = CALL_EXPR_ARG (exp, 1);
9522 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
9524 else
9525 failsafe = const0_rtx;
9527 /* If the result isn't used, the behavior is undefined. It would be
9528 nice to emit a warning here, but path splitting means this might
9529 happen with legitimate code. So simply drop the builtin
9530 expansion in that case; we've handled any side-effects above. */
9531 if (ignore)
9532 return const0_rtx;
9534 /* If we don't have a suitable target, create one to hold the result. */
9535 if (target == NULL || GET_MODE (target) != mode)
9536 target = gen_reg_rtx (mode);
9538 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
9539 val = convert_modes (mode, VOIDmode, val, false);
9541 return targetm.speculation_safe_value (mode, target, val, failsafe);
9544 /* Expand an expression EXP that calls a built-in function,
9545 with result going to TARGET if that's convenient
9546 (and in mode MODE if that's convenient).
9547 SUBTARGET may be used as the target for computing one of EXP's operands.
9548 IGNORE is nonzero if the value is to be ignored. */
9551 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
9552 int ignore)
9554 tree fndecl = get_callee_fndecl (exp);
9555 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
9556 int flags;
9558 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9559 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
9561 /* When ASan is enabled, we don't want to expand some memory/string
9562 builtins and rely on libsanitizer's hooks. This allows us to avoid
9563 redundant checks and be sure, that possible overflow will be detected
9564 by ASan. */
9566 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9567 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
9568 return expand_call (exp, target, ignore);
9570 /* When not optimizing, generate calls to library functions for a certain
9571 set of builtins. */
9572 if (!optimize
9573 && !called_as_built_in (fndecl)
9574 && fcode != BUILT_IN_FORK
9575 && fcode != BUILT_IN_EXECL
9576 && fcode != BUILT_IN_EXECV
9577 && fcode != BUILT_IN_EXECLP
9578 && fcode != BUILT_IN_EXECLE
9579 && fcode != BUILT_IN_EXECVP
9580 && fcode != BUILT_IN_EXECVE
9581 && fcode != BUILT_IN_CLEAR_CACHE
9582 && !ALLOCA_FUNCTION_CODE_P (fcode)
9583 && fcode != BUILT_IN_FREE)
9584 return expand_call (exp, target, ignore);
9586 /* The built-in function expanders test for target == const0_rtx
9587 to determine whether the function's result will be ignored. */
9588 if (ignore)
9589 target = const0_rtx;
9591 /* If the result of a pure or const built-in function is ignored, and
9592 none of its arguments are volatile, we can avoid expanding the
9593 built-in call and just evaluate the arguments for side-effects. */
9594 if (target == const0_rtx
9595 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
9596 && !(flags & ECF_LOOPING_CONST_OR_PURE))
9598 bool volatilep = false;
9599 tree arg;
9600 call_expr_arg_iterator iter;
9602 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
9603 if (TREE_THIS_VOLATILE (arg))
9605 volatilep = true;
9606 break;
9609 if (! volatilep)
9611 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
9612 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
9613 return const0_rtx;
9617 switch (fcode)
9619 CASE_FLT_FN (BUILT_IN_FABS):
9620 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9621 case BUILT_IN_FABSD32:
9622 case BUILT_IN_FABSD64:
9623 case BUILT_IN_FABSD128:
9624 target = expand_builtin_fabs (exp, target, subtarget);
9625 if (target)
9626 return target;
9627 break;
9629 CASE_FLT_FN (BUILT_IN_COPYSIGN):
9630 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
9631 target = expand_builtin_copysign (exp, target, subtarget);
9632 if (target)
9633 return target;
9634 break;
9636 /* Just do a normal library call if we were unable to fold
9637 the values. */
9638 CASE_FLT_FN (BUILT_IN_CABS):
9639 break;
9641 CASE_FLT_FN (BUILT_IN_FMA):
9642 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
9643 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
9644 if (target)
9645 return target;
9646 break;
9648 CASE_FLT_FN (BUILT_IN_ILOGB):
9649 if (! flag_unsafe_math_optimizations)
9650 break;
9651 gcc_fallthrough ();
9652 CASE_FLT_FN (BUILT_IN_ISINF):
9653 CASE_FLT_FN (BUILT_IN_FINITE):
9654 case BUILT_IN_ISFINITE:
9655 case BUILT_IN_ISNORMAL:
9656 target = expand_builtin_interclass_mathfn (exp, target);
9657 if (target)
9658 return target;
9659 break;
9661 CASE_FLT_FN (BUILT_IN_ICEIL):
9662 CASE_FLT_FN (BUILT_IN_LCEIL):
9663 CASE_FLT_FN (BUILT_IN_LLCEIL):
9664 CASE_FLT_FN (BUILT_IN_LFLOOR):
9665 CASE_FLT_FN (BUILT_IN_IFLOOR):
9666 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9667 target = expand_builtin_int_roundingfn (exp, target);
9668 if (target)
9669 return target;
9670 break;
9672 CASE_FLT_FN (BUILT_IN_IRINT):
9673 CASE_FLT_FN (BUILT_IN_LRINT):
9674 CASE_FLT_FN (BUILT_IN_LLRINT):
9675 CASE_FLT_FN (BUILT_IN_IROUND):
9676 CASE_FLT_FN (BUILT_IN_LROUND):
9677 CASE_FLT_FN (BUILT_IN_LLROUND):
9678 target = expand_builtin_int_roundingfn_2 (exp, target);
9679 if (target)
9680 return target;
9681 break;
9683 CASE_FLT_FN (BUILT_IN_POWI):
9684 target = expand_builtin_powi (exp, target);
9685 if (target)
9686 return target;
9687 break;
9689 CASE_FLT_FN (BUILT_IN_CEXPI):
9690 target = expand_builtin_cexpi (exp, target);
9691 gcc_assert (target);
9692 return target;
9694 CASE_FLT_FN (BUILT_IN_SIN):
9695 CASE_FLT_FN (BUILT_IN_COS):
9696 if (! flag_unsafe_math_optimizations)
9697 break;
9698 target = expand_builtin_mathfn_3 (exp, target, subtarget);
9699 if (target)
9700 return target;
9701 break;
9703 CASE_FLT_FN (BUILT_IN_SINCOS):
9704 if (! flag_unsafe_math_optimizations)
9705 break;
9706 target = expand_builtin_sincos (exp);
9707 if (target)
9708 return target;
9709 break;
9711 case BUILT_IN_APPLY_ARGS:
9712 return expand_builtin_apply_args ();
9714 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
9715 FUNCTION with a copy of the parameters described by
9716 ARGUMENTS, and ARGSIZE. It returns a block of memory
9717 allocated on the stack into which is stored all the registers
9718 that might possibly be used for returning the result of a
9719 function. ARGUMENTS is the value returned by
9720 __builtin_apply_args. ARGSIZE is the number of bytes of
9721 arguments that must be copied. ??? How should this value be
9722 computed? We'll also need a safe worst case value for varargs
9723 functions. */
9724 case BUILT_IN_APPLY:
9725 if (!validate_arglist (exp, POINTER_TYPE,
9726 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
9727 && !validate_arglist (exp, REFERENCE_TYPE,
9728 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9729 return const0_rtx;
9730 else
9732 rtx ops[3];
9734 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
9735 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
9736 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
9738 return expand_builtin_apply (ops[0], ops[1], ops[2]);
9741 /* __builtin_return (RESULT) causes the function to return the
9742 value described by RESULT. RESULT is address of the block of
9743 memory returned by __builtin_apply. */
9744 case BUILT_IN_RETURN:
9745 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9746 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
9747 return const0_rtx;
9749 case BUILT_IN_SAVEREGS:
9750 return expand_builtin_saveregs ();
9752 case BUILT_IN_VA_ARG_PACK:
9753 /* All valid uses of __builtin_va_arg_pack () are removed during
9754 inlining. */
9755 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9756 return const0_rtx;
9758 case BUILT_IN_VA_ARG_PACK_LEN:
9759 /* All valid uses of __builtin_va_arg_pack_len () are removed during
9760 inlining. */
9761 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
9762 return const0_rtx;
9764 /* Return the address of the first anonymous stack arg. */
9765 case BUILT_IN_NEXT_ARG:
9766 if (fold_builtin_next_arg (exp, false))
9767 return const0_rtx;
9768 return expand_builtin_next_arg ();
9770 case BUILT_IN_CLEAR_CACHE:
9771 expand_builtin___clear_cache (exp);
9772 return const0_rtx;
9774 case BUILT_IN_CLASSIFY_TYPE:
9775 return expand_builtin_classify_type (exp);
9777 case BUILT_IN_CONSTANT_P:
9778 return const0_rtx;
9780 case BUILT_IN_FRAME_ADDRESS:
9781 case BUILT_IN_RETURN_ADDRESS:
9782 return expand_builtin_frame_address (fndecl, exp);
9784 /* Returns the address of the area where the structure is returned.
9785 0 otherwise. */
9786 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9787 if (call_expr_nargs (exp) != 0
9788 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9789 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9790 return const0_rtx;
9791 else
9792 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9794 CASE_BUILT_IN_ALLOCA:
9795 target = expand_builtin_alloca (exp);
9796 if (target)
9797 return target;
9798 break;
9800 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
9801 return expand_asan_emit_allocas_unpoison (exp);
9803 case BUILT_IN_STACK_SAVE:
9804 return expand_stack_save ();
9806 case BUILT_IN_STACK_RESTORE:
9807 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
9808 return const0_rtx;
9810 case BUILT_IN_BSWAP16:
9811 case BUILT_IN_BSWAP32:
9812 case BUILT_IN_BSWAP64:
9813 case BUILT_IN_BSWAP128:
9814 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
9815 if (target)
9816 return target;
9817 break;
9819 CASE_INT_FN (BUILT_IN_FFS):
9820 target = expand_builtin_unop (target_mode, exp, target,
9821 subtarget, ffs_optab);
9822 if (target)
9823 return target;
9824 break;
9826 CASE_INT_FN (BUILT_IN_CLZ):
9827 target = expand_builtin_unop (target_mode, exp, target,
9828 subtarget, clz_optab);
9829 if (target)
9830 return target;
9831 break;
9833 CASE_INT_FN (BUILT_IN_CTZ):
9834 target = expand_builtin_unop (target_mode, exp, target,
9835 subtarget, ctz_optab);
9836 if (target)
9837 return target;
9838 break;
9840 CASE_INT_FN (BUILT_IN_CLRSB):
9841 target = expand_builtin_unop (target_mode, exp, target,
9842 subtarget, clrsb_optab);
9843 if (target)
9844 return target;
9845 break;
9847 CASE_INT_FN (BUILT_IN_POPCOUNT):
9848 target = expand_builtin_unop (target_mode, exp, target,
9849 subtarget, popcount_optab);
9850 if (target)
9851 return target;
9852 break;
9854 CASE_INT_FN (BUILT_IN_PARITY):
9855 target = expand_builtin_unop (target_mode, exp, target,
9856 subtarget, parity_optab);
9857 if (target)
9858 return target;
9859 break;
9861 case BUILT_IN_STRLEN:
9862 target = expand_builtin_strlen (exp, target, target_mode);
9863 if (target)
9864 return target;
9865 break;
9867 case BUILT_IN_STRNLEN:
9868 target = expand_builtin_strnlen (exp, target, target_mode);
9869 if (target)
9870 return target;
9871 break;
9873 case BUILT_IN_STRCAT:
9874 target = expand_builtin_strcat (exp);
9875 if (target)
9876 return target;
9877 break;
9879 case BUILT_IN_GETTEXT:
9880 case BUILT_IN_PUTS:
9881 case BUILT_IN_PUTS_UNLOCKED:
9882 case BUILT_IN_STRDUP:
9883 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9884 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9885 break;
9887 case BUILT_IN_INDEX:
9888 case BUILT_IN_RINDEX:
9889 case BUILT_IN_STRCHR:
9890 case BUILT_IN_STRRCHR:
9891 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9892 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9893 break;
9895 case BUILT_IN_FPUTS:
9896 case BUILT_IN_FPUTS_UNLOCKED:
9897 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9898 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9899 break;
9901 case BUILT_IN_STRNDUP:
9902 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9903 check_read_access (exp, CALL_EXPR_ARG (exp, 0), CALL_EXPR_ARG (exp, 1));
9904 break;
9906 case BUILT_IN_STRCASECMP:
9907 case BUILT_IN_STRPBRK:
9908 case BUILT_IN_STRSPN:
9909 case BUILT_IN_STRCSPN:
9910 case BUILT_IN_STRSTR:
9911 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9913 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9914 check_read_access (exp, CALL_EXPR_ARG (exp, 1));
9916 break;
9918 case BUILT_IN_STRCPY:
9919 target = expand_builtin_strcpy (exp, target);
9920 if (target)
9921 return target;
9922 break;
9924 case BUILT_IN_STRNCAT:
9925 target = expand_builtin_strncat (exp, target);
9926 if (target)
9927 return target;
9928 break;
9930 case BUILT_IN_STRNCPY:
9931 target = expand_builtin_strncpy (exp, target);
9932 if (target)
9933 return target;
9934 break;
9936 case BUILT_IN_STPCPY:
9937 target = expand_builtin_stpcpy (exp, target, mode);
9938 if (target)
9939 return target;
9940 break;
9942 case BUILT_IN_STPNCPY:
9943 target = expand_builtin_stpncpy (exp, target);
9944 if (target)
9945 return target;
9946 break;
9948 case BUILT_IN_MEMCHR:
9949 target = expand_builtin_memchr (exp, target);
9950 if (target)
9951 return target;
9952 break;
9954 case BUILT_IN_MEMCPY:
9955 target = expand_builtin_memcpy (exp, target);
9956 if (target)
9957 return target;
9958 break;
9960 case BUILT_IN_MEMMOVE:
9961 target = expand_builtin_memmove (exp, target);
9962 if (target)
9963 return target;
9964 break;
9966 case BUILT_IN_MEMPCPY:
9967 target = expand_builtin_mempcpy (exp, target);
9968 if (target)
9969 return target;
9970 break;
9972 case BUILT_IN_MEMSET:
9973 target = expand_builtin_memset (exp, target, mode);
9974 if (target)
9975 return target;
9976 break;
9978 case BUILT_IN_BZERO:
9979 target = expand_builtin_bzero (exp);
9980 if (target)
9981 return target;
9982 break;
9984 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
9985 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
9986 when changing it to a strcmp call. */
9987 case BUILT_IN_STRCMP_EQ:
9988 target = expand_builtin_memcmp (exp, target, true);
9989 if (target)
9990 return target;
9992 /* Change this call back to a BUILT_IN_STRCMP. */
9993 TREE_OPERAND (exp, 1)
9994 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
9996 /* Delete the last parameter. */
9997 unsigned int i;
9998 vec<tree, va_gc> *arg_vec;
9999 vec_alloc (arg_vec, 2);
10000 for (i = 0; i < 2; i++)
10001 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
10002 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
10003 /* FALLTHROUGH */
10005 case BUILT_IN_STRCMP:
10006 target = expand_builtin_strcmp (exp, target);
10007 if (target)
10008 return target;
10009 break;
10011 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
10012 back to a BUILT_IN_STRNCMP. */
10013 case BUILT_IN_STRNCMP_EQ:
10014 target = expand_builtin_memcmp (exp, target, true);
10015 if (target)
10016 return target;
10018 /* Change it back to a BUILT_IN_STRNCMP. */
10019 TREE_OPERAND (exp, 1)
10020 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
10021 /* FALLTHROUGH */
10023 case BUILT_IN_STRNCMP:
10024 target = expand_builtin_strncmp (exp, target, mode);
10025 if (target)
10026 return target;
10027 break;
10029 case BUILT_IN_BCMP:
10030 case BUILT_IN_MEMCMP:
10031 case BUILT_IN_MEMCMP_EQ:
10032 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
10033 if (target)
10034 return target;
10035 if (fcode == BUILT_IN_MEMCMP_EQ)
10037 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
10038 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
10040 break;
10042 case BUILT_IN_SETJMP:
10043 /* This should have been lowered to the builtins below. */
10044 gcc_unreachable ();
10046 case BUILT_IN_SETJMP_SETUP:
10047 /* __builtin_setjmp_setup is passed a pointer to an array of five words
10048 and the receiver label. */
10049 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
10051 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
10052 VOIDmode, EXPAND_NORMAL);
10053 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
10054 rtx_insn *label_r = label_rtx (label);
10056 /* This is copied from the handling of non-local gotos. */
10057 expand_builtin_setjmp_setup (buf_addr, label_r);
10058 nonlocal_goto_handler_labels
10059 = gen_rtx_INSN_LIST (VOIDmode, label_r,
10060 nonlocal_goto_handler_labels);
10061 /* ??? Do not let expand_label treat us as such since we would
10062 not want to be both on the list of non-local labels and on
10063 the list of forced labels. */
10064 FORCED_LABEL (label) = 0;
10065 return const0_rtx;
10067 break;
10069 case BUILT_IN_SETJMP_RECEIVER:
10070 /* __builtin_setjmp_receiver is passed the receiver label. */
10071 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
10073 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
10074 rtx_insn *label_r = label_rtx (label);
10076 expand_builtin_setjmp_receiver (label_r);
10077 return const0_rtx;
10079 break;
10081 /* __builtin_longjmp is passed a pointer to an array of five words.
10082 It's similar to the C library longjmp function but works with
10083 __builtin_setjmp above. */
10084 case BUILT_IN_LONGJMP:
10085 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10087 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
10088 VOIDmode, EXPAND_NORMAL);
10089 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
10091 if (value != const1_rtx)
10093 error ("%<__builtin_longjmp%> second argument must be 1");
10094 return const0_rtx;
10097 expand_builtin_longjmp (buf_addr, value);
10098 return const0_rtx;
10100 break;
10102 case BUILT_IN_NONLOCAL_GOTO:
10103 target = expand_builtin_nonlocal_goto (exp);
10104 if (target)
10105 return target;
10106 break;
10108 /* This updates the setjmp buffer that is its argument with the value
10109 of the current stack pointer. */
10110 case BUILT_IN_UPDATE_SETJMP_BUF:
10111 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
10113 rtx buf_addr
10114 = expand_normal (CALL_EXPR_ARG (exp, 0));
10116 expand_builtin_update_setjmp_buf (buf_addr);
10117 return const0_rtx;
10119 break;
10121 case BUILT_IN_TRAP:
10122 expand_builtin_trap ();
10123 return const0_rtx;
10125 case BUILT_IN_UNREACHABLE:
10126 expand_builtin_unreachable ();
10127 return const0_rtx;
10129 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10130 case BUILT_IN_SIGNBITD32:
10131 case BUILT_IN_SIGNBITD64:
10132 case BUILT_IN_SIGNBITD128:
10133 target = expand_builtin_signbit (exp, target);
10134 if (target)
10135 return target;
10136 break;
10138 /* Various hooks for the DWARF 2 __throw routine. */
10139 case BUILT_IN_UNWIND_INIT:
10140 expand_builtin_unwind_init ();
10141 return const0_rtx;
10142 case BUILT_IN_DWARF_CFA:
10143 return virtual_cfa_rtx;
10144 #ifdef DWARF2_UNWIND_INFO
10145 case BUILT_IN_DWARF_SP_COLUMN:
10146 return expand_builtin_dwarf_sp_column ();
10147 case BUILT_IN_INIT_DWARF_REG_SIZES:
10148 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
10149 return const0_rtx;
10150 #endif
10151 case BUILT_IN_FROB_RETURN_ADDR:
10152 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
10153 case BUILT_IN_EXTRACT_RETURN_ADDR:
10154 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
10155 case BUILT_IN_EH_RETURN:
10156 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
10157 CALL_EXPR_ARG (exp, 1));
10158 return const0_rtx;
10159 case BUILT_IN_EH_RETURN_DATA_REGNO:
10160 return expand_builtin_eh_return_data_regno (exp);
10161 case BUILT_IN_EXTEND_POINTER:
10162 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
10163 case BUILT_IN_EH_POINTER:
10164 return expand_builtin_eh_pointer (exp);
10165 case BUILT_IN_EH_FILTER:
10166 return expand_builtin_eh_filter (exp);
10167 case BUILT_IN_EH_COPY_VALUES:
10168 return expand_builtin_eh_copy_values (exp);
10170 case BUILT_IN_VA_START:
10171 return expand_builtin_va_start (exp);
10172 case BUILT_IN_VA_END:
10173 return expand_builtin_va_end (exp);
10174 case BUILT_IN_VA_COPY:
10175 return expand_builtin_va_copy (exp);
10176 case BUILT_IN_EXPECT:
10177 return expand_builtin_expect (exp, target);
10178 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10179 return expand_builtin_expect_with_probability (exp, target);
10180 case BUILT_IN_ASSUME_ALIGNED:
10181 return expand_builtin_assume_aligned (exp, target);
10182 case BUILT_IN_PREFETCH:
10183 expand_builtin_prefetch (exp);
10184 return const0_rtx;
10186 case BUILT_IN_INIT_TRAMPOLINE:
10187 return expand_builtin_init_trampoline (exp, true);
10188 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
10189 return expand_builtin_init_trampoline (exp, false);
10190 case BUILT_IN_ADJUST_TRAMPOLINE:
10191 return expand_builtin_adjust_trampoline (exp);
10193 case BUILT_IN_INIT_DESCRIPTOR:
10194 return expand_builtin_init_descriptor (exp);
10195 case BUILT_IN_ADJUST_DESCRIPTOR:
10196 return expand_builtin_adjust_descriptor (exp);
10198 case BUILT_IN_FORK:
10199 case BUILT_IN_EXECL:
10200 case BUILT_IN_EXECV:
10201 case BUILT_IN_EXECLP:
10202 case BUILT_IN_EXECLE:
10203 case BUILT_IN_EXECVP:
10204 case BUILT_IN_EXECVE:
10205 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
10206 if (target)
10207 return target;
10208 break;
10210 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
10211 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
10212 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
10213 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
10214 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
10215 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
10216 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
10217 if (target)
10218 return target;
10219 break;
10221 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
10222 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
10223 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
10224 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
10225 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
10226 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
10227 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
10228 if (target)
10229 return target;
10230 break;
10232 case BUILT_IN_SYNC_FETCH_AND_OR_1:
10233 case BUILT_IN_SYNC_FETCH_AND_OR_2:
10234 case BUILT_IN_SYNC_FETCH_AND_OR_4:
10235 case BUILT_IN_SYNC_FETCH_AND_OR_8:
10236 case BUILT_IN_SYNC_FETCH_AND_OR_16:
10237 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
10238 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
10239 if (target)
10240 return target;
10241 break;
10243 case BUILT_IN_SYNC_FETCH_AND_AND_1:
10244 case BUILT_IN_SYNC_FETCH_AND_AND_2:
10245 case BUILT_IN_SYNC_FETCH_AND_AND_4:
10246 case BUILT_IN_SYNC_FETCH_AND_AND_8:
10247 case BUILT_IN_SYNC_FETCH_AND_AND_16:
10248 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
10249 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
10250 if (target)
10251 return target;
10252 break;
10254 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
10255 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
10256 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
10257 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
10258 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
10259 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
10260 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
10261 if (target)
10262 return target;
10263 break;
10265 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
10266 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
10267 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
10268 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
10269 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
10270 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
10271 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
10272 if (target)
10273 return target;
10274 break;
10276 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
10277 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
10278 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
10279 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
10280 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
10281 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
10282 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
10283 if (target)
10284 return target;
10285 break;
10287 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
10288 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
10289 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
10290 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
10291 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
10292 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
10293 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
10294 if (target)
10295 return target;
10296 break;
10298 case BUILT_IN_SYNC_OR_AND_FETCH_1:
10299 case BUILT_IN_SYNC_OR_AND_FETCH_2:
10300 case BUILT_IN_SYNC_OR_AND_FETCH_4:
10301 case BUILT_IN_SYNC_OR_AND_FETCH_8:
10302 case BUILT_IN_SYNC_OR_AND_FETCH_16:
10303 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
10304 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
10305 if (target)
10306 return target;
10307 break;
10309 case BUILT_IN_SYNC_AND_AND_FETCH_1:
10310 case BUILT_IN_SYNC_AND_AND_FETCH_2:
10311 case BUILT_IN_SYNC_AND_AND_FETCH_4:
10312 case BUILT_IN_SYNC_AND_AND_FETCH_8:
10313 case BUILT_IN_SYNC_AND_AND_FETCH_16:
10314 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
10315 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
10316 if (target)
10317 return target;
10318 break;
10320 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
10321 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
10322 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
10323 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
10324 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
10325 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
10326 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
10327 if (target)
10328 return target;
10329 break;
10331 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
10332 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
10333 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
10334 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
10335 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
10336 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
10337 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
10338 if (target)
10339 return target;
10340 break;
10342 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
10343 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
10344 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
10345 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
10346 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
10347 if (mode == VOIDmode)
10348 mode = TYPE_MODE (boolean_type_node);
10349 if (!target || !register_operand (target, mode))
10350 target = gen_reg_rtx (mode);
10352 mode = get_builtin_sync_mode
10353 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
10354 target = expand_builtin_compare_and_swap (mode, exp, true, target);
10355 if (target)
10356 return target;
10357 break;
10359 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
10360 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
10361 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
10362 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
10363 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
10364 mode = get_builtin_sync_mode
10365 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
10366 target = expand_builtin_compare_and_swap (mode, exp, false, target);
10367 if (target)
10368 return target;
10369 break;
10371 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
10372 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
10373 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
10374 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
10375 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
10376 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
10377 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
10378 if (target)
10379 return target;
10380 break;
10382 case BUILT_IN_SYNC_LOCK_RELEASE_1:
10383 case BUILT_IN_SYNC_LOCK_RELEASE_2:
10384 case BUILT_IN_SYNC_LOCK_RELEASE_4:
10385 case BUILT_IN_SYNC_LOCK_RELEASE_8:
10386 case BUILT_IN_SYNC_LOCK_RELEASE_16:
10387 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
10388 expand_builtin_sync_lock_release (mode, exp);
10389 return const0_rtx;
10391 case BUILT_IN_SYNC_SYNCHRONIZE:
10392 expand_builtin_sync_synchronize ();
10393 return const0_rtx;
10395 case BUILT_IN_ATOMIC_EXCHANGE_1:
10396 case BUILT_IN_ATOMIC_EXCHANGE_2:
10397 case BUILT_IN_ATOMIC_EXCHANGE_4:
10398 case BUILT_IN_ATOMIC_EXCHANGE_8:
10399 case BUILT_IN_ATOMIC_EXCHANGE_16:
10400 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
10401 target = expand_builtin_atomic_exchange (mode, exp, target);
10402 if (target)
10403 return target;
10404 break;
10406 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
10407 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
10408 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
10409 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
10410 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
10412 unsigned int nargs, z;
10413 vec<tree, va_gc> *vec;
10415 mode =
10416 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
10417 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
10418 if (target)
10419 return target;
10421 /* If this is turned into an external library call, the weak parameter
10422 must be dropped to match the expected parameter list. */
10423 nargs = call_expr_nargs (exp);
10424 vec_alloc (vec, nargs - 1);
10425 for (z = 0; z < 3; z++)
10426 vec->quick_push (CALL_EXPR_ARG (exp, z));
10427 /* Skip the boolean weak parameter. */
10428 for (z = 4; z < 6; z++)
10429 vec->quick_push (CALL_EXPR_ARG (exp, z));
10430 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
10431 break;
10434 case BUILT_IN_ATOMIC_LOAD_1:
10435 case BUILT_IN_ATOMIC_LOAD_2:
10436 case BUILT_IN_ATOMIC_LOAD_4:
10437 case BUILT_IN_ATOMIC_LOAD_8:
10438 case BUILT_IN_ATOMIC_LOAD_16:
10439 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
10440 target = expand_builtin_atomic_load (mode, exp, target);
10441 if (target)
10442 return target;
10443 break;
10445 case BUILT_IN_ATOMIC_STORE_1:
10446 case BUILT_IN_ATOMIC_STORE_2:
10447 case BUILT_IN_ATOMIC_STORE_4:
10448 case BUILT_IN_ATOMIC_STORE_8:
10449 case BUILT_IN_ATOMIC_STORE_16:
10450 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
10451 target = expand_builtin_atomic_store (mode, exp);
10452 if (target)
10453 return const0_rtx;
10454 break;
10456 case BUILT_IN_ATOMIC_ADD_FETCH_1:
10457 case BUILT_IN_ATOMIC_ADD_FETCH_2:
10458 case BUILT_IN_ATOMIC_ADD_FETCH_4:
10459 case BUILT_IN_ATOMIC_ADD_FETCH_8:
10460 case BUILT_IN_ATOMIC_ADD_FETCH_16:
10462 enum built_in_function lib;
10463 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
10464 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
10465 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
10466 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
10467 ignore, lib);
10468 if (target)
10469 return target;
10470 break;
10472 case BUILT_IN_ATOMIC_SUB_FETCH_1:
10473 case BUILT_IN_ATOMIC_SUB_FETCH_2:
10474 case BUILT_IN_ATOMIC_SUB_FETCH_4:
10475 case BUILT_IN_ATOMIC_SUB_FETCH_8:
10476 case BUILT_IN_ATOMIC_SUB_FETCH_16:
10478 enum built_in_function lib;
10479 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
10480 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
10481 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
10482 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
10483 ignore, lib);
10484 if (target)
10485 return target;
10486 break;
10488 case BUILT_IN_ATOMIC_AND_FETCH_1:
10489 case BUILT_IN_ATOMIC_AND_FETCH_2:
10490 case BUILT_IN_ATOMIC_AND_FETCH_4:
10491 case BUILT_IN_ATOMIC_AND_FETCH_8:
10492 case BUILT_IN_ATOMIC_AND_FETCH_16:
10494 enum built_in_function lib;
10495 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
10496 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
10497 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
10498 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
10499 ignore, lib);
10500 if (target)
10501 return target;
10502 break;
10504 case BUILT_IN_ATOMIC_NAND_FETCH_1:
10505 case BUILT_IN_ATOMIC_NAND_FETCH_2:
10506 case BUILT_IN_ATOMIC_NAND_FETCH_4:
10507 case BUILT_IN_ATOMIC_NAND_FETCH_8:
10508 case BUILT_IN_ATOMIC_NAND_FETCH_16:
10510 enum built_in_function lib;
10511 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
10512 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
10513 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
10514 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
10515 ignore, lib);
10516 if (target)
10517 return target;
10518 break;
10520 case BUILT_IN_ATOMIC_XOR_FETCH_1:
10521 case BUILT_IN_ATOMIC_XOR_FETCH_2:
10522 case BUILT_IN_ATOMIC_XOR_FETCH_4:
10523 case BUILT_IN_ATOMIC_XOR_FETCH_8:
10524 case BUILT_IN_ATOMIC_XOR_FETCH_16:
10526 enum built_in_function lib;
10527 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
10528 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
10529 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
10530 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
10531 ignore, lib);
10532 if (target)
10533 return target;
10534 break;
10536 case BUILT_IN_ATOMIC_OR_FETCH_1:
10537 case BUILT_IN_ATOMIC_OR_FETCH_2:
10538 case BUILT_IN_ATOMIC_OR_FETCH_4:
10539 case BUILT_IN_ATOMIC_OR_FETCH_8:
10540 case BUILT_IN_ATOMIC_OR_FETCH_16:
10542 enum built_in_function lib;
10543 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
10544 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
10545 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
10546 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
10547 ignore, lib);
10548 if (target)
10549 return target;
10550 break;
10552 case BUILT_IN_ATOMIC_FETCH_ADD_1:
10553 case BUILT_IN_ATOMIC_FETCH_ADD_2:
10554 case BUILT_IN_ATOMIC_FETCH_ADD_4:
10555 case BUILT_IN_ATOMIC_FETCH_ADD_8:
10556 case BUILT_IN_ATOMIC_FETCH_ADD_16:
10557 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
10558 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
10559 ignore, BUILT_IN_NONE);
10560 if (target)
10561 return target;
10562 break;
10564 case BUILT_IN_ATOMIC_FETCH_SUB_1:
10565 case BUILT_IN_ATOMIC_FETCH_SUB_2:
10566 case BUILT_IN_ATOMIC_FETCH_SUB_4:
10567 case BUILT_IN_ATOMIC_FETCH_SUB_8:
10568 case BUILT_IN_ATOMIC_FETCH_SUB_16:
10569 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
10570 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
10571 ignore, BUILT_IN_NONE);
10572 if (target)
10573 return target;
10574 break;
10576 case BUILT_IN_ATOMIC_FETCH_AND_1:
10577 case BUILT_IN_ATOMIC_FETCH_AND_2:
10578 case BUILT_IN_ATOMIC_FETCH_AND_4:
10579 case BUILT_IN_ATOMIC_FETCH_AND_8:
10580 case BUILT_IN_ATOMIC_FETCH_AND_16:
10581 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
10582 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
10583 ignore, BUILT_IN_NONE);
10584 if (target)
10585 return target;
10586 break;
10588 case BUILT_IN_ATOMIC_FETCH_NAND_1:
10589 case BUILT_IN_ATOMIC_FETCH_NAND_2:
10590 case BUILT_IN_ATOMIC_FETCH_NAND_4:
10591 case BUILT_IN_ATOMIC_FETCH_NAND_8:
10592 case BUILT_IN_ATOMIC_FETCH_NAND_16:
10593 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
10594 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
10595 ignore, BUILT_IN_NONE);
10596 if (target)
10597 return target;
10598 break;
10600 case BUILT_IN_ATOMIC_FETCH_XOR_1:
10601 case BUILT_IN_ATOMIC_FETCH_XOR_2:
10602 case BUILT_IN_ATOMIC_FETCH_XOR_4:
10603 case BUILT_IN_ATOMIC_FETCH_XOR_8:
10604 case BUILT_IN_ATOMIC_FETCH_XOR_16:
10605 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
10606 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
10607 ignore, BUILT_IN_NONE);
10608 if (target)
10609 return target;
10610 break;
10612 case BUILT_IN_ATOMIC_FETCH_OR_1:
10613 case BUILT_IN_ATOMIC_FETCH_OR_2:
10614 case BUILT_IN_ATOMIC_FETCH_OR_4:
10615 case BUILT_IN_ATOMIC_FETCH_OR_8:
10616 case BUILT_IN_ATOMIC_FETCH_OR_16:
10617 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
10618 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
10619 ignore, BUILT_IN_NONE);
10620 if (target)
10621 return target;
10622 break;
10624 case BUILT_IN_ATOMIC_TEST_AND_SET:
10625 return expand_builtin_atomic_test_and_set (exp, target);
10627 case BUILT_IN_ATOMIC_CLEAR:
10628 return expand_builtin_atomic_clear (exp);
10630 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10631 return expand_builtin_atomic_always_lock_free (exp);
10633 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10634 target = expand_builtin_atomic_is_lock_free (exp);
10635 if (target)
10636 return target;
10637 break;
10639 case BUILT_IN_ATOMIC_THREAD_FENCE:
10640 expand_builtin_atomic_thread_fence (exp);
10641 return const0_rtx;
10643 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
10644 expand_builtin_atomic_signal_fence (exp);
10645 return const0_rtx;
10647 case BUILT_IN_OBJECT_SIZE:
10648 return expand_builtin_object_size (exp);
10650 case BUILT_IN_MEMCPY_CHK:
10651 case BUILT_IN_MEMPCPY_CHK:
10652 case BUILT_IN_MEMMOVE_CHK:
10653 case BUILT_IN_MEMSET_CHK:
10654 target = expand_builtin_memory_chk (exp, target, mode, fcode);
10655 if (target)
10656 return target;
10657 break;
10659 case BUILT_IN_STRCPY_CHK:
10660 case BUILT_IN_STPCPY_CHK:
10661 case BUILT_IN_STRNCPY_CHK:
10662 case BUILT_IN_STPNCPY_CHK:
10663 case BUILT_IN_STRCAT_CHK:
10664 case BUILT_IN_STRNCAT_CHK:
10665 case BUILT_IN_SNPRINTF_CHK:
10666 case BUILT_IN_VSNPRINTF_CHK:
10667 maybe_emit_chk_warning (exp, fcode);
10668 break;
10670 case BUILT_IN_SPRINTF_CHK:
10671 case BUILT_IN_VSPRINTF_CHK:
10672 maybe_emit_sprintf_chk_warning (exp, fcode);
10673 break;
10675 case BUILT_IN_THREAD_POINTER:
10676 return expand_builtin_thread_pointer (exp, target);
10678 case BUILT_IN_SET_THREAD_POINTER:
10679 expand_builtin_set_thread_pointer (exp);
10680 return const0_rtx;
10682 case BUILT_IN_ACC_ON_DEVICE:
10683 /* Do library call, if we failed to expand the builtin when
10684 folding. */
10685 break;
10687 case BUILT_IN_GOACC_PARLEVEL_ID:
10688 case BUILT_IN_GOACC_PARLEVEL_SIZE:
10689 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
10691 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
10692 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
10694 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
10695 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
10696 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
10697 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
10698 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
10699 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
10700 return expand_speculation_safe_value (mode, exp, target, ignore);
10702 default: /* just do library call, if unknown builtin */
10703 break;
10706 /* The switch statement above can drop through to cause the function
10707 to be called normally. */
10708 return expand_call (exp, target, ignore);
10711 /* Determine whether a tree node represents a call to a built-in
10712 function. If the tree T is a call to a built-in function with
10713 the right number of arguments of the appropriate types, return
10714 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
10715 Otherwise the return value is END_BUILTINS. */
10717 enum built_in_function
10718 builtin_mathfn_code (const_tree t)
10720 const_tree fndecl, arg, parmlist;
10721 const_tree argtype, parmtype;
10722 const_call_expr_arg_iterator iter;
10724 if (TREE_CODE (t) != CALL_EXPR)
10725 return END_BUILTINS;
10727 fndecl = get_callee_fndecl (t);
10728 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
10729 return END_BUILTINS;
10731 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
10732 init_const_call_expr_arg_iterator (t, &iter);
10733 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
10735 /* If a function doesn't take a variable number of arguments,
10736 the last element in the list will have type `void'. */
10737 parmtype = TREE_VALUE (parmlist);
10738 if (VOID_TYPE_P (parmtype))
10740 if (more_const_call_expr_args_p (&iter))
10741 return END_BUILTINS;
10742 return DECL_FUNCTION_CODE (fndecl);
10745 if (! more_const_call_expr_args_p (&iter))
10746 return END_BUILTINS;
10748 arg = next_const_call_expr_arg (&iter);
10749 argtype = TREE_TYPE (arg);
10751 if (SCALAR_FLOAT_TYPE_P (parmtype))
10753 if (! SCALAR_FLOAT_TYPE_P (argtype))
10754 return END_BUILTINS;
10756 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
10758 if (! COMPLEX_FLOAT_TYPE_P (argtype))
10759 return END_BUILTINS;
10761 else if (POINTER_TYPE_P (parmtype))
10763 if (! POINTER_TYPE_P (argtype))
10764 return END_BUILTINS;
10766 else if (INTEGRAL_TYPE_P (parmtype))
10768 if (! INTEGRAL_TYPE_P (argtype))
10769 return END_BUILTINS;
10771 else
10772 return END_BUILTINS;
10775 /* Variable-length argument list. */
10776 return DECL_FUNCTION_CODE (fndecl);
10779 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
10780 evaluate to a constant. */
10782 static tree
10783 fold_builtin_constant_p (tree arg)
10785 /* We return 1 for a numeric type that's known to be a constant
10786 value at compile-time or for an aggregate type that's a
10787 literal constant. */
10788 STRIP_NOPS (arg);
10790 /* If we know this is a constant, emit the constant of one. */
10791 if (CONSTANT_CLASS_P (arg)
10792 || (TREE_CODE (arg) == CONSTRUCTOR
10793 && TREE_CONSTANT (arg)))
10794 return integer_one_node;
10795 if (TREE_CODE (arg) == ADDR_EXPR)
10797 tree op = TREE_OPERAND (arg, 0);
10798 if (TREE_CODE (op) == STRING_CST
10799 || (TREE_CODE (op) == ARRAY_REF
10800 && integer_zerop (TREE_OPERAND (op, 1))
10801 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
10802 return integer_one_node;
10805 /* If this expression has side effects, show we don't know it to be a
10806 constant. Likewise if it's a pointer or aggregate type since in
10807 those case we only want literals, since those are only optimized
10808 when generating RTL, not later.
10809 And finally, if we are compiling an initializer, not code, we
10810 need to return a definite result now; there's not going to be any
10811 more optimization done. */
10812 if (TREE_SIDE_EFFECTS (arg)
10813 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
10814 || POINTER_TYPE_P (TREE_TYPE (arg))
10815 || cfun == 0
10816 || folding_initializer
10817 || force_folding_builtin_constant_p)
10818 return integer_zero_node;
10820 return NULL_TREE;
10823 /* Create builtin_expect or builtin_expect_with_probability
10824 with PRED and EXPECTED as its arguments and return it as a truthvalue.
10825 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
10826 builtin_expect_with_probability instead uses third argument as PROBABILITY
10827 value. */
10829 static tree
10830 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
10831 tree predictor, tree probability)
10833 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
10835 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
10836 : BUILT_IN_EXPECT_WITH_PROBABILITY);
10837 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
10838 ret_type = TREE_TYPE (TREE_TYPE (fn));
10839 pred_type = TREE_VALUE (arg_types);
10840 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
10842 pred = fold_convert_loc (loc, pred_type, pred);
10843 expected = fold_convert_loc (loc, expected_type, expected);
10845 if (probability)
10846 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
10847 else
10848 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
10849 predictor);
10851 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
10852 build_int_cst (ret_type, 0));
10855 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
10856 NULL_TREE if no simplification is possible. */
10858 tree
10859 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
10860 tree arg3)
10862 tree inner, fndecl, inner_arg0;
10863 enum tree_code code;
10865 /* Distribute the expected value over short-circuiting operators.
10866 See through the cast from truthvalue_type_node to long. */
10867 inner_arg0 = arg0;
10868 while (CONVERT_EXPR_P (inner_arg0)
10869 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
10870 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
10871 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
10873 /* If this is a builtin_expect within a builtin_expect keep the
10874 inner one. See through a comparison against a constant. It
10875 might have been added to create a thruthvalue. */
10876 inner = inner_arg0;
10878 if (COMPARISON_CLASS_P (inner)
10879 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
10880 inner = TREE_OPERAND (inner, 0);
10882 if (TREE_CODE (inner) == CALL_EXPR
10883 && (fndecl = get_callee_fndecl (inner))
10884 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
10885 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
10886 return arg0;
10888 inner = inner_arg0;
10889 code = TREE_CODE (inner);
10890 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
10892 tree op0 = TREE_OPERAND (inner, 0);
10893 tree op1 = TREE_OPERAND (inner, 1);
10894 arg1 = save_expr (arg1);
10896 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
10897 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
10898 inner = build2 (code, TREE_TYPE (inner), op0, op1);
10900 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
10903 /* If the argument isn't invariant then there's nothing else we can do. */
10904 if (!TREE_CONSTANT (inner_arg0))
10905 return NULL_TREE;
10907 /* If we expect that a comparison against the argument will fold to
10908 a constant return the constant. In practice, this means a true
10909 constant or the address of a non-weak symbol. */
10910 inner = inner_arg0;
10911 STRIP_NOPS (inner);
10912 if (TREE_CODE (inner) == ADDR_EXPR)
10916 inner = TREE_OPERAND (inner, 0);
10918 while (TREE_CODE (inner) == COMPONENT_REF
10919 || TREE_CODE (inner) == ARRAY_REF);
10920 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
10921 return NULL_TREE;
10924 /* Otherwise, ARG0 already has the proper type for the return value. */
10925 return arg0;
10928 /* Fold a call to __builtin_classify_type with argument ARG. */
10930 static tree
10931 fold_builtin_classify_type (tree arg)
10933 if (arg == 0)
10934 return build_int_cst (integer_type_node, no_type_class);
10936 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
10939 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
10940 ARG. */
10942 static tree
10943 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
10945 if (!validate_arg (arg, POINTER_TYPE))
10946 return NULL_TREE;
10947 else
10949 c_strlen_data lendata = { };
10950 tree len = c_strlen (arg, 0, &lendata);
10952 if (len)
10953 return fold_convert_loc (loc, type, len);
10955 if (!lendata.decl)
10956 c_strlen (arg, 1, &lendata);
10958 if (lendata.decl)
10960 if (EXPR_HAS_LOCATION (arg))
10961 loc = EXPR_LOCATION (arg);
10962 else if (loc == UNKNOWN_LOCATION)
10963 loc = input_location;
10964 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
10967 return NULL_TREE;
10971 /* Fold a call to __builtin_inf or __builtin_huge_val. */
10973 static tree
10974 fold_builtin_inf (location_t loc, tree type, int warn)
10976 REAL_VALUE_TYPE real;
10978 /* __builtin_inff is intended to be usable to define INFINITY on all
10979 targets. If an infinity is not available, INFINITY expands "to a
10980 positive constant of type float that overflows at translation
10981 time", footnote "In this case, using INFINITY will violate the
10982 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
10983 Thus we pedwarn to ensure this constraint violation is
10984 diagnosed. */
10985 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
10986 pedwarn (loc, 0, "target format does not support infinity");
10988 real_inf (&real);
10989 return build_real (type, real);
10992 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
10993 NULL_TREE if no simplification can be made. */
10995 static tree
10996 fold_builtin_sincos (location_t loc,
10997 tree arg0, tree arg1, tree arg2)
10999 tree type;
11000 tree fndecl, call = NULL_TREE;
11002 if (!validate_arg (arg0, REAL_TYPE)
11003 || !validate_arg (arg1, POINTER_TYPE)
11004 || !validate_arg (arg2, POINTER_TYPE))
11005 return NULL_TREE;
11007 type = TREE_TYPE (arg0);
11009 /* Calculate the result when the argument is a constant. */
11010 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
11011 if (fn == END_BUILTINS)
11012 return NULL_TREE;
11014 /* Canonicalize sincos to cexpi. */
11015 if (TREE_CODE (arg0) == REAL_CST)
11017 tree complex_type = build_complex_type (type);
11018 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
11020 if (!call)
11022 if (!targetm.libc_has_function (function_c99_math_complex, type)
11023 || !builtin_decl_implicit_p (fn))
11024 return NULL_TREE;
11025 fndecl = builtin_decl_explicit (fn);
11026 call = build_call_expr_loc (loc, fndecl, 1, arg0);
11027 call = builtin_save_expr (call);
11030 tree ptype = build_pointer_type (type);
11031 arg1 = fold_convert (ptype, arg1);
11032 arg2 = fold_convert (ptype, arg2);
11033 return build2 (COMPOUND_EXPR, void_type_node,
11034 build2 (MODIFY_EXPR, void_type_node,
11035 build_fold_indirect_ref_loc (loc, arg1),
11036 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
11037 build2 (MODIFY_EXPR, void_type_node,
11038 build_fold_indirect_ref_loc (loc, arg2),
11039 fold_build1_loc (loc, REALPART_EXPR, type, call)));
11042 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
11043 Return NULL_TREE if no simplification can be made. */
11045 static tree
11046 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
11048 if (!validate_arg (arg1, POINTER_TYPE)
11049 || !validate_arg (arg2, POINTER_TYPE)
11050 || !validate_arg (len, INTEGER_TYPE))
11051 return NULL_TREE;
11053 /* If the LEN parameter is zero, return zero. */
11054 if (integer_zerop (len))
11055 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
11056 arg1, arg2);
11058 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
11059 if (operand_equal_p (arg1, arg2, 0))
11060 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
11062 /* If len parameter is one, return an expression corresponding to
11063 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
11064 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
11066 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
11067 tree cst_uchar_ptr_node
11068 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
11070 tree ind1
11071 = fold_convert_loc (loc, integer_type_node,
11072 build1 (INDIRECT_REF, cst_uchar_node,
11073 fold_convert_loc (loc,
11074 cst_uchar_ptr_node,
11075 arg1)));
11076 tree ind2
11077 = fold_convert_loc (loc, integer_type_node,
11078 build1 (INDIRECT_REF, cst_uchar_node,
11079 fold_convert_loc (loc,
11080 cst_uchar_ptr_node,
11081 arg2)));
11082 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
11085 return NULL_TREE;
11088 /* Fold a call to builtin isascii with argument ARG. */
11090 static tree
11091 fold_builtin_isascii (location_t loc, tree arg)
11093 if (!validate_arg (arg, INTEGER_TYPE))
11094 return NULL_TREE;
11095 else
11097 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
11098 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
11099 build_int_cst (integer_type_node,
11100 ~ (unsigned HOST_WIDE_INT) 0x7f));
11101 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
11102 arg, integer_zero_node);
11106 /* Fold a call to builtin toascii with argument ARG. */
11108 static tree
11109 fold_builtin_toascii (location_t loc, tree arg)
11111 if (!validate_arg (arg, INTEGER_TYPE))
11112 return NULL_TREE;
11114 /* Transform toascii(c) -> (c & 0x7f). */
11115 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
11116 build_int_cst (integer_type_node, 0x7f));
11119 /* Fold a call to builtin isdigit with argument ARG. */
11121 static tree
11122 fold_builtin_isdigit (location_t loc, tree arg)
11124 if (!validate_arg (arg, INTEGER_TYPE))
11125 return NULL_TREE;
11126 else
11128 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
11129 /* According to the C standard, isdigit is unaffected by locale.
11130 However, it definitely is affected by the target character set. */
11131 unsigned HOST_WIDE_INT target_digit0
11132 = lang_hooks.to_target_charset ('0');
11134 if (target_digit0 == 0)
11135 return NULL_TREE;
11137 arg = fold_convert_loc (loc, unsigned_type_node, arg);
11138 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
11139 build_int_cst (unsigned_type_node, target_digit0));
11140 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
11141 build_int_cst (unsigned_type_node, 9));
11145 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
11147 static tree
11148 fold_builtin_fabs (location_t loc, tree arg, tree type)
11150 if (!validate_arg (arg, REAL_TYPE))
11151 return NULL_TREE;
11153 arg = fold_convert_loc (loc, type, arg);
11154 return fold_build1_loc (loc, ABS_EXPR, type, arg);
11157 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
11159 static tree
11160 fold_builtin_abs (location_t loc, tree arg, tree type)
11162 if (!validate_arg (arg, INTEGER_TYPE))
11163 return NULL_TREE;
11165 arg = fold_convert_loc (loc, type, arg);
11166 return fold_build1_loc (loc, ABS_EXPR, type, arg);
11169 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
11171 static tree
11172 fold_builtin_carg (location_t loc, tree arg, tree type)
11174 if (validate_arg (arg, COMPLEX_TYPE)
11175 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
11177 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
11179 if (atan2_fn)
11181 tree new_arg = builtin_save_expr (arg);
11182 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
11183 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
11184 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
11188 return NULL_TREE;
11191 /* Fold a call to builtin frexp, we can assume the base is 2. */
11193 static tree
11194 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
11196 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
11197 return NULL_TREE;
11199 STRIP_NOPS (arg0);
11201 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
11202 return NULL_TREE;
11204 arg1 = build_fold_indirect_ref_loc (loc, arg1);
11206 /* Proceed if a valid pointer type was passed in. */
11207 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
11209 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
11210 tree frac, exp;
11212 switch (value->cl)
11214 case rvc_zero:
11215 /* For +-0, return (*exp = 0, +-0). */
11216 exp = integer_zero_node;
11217 frac = arg0;
11218 break;
11219 case rvc_nan:
11220 case rvc_inf:
11221 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
11222 return omit_one_operand_loc (loc, rettype, arg0, arg1);
11223 case rvc_normal:
11225 /* Since the frexp function always expects base 2, and in
11226 GCC normalized significands are already in the range
11227 [0.5, 1.0), we have exactly what frexp wants. */
11228 REAL_VALUE_TYPE frac_rvt = *value;
11229 SET_REAL_EXP (&frac_rvt, 0);
11230 frac = build_real (rettype, frac_rvt);
11231 exp = build_int_cst (integer_type_node, REAL_EXP (value));
11233 break;
11234 default:
11235 gcc_unreachable ();
11238 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
11239 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
11240 TREE_SIDE_EFFECTS (arg1) = 1;
11241 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
11244 return NULL_TREE;
11247 /* Fold a call to builtin modf. */
11249 static tree
11250 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
11252 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
11253 return NULL_TREE;
11255 STRIP_NOPS (arg0);
11257 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
11258 return NULL_TREE;
11260 arg1 = build_fold_indirect_ref_loc (loc, arg1);
11262 /* Proceed if a valid pointer type was passed in. */
11263 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
11265 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
11266 REAL_VALUE_TYPE trunc, frac;
11268 switch (value->cl)
11270 case rvc_nan:
11271 case rvc_zero:
11272 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
11273 trunc = frac = *value;
11274 break;
11275 case rvc_inf:
11276 /* For +-Inf, return (*arg1 = arg0, +-0). */
11277 frac = dconst0;
11278 frac.sign = value->sign;
11279 trunc = *value;
11280 break;
11281 case rvc_normal:
11282 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
11283 real_trunc (&trunc, VOIDmode, value);
11284 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
11285 /* If the original number was negative and already
11286 integral, then the fractional part is -0.0. */
11287 if (value->sign && frac.cl == rvc_zero)
11288 frac.sign = value->sign;
11289 break;
11292 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
11293 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
11294 build_real (rettype, trunc));
11295 TREE_SIDE_EFFECTS (arg1) = 1;
11296 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
11297 build_real (rettype, frac));
11300 return NULL_TREE;
11303 /* Given a location LOC, an interclass builtin function decl FNDECL
11304 and its single argument ARG, return an folded expression computing
11305 the same, or NULL_TREE if we either couldn't or didn't want to fold
11306 (the latter happen if there's an RTL instruction available). */
11308 static tree
11309 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
11311 machine_mode mode;
11313 if (!validate_arg (arg, REAL_TYPE))
11314 return NULL_TREE;
11316 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
11317 return NULL_TREE;
11319 mode = TYPE_MODE (TREE_TYPE (arg));
11321 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
11323 /* If there is no optab, try generic code. */
11324 switch (DECL_FUNCTION_CODE (fndecl))
11326 tree result;
11328 CASE_FLT_FN (BUILT_IN_ISINF):
11330 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
11331 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
11332 tree type = TREE_TYPE (arg);
11333 REAL_VALUE_TYPE r;
11334 char buf[128];
11336 if (is_ibm_extended)
11338 /* NaN and Inf are encoded in the high-order double value
11339 only. The low-order value is not significant. */
11340 type = double_type_node;
11341 mode = DFmode;
11342 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11344 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
11345 real_from_string (&r, buf);
11346 result = build_call_expr (isgr_fn, 2,
11347 fold_build1_loc (loc, ABS_EXPR, type, arg),
11348 build_real (type, r));
11349 return result;
11351 CASE_FLT_FN (BUILT_IN_FINITE):
11352 case BUILT_IN_ISFINITE:
11354 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
11355 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
11356 tree type = TREE_TYPE (arg);
11357 REAL_VALUE_TYPE r;
11358 char buf[128];
11360 if (is_ibm_extended)
11362 /* NaN and Inf are encoded in the high-order double value
11363 only. The low-order value is not significant. */
11364 type = double_type_node;
11365 mode = DFmode;
11366 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11368 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
11369 real_from_string (&r, buf);
11370 result = build_call_expr (isle_fn, 2,
11371 fold_build1_loc (loc, ABS_EXPR, type, arg),
11372 build_real (type, r));
11373 /*result = fold_build2_loc (loc, UNGT_EXPR,
11374 TREE_TYPE (TREE_TYPE (fndecl)),
11375 fold_build1_loc (loc, ABS_EXPR, type, arg),
11376 build_real (type, r));
11377 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
11378 TREE_TYPE (TREE_TYPE (fndecl)),
11379 result);*/
11380 return result;
11382 case BUILT_IN_ISNORMAL:
11384 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
11385 islessequal(fabs(x),DBL_MAX). */
11386 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
11387 tree type = TREE_TYPE (arg);
11388 tree orig_arg, max_exp, min_exp;
11389 machine_mode orig_mode = mode;
11390 REAL_VALUE_TYPE rmax, rmin;
11391 char buf[128];
11393 orig_arg = arg = builtin_save_expr (arg);
11394 if (is_ibm_extended)
11396 /* Use double to test the normal range of IBM extended
11397 precision. Emin for IBM extended precision is
11398 different to emin for IEEE double, being 53 higher
11399 since the low double exponent is at least 53 lower
11400 than the high double exponent. */
11401 type = double_type_node;
11402 mode = DFmode;
11403 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11405 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
11407 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
11408 real_from_string (&rmax, buf);
11409 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
11410 real_from_string (&rmin, buf);
11411 max_exp = build_real (type, rmax);
11412 min_exp = build_real (type, rmin);
11414 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
11415 if (is_ibm_extended)
11417 /* Testing the high end of the range is done just using
11418 the high double, using the same test as isfinite().
11419 For the subnormal end of the range we first test the
11420 high double, then if its magnitude is equal to the
11421 limit of 0x1p-969, we test whether the low double is
11422 non-zero and opposite sign to the high double. */
11423 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
11424 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
11425 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
11426 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
11427 arg, min_exp);
11428 tree as_complex = build1 (VIEW_CONVERT_EXPR,
11429 complex_double_type_node, orig_arg);
11430 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
11431 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
11432 tree zero = build_real (type, dconst0);
11433 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
11434 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
11435 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
11436 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
11437 fold_build3 (COND_EXPR,
11438 integer_type_node,
11439 hilt, logt, lolt));
11440 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
11441 eq_min, ok_lo);
11442 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
11443 gt_min, eq_min);
11445 else
11447 tree const isge_fn
11448 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
11449 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
11451 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
11452 max_exp, min_exp);
11453 return result;
11455 default:
11456 break;
11459 return NULL_TREE;
11462 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
11463 ARG is the argument for the call. */
11465 static tree
11466 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
11468 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11470 if (!validate_arg (arg, REAL_TYPE))
11471 return NULL_TREE;
11473 switch (builtin_index)
11475 case BUILT_IN_ISINF:
11476 if (tree_expr_infinite_p (arg))
11477 return omit_one_operand_loc (loc, type, integer_one_node, arg);
11478 if (!tree_expr_maybe_infinite_p (arg))
11479 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11480 return NULL_TREE;
11482 case BUILT_IN_ISINF_SIGN:
11484 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
11485 /* In a boolean context, GCC will fold the inner COND_EXPR to
11486 1. So e.g. "if (isinf_sign(x))" would be folded to just
11487 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
11488 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
11489 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
11490 tree tmp = NULL_TREE;
11492 arg = builtin_save_expr (arg);
11494 if (signbit_fn && isinf_fn)
11496 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
11497 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
11499 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
11500 signbit_call, integer_zero_node);
11501 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
11502 isinf_call, integer_zero_node);
11504 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
11505 integer_minus_one_node, integer_one_node);
11506 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
11507 isinf_call, tmp,
11508 integer_zero_node);
11511 return tmp;
11514 case BUILT_IN_ISFINITE:
11515 if (tree_expr_finite_p (arg))
11516 return omit_one_operand_loc (loc, type, integer_one_node, arg);
11517 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
11518 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11519 return NULL_TREE;
11521 case BUILT_IN_ISNAN:
11522 if (tree_expr_nan_p (arg))
11523 return omit_one_operand_loc (loc, type, integer_one_node, arg);
11524 if (!tree_expr_maybe_nan_p (arg))
11525 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11528 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
11529 if (is_ibm_extended)
11531 /* NaN and Inf are encoded in the high-order double value
11532 only. The low-order value is not significant. */
11533 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
11536 arg = builtin_save_expr (arg);
11537 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
11539 default:
11540 gcc_unreachable ();
11544 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
11545 This builtin will generate code to return the appropriate floating
11546 point classification depending on the value of the floating point
11547 number passed in. The possible return values must be supplied as
11548 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
11549 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
11550 one floating point argument which is "type generic". */
11552 static tree
11553 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
11555 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
11556 arg, type, res, tmp;
11557 machine_mode mode;
11558 REAL_VALUE_TYPE r;
11559 char buf[128];
11561 /* Verify the required arguments in the original call. */
11562 if (nargs != 6
11563 || !validate_arg (args[0], INTEGER_TYPE)
11564 || !validate_arg (args[1], INTEGER_TYPE)
11565 || !validate_arg (args[2], INTEGER_TYPE)
11566 || !validate_arg (args[3], INTEGER_TYPE)
11567 || !validate_arg (args[4], INTEGER_TYPE)
11568 || !validate_arg (args[5], REAL_TYPE))
11569 return NULL_TREE;
11571 fp_nan = args[0];
11572 fp_infinite = args[1];
11573 fp_normal = args[2];
11574 fp_subnormal = args[3];
11575 fp_zero = args[4];
11576 arg = args[5];
11577 type = TREE_TYPE (arg);
11578 mode = TYPE_MODE (type);
11579 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
11581 /* fpclassify(x) ->
11582 isnan(x) ? FP_NAN :
11583 (fabs(x) == Inf ? FP_INFINITE :
11584 (fabs(x) >= DBL_MIN ? FP_NORMAL :
11585 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
11587 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
11588 build_real (type, dconst0));
11589 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
11590 tmp, fp_zero, fp_subnormal);
11592 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
11593 real_from_string (&r, buf);
11594 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
11595 arg, build_real (type, r));
11596 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
11598 if (tree_expr_maybe_infinite_p (arg))
11600 real_inf (&r);
11601 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
11602 build_real (type, r));
11603 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
11604 fp_infinite, res);
11607 if (tree_expr_maybe_nan_p (arg))
11609 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
11610 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
11613 return res;
11616 /* Fold a call to an unordered comparison function such as
11617 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
11618 being called and ARG0 and ARG1 are the arguments for the call.
11619 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
11620 the opposite of the desired result. UNORDERED_CODE is used
11621 for modes that can hold NaNs and ORDERED_CODE is used for
11622 the rest. */
11624 static tree
11625 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
11626 enum tree_code unordered_code,
11627 enum tree_code ordered_code)
11629 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11630 enum tree_code code;
11631 tree type0, type1;
11632 enum tree_code code0, code1;
11633 tree cmp_type = NULL_TREE;
11635 type0 = TREE_TYPE (arg0);
11636 type1 = TREE_TYPE (arg1);
11638 code0 = TREE_CODE (type0);
11639 code1 = TREE_CODE (type1);
11641 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
11642 /* Choose the wider of two real types. */
11643 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
11644 ? type0 : type1;
11645 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
11646 cmp_type = type0;
11647 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
11648 cmp_type = type1;
11650 arg0 = fold_convert_loc (loc, cmp_type, arg0);
11651 arg1 = fold_convert_loc (loc, cmp_type, arg1);
11653 if (unordered_code == UNORDERED_EXPR)
11655 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
11656 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
11657 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
11658 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
11659 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
11662 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
11663 ? unordered_code : ordered_code;
11664 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
11665 fold_build2_loc (loc, code, type, arg0, arg1));
11668 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
11669 arithmetics if it can never overflow, or into internal functions that
11670 return both result of arithmetics and overflowed boolean flag in
11671 a complex integer result, or some other check for overflow.
11672 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
11673 checking part of that. */
11675 static tree
11676 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
11677 tree arg0, tree arg1, tree arg2)
11679 enum internal_fn ifn = IFN_LAST;
11680 /* The code of the expression corresponding to the built-in. */
11681 enum tree_code opcode = ERROR_MARK;
11682 bool ovf_only = false;
11684 switch (fcode)
11686 case BUILT_IN_ADD_OVERFLOW_P:
11687 ovf_only = true;
11688 /* FALLTHRU */
11689 case BUILT_IN_ADD_OVERFLOW:
11690 case BUILT_IN_SADD_OVERFLOW:
11691 case BUILT_IN_SADDL_OVERFLOW:
11692 case BUILT_IN_SADDLL_OVERFLOW:
11693 case BUILT_IN_UADD_OVERFLOW:
11694 case BUILT_IN_UADDL_OVERFLOW:
11695 case BUILT_IN_UADDLL_OVERFLOW:
11696 opcode = PLUS_EXPR;
11697 ifn = IFN_ADD_OVERFLOW;
11698 break;
11699 case BUILT_IN_SUB_OVERFLOW_P:
11700 ovf_only = true;
11701 /* FALLTHRU */
11702 case BUILT_IN_SUB_OVERFLOW:
11703 case BUILT_IN_SSUB_OVERFLOW:
11704 case BUILT_IN_SSUBL_OVERFLOW:
11705 case BUILT_IN_SSUBLL_OVERFLOW:
11706 case BUILT_IN_USUB_OVERFLOW:
11707 case BUILT_IN_USUBL_OVERFLOW:
11708 case BUILT_IN_USUBLL_OVERFLOW:
11709 opcode = MINUS_EXPR;
11710 ifn = IFN_SUB_OVERFLOW;
11711 break;
11712 case BUILT_IN_MUL_OVERFLOW_P:
11713 ovf_only = true;
11714 /* FALLTHRU */
11715 case BUILT_IN_MUL_OVERFLOW:
11716 case BUILT_IN_SMUL_OVERFLOW:
11717 case BUILT_IN_SMULL_OVERFLOW:
11718 case BUILT_IN_SMULLL_OVERFLOW:
11719 case BUILT_IN_UMUL_OVERFLOW:
11720 case BUILT_IN_UMULL_OVERFLOW:
11721 case BUILT_IN_UMULLL_OVERFLOW:
11722 opcode = MULT_EXPR;
11723 ifn = IFN_MUL_OVERFLOW;
11724 break;
11725 default:
11726 gcc_unreachable ();
11729 /* For the "generic" overloads, the first two arguments can have different
11730 types and the last argument determines the target type to use to check
11731 for overflow. The arguments of the other overloads all have the same
11732 type. */
11733 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
11735 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
11736 arguments are constant, attempt to fold the built-in call into a constant
11737 expression indicating whether or not it detected an overflow. */
11738 if (ovf_only
11739 && TREE_CODE (arg0) == INTEGER_CST
11740 && TREE_CODE (arg1) == INTEGER_CST)
11741 /* Perform the computation in the target type and check for overflow. */
11742 return omit_one_operand_loc (loc, boolean_type_node,
11743 arith_overflowed_p (opcode, type, arg0, arg1)
11744 ? boolean_true_node : boolean_false_node,
11745 arg2);
11747 tree intres, ovfres;
11748 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
11750 intres = fold_binary_loc (loc, opcode, type,
11751 fold_convert_loc (loc, type, arg0),
11752 fold_convert_loc (loc, type, arg1));
11753 if (TREE_OVERFLOW (intres))
11754 intres = drop_tree_overflow (intres);
11755 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
11756 ? boolean_true_node : boolean_false_node);
11758 else
11760 tree ctype = build_complex_type (type);
11761 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
11762 arg0, arg1);
11763 tree tgt = save_expr (call);
11764 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
11765 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
11766 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
11769 if (ovf_only)
11770 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
11772 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
11773 tree store
11774 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
11775 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
11778 /* Fold a call to __builtin_FILE to a constant string. */
11780 static inline tree
11781 fold_builtin_FILE (location_t loc)
11783 if (const char *fname = LOCATION_FILE (loc))
11785 /* The documentation says this builtin is equivalent to the preprocessor
11786 __FILE__ macro so it appears appropriate to use the same file prefix
11787 mappings. */
11788 fname = remap_macro_filename (fname);
11789 return build_string_literal (strlen (fname) + 1, fname);
11792 return build_string_literal (1, "");
11795 /* Fold a call to __builtin_FUNCTION to a constant string. */
11797 static inline tree
11798 fold_builtin_FUNCTION ()
11800 const char *name = "";
11802 if (current_function_decl)
11803 name = lang_hooks.decl_printable_name (current_function_decl, 0);
11805 return build_string_literal (strlen (name) + 1, name);
11808 /* Fold a call to __builtin_LINE to an integer constant. */
11810 static inline tree
11811 fold_builtin_LINE (location_t loc, tree type)
11813 return build_int_cst (type, LOCATION_LINE (loc));
11816 /* Fold a call to built-in function FNDECL with 0 arguments.
11817 This function returns NULL_TREE if no simplification was possible. */
11819 static tree
11820 fold_builtin_0 (location_t loc, tree fndecl)
11822 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11823 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11824 switch (fcode)
11826 case BUILT_IN_FILE:
11827 return fold_builtin_FILE (loc);
11829 case BUILT_IN_FUNCTION:
11830 return fold_builtin_FUNCTION ();
11832 case BUILT_IN_LINE:
11833 return fold_builtin_LINE (loc, type);
11835 CASE_FLT_FN (BUILT_IN_INF):
11836 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
11837 case BUILT_IN_INFD32:
11838 case BUILT_IN_INFD64:
11839 case BUILT_IN_INFD128:
11840 return fold_builtin_inf (loc, type, true);
11842 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
11843 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
11844 return fold_builtin_inf (loc, type, false);
11846 case BUILT_IN_CLASSIFY_TYPE:
11847 return fold_builtin_classify_type (NULL_TREE);
11849 default:
11850 break;
11852 return NULL_TREE;
11855 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
11856 This function returns NULL_TREE if no simplification was possible. */
11858 static tree
11859 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
11861 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11862 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11864 if (TREE_CODE (arg0) == ERROR_MARK)
11865 return NULL_TREE;
11867 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
11868 return ret;
11870 switch (fcode)
11872 case BUILT_IN_CONSTANT_P:
11874 tree val = fold_builtin_constant_p (arg0);
11876 /* Gimplification will pull the CALL_EXPR for the builtin out of
11877 an if condition. When not optimizing, we'll not CSE it back.
11878 To avoid link error types of regressions, return false now. */
11879 if (!val && !optimize)
11880 val = integer_zero_node;
11882 return val;
11885 case BUILT_IN_CLASSIFY_TYPE:
11886 return fold_builtin_classify_type (arg0);
11888 case BUILT_IN_STRLEN:
11889 return fold_builtin_strlen (loc, expr, type, arg0);
11891 CASE_FLT_FN (BUILT_IN_FABS):
11892 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11893 case BUILT_IN_FABSD32:
11894 case BUILT_IN_FABSD64:
11895 case BUILT_IN_FABSD128:
11896 return fold_builtin_fabs (loc, arg0, type);
11898 case BUILT_IN_ABS:
11899 case BUILT_IN_LABS:
11900 case BUILT_IN_LLABS:
11901 case BUILT_IN_IMAXABS:
11902 return fold_builtin_abs (loc, arg0, type);
11904 CASE_FLT_FN (BUILT_IN_CONJ):
11905 if (validate_arg (arg0, COMPLEX_TYPE)
11906 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
11907 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
11908 break;
11910 CASE_FLT_FN (BUILT_IN_CREAL):
11911 if (validate_arg (arg0, COMPLEX_TYPE)
11912 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
11913 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
11914 break;
11916 CASE_FLT_FN (BUILT_IN_CIMAG):
11917 if (validate_arg (arg0, COMPLEX_TYPE)
11918 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
11919 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
11920 break;
11922 CASE_FLT_FN (BUILT_IN_CARG):
11923 return fold_builtin_carg (loc, arg0, type);
11925 case BUILT_IN_ISASCII:
11926 return fold_builtin_isascii (loc, arg0);
11928 case BUILT_IN_TOASCII:
11929 return fold_builtin_toascii (loc, arg0);
11931 case BUILT_IN_ISDIGIT:
11932 return fold_builtin_isdigit (loc, arg0);
11934 CASE_FLT_FN (BUILT_IN_FINITE):
11935 case BUILT_IN_FINITED32:
11936 case BUILT_IN_FINITED64:
11937 case BUILT_IN_FINITED128:
11938 case BUILT_IN_ISFINITE:
11940 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
11941 if (ret)
11942 return ret;
11943 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11946 CASE_FLT_FN (BUILT_IN_ISINF):
11947 case BUILT_IN_ISINFD32:
11948 case BUILT_IN_ISINFD64:
11949 case BUILT_IN_ISINFD128:
11951 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
11952 if (ret)
11953 return ret;
11954 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11957 case BUILT_IN_ISNORMAL:
11958 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11960 case BUILT_IN_ISINF_SIGN:
11961 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
11963 CASE_FLT_FN (BUILT_IN_ISNAN):
11964 case BUILT_IN_ISNAND32:
11965 case BUILT_IN_ISNAND64:
11966 case BUILT_IN_ISNAND128:
11967 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
11969 case BUILT_IN_FREE:
11970 if (integer_zerop (arg0))
11971 return build_empty_stmt (loc);
11972 break;
11974 default:
11975 break;
11978 return NULL_TREE;
11982 /* Folds a call EXPR (which may be null) to built-in function FNDECL
11983 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
11984 if no simplification was possible. */
11986 static tree
11987 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
11989 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11990 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11992 if (TREE_CODE (arg0) == ERROR_MARK
11993 || TREE_CODE (arg1) == ERROR_MARK)
11994 return NULL_TREE;
11996 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
11997 return ret;
11999 switch (fcode)
12001 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
12002 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
12003 if (validate_arg (arg0, REAL_TYPE)
12004 && validate_arg (arg1, POINTER_TYPE))
12005 return do_mpfr_lgamma_r (arg0, arg1, type);
12006 break;
12008 CASE_FLT_FN (BUILT_IN_FREXP):
12009 return fold_builtin_frexp (loc, arg0, arg1, type);
12011 CASE_FLT_FN (BUILT_IN_MODF):
12012 return fold_builtin_modf (loc, arg0, arg1, type);
12014 case BUILT_IN_STRSPN:
12015 return fold_builtin_strspn (loc, expr, arg0, arg1);
12017 case BUILT_IN_STRCSPN:
12018 return fold_builtin_strcspn (loc, expr, arg0, arg1);
12020 case BUILT_IN_STRPBRK:
12021 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
12023 case BUILT_IN_EXPECT:
12024 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
12026 case BUILT_IN_ISGREATER:
12027 return fold_builtin_unordered_cmp (loc, fndecl,
12028 arg0, arg1, UNLE_EXPR, LE_EXPR);
12029 case BUILT_IN_ISGREATEREQUAL:
12030 return fold_builtin_unordered_cmp (loc, fndecl,
12031 arg0, arg1, UNLT_EXPR, LT_EXPR);
12032 case BUILT_IN_ISLESS:
12033 return fold_builtin_unordered_cmp (loc, fndecl,
12034 arg0, arg1, UNGE_EXPR, GE_EXPR);
12035 case BUILT_IN_ISLESSEQUAL:
12036 return fold_builtin_unordered_cmp (loc, fndecl,
12037 arg0, arg1, UNGT_EXPR, GT_EXPR);
12038 case BUILT_IN_ISLESSGREATER:
12039 return fold_builtin_unordered_cmp (loc, fndecl,
12040 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
12041 case BUILT_IN_ISUNORDERED:
12042 return fold_builtin_unordered_cmp (loc, fndecl,
12043 arg0, arg1, UNORDERED_EXPR,
12044 NOP_EXPR);
12046 /* We do the folding for va_start in the expander. */
12047 case BUILT_IN_VA_START:
12048 break;
12050 case BUILT_IN_OBJECT_SIZE:
12051 return fold_builtin_object_size (arg0, arg1);
12053 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
12054 return fold_builtin_atomic_always_lock_free (arg0, arg1);
12056 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
12057 return fold_builtin_atomic_is_lock_free (arg0, arg1);
12059 default:
12060 break;
12062 return NULL_TREE;
12065 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
12066 and ARG2.
12067 This function returns NULL_TREE if no simplification was possible. */
12069 static tree
12070 fold_builtin_3 (location_t loc, tree fndecl,
12071 tree arg0, tree arg1, tree arg2)
12073 tree type = TREE_TYPE (TREE_TYPE (fndecl));
12074 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
12076 if (TREE_CODE (arg0) == ERROR_MARK
12077 || TREE_CODE (arg1) == ERROR_MARK
12078 || TREE_CODE (arg2) == ERROR_MARK)
12079 return NULL_TREE;
12081 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
12082 arg0, arg1, arg2))
12083 return ret;
12085 switch (fcode)
12088 CASE_FLT_FN (BUILT_IN_SINCOS):
12089 return fold_builtin_sincos (loc, arg0, arg1, arg2);
12091 CASE_FLT_FN (BUILT_IN_REMQUO):
12092 if (validate_arg (arg0, REAL_TYPE)
12093 && validate_arg (arg1, REAL_TYPE)
12094 && validate_arg (arg2, POINTER_TYPE))
12095 return do_mpfr_remquo (arg0, arg1, arg2);
12096 break;
12098 case BUILT_IN_MEMCMP:
12099 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
12101 case BUILT_IN_EXPECT:
12102 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
12104 case BUILT_IN_EXPECT_WITH_PROBABILITY:
12105 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
12107 case BUILT_IN_ADD_OVERFLOW:
12108 case BUILT_IN_SUB_OVERFLOW:
12109 case BUILT_IN_MUL_OVERFLOW:
12110 case BUILT_IN_ADD_OVERFLOW_P:
12111 case BUILT_IN_SUB_OVERFLOW_P:
12112 case BUILT_IN_MUL_OVERFLOW_P:
12113 case BUILT_IN_SADD_OVERFLOW:
12114 case BUILT_IN_SADDL_OVERFLOW:
12115 case BUILT_IN_SADDLL_OVERFLOW:
12116 case BUILT_IN_SSUB_OVERFLOW:
12117 case BUILT_IN_SSUBL_OVERFLOW:
12118 case BUILT_IN_SSUBLL_OVERFLOW:
12119 case BUILT_IN_SMUL_OVERFLOW:
12120 case BUILT_IN_SMULL_OVERFLOW:
12121 case BUILT_IN_SMULLL_OVERFLOW:
12122 case BUILT_IN_UADD_OVERFLOW:
12123 case BUILT_IN_UADDL_OVERFLOW:
12124 case BUILT_IN_UADDLL_OVERFLOW:
12125 case BUILT_IN_USUB_OVERFLOW:
12126 case BUILT_IN_USUBL_OVERFLOW:
12127 case BUILT_IN_USUBLL_OVERFLOW:
12128 case BUILT_IN_UMUL_OVERFLOW:
12129 case BUILT_IN_UMULL_OVERFLOW:
12130 case BUILT_IN_UMULLL_OVERFLOW:
12131 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
12133 default:
12134 break;
12136 return NULL_TREE;
12139 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
12140 ARGS is an array of NARGS arguments. IGNORE is true if the result
12141 of the function call is ignored. This function returns NULL_TREE
12142 if no simplification was possible. */
12144 static tree
12145 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
12146 int nargs, bool)
12148 tree ret = NULL_TREE;
12150 switch (nargs)
12152 case 0:
12153 ret = fold_builtin_0 (loc, fndecl);
12154 break;
12155 case 1:
12156 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
12157 break;
12158 case 2:
12159 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
12160 break;
12161 case 3:
12162 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
12163 break;
12164 default:
12165 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
12166 break;
12168 if (ret)
12170 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
12171 SET_EXPR_LOCATION (ret, loc);
12172 return ret;
12174 return NULL_TREE;
12177 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
12178 list ARGS along with N new arguments in NEWARGS. SKIP is the number
12179 of arguments in ARGS to be omitted. OLDNARGS is the number of
12180 elements in ARGS. */
12182 static tree
12183 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
12184 int skip, tree fndecl, int n, va_list newargs)
12186 int nargs = oldnargs - skip + n;
12187 tree *buffer;
12189 if (n > 0)
12191 int i, j;
12193 buffer = XALLOCAVEC (tree, nargs);
12194 for (i = 0; i < n; i++)
12195 buffer[i] = va_arg (newargs, tree);
12196 for (j = skip; j < oldnargs; j++, i++)
12197 buffer[i] = args[j];
12199 else
12200 buffer = args + skip;
12202 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
12205 /* Return true if FNDECL shouldn't be folded right now.
12206 If a built-in function has an inline attribute always_inline
12207 wrapper, defer folding it after always_inline functions have
12208 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
12209 might not be performed. */
12211 bool
12212 avoid_folding_inline_builtin (tree fndecl)
12214 return (DECL_DECLARED_INLINE_P (fndecl)
12215 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
12216 && cfun
12217 && !cfun->always_inline_functions_inlined
12218 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
12221 /* A wrapper function for builtin folding that prevents warnings for
12222 "statement without effect" and the like, caused by removing the
12223 call node earlier than the warning is generated. */
12225 tree
12226 fold_call_expr (location_t loc, tree exp, bool ignore)
12228 tree ret = NULL_TREE;
12229 tree fndecl = get_callee_fndecl (exp);
12230 if (fndecl && fndecl_built_in_p (fndecl)
12231 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
12232 yet. Defer folding until we see all the arguments
12233 (after inlining). */
12234 && !CALL_EXPR_VA_ARG_PACK (exp))
12236 int nargs = call_expr_nargs (exp);
12238 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
12239 instead last argument is __builtin_va_arg_pack (). Defer folding
12240 even in that case, until arguments are finalized. */
12241 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
12243 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
12244 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
12245 return NULL_TREE;
12248 if (avoid_folding_inline_builtin (fndecl))
12249 return NULL_TREE;
12251 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12252 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
12253 CALL_EXPR_ARGP (exp), ignore);
12254 else
12256 tree *args = CALL_EXPR_ARGP (exp);
12257 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
12258 if (ret)
12259 return ret;
12262 return NULL_TREE;
12265 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
12266 N arguments are passed in the array ARGARRAY. Return a folded
12267 expression or NULL_TREE if no simplification was possible. */
12269 tree
12270 fold_builtin_call_array (location_t loc, tree,
12271 tree fn,
12272 int n,
12273 tree *argarray)
12275 if (TREE_CODE (fn) != ADDR_EXPR)
12276 return NULL_TREE;
12278 tree fndecl = TREE_OPERAND (fn, 0);
12279 if (TREE_CODE (fndecl) == FUNCTION_DECL
12280 && fndecl_built_in_p (fndecl))
12282 /* If last argument is __builtin_va_arg_pack (), arguments to this
12283 function are not finalized yet. Defer folding until they are. */
12284 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
12286 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
12287 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
12288 return NULL_TREE;
12290 if (avoid_folding_inline_builtin (fndecl))
12291 return NULL_TREE;
12292 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12293 return targetm.fold_builtin (fndecl, n, argarray, false);
12294 else
12295 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
12298 return NULL_TREE;
12301 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
12302 along with N new arguments specified as the "..." parameters. SKIP
12303 is the number of arguments in EXP to be omitted. This function is used
12304 to do varargs-to-varargs transformations. */
12306 static tree
12307 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
12309 va_list ap;
12310 tree t;
12312 va_start (ap, n);
12313 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
12314 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
12315 va_end (ap);
12317 return t;
12320 /* Validate a single argument ARG against a tree code CODE representing
12321 a type. Return true when argument is valid. */
12323 static bool
12324 validate_arg (const_tree arg, enum tree_code code)
12326 if (!arg)
12327 return false;
12328 else if (code == POINTER_TYPE)
12329 return POINTER_TYPE_P (TREE_TYPE (arg));
12330 else if (code == INTEGER_TYPE)
12331 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
12332 return code == TREE_CODE (TREE_TYPE (arg));
12335 /* This function validates the types of a function call argument list
12336 against a specified list of tree_codes. If the last specifier is a 0,
12337 that represents an ellipses, otherwise the last specifier must be a
12338 VOID_TYPE.
12340 This is the GIMPLE version of validate_arglist. Eventually we want to
12341 completely convert builtins.c to work from GIMPLEs and the tree based
12342 validate_arglist will then be removed. */
12344 bool
12345 validate_gimple_arglist (const gcall *call, ...)
12347 enum tree_code code;
12348 bool res = 0;
12349 va_list ap;
12350 const_tree arg;
12351 size_t i;
12353 va_start (ap, call);
12354 i = 0;
12358 code = (enum tree_code) va_arg (ap, int);
12359 switch (code)
12361 case 0:
12362 /* This signifies an ellipses, any further arguments are all ok. */
12363 res = true;
12364 goto end;
12365 case VOID_TYPE:
12366 /* This signifies an endlink, if no arguments remain, return
12367 true, otherwise return false. */
12368 res = (i == gimple_call_num_args (call));
12369 goto end;
12370 default:
12371 /* If no parameters remain or the parameter's code does not
12372 match the specified code, return false. Otherwise continue
12373 checking any remaining arguments. */
12374 arg = gimple_call_arg (call, i++);
12375 if (!validate_arg (arg, code))
12376 goto end;
12377 break;
12380 while (1);
12382 /* We need gotos here since we can only have one VA_CLOSE in a
12383 function. */
12384 end: ;
12385 va_end (ap);
12387 return res;
12390 /* Default target-specific builtin expander that does nothing. */
12393 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
12394 rtx target ATTRIBUTE_UNUSED,
12395 rtx subtarget ATTRIBUTE_UNUSED,
12396 machine_mode mode ATTRIBUTE_UNUSED,
12397 int ignore ATTRIBUTE_UNUSED)
12399 return NULL_RTX;
12402 /* Returns true is EXP represents data that would potentially reside
12403 in a readonly section. */
12405 bool
12406 readonly_data_expr (tree exp)
12408 STRIP_NOPS (exp);
12410 if (TREE_CODE (exp) != ADDR_EXPR)
12411 return false;
12413 exp = get_base_address (TREE_OPERAND (exp, 0));
12414 if (!exp)
12415 return false;
12417 /* Make sure we call decl_readonly_section only for trees it
12418 can handle (since it returns true for everything it doesn't
12419 understand). */
12420 if (TREE_CODE (exp) == STRING_CST
12421 || TREE_CODE (exp) == CONSTRUCTOR
12422 || (VAR_P (exp) && TREE_STATIC (exp)))
12423 return decl_readonly_section (exp, 0);
12424 else
12425 return false;
12428 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
12429 to the call, and TYPE is its return type.
12431 Return NULL_TREE if no simplification was possible, otherwise return the
12432 simplified form of the call as a tree.
12434 The simplified form may be a constant or other expression which
12435 computes the same value, but in a more efficient manner (including
12436 calls to other builtin functions).
12438 The call may contain arguments which need to be evaluated, but
12439 which are not useful to determine the result of the call. In
12440 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12441 COMPOUND_EXPR will be an argument which must be evaluated.
12442 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12443 COMPOUND_EXPR in the chain will contain the tree for the simplified
12444 form of the builtin function call. */
12446 static tree
12447 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
12449 if (!validate_arg (s1, POINTER_TYPE)
12450 || !validate_arg (s2, POINTER_TYPE))
12451 return NULL_TREE;
12453 tree fn;
12454 const char *p1, *p2;
12456 p2 = c_getstr (s2);
12457 if (p2 == NULL)
12458 return NULL_TREE;
12460 p1 = c_getstr (s1);
12461 if (p1 != NULL)
12463 const char *r = strpbrk (p1, p2);
12464 tree tem;
12466 if (r == NULL)
12467 return build_int_cst (TREE_TYPE (s1), 0);
12469 /* Return an offset into the constant string argument. */
12470 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
12471 return fold_convert_loc (loc, type, tem);
12474 if (p2[0] == '\0')
12475 /* strpbrk(x, "") == NULL.
12476 Evaluate and ignore s1 in case it had side-effects. */
12477 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
12479 if (p2[1] != '\0')
12480 return NULL_TREE; /* Really call strpbrk. */
12482 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
12483 if (!fn)
12484 return NULL_TREE;
12486 /* New argument list transforming strpbrk(s1, s2) to
12487 strchr(s1, s2[0]). */
12488 return build_call_expr_loc (loc, fn, 2, s1,
12489 build_int_cst (integer_type_node, p2[0]));
12492 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
12493 to the call.
12495 Return NULL_TREE if no simplification was possible, otherwise return the
12496 simplified form of the call as a tree.
12498 The simplified form may be a constant or other expression which
12499 computes the same value, but in a more efficient manner (including
12500 calls to other builtin functions).
12502 The call may contain arguments which need to be evaluated, but
12503 which are not useful to determine the result of the call. In
12504 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12505 COMPOUND_EXPR will be an argument which must be evaluated.
12506 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12507 COMPOUND_EXPR in the chain will contain the tree for the simplified
12508 form of the builtin function call. */
12510 static tree
12511 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
12513 if (!validate_arg (s1, POINTER_TYPE)
12514 || !validate_arg (s2, POINTER_TYPE))
12515 return NULL_TREE;
12517 if (!check_nul_terminated_array (expr, s1)
12518 || !check_nul_terminated_array (expr, s2))
12519 return NULL_TREE;
12521 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
12523 /* If either argument is "", return NULL_TREE. */
12524 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
12525 /* Evaluate and ignore both arguments in case either one has
12526 side-effects. */
12527 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
12528 s1, s2);
12529 return NULL_TREE;
12532 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
12533 to the call.
12535 Return NULL_TREE if no simplification was possible, otherwise return the
12536 simplified form of the call as a tree.
12538 The simplified form may be a constant or other expression which
12539 computes the same value, but in a more efficient manner (including
12540 calls to other builtin functions).
12542 The call may contain arguments which need to be evaluated, but
12543 which are not useful to determine the result of the call. In
12544 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12545 COMPOUND_EXPR will be an argument which must be evaluated.
12546 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12547 COMPOUND_EXPR in the chain will contain the tree for the simplified
12548 form of the builtin function call. */
12550 static tree
12551 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
12553 if (!validate_arg (s1, POINTER_TYPE)
12554 || !validate_arg (s2, POINTER_TYPE))
12555 return NULL_TREE;
12557 if (!check_nul_terminated_array (expr, s1)
12558 || !check_nul_terminated_array (expr, s2))
12559 return NULL_TREE;
12561 /* If the first argument is "", return NULL_TREE. */
12562 const char *p1 = c_getstr (s1);
12563 if (p1 && *p1 == '\0')
12565 /* Evaluate and ignore argument s2 in case it has
12566 side-effects. */
12567 return omit_one_operand_loc (loc, size_type_node,
12568 size_zero_node, s2);
12571 /* If the second argument is "", return __builtin_strlen(s1). */
12572 const char *p2 = c_getstr (s2);
12573 if (p2 && *p2 == '\0')
12575 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
12577 /* If the replacement _DECL isn't initialized, don't do the
12578 transformation. */
12579 if (!fn)
12580 return NULL_TREE;
12582 return build_call_expr_loc (loc, fn, 1, s1);
12584 return NULL_TREE;
12587 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12588 produced. False otherwise. This is done so that we don't output the error
12589 or warning twice or three times. */
12591 bool
12592 fold_builtin_next_arg (tree exp, bool va_start_p)
12594 tree fntype = TREE_TYPE (current_function_decl);
12595 int nargs = call_expr_nargs (exp);
12596 tree arg;
12597 /* There is good chance the current input_location points inside the
12598 definition of the va_start macro (perhaps on the token for
12599 builtin) in a system header, so warnings will not be emitted.
12600 Use the location in real source code. */
12601 location_t current_location =
12602 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12603 NULL);
12605 if (!stdarg_p (fntype))
12607 error ("%<va_start%> used in function with fixed arguments");
12608 return true;
12611 if (va_start_p)
12613 if (va_start_p && (nargs != 2))
12615 error ("wrong number of arguments to function %<va_start%>");
12616 return true;
12618 arg = CALL_EXPR_ARG (exp, 1);
12620 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12621 when we checked the arguments and if needed issued a warning. */
12622 else
12624 if (nargs == 0)
12626 /* Evidently an out of date version of <stdarg.h>; can't validate
12627 va_start's second argument, but can still work as intended. */
12628 warning_at (current_location,
12629 OPT_Wvarargs,
12630 "%<__builtin_next_arg%> called without an argument");
12631 return true;
12633 else if (nargs > 1)
12635 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12636 return true;
12638 arg = CALL_EXPR_ARG (exp, 0);
12641 if (TREE_CODE (arg) == SSA_NAME
12642 && SSA_NAME_VAR (arg))
12643 arg = SSA_NAME_VAR (arg);
12645 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12646 or __builtin_next_arg (0) the first time we see it, after checking
12647 the arguments and if needed issuing a warning. */
12648 if (!integer_zerop (arg))
12650 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12652 /* Strip off all nops for the sake of the comparison. This
12653 is not quite the same as STRIP_NOPS. It does more.
12654 We must also strip off INDIRECT_EXPR for C++ reference
12655 parameters. */
12656 while (CONVERT_EXPR_P (arg)
12657 || TREE_CODE (arg) == INDIRECT_REF)
12658 arg = TREE_OPERAND (arg, 0);
12659 if (arg != last_parm)
12661 /* FIXME: Sometimes with the tree optimizers we can get the
12662 not the last argument even though the user used the last
12663 argument. We just warn and set the arg to be the last
12664 argument so that we will get wrong-code because of
12665 it. */
12666 warning_at (current_location,
12667 OPT_Wvarargs,
12668 "second parameter of %<va_start%> not last named argument");
12671 /* Undefined by C99 7.15.1.4p4 (va_start):
12672 "If the parameter parmN is declared with the register storage
12673 class, with a function or array type, or with a type that is
12674 not compatible with the type that results after application of
12675 the default argument promotions, the behavior is undefined."
12677 else if (DECL_REGISTER (arg))
12679 warning_at (current_location,
12680 OPT_Wvarargs,
12681 "undefined behavior when second parameter of "
12682 "%<va_start%> is declared with %<register%> storage");
12685 /* We want to verify the second parameter just once before the tree
12686 optimizers are run and then avoid keeping it in the tree,
12687 as otherwise we could warn even for correct code like:
12688 void foo (int i, ...)
12689 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12690 if (va_start_p)
12691 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12692 else
12693 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12695 return false;
12699 /* Expand a call EXP to __builtin_object_size. */
12701 static rtx
12702 expand_builtin_object_size (tree exp)
12704 tree ost;
12705 int object_size_type;
12706 tree fndecl = get_callee_fndecl (exp);
12708 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12710 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
12711 exp, fndecl);
12712 expand_builtin_trap ();
12713 return const0_rtx;
12716 ost = CALL_EXPR_ARG (exp, 1);
12717 STRIP_NOPS (ost);
12719 if (TREE_CODE (ost) != INTEGER_CST
12720 || tree_int_cst_sgn (ost) < 0
12721 || compare_tree_int (ost, 3) > 0)
12723 error ("%Klast argument of %qD is not integer constant between 0 and 3",
12724 exp, fndecl);
12725 expand_builtin_trap ();
12726 return const0_rtx;
12729 object_size_type = tree_to_shwi (ost);
12731 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12734 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12735 FCODE is the BUILT_IN_* to use.
12736 Return NULL_RTX if we failed; the caller should emit a normal call,
12737 otherwise try to get the result in TARGET, if convenient (and in
12738 mode MODE if that's convenient). */
12740 static rtx
12741 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
12742 enum built_in_function fcode)
12744 if (!validate_arglist (exp,
12745 POINTER_TYPE,
12746 fcode == BUILT_IN_MEMSET_CHK
12747 ? INTEGER_TYPE : POINTER_TYPE,
12748 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12749 return NULL_RTX;
12751 tree dest = CALL_EXPR_ARG (exp, 0);
12752 tree src = CALL_EXPR_ARG (exp, 1);
12753 tree len = CALL_EXPR_ARG (exp, 2);
12754 tree size = CALL_EXPR_ARG (exp, 3);
12756 /* FIXME: Set access mode to write only for memset et al. */
12757 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
12758 /*srcstr=*/NULL_TREE, size, access_read_write);
12760 if (!tree_fits_uhwi_p (size))
12761 return NULL_RTX;
12763 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
12765 /* Avoid transforming the checking call to an ordinary one when
12766 an overflow has been detected or when the call couldn't be
12767 validated because the size is not constant. */
12768 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
12769 return NULL_RTX;
12771 tree fn = NULL_TREE;
12772 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12773 mem{cpy,pcpy,move,set} is available. */
12774 switch (fcode)
12776 case BUILT_IN_MEMCPY_CHK:
12777 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12778 break;
12779 case BUILT_IN_MEMPCPY_CHK:
12780 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12781 break;
12782 case BUILT_IN_MEMMOVE_CHK:
12783 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12784 break;
12785 case BUILT_IN_MEMSET_CHK:
12786 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12787 break;
12788 default:
12789 break;
12792 if (! fn)
12793 return NULL_RTX;
12795 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12796 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12797 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12798 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12800 else if (fcode == BUILT_IN_MEMSET_CHK)
12801 return NULL_RTX;
12802 else
12804 unsigned int dest_align = get_pointer_alignment (dest);
12806 /* If DEST is not a pointer type, call the normal function. */
12807 if (dest_align == 0)
12808 return NULL_RTX;
12810 /* If SRC and DEST are the same (and not volatile), do nothing. */
12811 if (operand_equal_p (src, dest, 0))
12813 tree expr;
12815 if (fcode != BUILT_IN_MEMPCPY_CHK)
12817 /* Evaluate and ignore LEN in case it has side-effects. */
12818 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12819 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12822 expr = fold_build_pointer_plus (dest, len);
12823 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12826 /* __memmove_chk special case. */
12827 if (fcode == BUILT_IN_MEMMOVE_CHK)
12829 unsigned int src_align = get_pointer_alignment (src);
12831 if (src_align == 0)
12832 return NULL_RTX;
12834 /* If src is categorized for a readonly section we can use
12835 normal __memcpy_chk. */
12836 if (readonly_data_expr (src))
12838 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12839 if (!fn)
12840 return NULL_RTX;
12841 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12842 dest, src, len, size);
12843 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12844 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12845 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12848 return NULL_RTX;
12852 /* Emit warning if a buffer overflow is detected at compile time. */
12854 static void
12855 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12857 /* The source string. */
12858 tree srcstr = NULL_TREE;
12859 /* The size of the destination object returned by __builtin_object_size. */
12860 tree objsize = NULL_TREE;
12861 /* The string that is being concatenated with (as in __strcat_chk)
12862 or null if it isn't. */
12863 tree catstr = NULL_TREE;
12864 /* The maximum length of the source sequence in a bounded operation
12865 (such as __strncat_chk) or null if the operation isn't bounded
12866 (such as __strcat_chk). */
12867 tree maxread = NULL_TREE;
12868 /* The exact size of the access (such as in __strncpy_chk). */
12869 tree size = NULL_TREE;
12870 /* The access by the function that's checked. Except for snprintf
12871 both writing and reading is checked. */
12872 access_mode mode = access_read_write;
12874 switch (fcode)
12876 case BUILT_IN_STRCPY_CHK:
12877 case BUILT_IN_STPCPY_CHK:
12878 srcstr = CALL_EXPR_ARG (exp, 1);
12879 objsize = CALL_EXPR_ARG (exp, 2);
12880 break;
12882 case BUILT_IN_STRCAT_CHK:
12883 /* For __strcat_chk the warning will be emitted only if overflowing
12884 by at least strlen (dest) + 1 bytes. */
12885 catstr = CALL_EXPR_ARG (exp, 0);
12886 srcstr = CALL_EXPR_ARG (exp, 1);
12887 objsize = CALL_EXPR_ARG (exp, 2);
12888 break;
12890 case BUILT_IN_STRNCAT_CHK:
12891 catstr = CALL_EXPR_ARG (exp, 0);
12892 srcstr = CALL_EXPR_ARG (exp, 1);
12893 maxread = CALL_EXPR_ARG (exp, 2);
12894 objsize = CALL_EXPR_ARG (exp, 3);
12895 break;
12897 case BUILT_IN_STRNCPY_CHK:
12898 case BUILT_IN_STPNCPY_CHK:
12899 srcstr = CALL_EXPR_ARG (exp, 1);
12900 size = CALL_EXPR_ARG (exp, 2);
12901 objsize = CALL_EXPR_ARG (exp, 3);
12902 break;
12904 case BUILT_IN_SNPRINTF_CHK:
12905 case BUILT_IN_VSNPRINTF_CHK:
12906 maxread = CALL_EXPR_ARG (exp, 1);
12907 objsize = CALL_EXPR_ARG (exp, 3);
12908 /* The only checked access the write to the destination. */
12909 mode = access_write_only;
12910 break;
12911 default:
12912 gcc_unreachable ();
12915 if (catstr && maxread)
12917 /* Check __strncat_chk. There is no way to determine the length
12918 of the string to which the source string is being appended so
12919 just warn when the length of the source string is not known. */
12920 check_strncat_sizes (exp, objsize);
12921 return;
12924 check_access (exp, size, maxread, srcstr, objsize, mode);
12927 /* Emit warning if a buffer overflow is detected at compile time
12928 in __sprintf_chk/__vsprintf_chk calls. */
12930 static void
12931 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12933 tree size, len, fmt;
12934 const char *fmt_str;
12935 int nargs = call_expr_nargs (exp);
12937 /* Verify the required arguments in the original call. */
12939 if (nargs < 4)
12940 return;
12941 size = CALL_EXPR_ARG (exp, 2);
12942 fmt = CALL_EXPR_ARG (exp, 3);
12944 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12945 return;
12947 /* Check whether the format is a literal string constant. */
12948 fmt_str = c_getstr (fmt);
12949 if (fmt_str == NULL)
12950 return;
12952 if (!init_target_chars ())
12953 return;
12955 /* If the format doesn't contain % args or %%, we know its size. */
12956 if (strchr (fmt_str, target_percent) == 0)
12957 len = build_int_cstu (size_type_node, strlen (fmt_str));
12958 /* If the format is "%s" and first ... argument is a string literal,
12959 we know it too. */
12960 else if (fcode == BUILT_IN_SPRINTF_CHK
12961 && strcmp (fmt_str, target_percent_s) == 0)
12963 tree arg;
12965 if (nargs < 5)
12966 return;
12967 arg = CALL_EXPR_ARG (exp, 4);
12968 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12969 return;
12971 len = c_strlen (arg, 1);
12972 if (!len || ! tree_fits_uhwi_p (len))
12973 return;
12975 else
12976 return;
12978 /* Add one for the terminating nul. */
12979 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
12981 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
12982 access_write_only);
12985 /* Return true if STMT is a call to an allocation function. Unless
12986 ALL_ALLOC is set, consider only functions that return dynmamically
12987 allocated objects. Otherwise return true even for all forms of
12988 alloca (including VLA). */
12990 static bool
12991 fndecl_alloc_p (tree fndecl, bool all_alloc)
12993 if (!fndecl)
12994 return false;
12996 /* A call to operator new isn't recognized as one to a built-in. */
12997 if (DECL_IS_OPERATOR_NEW_P (fndecl))
12998 return true;
13000 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
13002 switch (DECL_FUNCTION_CODE (fndecl))
13004 case BUILT_IN_ALLOCA:
13005 case BUILT_IN_ALLOCA_WITH_ALIGN:
13006 return all_alloc;
13007 case BUILT_IN_ALIGNED_ALLOC:
13008 case BUILT_IN_CALLOC:
13009 case BUILT_IN_GOMP_ALLOC:
13010 case BUILT_IN_MALLOC:
13011 case BUILT_IN_REALLOC:
13012 case BUILT_IN_STRDUP:
13013 case BUILT_IN_STRNDUP:
13014 return true;
13015 default:
13016 break;
13020 /* A function is considered an allocation function if it's declared
13021 with attribute malloc with an argument naming its associated
13022 deallocation function. */
13023 tree attrs = DECL_ATTRIBUTES (fndecl);
13024 if (!attrs)
13025 return false;
13027 for (tree allocs = attrs;
13028 (allocs = lookup_attribute ("malloc", allocs));
13029 allocs = TREE_CHAIN (allocs))
13031 tree args = TREE_VALUE (allocs);
13032 if (!args)
13033 continue;
13035 if (TREE_VALUE (args))
13036 return true;
13039 return false;
13042 /* Return true if STMT is a call to an allocation function. A wrapper
13043 around fndecl_alloc_p. */
13045 static bool
13046 gimple_call_alloc_p (gimple *stmt, bool all_alloc = false)
13048 return fndecl_alloc_p (gimple_call_fndecl (stmt), all_alloc);
13051 /* Return the zero-based number corresponding to the argument being
13052 deallocated if STMT is a call to a deallocation function or UINT_MAX
13053 if it isn't. */
13055 static unsigned
13056 call_dealloc_argno (tree exp)
13058 tree fndecl = get_callee_fndecl (exp);
13059 if (!fndecl)
13060 return UINT_MAX;
13062 return fndecl_dealloc_argno (fndecl);
13065 /* Return the zero-based number corresponding to the argument being
13066 deallocated if FNDECL is a deallocation function or UINT_MAX
13067 if it isn't. */
13069 unsigned
13070 fndecl_dealloc_argno (tree fndecl)
13072 /* A call to operator delete isn't recognized as one to a built-in. */
13073 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
13074 return 0;
13076 /* TODO: Handle user-defined functions with attribute malloc? Handle
13077 known non-built-ins like fopen? */
13078 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
13080 switch (DECL_FUNCTION_CODE (fndecl))
13082 case BUILT_IN_FREE:
13083 case BUILT_IN_REALLOC:
13084 return 0;
13085 default:
13086 break;
13088 return UINT_MAX;
13091 tree attrs = DECL_ATTRIBUTES (fndecl);
13092 if (!attrs)
13093 return UINT_MAX;
13095 for (tree atfree = attrs;
13096 (atfree = lookup_attribute ("*dealloc", atfree));
13097 atfree = TREE_CHAIN (atfree))
13099 tree alloc = TREE_VALUE (atfree);
13100 if (!alloc)
13101 continue;
13103 tree pos = TREE_CHAIN (alloc);
13104 if (!pos)
13105 return 0;
13107 pos = TREE_VALUE (pos);
13108 return TREE_INT_CST_LOW (pos) - 1;
13111 return UINT_MAX;
13114 /* Return true if DELC doesn't refer to an operator delete that's
13115 suitable to call with a pointer returned from the operator new
13116 described by NEWC. */
13118 static bool
13119 new_delete_mismatch_p (const demangle_component &newc,
13120 const demangle_component &delc)
13122 if (newc.type != delc.type)
13123 return true;
13125 switch (newc.type)
13127 case DEMANGLE_COMPONENT_NAME:
13129 int len = newc.u.s_name.len;
13130 const char *news = newc.u.s_name.s;
13131 const char *dels = delc.u.s_name.s;
13132 if (len != delc.u.s_name.len || memcmp (news, dels, len))
13133 return true;
13135 if (news[len] == 'n')
13137 if (news[len + 1] == 'a')
13138 return dels[len] != 'd' || dels[len + 1] != 'a';
13139 if (news[len + 1] == 'w')
13140 return dels[len] != 'd' || dels[len + 1] != 'l';
13142 return false;
13145 case DEMANGLE_COMPONENT_OPERATOR:
13146 /* Operator mismatches are handled above. */
13147 return false;
13149 case DEMANGLE_COMPONENT_EXTENDED_OPERATOR:
13150 if (newc.u.s_extended_operator.args != delc.u.s_extended_operator.args)
13151 return true;
13152 return new_delete_mismatch_p (*newc.u.s_extended_operator.name,
13153 *delc.u.s_extended_operator.name);
13155 case DEMANGLE_COMPONENT_FIXED_TYPE:
13156 if (newc.u.s_fixed.accum != delc.u.s_fixed.accum
13157 || newc.u.s_fixed.sat != delc.u.s_fixed.sat)
13158 return true;
13159 return new_delete_mismatch_p (*newc.u.s_fixed.length,
13160 *delc.u.s_fixed.length);
13162 case DEMANGLE_COMPONENT_CTOR:
13163 if (newc.u.s_ctor.kind != delc.u.s_ctor.kind)
13164 return true;
13165 return new_delete_mismatch_p (*newc.u.s_ctor.name,
13166 *delc.u.s_ctor.name);
13168 case DEMANGLE_COMPONENT_DTOR:
13169 if (newc.u.s_dtor.kind != delc.u.s_dtor.kind)
13170 return true;
13171 return new_delete_mismatch_p (*newc.u.s_dtor.name,
13172 *delc.u.s_dtor.name);
13174 case DEMANGLE_COMPONENT_BUILTIN_TYPE:
13176 /* The demangler API provides no better way to compare built-in
13177 types except to by comparing their demangled names. */
13178 size_t nsz, dsz;
13179 demangle_component *pnc = const_cast<demangle_component *>(&newc);
13180 demangle_component *pdc = const_cast<demangle_component *>(&delc);
13181 char *nts = cplus_demangle_print (0, pnc, 16, &nsz);
13182 char *dts = cplus_demangle_print (0, pdc, 16, &dsz);
13183 if (!nts != !dts)
13184 return true;
13185 bool mismatch = strcmp (nts, dts);
13186 free (nts);
13187 free (dts);
13188 return mismatch;
13191 case DEMANGLE_COMPONENT_SUB_STD:
13192 if (newc.u.s_string.len != delc.u.s_string.len)
13193 return true;
13194 return memcmp (newc.u.s_string.string, delc.u.s_string.string,
13195 newc.u.s_string.len);
13197 case DEMANGLE_COMPONENT_FUNCTION_PARAM:
13198 case DEMANGLE_COMPONENT_TEMPLATE_PARAM:
13199 return newc.u.s_number.number != delc.u.s_number.number;
13201 case DEMANGLE_COMPONENT_CHARACTER:
13202 return newc.u.s_character.character != delc.u.s_character.character;
13204 case DEMANGLE_COMPONENT_DEFAULT_ARG:
13205 case DEMANGLE_COMPONENT_LAMBDA:
13206 if (newc.u.s_unary_num.num != delc.u.s_unary_num.num)
13207 return true;
13208 return new_delete_mismatch_p (*newc.u.s_unary_num.sub,
13209 *delc.u.s_unary_num.sub);
13210 default:
13211 break;
13214 if (!newc.u.s_binary.left != !delc.u.s_binary.left)
13215 return true;
13217 if (!newc.u.s_binary.left)
13218 return false;
13220 if (new_delete_mismatch_p (*newc.u.s_binary.left, *delc.u.s_binary.left)
13221 || !newc.u.s_binary.right != !delc.u.s_binary.right)
13222 return true;
13224 if (newc.u.s_binary.right)
13225 return new_delete_mismatch_p (*newc.u.s_binary.right,
13226 *delc.u.s_binary.right);
13227 return false;
13230 /* Return true if DELETE_DECL is an operator delete that's not suitable
13231 to call with a pointer returned fron NEW_DECL. */
13233 static bool
13234 new_delete_mismatch_p (tree new_decl, tree delete_decl)
13236 tree new_name = DECL_ASSEMBLER_NAME (new_decl);
13237 tree delete_name = DECL_ASSEMBLER_NAME (delete_decl);
13239 /* valid_new_delete_pair_p() returns a conservative result (currently
13240 it only handles global operators). A true result is reliable but
13241 a false result doesn't necessarily mean the operators don't match. */
13242 if (valid_new_delete_pair_p (new_name, delete_name))
13243 return false;
13245 /* For anything not handled by valid_new_delete_pair_p() such as member
13246 operators compare the individual demangled components of the mangled
13247 name. */
13248 const char *new_str = IDENTIFIER_POINTER (new_name);
13249 const char *del_str = IDENTIFIER_POINTER (delete_name);
13251 void *np = NULL, *dp = NULL;
13252 demangle_component *ndc = cplus_demangle_v3_components (new_str, 0, &np);
13253 demangle_component *ddc = cplus_demangle_v3_components (del_str, 0, &dp);
13254 bool mismatch = new_delete_mismatch_p (*ndc, *ddc);
13255 free (np);
13256 free (dp);
13257 return mismatch;
13260 /* ALLOC_DECL and DEALLOC_DECL are pair of allocation and deallocation
13261 functions. Return true if the latter is suitable to deallocate objects
13262 allocated by calls to the former. */
13264 static bool
13265 matching_alloc_calls_p (tree alloc_decl, tree dealloc_decl)
13267 /* Set to alloc_kind_t::builtin if ALLOC_DECL is associated with
13268 a built-in deallocator. */
13269 enum class alloc_kind_t { none, builtin, user }
13270 alloc_dealloc_kind = alloc_kind_t::none;
13272 if (DECL_IS_OPERATOR_NEW_P (alloc_decl))
13274 if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl))
13275 /* Return true iff both functions are of the same array or
13276 singleton form and false otherwise. */
13277 return !new_delete_mismatch_p (alloc_decl, dealloc_decl);
13279 /* Return false for deallocation functions that are known not
13280 to match. */
13281 if (fndecl_built_in_p (dealloc_decl, BUILT_IN_FREE)
13282 || fndecl_built_in_p (dealloc_decl, BUILT_IN_REALLOC))
13283 return false;
13284 /* Otherwise proceed below to check the deallocation function's
13285 "*dealloc" attributes to look for one that mentions this operator
13286 new. */
13288 else if (fndecl_built_in_p (alloc_decl, BUILT_IN_NORMAL))
13290 switch (DECL_FUNCTION_CODE (alloc_decl))
13292 case BUILT_IN_ALLOCA:
13293 case BUILT_IN_ALLOCA_WITH_ALIGN:
13294 return false;
13296 case BUILT_IN_ALIGNED_ALLOC:
13297 case BUILT_IN_CALLOC:
13298 case BUILT_IN_GOMP_ALLOC:
13299 case BUILT_IN_MALLOC:
13300 case BUILT_IN_REALLOC:
13301 case BUILT_IN_STRDUP:
13302 case BUILT_IN_STRNDUP:
13303 if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl))
13304 return false;
13306 if (fndecl_built_in_p (dealloc_decl, BUILT_IN_FREE)
13307 || fndecl_built_in_p (dealloc_decl, BUILT_IN_REALLOC))
13308 return true;
13310 alloc_dealloc_kind = alloc_kind_t::builtin;
13311 break;
13313 default:
13314 break;
13318 /* Set if DEALLOC_DECL both allocates and deallocates. */
13319 alloc_kind_t realloc_kind = alloc_kind_t::none;
13321 if (fndecl_built_in_p (dealloc_decl, BUILT_IN_NORMAL))
13323 built_in_function dealloc_code = DECL_FUNCTION_CODE (dealloc_decl);
13324 if (dealloc_code == BUILT_IN_REALLOC)
13325 realloc_kind = alloc_kind_t::builtin;
13327 for (tree amats = DECL_ATTRIBUTES (alloc_decl);
13328 (amats = lookup_attribute ("malloc", amats));
13329 amats = TREE_CHAIN (amats))
13331 tree args = TREE_VALUE (amats);
13332 if (!args)
13333 continue;
13335 tree fndecl = TREE_VALUE (args);
13336 if (!fndecl || !DECL_P (fndecl))
13337 continue;
13339 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
13340 && dealloc_code == DECL_FUNCTION_CODE (fndecl))
13341 return true;
13345 const bool alloc_builtin = fndecl_built_in_p (alloc_decl, BUILT_IN_NORMAL);
13346 alloc_kind_t realloc_dealloc_kind = alloc_kind_t::none;
13348 /* If DEALLOC_DECL has an internal "*dealloc" attribute scan the list
13349 of its associated allocation functions for ALLOC_DECL.
13350 If the corresponding ALLOC_DECL is found they're a matching pair,
13351 otherwise they're not.
13352 With DDATS set to the Deallocator's *Dealloc ATtributes... */
13353 for (tree ddats = DECL_ATTRIBUTES (dealloc_decl);
13354 (ddats = lookup_attribute ("*dealloc", ddats));
13355 ddats = TREE_CHAIN (ddats))
13357 tree args = TREE_VALUE (ddats);
13358 if (!args)
13359 continue;
13361 tree alloc = TREE_VALUE (args);
13362 if (!alloc)
13363 continue;
13365 if (alloc == DECL_NAME (dealloc_decl))
13366 realloc_kind = alloc_kind_t::user;
13368 if (DECL_P (alloc))
13370 gcc_checking_assert (fndecl_built_in_p (alloc, BUILT_IN_NORMAL));
13372 switch (DECL_FUNCTION_CODE (alloc))
13374 case BUILT_IN_ALIGNED_ALLOC:
13375 case BUILT_IN_CALLOC:
13376 case BUILT_IN_GOMP_ALLOC:
13377 case BUILT_IN_MALLOC:
13378 case BUILT_IN_REALLOC:
13379 case BUILT_IN_STRDUP:
13380 case BUILT_IN_STRNDUP:
13381 realloc_dealloc_kind = alloc_kind_t::builtin;
13382 break;
13383 default:
13384 break;
13387 if (!alloc_builtin)
13388 continue;
13390 if (DECL_FUNCTION_CODE (alloc) != DECL_FUNCTION_CODE (alloc_decl))
13391 continue;
13393 return true;
13396 if (alloc == DECL_NAME (alloc_decl))
13397 return true;
13400 if (realloc_kind == alloc_kind_t::none)
13401 return false;
13403 hash_set<tree> common_deallocs;
13404 /* Special handling for deallocators. Iterate over both the allocator's
13405 and the reallocator's associated deallocator functions looking for
13406 the first one in common. If one is found, the de/reallocator is
13407 a match for the allocator even though the latter isn't directly
13408 associated with the former. This simplifies declarations in system
13409 headers.
13410 With AMATS set to the Allocator's Malloc ATtributes,
13411 and RMATS set to Reallocator's Malloc ATtributes... */
13412 for (tree amats = DECL_ATTRIBUTES (alloc_decl),
13413 rmats = DECL_ATTRIBUTES (dealloc_decl);
13414 (amats = lookup_attribute ("malloc", amats))
13415 || (rmats = lookup_attribute ("malloc", rmats));
13416 amats = amats ? TREE_CHAIN (amats) : NULL_TREE,
13417 rmats = rmats ? TREE_CHAIN (rmats) : NULL_TREE)
13419 if (tree args = amats ? TREE_VALUE (amats) : NULL_TREE)
13420 if (tree adealloc = TREE_VALUE (args))
13422 if (DECL_P (adealloc)
13423 && fndecl_built_in_p (adealloc, BUILT_IN_NORMAL))
13425 built_in_function fncode = DECL_FUNCTION_CODE (adealloc);
13426 if (fncode == BUILT_IN_FREE || fncode == BUILT_IN_REALLOC)
13428 if (realloc_kind == alloc_kind_t::builtin)
13429 return true;
13430 alloc_dealloc_kind = alloc_kind_t::builtin;
13432 continue;
13435 common_deallocs.add (adealloc);
13438 if (tree args = rmats ? TREE_VALUE (rmats) : NULL_TREE)
13439 if (tree ddealloc = TREE_VALUE (args))
13441 if (DECL_P (ddealloc)
13442 && fndecl_built_in_p (ddealloc, BUILT_IN_NORMAL))
13444 built_in_function fncode = DECL_FUNCTION_CODE (ddealloc);
13445 if (fncode == BUILT_IN_FREE || fncode == BUILT_IN_REALLOC)
13447 if (alloc_dealloc_kind == alloc_kind_t::builtin)
13448 return true;
13449 realloc_dealloc_kind = alloc_kind_t::builtin;
13451 continue;
13454 if (common_deallocs.add (ddealloc))
13455 return true;
13459 /* Succeed only if ALLOC_DECL and the reallocator DEALLOC_DECL share
13460 a built-in deallocator. */
13461 return (alloc_dealloc_kind == alloc_kind_t::builtin
13462 && realloc_dealloc_kind == alloc_kind_t::builtin);
13465 /* Return true if DEALLOC_DECL is a function suitable to deallocate
13466 objectes allocated by the ALLOC call. */
13468 static bool
13469 matching_alloc_calls_p (gimple *alloc, tree dealloc_decl)
13471 tree alloc_decl = gimple_call_fndecl (alloc);
13472 if (!alloc_decl)
13473 return true;
13475 return matching_alloc_calls_p (alloc_decl, dealloc_decl);
13478 /* Diagnose a call EXP to deallocate a pointer referenced by AREF if it
13479 includes a nonzero offset. Such a pointer cannot refer to the beginning
13480 of an allocated object. A negative offset may refer to it only if
13481 the target pointer is unknown. */
13483 static bool
13484 warn_dealloc_offset (location_t loc, tree exp, const access_ref &aref)
13486 if (aref.deref || aref.offrng[0] <= 0 || aref.offrng[1] <= 0)
13487 return false;
13489 tree dealloc_decl = get_callee_fndecl (exp);
13490 if (!dealloc_decl)
13491 return false;
13493 if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl)
13494 && !DECL_IS_REPLACEABLE_OPERATOR (dealloc_decl))
13496 /* A call to a user-defined operator delete with a pointer plus offset
13497 may be valid if it's returned from an unknown function (i.e., one
13498 that's not operator new). */
13499 if (TREE_CODE (aref.ref) == SSA_NAME)
13501 gimple *def_stmt = SSA_NAME_DEF_STMT (aref.ref);
13502 if (is_gimple_call (def_stmt))
13504 tree alloc_decl = gimple_call_fndecl (def_stmt);
13505 if (!alloc_decl || !DECL_IS_OPERATOR_NEW_P (alloc_decl))
13506 return false;
13511 char offstr[80];
13512 offstr[0] = '\0';
13513 if (wi::fits_shwi_p (aref.offrng[0]))
13515 if (aref.offrng[0] == aref.offrng[1]
13516 || !wi::fits_shwi_p (aref.offrng[1]))
13517 sprintf (offstr, " %lli",
13518 (long long)aref.offrng[0].to_shwi ());
13519 else
13520 sprintf (offstr, " [%lli, %lli]",
13521 (long long)aref.offrng[0].to_shwi (),
13522 (long long)aref.offrng[1].to_shwi ());
13525 if (!warning_at (loc, OPT_Wfree_nonheap_object,
13526 "%K%qD called on pointer %qE with nonzero offset%s",
13527 exp, dealloc_decl, aref.ref, offstr))
13528 return false;
13530 if (DECL_P (aref.ref))
13531 inform (DECL_SOURCE_LOCATION (aref.ref), "declared here");
13532 else if (TREE_CODE (aref.ref) == SSA_NAME)
13534 gimple *def_stmt = SSA_NAME_DEF_STMT (aref.ref);
13535 if (is_gimple_call (def_stmt))
13537 location_t def_loc = gimple_location (def_stmt);
13538 tree alloc_decl = gimple_call_fndecl (def_stmt);
13539 if (alloc_decl)
13540 inform (def_loc,
13541 "returned from %qD", alloc_decl);
13542 else if (tree alloc_fntype = gimple_call_fntype (def_stmt))
13543 inform (def_loc,
13544 "returned from %qT", alloc_fntype);
13545 else
13546 inform (def_loc, "obtained here");
13550 return true;
13553 /* Issue a warning if a deallocation function such as free, realloc,
13554 or C++ operator delete is called with an argument not returned by
13555 a matching allocation function such as malloc or the corresponding
13556 form of C++ operatorn new. */
13558 void
13559 maybe_emit_free_warning (tree exp)
13561 tree fndecl = get_callee_fndecl (exp);
13562 if (!fndecl)
13563 return;
13565 unsigned argno = call_dealloc_argno (exp);
13566 if ((unsigned) call_expr_nargs (exp) <= argno)
13567 return;
13569 tree ptr = CALL_EXPR_ARG (exp, argno);
13570 if (integer_zerop (ptr))
13571 return;
13573 access_ref aref;
13574 if (!compute_objsize (ptr, 0, &aref))
13575 return;
13577 tree ref = aref.ref;
13578 if (integer_zerop (ref))
13579 return;
13581 tree dealloc_decl = get_callee_fndecl (exp);
13582 location_t loc = tree_inlined_location (exp);
13584 if (DECL_P (ref) || EXPR_P (ref))
13586 /* Diagnose freeing a declared object. */
13587 if (aref.ref_declared ()
13588 && warning_at (loc, OPT_Wfree_nonheap_object,
13589 "%K%qD called on unallocated object %qD",
13590 exp, dealloc_decl, ref))
13592 loc = (DECL_P (ref)
13593 ? DECL_SOURCE_LOCATION (ref)
13594 : EXPR_LOCATION (ref));
13595 inform (loc, "declared here");
13596 return;
13599 /* Diagnose freeing a pointer that includes a positive offset.
13600 Such a pointer cannot refer to the beginning of an allocated
13601 object. A negative offset may refer to it. */
13602 if (aref.sizrng[0] != aref.sizrng[1]
13603 && warn_dealloc_offset (loc, exp, aref))
13604 return;
13606 else if (CONSTANT_CLASS_P (ref))
13608 if (warning_at (loc, OPT_Wfree_nonheap_object,
13609 "%K%qD called on a pointer to an unallocated "
13610 "object %qE", exp, dealloc_decl, ref))
13612 if (TREE_CODE (ptr) == SSA_NAME)
13614 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
13615 if (is_gimple_assign (def_stmt))
13617 location_t loc = gimple_location (def_stmt);
13618 inform (loc, "assigned here");
13621 return;
13624 else if (TREE_CODE (ref) == SSA_NAME)
13626 /* Also warn if the pointer argument refers to the result
13627 of an allocation call like alloca or VLA. */
13628 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
13629 if (is_gimple_call (def_stmt))
13631 bool warned = false;
13632 if (gimple_call_alloc_p (def_stmt))
13634 if (matching_alloc_calls_p (def_stmt, dealloc_decl))
13636 if (warn_dealloc_offset (loc, exp, aref))
13637 return;
13639 else
13641 tree alloc_decl = gimple_call_fndecl (def_stmt);
13642 int opt = (DECL_IS_OPERATOR_NEW_P (alloc_decl)
13643 || DECL_IS_OPERATOR_DELETE_P (dealloc_decl)
13644 ? OPT_Wmismatched_new_delete
13645 : OPT_Wmismatched_dealloc);
13646 warned = warning_at (loc, opt,
13647 "%K%qD called on pointer returned "
13648 "from a mismatched allocation "
13649 "function", exp, dealloc_decl);
13652 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_ALLOCA)
13653 || gimple_call_builtin_p (def_stmt,
13654 BUILT_IN_ALLOCA_WITH_ALIGN))
13655 warned = warning_at (loc, OPT_Wfree_nonheap_object,
13656 "%K%qD called on pointer to "
13657 "an unallocated object",
13658 exp, dealloc_decl);
13659 else if (warn_dealloc_offset (loc, exp, aref))
13660 return;
13662 if (warned)
13664 tree fndecl = gimple_call_fndecl (def_stmt);
13665 inform (gimple_location (def_stmt),
13666 "returned from %qD", fndecl);
13667 return;
13670 else if (gimple_nop_p (def_stmt))
13672 ref = SSA_NAME_VAR (ref);
13673 /* Diagnose freeing a pointer that includes a positive offset. */
13674 if (TREE_CODE (ref) == PARM_DECL
13675 && !aref.deref
13676 && aref.sizrng[0] != aref.sizrng[1]
13677 && aref.offrng[0] > 0 && aref.offrng[1] > 0
13678 && warn_dealloc_offset (loc, exp, aref))
13679 return;
13684 /* Fold a call to __builtin_object_size with arguments PTR and OST,
13685 if possible. */
13687 static tree
13688 fold_builtin_object_size (tree ptr, tree ost)
13690 unsigned HOST_WIDE_INT bytes;
13691 int object_size_type;
13693 if (!validate_arg (ptr, POINTER_TYPE)
13694 || !validate_arg (ost, INTEGER_TYPE))
13695 return NULL_TREE;
13697 STRIP_NOPS (ost);
13699 if (TREE_CODE (ost) != INTEGER_CST
13700 || tree_int_cst_sgn (ost) < 0
13701 || compare_tree_int (ost, 3) > 0)
13702 return NULL_TREE;
13704 object_size_type = tree_to_shwi (ost);
13706 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
13707 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
13708 and (size_t) 0 for types 2 and 3. */
13709 if (TREE_SIDE_EFFECTS (ptr))
13710 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
13712 if (TREE_CODE (ptr) == ADDR_EXPR)
13714 compute_builtin_object_size (ptr, object_size_type, &bytes);
13715 if (wi::fits_to_tree_p (bytes, size_type_node))
13716 return build_int_cstu (size_type_node, bytes);
13718 else if (TREE_CODE (ptr) == SSA_NAME)
13720 /* If object size is not known yet, delay folding until
13721 later. Maybe subsequent passes will help determining
13722 it. */
13723 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
13724 && wi::fits_to_tree_p (bytes, size_type_node))
13725 return build_int_cstu (size_type_node, bytes);
13728 return NULL_TREE;
13731 /* Builtins with folding operations that operate on "..." arguments
13732 need special handling; we need to store the arguments in a convenient
13733 data structure before attempting any folding. Fortunately there are
13734 only a few builtins that fall into this category. FNDECL is the
13735 function, EXP is the CALL_EXPR for the call. */
13737 static tree
13738 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
13740 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13741 tree ret = NULL_TREE;
13743 switch (fcode)
13745 case BUILT_IN_FPCLASSIFY:
13746 ret = fold_builtin_fpclassify (loc, args, nargs);
13747 break;
13749 default:
13750 break;
13752 if (ret)
13754 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13755 SET_EXPR_LOCATION (ret, loc);
13756 TREE_NO_WARNING (ret) = 1;
13757 return ret;
13759 return NULL_TREE;
13762 /* Initialize format string characters in the target charset. */
13764 bool
13765 init_target_chars (void)
13767 static bool init;
13768 if (!init)
13770 target_newline = lang_hooks.to_target_charset ('\n');
13771 target_percent = lang_hooks.to_target_charset ('%');
13772 target_c = lang_hooks.to_target_charset ('c');
13773 target_s = lang_hooks.to_target_charset ('s');
13774 if (target_newline == 0 || target_percent == 0 || target_c == 0
13775 || target_s == 0)
13776 return false;
13778 target_percent_c[0] = target_percent;
13779 target_percent_c[1] = target_c;
13780 target_percent_c[2] = '\0';
13782 target_percent_s[0] = target_percent;
13783 target_percent_s[1] = target_s;
13784 target_percent_s[2] = '\0';
13786 target_percent_s_newline[0] = target_percent;
13787 target_percent_s_newline[1] = target_s;
13788 target_percent_s_newline[2] = target_newline;
13789 target_percent_s_newline[3] = '\0';
13791 init = true;
13793 return true;
13796 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13797 and no overflow/underflow occurred. INEXACT is true if M was not
13798 exactly calculated. TYPE is the tree type for the result. This
13799 function assumes that you cleared the MPFR flags and then
13800 calculated M to see if anything subsequently set a flag prior to
13801 entering this function. Return NULL_TREE if any checks fail. */
13803 static tree
13804 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13806 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13807 overflow/underflow occurred. If -frounding-math, proceed iff the
13808 result of calling FUNC was exact. */
13809 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13810 && (!flag_rounding_math || !inexact))
13812 REAL_VALUE_TYPE rr;
13814 real_from_mpfr (&rr, m, type, MPFR_RNDN);
13815 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13816 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13817 but the mpft_t is not, then we underflowed in the
13818 conversion. */
13819 if (real_isfinite (&rr)
13820 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13822 REAL_VALUE_TYPE rmode;
13824 real_convert (&rmode, TYPE_MODE (type), &rr);
13825 /* Proceed iff the specified mode can hold the value. */
13826 if (real_identical (&rmode, &rr))
13827 return build_real (type, rmode);
13830 return NULL_TREE;
13833 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13834 number and no overflow/underflow occurred. INEXACT is true if M
13835 was not exactly calculated. TYPE is the tree type for the result.
13836 This function assumes that you cleared the MPFR flags and then
13837 calculated M to see if anything subsequently set a flag prior to
13838 entering this function. Return NULL_TREE if any checks fail, if
13839 FORCE_CONVERT is true, then bypass the checks. */
13841 static tree
13842 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13844 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13845 overflow/underflow occurred. If -frounding-math, proceed iff the
13846 result of calling FUNC was exact. */
13847 if (force_convert
13848 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13849 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13850 && (!flag_rounding_math || !inexact)))
13852 REAL_VALUE_TYPE re, im;
13854 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
13855 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
13856 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13857 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13858 but the mpft_t is not, then we underflowed in the
13859 conversion. */
13860 if (force_convert
13861 || (real_isfinite (&re) && real_isfinite (&im)
13862 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13863 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13865 REAL_VALUE_TYPE re_mode, im_mode;
13867 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13868 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13869 /* Proceed iff the specified mode can hold the value. */
13870 if (force_convert
13871 || (real_identical (&re_mode, &re)
13872 && real_identical (&im_mode, &im)))
13873 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13874 build_real (TREE_TYPE (type), im_mode));
13877 return NULL_TREE;
13880 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13881 the pointer *(ARG_QUO) and return the result. The type is taken
13882 from the type of ARG0 and is used for setting the precision of the
13883 calculation and results. */
13885 static tree
13886 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13888 tree const type = TREE_TYPE (arg0);
13889 tree result = NULL_TREE;
13891 STRIP_NOPS (arg0);
13892 STRIP_NOPS (arg1);
13894 /* To proceed, MPFR must exactly represent the target floating point
13895 format, which only happens when the target base equals two. */
13896 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13897 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13898 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13900 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13901 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13903 if (real_isfinite (ra0) && real_isfinite (ra1))
13905 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13906 const int prec = fmt->p;
13907 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
13908 tree result_rem;
13909 long integer_quo;
13910 mpfr_t m0, m1;
13912 mpfr_inits2 (prec, m0, m1, NULL);
13913 mpfr_from_real (m0, ra0, MPFR_RNDN);
13914 mpfr_from_real (m1, ra1, MPFR_RNDN);
13915 mpfr_clear_flags ();
13916 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13917 /* Remquo is independent of the rounding mode, so pass
13918 inexact=0 to do_mpfr_ckconv(). */
13919 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13920 mpfr_clears (m0, m1, NULL);
13921 if (result_rem)
13923 /* MPFR calculates quo in the host's long so it may
13924 return more bits in quo than the target int can hold
13925 if sizeof(host long) > sizeof(target int). This can
13926 happen even for native compilers in LP64 mode. In
13927 these cases, modulo the quo value with the largest
13928 number that the target int can hold while leaving one
13929 bit for the sign. */
13930 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13931 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13933 /* Dereference the quo pointer argument. */
13934 arg_quo = build_fold_indirect_ref (arg_quo);
13935 /* Proceed iff a valid pointer type was passed in. */
13936 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13938 /* Set the value. */
13939 tree result_quo
13940 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13941 build_int_cst (TREE_TYPE (arg_quo),
13942 integer_quo));
13943 TREE_SIDE_EFFECTS (result_quo) = 1;
13944 /* Combine the quo assignment with the rem. */
13945 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13946 result_quo, result_rem));
13951 return result;
13954 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13955 resulting value as a tree with type TYPE. The mpfr precision is
13956 set to the precision of TYPE. We assume that this mpfr function
13957 returns zero if the result could be calculated exactly within the
13958 requested precision. In addition, the integer pointer represented
13959 by ARG_SG will be dereferenced and set to the appropriate signgam
13960 (-1,1) value. */
13962 static tree
13963 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13965 tree result = NULL_TREE;
13967 STRIP_NOPS (arg);
13969 /* To proceed, MPFR must exactly represent the target floating point
13970 format, which only happens when the target base equals two. Also
13971 verify ARG is a constant and that ARG_SG is an int pointer. */
13972 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13973 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13974 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13975 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13977 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13979 /* In addition to NaN and Inf, the argument cannot be zero or a
13980 negative integer. */
13981 if (real_isfinite (ra)
13982 && ra->cl != rvc_zero
13983 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
13985 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13986 const int prec = fmt->p;
13987 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
13988 int inexact, sg;
13989 mpfr_t m;
13990 tree result_lg;
13992 mpfr_init2 (m, prec);
13993 mpfr_from_real (m, ra, MPFR_RNDN);
13994 mpfr_clear_flags ();
13995 inexact = mpfr_lgamma (m, &sg, m, rnd);
13996 result_lg = do_mpfr_ckconv (m, type, inexact);
13997 mpfr_clear (m);
13998 if (result_lg)
14000 tree result_sg;
14002 /* Dereference the arg_sg pointer argument. */
14003 arg_sg = build_fold_indirect_ref (arg_sg);
14004 /* Assign the signgam value into *arg_sg. */
14005 result_sg = fold_build2 (MODIFY_EXPR,
14006 TREE_TYPE (arg_sg), arg_sg,
14007 build_int_cst (TREE_TYPE (arg_sg), sg));
14008 TREE_SIDE_EFFECTS (result_sg) = 1;
14009 /* Combine the signgam assignment with the lgamma result. */
14010 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14011 result_sg, result_lg));
14016 return result;
14019 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14020 mpc function FUNC on it and return the resulting value as a tree
14021 with type TYPE. The mpfr precision is set to the precision of
14022 TYPE. We assume that function FUNC returns zero if the result
14023 could be calculated exactly within the requested precision. If
14024 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14025 in the arguments and/or results. */
14027 tree
14028 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14029 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14031 tree result = NULL_TREE;
14033 STRIP_NOPS (arg0);
14034 STRIP_NOPS (arg1);
14036 /* To proceed, MPFR must exactly represent the target floating point
14037 format, which only happens when the target base equals two. */
14038 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14039 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14040 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14041 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14042 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14044 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14045 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14046 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14047 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14049 if (do_nonfinite
14050 || (real_isfinite (re0) && real_isfinite (im0)
14051 && real_isfinite (re1) && real_isfinite (im1)))
14053 const struct real_format *const fmt =
14054 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14055 const int prec = fmt->p;
14056 const mpfr_rnd_t rnd = fmt->round_towards_zero
14057 ? MPFR_RNDZ : MPFR_RNDN;
14058 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14059 int inexact;
14060 mpc_t m0, m1;
14062 mpc_init2 (m0, prec);
14063 mpc_init2 (m1, prec);
14064 mpfr_from_real (mpc_realref (m0), re0, rnd);
14065 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14066 mpfr_from_real (mpc_realref (m1), re1, rnd);
14067 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14068 mpfr_clear_flags ();
14069 inexact = func (m0, m0, m1, crnd);
14070 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14071 mpc_clear (m0);
14072 mpc_clear (m1);
14076 return result;
14079 /* A wrapper function for builtin folding that prevents warnings for
14080 "statement without effect" and the like, caused by removing the
14081 call node earlier than the warning is generated. */
14083 tree
14084 fold_call_stmt (gcall *stmt, bool ignore)
14086 tree ret = NULL_TREE;
14087 tree fndecl = gimple_call_fndecl (stmt);
14088 location_t loc = gimple_location (stmt);
14089 if (fndecl && fndecl_built_in_p (fndecl)
14090 && !gimple_call_va_arg_pack_p (stmt))
14092 int nargs = gimple_call_num_args (stmt);
14093 tree *args = (nargs > 0
14094 ? gimple_call_arg_ptr (stmt, 0)
14095 : &error_mark_node);
14097 if (avoid_folding_inline_builtin (fndecl))
14098 return NULL_TREE;
14099 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14101 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14103 else
14105 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
14106 if (ret)
14108 /* Propagate location information from original call to
14109 expansion of builtin. Otherwise things like
14110 maybe_emit_chk_warning, that operate on the expansion
14111 of a builtin, will use the wrong location information. */
14112 if (gimple_has_location (stmt))
14114 tree realret = ret;
14115 if (TREE_CODE (ret) == NOP_EXPR)
14116 realret = TREE_OPERAND (ret, 0);
14117 if (CAN_HAVE_LOCATION_P (realret)
14118 && !EXPR_HAS_LOCATION (realret))
14119 SET_EXPR_LOCATION (realret, loc);
14120 return realret;
14122 return ret;
14126 return NULL_TREE;
14129 /* Look up the function in builtin_decl that corresponds to DECL
14130 and set ASMSPEC as its user assembler name. DECL must be a
14131 function decl that declares a builtin. */
14133 void
14134 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14136 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
14137 && asmspec != 0);
14139 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14140 set_user_assembler_name (builtin, asmspec);
14142 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
14143 && INT_TYPE_SIZE < BITS_PER_WORD)
14145 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
14146 set_user_assembler_libfunc ("ffs", asmspec);
14147 set_optab_libfunc (ffs_optab, mode, "ffs");
14151 /* Return true if DECL is a builtin that expands to a constant or similarly
14152 simple code. */
14153 bool
14154 is_simple_builtin (tree decl)
14156 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
14157 switch (DECL_FUNCTION_CODE (decl))
14159 /* Builtins that expand to constants. */
14160 case BUILT_IN_CONSTANT_P:
14161 case BUILT_IN_EXPECT:
14162 case BUILT_IN_OBJECT_SIZE:
14163 case BUILT_IN_UNREACHABLE:
14164 /* Simple register moves or loads from stack. */
14165 case BUILT_IN_ASSUME_ALIGNED:
14166 case BUILT_IN_RETURN_ADDRESS:
14167 case BUILT_IN_EXTRACT_RETURN_ADDR:
14168 case BUILT_IN_FROB_RETURN_ADDR:
14169 case BUILT_IN_RETURN:
14170 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14171 case BUILT_IN_FRAME_ADDRESS:
14172 case BUILT_IN_VA_END:
14173 case BUILT_IN_STACK_SAVE:
14174 case BUILT_IN_STACK_RESTORE:
14175 /* Exception state returns or moves registers around. */
14176 case BUILT_IN_EH_FILTER:
14177 case BUILT_IN_EH_POINTER:
14178 case BUILT_IN_EH_COPY_VALUES:
14179 return true;
14181 default:
14182 return false;
14185 return false;
14188 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14189 most probably expanded inline into reasonably simple code. This is a
14190 superset of is_simple_builtin. */
14191 bool
14192 is_inexpensive_builtin (tree decl)
14194 if (!decl)
14195 return false;
14196 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14197 return true;
14198 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14199 switch (DECL_FUNCTION_CODE (decl))
14201 case BUILT_IN_ABS:
14202 CASE_BUILT_IN_ALLOCA:
14203 case BUILT_IN_BSWAP16:
14204 case BUILT_IN_BSWAP32:
14205 case BUILT_IN_BSWAP64:
14206 case BUILT_IN_BSWAP128:
14207 case BUILT_IN_CLZ:
14208 case BUILT_IN_CLZIMAX:
14209 case BUILT_IN_CLZL:
14210 case BUILT_IN_CLZLL:
14211 case BUILT_IN_CTZ:
14212 case BUILT_IN_CTZIMAX:
14213 case BUILT_IN_CTZL:
14214 case BUILT_IN_CTZLL:
14215 case BUILT_IN_FFS:
14216 case BUILT_IN_FFSIMAX:
14217 case BUILT_IN_FFSL:
14218 case BUILT_IN_FFSLL:
14219 case BUILT_IN_IMAXABS:
14220 case BUILT_IN_FINITE:
14221 case BUILT_IN_FINITEF:
14222 case BUILT_IN_FINITEL:
14223 case BUILT_IN_FINITED32:
14224 case BUILT_IN_FINITED64:
14225 case BUILT_IN_FINITED128:
14226 case BUILT_IN_FPCLASSIFY:
14227 case BUILT_IN_ISFINITE:
14228 case BUILT_IN_ISINF_SIGN:
14229 case BUILT_IN_ISINF:
14230 case BUILT_IN_ISINFF:
14231 case BUILT_IN_ISINFL:
14232 case BUILT_IN_ISINFD32:
14233 case BUILT_IN_ISINFD64:
14234 case BUILT_IN_ISINFD128:
14235 case BUILT_IN_ISNAN:
14236 case BUILT_IN_ISNANF:
14237 case BUILT_IN_ISNANL:
14238 case BUILT_IN_ISNAND32:
14239 case BUILT_IN_ISNAND64:
14240 case BUILT_IN_ISNAND128:
14241 case BUILT_IN_ISNORMAL:
14242 case BUILT_IN_ISGREATER:
14243 case BUILT_IN_ISGREATEREQUAL:
14244 case BUILT_IN_ISLESS:
14245 case BUILT_IN_ISLESSEQUAL:
14246 case BUILT_IN_ISLESSGREATER:
14247 case BUILT_IN_ISUNORDERED:
14248 case BUILT_IN_VA_ARG_PACK:
14249 case BUILT_IN_VA_ARG_PACK_LEN:
14250 case BUILT_IN_VA_COPY:
14251 case BUILT_IN_TRAP:
14252 case BUILT_IN_SAVEREGS:
14253 case BUILT_IN_POPCOUNTL:
14254 case BUILT_IN_POPCOUNTLL:
14255 case BUILT_IN_POPCOUNTIMAX:
14256 case BUILT_IN_POPCOUNT:
14257 case BUILT_IN_PARITYL:
14258 case BUILT_IN_PARITYLL:
14259 case BUILT_IN_PARITYIMAX:
14260 case BUILT_IN_PARITY:
14261 case BUILT_IN_LABS:
14262 case BUILT_IN_LLABS:
14263 case BUILT_IN_PREFETCH:
14264 case BUILT_IN_ACC_ON_DEVICE:
14265 return true;
14267 default:
14268 return is_simple_builtin (decl);
14271 return false;
14274 /* Return true if T is a constant and the value cast to a target char
14275 can be represented by a host char.
14276 Store the casted char constant in *P if so. */
14278 bool
14279 target_char_cst_p (tree t, char *p)
14281 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
14282 return false;
14284 *p = (char)tree_to_uhwi (t);
14285 return true;
14288 /* Return true if the builtin DECL is implemented in a standard library.
14289 Otherwise returns false which doesn't guarantee it is not (thus the list of
14290 handled builtins below may be incomplete). */
14292 bool
14293 builtin_with_linkage_p (tree decl)
14295 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14296 switch (DECL_FUNCTION_CODE (decl))
14298 CASE_FLT_FN (BUILT_IN_ACOS):
14299 CASE_FLT_FN (BUILT_IN_ACOSH):
14300 CASE_FLT_FN (BUILT_IN_ASIN):
14301 CASE_FLT_FN (BUILT_IN_ASINH):
14302 CASE_FLT_FN (BUILT_IN_ATAN):
14303 CASE_FLT_FN (BUILT_IN_ATANH):
14304 CASE_FLT_FN (BUILT_IN_ATAN2):
14305 CASE_FLT_FN (BUILT_IN_CBRT):
14306 CASE_FLT_FN (BUILT_IN_CEIL):
14307 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
14308 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14309 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
14310 CASE_FLT_FN (BUILT_IN_COS):
14311 CASE_FLT_FN (BUILT_IN_COSH):
14312 CASE_FLT_FN (BUILT_IN_ERF):
14313 CASE_FLT_FN (BUILT_IN_ERFC):
14314 CASE_FLT_FN (BUILT_IN_EXP):
14315 CASE_FLT_FN (BUILT_IN_EXP2):
14316 CASE_FLT_FN (BUILT_IN_EXPM1):
14317 CASE_FLT_FN (BUILT_IN_FABS):
14318 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
14319 CASE_FLT_FN (BUILT_IN_FDIM):
14320 CASE_FLT_FN (BUILT_IN_FLOOR):
14321 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
14322 CASE_FLT_FN (BUILT_IN_FMA):
14323 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
14324 CASE_FLT_FN (BUILT_IN_FMAX):
14325 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
14326 CASE_FLT_FN (BUILT_IN_FMIN):
14327 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
14328 CASE_FLT_FN (BUILT_IN_FMOD):
14329 CASE_FLT_FN (BUILT_IN_FREXP):
14330 CASE_FLT_FN (BUILT_IN_HYPOT):
14331 CASE_FLT_FN (BUILT_IN_ILOGB):
14332 CASE_FLT_FN (BUILT_IN_LDEXP):
14333 CASE_FLT_FN (BUILT_IN_LGAMMA):
14334 CASE_FLT_FN (BUILT_IN_LLRINT):
14335 CASE_FLT_FN (BUILT_IN_LLROUND):
14336 CASE_FLT_FN (BUILT_IN_LOG):
14337 CASE_FLT_FN (BUILT_IN_LOG10):
14338 CASE_FLT_FN (BUILT_IN_LOG1P):
14339 CASE_FLT_FN (BUILT_IN_LOG2):
14340 CASE_FLT_FN (BUILT_IN_LOGB):
14341 CASE_FLT_FN (BUILT_IN_LRINT):
14342 CASE_FLT_FN (BUILT_IN_LROUND):
14343 CASE_FLT_FN (BUILT_IN_MODF):
14344 CASE_FLT_FN (BUILT_IN_NAN):
14345 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14346 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
14347 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
14348 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
14349 CASE_FLT_FN (BUILT_IN_POW):
14350 CASE_FLT_FN (BUILT_IN_REMAINDER):
14351 CASE_FLT_FN (BUILT_IN_REMQUO):
14352 CASE_FLT_FN (BUILT_IN_RINT):
14353 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
14354 CASE_FLT_FN (BUILT_IN_ROUND):
14355 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
14356 CASE_FLT_FN (BUILT_IN_SCALBLN):
14357 CASE_FLT_FN (BUILT_IN_SCALBN):
14358 CASE_FLT_FN (BUILT_IN_SIN):
14359 CASE_FLT_FN (BUILT_IN_SINH):
14360 CASE_FLT_FN (BUILT_IN_SINCOS):
14361 CASE_FLT_FN (BUILT_IN_SQRT):
14362 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
14363 CASE_FLT_FN (BUILT_IN_TAN):
14364 CASE_FLT_FN (BUILT_IN_TANH):
14365 CASE_FLT_FN (BUILT_IN_TGAMMA):
14366 CASE_FLT_FN (BUILT_IN_TRUNC):
14367 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
14368 return true;
14369 default:
14370 break;
14372 return false;
14375 /* Return true if OFFRNG is bounded to a subrange of offset values
14376 valid for the largest possible object. */
14378 bool
14379 access_ref::offset_bounded () const
14381 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
14382 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
14383 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
14386 /* If CALLEE has known side effects, fill in INFO and return true.
14387 See tree-ssa-structalias.c:find_func_aliases
14388 for the list of builtins we might need to handle here. */
14390 attr_fnspec
14391 builtin_fnspec (tree callee)
14393 built_in_function code = DECL_FUNCTION_CODE (callee);
14395 switch (code)
14397 /* All the following functions read memory pointed to by
14398 their second argument and write memory pointed to by first
14399 argument.
14400 strcat/strncat additionally reads memory pointed to by the first
14401 argument. */
14402 case BUILT_IN_STRCAT:
14403 case BUILT_IN_STRCAT_CHK:
14404 return "1cW 1 ";
14405 case BUILT_IN_STRNCAT:
14406 case BUILT_IN_STRNCAT_CHK:
14407 return "1cW 13";
14408 case BUILT_IN_STRCPY:
14409 case BUILT_IN_STRCPY_CHK:
14410 return "1cO 1 ";
14411 case BUILT_IN_STPCPY:
14412 case BUILT_IN_STPCPY_CHK:
14413 return ".cO 1 ";
14414 case BUILT_IN_STRNCPY:
14415 case BUILT_IN_MEMCPY:
14416 case BUILT_IN_MEMMOVE:
14417 case BUILT_IN_TM_MEMCPY:
14418 case BUILT_IN_TM_MEMMOVE:
14419 case BUILT_IN_STRNCPY_CHK:
14420 case BUILT_IN_MEMCPY_CHK:
14421 case BUILT_IN_MEMMOVE_CHK:
14422 return "1cO313";
14423 case BUILT_IN_MEMPCPY:
14424 case BUILT_IN_MEMPCPY_CHK:
14425 return ".cO313";
14426 case BUILT_IN_STPNCPY:
14427 case BUILT_IN_STPNCPY_CHK:
14428 return ".cO313";
14429 case BUILT_IN_BCOPY:
14430 return ".c23O3";
14431 case BUILT_IN_BZERO:
14432 return ".cO2";
14433 case BUILT_IN_MEMCMP:
14434 case BUILT_IN_MEMCMP_EQ:
14435 case BUILT_IN_BCMP:
14436 case BUILT_IN_STRNCMP:
14437 case BUILT_IN_STRNCMP_EQ:
14438 case BUILT_IN_STRNCASECMP:
14439 return ".cR3R3";
14441 /* The following functions read memory pointed to by their
14442 first argument. */
14443 CASE_BUILT_IN_TM_LOAD (1):
14444 CASE_BUILT_IN_TM_LOAD (2):
14445 CASE_BUILT_IN_TM_LOAD (4):
14446 CASE_BUILT_IN_TM_LOAD (8):
14447 CASE_BUILT_IN_TM_LOAD (FLOAT):
14448 CASE_BUILT_IN_TM_LOAD (DOUBLE):
14449 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
14450 CASE_BUILT_IN_TM_LOAD (M64):
14451 CASE_BUILT_IN_TM_LOAD (M128):
14452 CASE_BUILT_IN_TM_LOAD (M256):
14453 case BUILT_IN_TM_LOG:
14454 case BUILT_IN_TM_LOG_1:
14455 case BUILT_IN_TM_LOG_2:
14456 case BUILT_IN_TM_LOG_4:
14457 case BUILT_IN_TM_LOG_8:
14458 case BUILT_IN_TM_LOG_FLOAT:
14459 case BUILT_IN_TM_LOG_DOUBLE:
14460 case BUILT_IN_TM_LOG_LDOUBLE:
14461 case BUILT_IN_TM_LOG_M64:
14462 case BUILT_IN_TM_LOG_M128:
14463 case BUILT_IN_TM_LOG_M256:
14464 return ".cR ";
14466 case BUILT_IN_INDEX:
14467 case BUILT_IN_RINDEX:
14468 case BUILT_IN_STRCHR:
14469 case BUILT_IN_STRLEN:
14470 case BUILT_IN_STRRCHR:
14471 return ".cR ";
14472 case BUILT_IN_STRNLEN:
14473 return ".cR2";
14475 /* These read memory pointed to by the first argument.
14476 Allocating memory does not have any side-effects apart from
14477 being the definition point for the pointer.
14478 Unix98 specifies that errno is set on allocation failure. */
14479 case BUILT_IN_STRDUP:
14480 return "mCR ";
14481 case BUILT_IN_STRNDUP:
14482 return "mCR2";
14483 /* Allocating memory does not have any side-effects apart from
14484 being the definition point for the pointer. */
14485 case BUILT_IN_MALLOC:
14486 case BUILT_IN_ALIGNED_ALLOC:
14487 case BUILT_IN_CALLOC:
14488 case BUILT_IN_GOMP_ALLOC:
14489 return "mC";
14490 CASE_BUILT_IN_ALLOCA:
14491 return "mc";
14492 /* These read memory pointed to by the first argument with size
14493 in the third argument. */
14494 case BUILT_IN_MEMCHR:
14495 return ".cR3";
14496 /* These read memory pointed to by the first and second arguments. */
14497 case BUILT_IN_STRSTR:
14498 case BUILT_IN_STRPBRK:
14499 case BUILT_IN_STRCASECMP:
14500 case BUILT_IN_STRCSPN:
14501 case BUILT_IN_STRSPN:
14502 case BUILT_IN_STRCMP:
14503 case BUILT_IN_STRCMP_EQ:
14504 return ".cR R ";
14505 /* Freeing memory kills the pointed-to memory. More importantly
14506 the call has to serve as a barrier for moving loads and stores
14507 across it. */
14508 case BUILT_IN_STACK_RESTORE:
14509 case BUILT_IN_FREE:
14510 case BUILT_IN_GOMP_FREE:
14511 return ".co ";
14512 case BUILT_IN_VA_END:
14513 return ".cO ";
14514 /* Realloc serves both as allocation point and deallocation point. */
14515 case BUILT_IN_REALLOC:
14516 return ".Cw ";
14517 case BUILT_IN_GAMMA_R:
14518 case BUILT_IN_GAMMAF_R:
14519 case BUILT_IN_GAMMAL_R:
14520 case BUILT_IN_LGAMMA_R:
14521 case BUILT_IN_LGAMMAF_R:
14522 case BUILT_IN_LGAMMAL_R:
14523 return ".C. Ot";
14524 case BUILT_IN_FREXP:
14525 case BUILT_IN_FREXPF:
14526 case BUILT_IN_FREXPL:
14527 case BUILT_IN_MODF:
14528 case BUILT_IN_MODFF:
14529 case BUILT_IN_MODFL:
14530 return ".c. Ot";
14531 case BUILT_IN_REMQUO:
14532 case BUILT_IN_REMQUOF:
14533 case BUILT_IN_REMQUOL:
14534 return ".c. . Ot";
14535 case BUILT_IN_SINCOS:
14536 case BUILT_IN_SINCOSF:
14537 case BUILT_IN_SINCOSL:
14538 return ".c. OtOt";
14539 case BUILT_IN_MEMSET:
14540 case BUILT_IN_MEMSET_CHK:
14541 case BUILT_IN_TM_MEMSET:
14542 return "1cO3";
14543 CASE_BUILT_IN_TM_STORE (1):
14544 CASE_BUILT_IN_TM_STORE (2):
14545 CASE_BUILT_IN_TM_STORE (4):
14546 CASE_BUILT_IN_TM_STORE (8):
14547 CASE_BUILT_IN_TM_STORE (FLOAT):
14548 CASE_BUILT_IN_TM_STORE (DOUBLE):
14549 CASE_BUILT_IN_TM_STORE (LDOUBLE):
14550 CASE_BUILT_IN_TM_STORE (M64):
14551 CASE_BUILT_IN_TM_STORE (M128):
14552 CASE_BUILT_IN_TM_STORE (M256):
14553 return ".cO ";
14554 case BUILT_IN_STACK_SAVE:
14555 return ".c";
14556 case BUILT_IN_ASSUME_ALIGNED:
14557 return "1cX ";
14558 /* But posix_memalign stores a pointer into the memory pointed to
14559 by its first argument. */
14560 case BUILT_IN_POSIX_MEMALIGN:
14561 return ".cOt";
14563 default:
14564 return "";